[Mem0] Update dependencies and make the package lighter (#1708)

Co-authored-by: Dev-Khant <devkhant24@gmail.com>
This commit is contained in:
Deshraj Yadav
2024-08-14 23:28:07 -07:00
committed by GitHub
parent e35786e567
commit a8ba7abb7d
35 changed files with 634 additions and 1594 deletions

View File

@@ -7,19 +7,23 @@ from openai import OpenAI
from mem0.llms.base import LLMBase
from mem0.configs.llms.base import BaseLlmConfig
class OpenAILLM(LLMBase):
def __init__(self, config: Optional[BaseLlmConfig] = None):
super().__init__(config)
if not self.config.model:
self.config.model="gpt-4o"
self.config.model = "gpt-4o"
if os.environ.get("OPENROUTER_API_KEY"): # Use OpenRouter
self.client = OpenAI(api_key=os.environ.get("OPENROUTER_API_KEY"), base_url=self.config.openrouter_base_url)
if os.environ.get("OPENROUTER_API_KEY"): # Use OpenRouter
self.client = OpenAI(
api_key=os.environ.get("OPENROUTER_API_KEY"),
base_url=self.config.openrouter_base_url,
)
else:
api_key = os.getenv("OPENAI_API_KEY") or self.config.api_key
self.client = OpenAI(api_key=api_key)
def _parse_response(self, response, tools):
"""
Process the response based on whether tools are used or not.
@@ -34,16 +38,18 @@ class OpenAILLM(LLMBase):
if tools:
processed_response = {
"content": response.choices[0].message.content,
"tool_calls": []
"tool_calls": [],
}
if response.choices[0].message.tool_calls:
for tool_call in response.choices[0].message.tool_calls:
processed_response["tool_calls"].append({
"name": tool_call.function.name,
"arguments": json.loads(tool_call.function.arguments)
})
processed_response["tool_calls"].append(
{
"name": tool_call.function.name,
"arguments": json.loads(tool_call.function.arguments),
}
)
return processed_response
else:
return response.choices[0].message.content
@@ -68,11 +74,11 @@ class OpenAILLM(LLMBase):
str: The generated response.
"""
params = {
"model": self.config.model,
"messages": messages,
"temperature": self.config.temperature,
"max_tokens": self.config.max_tokens,
"top_p": self.config.top_p
"model": self.config.model,
"messages": messages,
"temperature": self.config.temperature,
"max_tokens": self.config.max_tokens,
"top_p": self.config.top_p,
}
if os.getenv("OPENROUTER_API_KEY"):
@@ -81,14 +87,14 @@ class OpenAILLM(LLMBase):
openrouter_params["models"] = self.config.models
openrouter_params["route"] = self.config.route
params.pop("model")
if self.config.site_url and self.config.app_name:
extra_headers={
"HTTP-Referer": self.config.site_url,
"X-Title": self.config.app_name
}
openrouter_params["extra_headers"] = extra_headers
extra_headers = {
"HTTP-Referer": self.config.site_url,
"X-Title": self.config.app_name,
}
openrouter_params["extra_headers"] = extra_headers
params.update(**openrouter_params)
if response_format: