Fix CI issues related to missing dependency (#3096)

This commit is contained in:
Deshraj Yadav
2025-07-03 18:52:50 -07:00
committed by GitHub
parent 2c496e6376
commit 7484eed4b2
32 changed files with 6150 additions and 828 deletions

View File

@@ -92,10 +92,12 @@ class AWSBedrockLLM(LLMBase):
if response["output"]["message"]["content"]:
for item in response["output"]["message"]["content"]:
if "toolUse" in item:
processed_response["tool_calls"].append({
"name": item["toolUse"]["name"],
"arguments": item["toolUse"]["input"],
})
processed_response["tool_calls"].append(
{
"name": item["toolUse"]["name"],
"arguments": item["toolUse"]["input"],
}
)
return processed_response

View File

@@ -165,7 +165,6 @@ class GeminiLLM(LLMBase):
if system_instruction:
config_params["system_instruction"] = system_instruction
if response_format is not None and response_format["type"] == "json_object":
config_params["response_mime_type"] = "application/json"
if "schema" in response_format:
@@ -175,7 +174,6 @@ class GeminiLLM(LLMBase):
formatted_tools = self._reformat_tools(tools)
config_params["tools"] = formatted_tools
if tool_choice:
if tool_choice == "auto":
mode = types.FunctionCallingConfigMode.AUTO

View File

@@ -18,7 +18,7 @@ class SarvamLLM(LLMBase):
if not self.api_key:
raise ValueError(
"Sarvam API key is required. Set SARVAM_API_KEY environment variable " "or provide api_key in config."
"Sarvam API key is required. Set SARVAM_API_KEY environment variable or provide api_key in config."
)
# Set base URL - use config value or environment or default

View File

@@ -7,7 +7,6 @@ from openai import OpenAI
from mem0.configs.llms.base import BaseLlmConfig
from mem0.llms.base import LLMBase
from mem0.memory.utils import extract_json
from openai import OpenAI
class VllmLLM(LLMBase):
@@ -41,10 +40,12 @@ class VllmLLM(LLMBase):
if response.choices[0].message.tool_calls:
for tool_call in response.choices[0].message.tool_calls:
processed_response["tool_calls"].append({
"name": tool_call.function.name,
"arguments": json.loads(extract_json(tool_call.function.arguments)),
})
processed_response["tool_calls"].append(
{
"name": tool_call.function.name,
"arguments": json.loads(extract_json(tool_call.function.arguments)),
}
)
return processed_response
else: