fix/failing-unit-tests (#2486)

This commit is contained in:
Anusha Yella
2025-04-02 10:36:02 +05:30
committed by GitHub
parent 1db1105cad
commit 1b1f02eb57
3 changed files with 30 additions and 30 deletions

View File

@@ -8,27 +8,39 @@ from mem0.llms.lmstudio import LMStudioLLM
@pytest.fixture
def mock_lm_studio_client():
with patch("mem0.llms.lmstudio.Client") as mock_lm_studio:
with patch("mem0.llms.lmstudio.OpenAI") as mock_openai: # Corrected path
mock_client = Mock()
mock_client.list.return_value = {"models": [{"name": "lmstudio-community/Meta-Llama-3.1-8B-Instruct-GGUF/Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf"}]}
mock_lm_studio.return_value = mock_client
mock_client.chat.completions.create.return_value = Mock(
choices=[
Mock(message=Mock(content="I'm doing well, thank you for asking!"))
]
)
mock_openai.return_value = mock_client
yield mock_client
def test_generate_response_without_tools(mock_lm_studio_client):
config = BaseLlmConfig(model="lmstudio-community/Meta-Llama-3.1-8B-Instruct-GGUF/Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf", temperature=0.7, max_tokens=100, top_p=1.0)
config = BaseLlmConfig(
model="lmstudio-community/Meta-Llama-3.1-8B-Instruct-GGUF/Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf",
temperature=0.7,
max_tokens=100,
top_p=1.0,
)
llm = LMStudioLLM(config)
messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Hello, how are you?"},
]
mock_response = {"message": {"content": "I'm doing well, thank you for asking!"}}
mock_lm_studio_client.chat.return_value = mock_response
response = llm.generate_response(messages)
mock_lm_studio_client.chat.assert_called_once_with(
model="lmstudio-community/Meta-Llama-3.1-8B-Instruct-GGUF/Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf", messages=messages, options={"temperature": 0.7, "num_predict": 100, "top_p": 1.0}
mock_lm_studio_client.chat.completions.create.assert_called_once_with(
model="lmstudio-community/Meta-Llama-3.1-8B-Instruct-GGUF/Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf",
messages=messages,
temperature=0.7,
max_tokens=100,
top_p=1.0,
response_format={"type": "json_object"},
)
assert response == "I'm doing well, thank you for asking!"