Fix skipped tests (#1385)

This commit is contained in:
Dev Khant
2024-06-07 22:56:54 +05:30
committed by GitHub
parent e0d0f6e94c
commit 827d63d115
4 changed files with 38 additions and 33 deletions

View File

@@ -2,12 +2,17 @@ from unittest.mock import MagicMock, patch
import pytest
from langchain.schema import HumanMessage, SystemMessage
from langchain_google_vertexai import ChatVertexAI
from embedchain.config import BaseLlmConfig
from embedchain.core.db.database import database_manager
from embedchain.llm.vertex_ai import VertexAILlm
@pytest.fixture(autouse=True)
def setup_database():
database_manager.setup_engine()
@pytest.fixture
def vertexai_llm():
config = BaseLlmConfig(temperature=0.6, model="chat-bison")
@@ -22,19 +27,18 @@ def test_get_llm_model_answer(vertexai_llm):
mock_method.assert_called_once_with(prompt=prompt, config=vertexai_llm.config)
@pytest.mark.skip(
reason="Requires mocking of Google Console Auth. Revisit later since don't want to block users right now."
)
def test_get_answer(vertexai_llm, caplog):
with patch.object(ChatVertexAI, "invoke", return_value=MagicMock(content="Test Response")) as mock_method:
config = vertexai_llm.config
prompt = "Test Prompt"
messages = vertexai_llm._get_messages(prompt)
response = vertexai_llm._get_answer(prompt, config)
mock_method.assert_called_once_with(messages)
@patch("embedchain.llm.vertex_ai.ChatVertexAI")
def test_get_answer(mock_chat_vertexai, vertexai_llm, caplog):
mock_chat_vertexai.return_value.invoke.return_value = MagicMock(content="Test Response")
assert response == "Test Response" # Assertion corrected
assert "Config option `top_p` is not supported by this model." not in caplog.text
config = vertexai_llm.config
prompt = "Test Prompt"
messages = vertexai_llm._get_messages(prompt)
response = vertexai_llm._get_answer(prompt, config)
mock_chat_vertexai.return_value.invoke.assert_called_once_with(messages)
assert response == "Test Response" # Assertion corrected
assert "Config option `top_p` is not supported by this model." not in caplog.text
def test_get_messages(vertexai_llm):