[Bug fix] Fix vertex ai integration issue (#1257)

This commit is contained in:
Deshraj Yadav
2024-02-14 11:19:32 -08:00
committed by GitHub
parent 036bf3a161
commit 0766a44ccf
7 changed files with 110 additions and 155 deletions

View File

@@ -2,6 +2,7 @@ from unittest.mock import MagicMock, patch
import pytest
from langchain.schema import HumanMessage, SystemMessage
from langchain_google_vertexai import ChatVertexAI
from embedchain.config import BaseLlmConfig
from embedchain.llm.vertex_ai import VertexAILlm
@@ -9,7 +10,7 @@ from embedchain.llm.vertex_ai import VertexAILlm
@pytest.fixture
def vertexai_llm():
config = BaseLlmConfig(temperature=0.6, model="vertexai_model", system_prompt="System Prompt")
config = BaseLlmConfig(temperature=0.6, model="chat-bison")
return VertexAILlm(config)
@@ -21,37 +22,18 @@ def test_get_llm_model_answer(vertexai_llm):
mock_method.assert_called_once_with(prompt=prompt, config=vertexai_llm.config)
def test_get_answer_with_warning(vertexai_llm, caplog):
with patch("langchain_community.chat_models.ChatVertexAI") as mock_chat:
mock_chat_instance = mock_chat.return_value
mock_chat_instance.return_value = MagicMock(content="Test Response")
prompt = "Test Prompt"
@pytest.mark.skip(
reason="Requires mocking of Google Console Auth. Revisit later since don't want to block users right now."
)
def test_get_answer(vertexai_llm, caplog):
with patch.object(ChatVertexAI, "invoke", return_value=MagicMock(content="Test Response")) as mock_method:
config = vertexai_llm.config
config.top_p = 0.5
response = vertexai_llm._get_answer(prompt, config)
assert response == "Test Response"
mock_chat.assert_called_once_with(temperature=config.temperature, model=config.model)
assert "Config option `top_p` is not supported by this model." in caplog.text
def test_get_answer_no_warning(vertexai_llm, caplog):
with patch("langchain_community.chat_models.ChatVertexAI") as mock_chat:
mock_chat_instance = mock_chat.return_value
mock_chat_instance.return_value = MagicMock(content="Test Response")
prompt = "Test Prompt"
config = vertexai_llm.config
config.top_p = 1.0
messages = vertexai_llm._get_messages(prompt)
response = vertexai_llm._get_answer(prompt, config)
mock_method.assert_called_once_with(messages)
assert response == "Test Response"
mock_chat.assert_called_once_with(temperature=config.temperature, model=config.model)
assert response == "Test Response" # Assertion corrected
assert "Config option `top_p` is not supported by this model." not in caplog.text