[Bug fix] Fix vertex ai integration issue (#1257)
This commit is contained in:
@@ -2,6 +2,9 @@ import importlib
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
|
||||
from langchain_google_vertexai import ChatVertexAI
|
||||
|
||||
from embedchain.config import BaseLlmConfig
|
||||
from embedchain.helpers.json_serializable import register_deserializable
|
||||
from embedchain.llm.base import BaseLlm
|
||||
@@ -24,13 +27,17 @@ class VertexAILlm(BaseLlm):
|
||||
|
||||
@staticmethod
|
||||
def _get_answer(prompt: str, config: BaseLlmConfig) -> str:
|
||||
from langchain_community.chat_models import ChatVertexAI
|
||||
|
||||
chat = ChatVertexAI(temperature=config.temperature, model=config.model)
|
||||
|
||||
if config.top_p and config.top_p != 1:
|
||||
logging.warning("Config option `top_p` is not supported by this model.")
|
||||
|
||||
messages = BaseLlm._get_messages(prompt, system_prompt=config.system_prompt)
|
||||
|
||||
return chat(messages).content
|
||||
if config.stream:
|
||||
callbacks = config.callbacks if config.callbacks else [StreamingStdOutCallbackHandler()]
|
||||
llm = ChatVertexAI(
|
||||
temperature=config.temperature, model=config.model, callbacks=callbacks, streaming=config.stream
|
||||
)
|
||||
else:
|
||||
llm = ChatVertexAI(temperature=config.temperature, model=config.model)
|
||||
|
||||
return llm.invoke(messages).content
|
||||
|
||||
Reference in New Issue
Block a user