[Bug fix] Anthropic, Llama2 and VertexAI LLMs dependencies (#820)
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
import logging
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
from embedchain.config import BaseLlmConfig
|
||||
@@ -9,6 +10,8 @@ from embedchain.llm.base import BaseLlm
|
||||
@register_deserializable
|
||||
class AnthropicLlm(BaseLlm):
|
||||
def __init__(self, config: Optional[BaseLlmConfig] = None):
|
||||
if "ANTHROPIC_API_KEY" not in os.environ:
|
||||
raise ValueError("Please set the ANTHROPIC_API_KEY environment variable.")
|
||||
super().__init__(config=config)
|
||||
|
||||
def get_llm_model_answer(self, prompt):
|
||||
@@ -18,7 +21,9 @@ class AnthropicLlm(BaseLlm):
|
||||
def _get_answer(prompt: str, config: BaseLlmConfig) -> str:
|
||||
from langchain.chat_models import ChatAnthropic
|
||||
|
||||
chat = ChatAnthropic(temperature=config.temperature, model=config.model)
|
||||
chat = ChatAnthropic(
|
||||
anthropic_api_key=os.environ["ANTHROPIC_API_KEY"], temperature=config.temperature, model=config.model
|
||||
)
|
||||
|
||||
if config.max_tokens and config.max_tokens != 1000:
|
||||
logging.warning("Config option `max_tokens` is not supported by this model.")
|
||||
|
||||
Reference in New Issue
Block a user