[Feature] Add support for Together AI API (#1058)

This commit is contained in:
Sukkrit Sharma
2023-12-25 18:36:20 +05:30
committed by GitHub
parent 6be29f5bed
commit 6c1ea7799e
15 changed files with 629 additions and 128 deletions

View File

@@ -12,6 +12,7 @@ class LlmFactory:
"anthropic": "embedchain.llm.anthropic.AnthropicLlm",
"azure_openai": "embedchain.llm.azure_openai.AzureOpenAILlm",
"cohere": "embedchain.llm.cohere.CohereLlm",
"together": "embedchain.llm.together.TogetherLlm",
"gpt4all": "embedchain.llm.gpt4all.GPT4ALLLlm",
"ollama": "embedchain.llm.ollama.OllamaLlm",
"huggingface": "embedchain.llm.huggingface.HuggingFaceLlm",

View File

@@ -0,0 +1,43 @@
import importlib
import os
from typing import Optional
from langchain.llms import Together
from embedchain.config import BaseLlmConfig
from embedchain.helpers.json_serializable import register_deserializable
from embedchain.llm.base import BaseLlm
@register_deserializable
class TogetherLlm(BaseLlm):
def __init__(self, config: Optional[BaseLlmConfig] = None):
if "TOGETHER_API_KEY" not in os.environ:
raise ValueError("Please set the TOGETHER_API_KEY environment variable.")
try:
importlib.import_module("together")
except ModuleNotFoundError:
raise ModuleNotFoundError(
"The required dependencies for Together are not installed."
'Please install with `pip install --upgrade "embedchain[together]"`'
) from None
super().__init__(config=config)
def get_llm_model_answer(self, prompt):
if self.config.system_prompt:
raise ValueError("TogetherLlm does not support `system_prompt`")
return TogetherLlm._get_answer(prompt=prompt, config=self.config)
@staticmethod
def _get_answer(prompt: str, config: BaseLlmConfig) -> str:
llm = Together(
together_api_key=os.environ["TOGETHER_API_KEY"],
model=config.model,
max_tokens=config.max_tokens,
temperature=config.temperature,
top_p=config.top_p,
)
return llm(prompt)

View File

@@ -384,6 +384,7 @@ def validate_config(config_data):
"anthropic",
"huggingface",
"cohere",
"together",
"gpt4all",
"ollama",
"jina",