[integration]: Together embedder added (#1995)

This commit is contained in:
Mayank
2024-10-30 22:21:01 +05:30
committed by GitHub
parent efd45c0c4d
commit d928ea4a2b
5 changed files with 72 additions and 1 deletions

View File

@@ -13,7 +13,7 @@ class EmbedderConfig(BaseModel):
@field_validator("config")
def validate_config(cls, v, values):
provider = values.data.get("provider")
if provider in ["openai", "ollama", "huggingface", "azure_openai", "gemini", "vertexai"]:
if provider in ["openai", "ollama", "huggingface", "azure_openai", "gemini", "vertexai", "together"]:
return v
else:
raise ValueError(f"Unsupported embedding provider: {provider}")

View File

@@ -0,0 +1,31 @@
import os
from typing import Optional
from together import Together
from mem0.configs.embeddings.base import BaseEmbedderConfig
from mem0.embeddings.base import EmbeddingBase
class TogetherEmbedding(EmbeddingBase):
def __init__(self, config: Optional[BaseEmbedderConfig] = None):
super().__init__(config)
self.config.model = self.config.model or "togethercomputer/m2-bert-80M-8k-retrieval"
api_key = self.config.api_key or os.getenv("TOGETHER_API_KEY")
# TODO: check if this is correct
self.config.embedding_dims = self.config.embedding_dims or 768
self.client = Together(api_key=api_key)
def embed(self, text):
"""
Get the embedding for the given text using OpenAI.
Args:
text (str): The text to embed.
Returns:
list: The embedding vector.
"""
return self.client.embeddings.create(model=self.config.model, input=text).data[0].embedding