[integration]: Together embedder added (#1995)
This commit is contained in:
38
docs/components/embedders/models/together.mdx
Normal file
38
docs/components/embedders/models/together.mdx
Normal file
@@ -0,0 +1,38 @@
|
||||
---
|
||||
title: Together
|
||||
---
|
||||
|
||||
To use Together embedding models, set the `TOGETHER_API_KEY` environment variable. You can obtain the Together API key from the [Together Platform](https://api.together.xyz/settings/api-keys).
|
||||
|
||||
### Usage
|
||||
|
||||
<Note> The `embedding_model_dims` parameter for `vector_store` should be set to `768` for Together embedder. </Note>
|
||||
|
||||
```python
|
||||
import os
|
||||
from mem0 import Memory
|
||||
|
||||
os.environ["TOGETHER_API_KEY"] = "your_api_key"
|
||||
|
||||
config = {
|
||||
"embedder": {
|
||||
"provider": "together",
|
||||
"config": {
|
||||
"model": "togethercomputer/m2-bert-80M-8k-retrieval"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
m = Memory.from_config(config)
|
||||
m.add("I'm visiting Paris", user_id="john")
|
||||
```
|
||||
|
||||
### Config
|
||||
|
||||
Here are the parameters available for configuring Together embedder:
|
||||
|
||||
| Parameter | Description | Default Value |
|
||||
| --- | --- | --- |
|
||||
| `model` | The name of the embedding model to use | `togethercomputer/m2-bert-80M-8k-retrieval` |
|
||||
| `embedding_dims` | Dimensions of the embedding model | `768` |
|
||||
| `api_key` | The Together API key | `None` |
|
||||
@@ -15,6 +15,7 @@ See the list of supported embedders below.
|
||||
<Card title="Hugging Face" href="/components/embedders/models/huggingface"></Card>
|
||||
<Card title="Gemini" href="/components/embedders/models/gemini"></Card>
|
||||
<Card title="Vertex AI" href="/components/embedders/models/vertexai"></Card>
|
||||
<Card title="Together" href="/components/embedders/models/together"></Card>
|
||||
</CardGroup>
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -13,7 +13,7 @@ class EmbedderConfig(BaseModel):
|
||||
@field_validator("config")
|
||||
def validate_config(cls, v, values):
|
||||
provider = values.data.get("provider")
|
||||
if provider in ["openai", "ollama", "huggingface", "azure_openai", "gemini", "vertexai"]:
|
||||
if provider in ["openai", "ollama", "huggingface", "azure_openai", "gemini", "vertexai", "together"]:
|
||||
return v
|
||||
else:
|
||||
raise ValueError(f"Unsupported embedding provider: {provider}")
|
||||
|
||||
31
mem0/embeddings/together.py
Normal file
31
mem0/embeddings/together.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
from together import Together
|
||||
|
||||
from mem0.configs.embeddings.base import BaseEmbedderConfig
|
||||
from mem0.embeddings.base import EmbeddingBase
|
||||
|
||||
|
||||
class TogetherEmbedding(EmbeddingBase):
|
||||
def __init__(self, config: Optional[BaseEmbedderConfig] = None):
|
||||
super().__init__(config)
|
||||
|
||||
self.config.model = self.config.model or "togethercomputer/m2-bert-80M-8k-retrieval"
|
||||
api_key = self.config.api_key or os.getenv("TOGETHER_API_KEY")
|
||||
# TODO: check if this is correct
|
||||
self.config.embedding_dims = self.config.embedding_dims or 768
|
||||
self.client = Together(api_key=api_key)
|
||||
|
||||
def embed(self, text):
|
||||
"""
|
||||
Get the embedding for the given text using OpenAI.
|
||||
|
||||
Args:
|
||||
text (str): The text to embed.
|
||||
|
||||
Returns:
|
||||
list: The embedding vector.
|
||||
"""
|
||||
|
||||
return self.client.embeddings.create(model=self.config.model, input=text).data[0].embedding
|
||||
@@ -44,6 +44,7 @@ class EmbedderFactory:
|
||||
"azure_openai": "mem0.embeddings.azure_openai.AzureOpenAIEmbedding",
|
||||
"gemini": "mem0.embeddings.gemini.GoogleGenAIEmbedding",
|
||||
"vertexai": "mem0.embeddings.vertexai.VertexAIEmbedding",
|
||||
"together": "mem0.embeddings.together.TogetherEmbedding",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
||||
Reference in New Issue
Block a user