Fix: Gemini Embeddings and LLM (#3050)
This commit is contained in:
@@ -1,7 +1,8 @@
|
||||
import os
|
||||
from typing import Literal, Optional
|
||||
|
||||
import google.genai as genai
|
||||
from google import genai
|
||||
from google.genai import types
|
||||
|
||||
from mem0.configs.embeddings.base import BaseEmbedderConfig
|
||||
from mem0.embeddings.base import EmbeddingBase
|
||||
@@ -16,24 +17,23 @@ class GoogleGenAIEmbedding(EmbeddingBase):
|
||||
|
||||
api_key = self.config.api_key or os.getenv("GOOGLE_API_KEY")
|
||||
|
||||
if api_key:
|
||||
self.client = genai.Client(api_key="api_key")
|
||||
else:
|
||||
self.client = genai.Client()
|
||||
self.client = genai.Client(api_key=api_key)
|
||||
|
||||
def embed(self, text, memory_action: Optional[Literal["add", "search", "update"]] = None):
|
||||
"""
|
||||
Get the embedding for the given text using Google Generative AI.
|
||||
Args:
|
||||
text (str): The text to embed.
|
||||
memory_action (optional): The type of embedding to use. (Currently not used by Gemini for task_type)
|
||||
memory_action (optional): The type of embedding to use. Must be one of "add", "search", or "update". Defaults to None.
|
||||
Returns:
|
||||
list: The embedding vector.
|
||||
"""
|
||||
text = text.replace("\n", " ")
|
||||
|
||||
response = self.client.models.embed_content(
|
||||
model=self.config.model, content=text, output_dimensionality=self.config.embedding_dims
|
||||
)
|
||||
# Create config for embedding parameters
|
||||
config = types.EmbedContentConfig(output_dimensionality=self.config.embedding_dims)
|
||||
|
||||
return response["embedding"]
|
||||
# Call the embed_content method with the correct parameters
|
||||
response = self.client.models.embed_content(model=self.config.model, contents=text, config=config)
|
||||
|
||||
return response.embeddings[0].values
|
||||
Reference in New Issue
Block a user