diff --git a/mem0-ts/src/oss/package.json b/mem0-ts/src/oss/package.json index 4ae36ada..15a88382 100644 --- a/mem0-ts/src/oss/package.json +++ b/mem0-ts/src/oss/package.json @@ -14,6 +14,7 @@ }, "dependencies": { "@anthropic-ai/sdk": "^0.18.0", + "@google/genai": "^0.7.0", "@qdrant/js-client-rest": "^1.13.0", "@types/node": "^20.11.19", "@types/pg": "^8.11.0", diff --git a/mem0-ts/src/oss/src/embeddings/google.ts b/mem0-ts/src/oss/src/embeddings/google.ts new file mode 100644 index 00000000..e75da639 --- /dev/null +++ b/mem0-ts/src/oss/src/embeddings/google.ts @@ -0,0 +1,31 @@ +import { GoogleGenAI } from "@google/genai"; +import { Embedder } from "./base"; +import { EmbeddingConfig } from "../types"; + +export class GoogleEmbedder implements Embedder { + private google: GoogleGenAI; + private model: string; + + constructor(config: EmbeddingConfig) { + this.google = new GoogleGenAI({ apiKey: config.apiKey }); + this.model = config.model || "text-embedding-004"; + } + + async embed(text: string): Promise { + const response = await this.google.models.embedContent({ + model: this.model, + contents: text, + config: { outputDimensionality: 768 }, + }); + return response.embeddings![0].values!; + } + + async embedBatch(texts: string[]): Promise { + const response = await this.google.models.embedContent({ + model: this.model, + contents: texts, + config: { outputDimensionality: 768 }, + }); + return response.embeddings!.map((item) => item.values!); + } +} diff --git a/mem0-ts/src/oss/src/index.ts b/mem0-ts/src/oss/src/index.ts index 5bbdcc5d..195d9738 100644 --- a/mem0-ts/src/oss/src/index.ts +++ b/mem0-ts/src/oss/src/index.ts @@ -4,8 +4,10 @@ export * from "./types"; export * from "./embeddings/base"; export * from "./embeddings/openai"; export * from "./embeddings/ollama"; +export * from "./embeddings/google"; export * from "./llms/base"; export * from "./llms/openai"; +export * from "./llms/google"; export * from "./llms/openai_structured"; export * from "./llms/anthropic"; export * from "./llms/groq"; diff --git a/mem0-ts/src/oss/src/llms/google.ts b/mem0-ts/src/oss/src/llms/google.ts new file mode 100644 index 00000000..735a2c38 --- /dev/null +++ b/mem0-ts/src/oss/src/llms/google.ts @@ -0,0 +1,54 @@ +import { GoogleGenAI } from "@google/genai"; +import { LLM, LLMResponse } from "./base"; +import { LLMConfig, Message } from "../types"; + +export class GoogleLLM implements LLM { + private google: GoogleGenAI; + private model: string; + + constructor(config: LLMConfig) { + this.google = new GoogleGenAI({ apiKey: config.apiKey }); + this.model = config.model || "gemini-2.0-flash"; + } + + async generateResponse( + messages: Message[], + responseFormat?: { type: string }, + tools?: any[], + ): Promise { + const completion = await this.google.models.generateContent({ + contents: messages.map((msg) => ({ + parts: [ + { + text: + typeof msg.content === "string" + ? msg.content + : JSON.stringify(msg.content), + }, + ], + role: msg.role === "system" ? "model" : "user", + })), + + model: this.model, + // config: { + // responseSchema: {}, // Add response schema if needed + // }, + }); + + const text = completion.text?.replace(/^```json\n/, "").replace(/\n```$/, ""); + + return text || ""; + } + + async generateChat(messages: Message[]): Promise { + const completion = await this.google.models.generateContent({ + contents: messages, + model: this.model, + }); + const response = completion.candidates![0].content; + return { + content: response!.parts![0].text || "", + role: response!.role!, + }; + } +} diff --git a/mem0-ts/src/oss/src/utils/factory.ts b/mem0-ts/src/oss/src/utils/factory.ts index 62cfc78a..77834417 100644 --- a/mem0-ts/src/oss/src/utils/factory.ts +++ b/mem0-ts/src/oss/src/utils/factory.ts @@ -22,6 +22,8 @@ import { SQLiteManager } from "../storage/SQLiteManager"; import { MemoryHistoryManager } from "../storage/MemoryHistoryManager"; import { SupabaseHistoryManager } from "../storage/SupabaseHistoryManager"; import { HistoryManager } from "../storage/base"; +import { GoogleEmbedder } from "../embeddings/google"; +import { GoogleLLM } from "../llms/google"; export class EmbedderFactory { static create(provider: string, config: EmbeddingConfig): Embedder { @@ -30,6 +32,8 @@ export class EmbedderFactory { return new OpenAIEmbedder(config); case "ollama": return new OllamaEmbedder(config); + case "google": + return new GoogleEmbedder(config); default: throw new Error(`Unsupported embedder provider: ${provider}`); } @@ -49,6 +53,8 @@ export class LLMFactory { return new GroqLLM(config); case "ollama": return new OllamaLLM(config); + case "google": + return new GoogleLLM(config); default: throw new Error(`Unsupported LLM provider: ${provider}`); }