Feature/google as new llm and embedder in mem0-ts (#2468)
Co-authored-by: Saket Aryan <94069182+whysosaket@users.noreply.github.com>
This commit is contained in:
@@ -14,6 +14,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.18.0",
|
||||
"@google/genai": "^0.7.0",
|
||||
"@qdrant/js-client-rest": "^1.13.0",
|
||||
"@types/node": "^20.11.19",
|
||||
"@types/pg": "^8.11.0",
|
||||
|
||||
31
mem0-ts/src/oss/src/embeddings/google.ts
Normal file
31
mem0-ts/src/oss/src/embeddings/google.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { GoogleGenAI } from "@google/genai";
|
||||
import { Embedder } from "./base";
|
||||
import { EmbeddingConfig } from "../types";
|
||||
|
||||
export class GoogleEmbedder implements Embedder {
|
||||
private google: GoogleGenAI;
|
||||
private model: string;
|
||||
|
||||
constructor(config: EmbeddingConfig) {
|
||||
this.google = new GoogleGenAI({ apiKey: config.apiKey });
|
||||
this.model = config.model || "text-embedding-004";
|
||||
}
|
||||
|
||||
async embed(text: string): Promise<number[]> {
|
||||
const response = await this.google.models.embedContent({
|
||||
model: this.model,
|
||||
contents: text,
|
||||
config: { outputDimensionality: 768 },
|
||||
});
|
||||
return response.embeddings![0].values!;
|
||||
}
|
||||
|
||||
async embedBatch(texts: string[]): Promise<number[][]> {
|
||||
const response = await this.google.models.embedContent({
|
||||
model: this.model,
|
||||
contents: texts,
|
||||
config: { outputDimensionality: 768 },
|
||||
});
|
||||
return response.embeddings!.map((item) => item.values!);
|
||||
}
|
||||
}
|
||||
@@ -4,8 +4,10 @@ export * from "./types";
|
||||
export * from "./embeddings/base";
|
||||
export * from "./embeddings/openai";
|
||||
export * from "./embeddings/ollama";
|
||||
export * from "./embeddings/google";
|
||||
export * from "./llms/base";
|
||||
export * from "./llms/openai";
|
||||
export * from "./llms/google";
|
||||
export * from "./llms/openai_structured";
|
||||
export * from "./llms/anthropic";
|
||||
export * from "./llms/groq";
|
||||
|
||||
54
mem0-ts/src/oss/src/llms/google.ts
Normal file
54
mem0-ts/src/oss/src/llms/google.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { GoogleGenAI } from "@google/genai";
|
||||
import { LLM, LLMResponse } from "./base";
|
||||
import { LLMConfig, Message } from "../types";
|
||||
|
||||
export class GoogleLLM implements LLM {
|
||||
private google: GoogleGenAI;
|
||||
private model: string;
|
||||
|
||||
constructor(config: LLMConfig) {
|
||||
this.google = new GoogleGenAI({ apiKey: config.apiKey });
|
||||
this.model = config.model || "gemini-2.0-flash";
|
||||
}
|
||||
|
||||
async generateResponse(
|
||||
messages: Message[],
|
||||
responseFormat?: { type: string },
|
||||
tools?: any[],
|
||||
): Promise<string | LLMResponse> {
|
||||
const completion = await this.google.models.generateContent({
|
||||
contents: messages.map((msg) => ({
|
||||
parts: [
|
||||
{
|
||||
text:
|
||||
typeof msg.content === "string"
|
||||
? msg.content
|
||||
: JSON.stringify(msg.content),
|
||||
},
|
||||
],
|
||||
role: msg.role === "system" ? "model" : "user",
|
||||
})),
|
||||
|
||||
model: this.model,
|
||||
// config: {
|
||||
// responseSchema: {}, // Add response schema if needed
|
||||
// },
|
||||
});
|
||||
|
||||
const text = completion.text?.replace(/^```json\n/, "").replace(/\n```$/, "");
|
||||
|
||||
return text || "";
|
||||
}
|
||||
|
||||
async generateChat(messages: Message[]): Promise<LLMResponse> {
|
||||
const completion = await this.google.models.generateContent({
|
||||
contents: messages,
|
||||
model: this.model,
|
||||
});
|
||||
const response = completion.candidates![0].content;
|
||||
return {
|
||||
content: response!.parts![0].text || "",
|
||||
role: response!.role!,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -22,6 +22,8 @@ import { SQLiteManager } from "../storage/SQLiteManager";
|
||||
import { MemoryHistoryManager } from "../storage/MemoryHistoryManager";
|
||||
import { SupabaseHistoryManager } from "../storage/SupabaseHistoryManager";
|
||||
import { HistoryManager } from "../storage/base";
|
||||
import { GoogleEmbedder } from "../embeddings/google";
|
||||
import { GoogleLLM } from "../llms/google";
|
||||
|
||||
export class EmbedderFactory {
|
||||
static create(provider: string, config: EmbeddingConfig): Embedder {
|
||||
@@ -30,6 +32,8 @@ export class EmbedderFactory {
|
||||
return new OpenAIEmbedder(config);
|
||||
case "ollama":
|
||||
return new OllamaEmbedder(config);
|
||||
case "google":
|
||||
return new GoogleEmbedder(config);
|
||||
default:
|
||||
throw new Error(`Unsupported embedder provider: ${provider}`);
|
||||
}
|
||||
@@ -49,6 +53,8 @@ export class LLMFactory {
|
||||
return new GroqLLM(config);
|
||||
case "ollama":
|
||||
return new OllamaLLM(config);
|
||||
case "google":
|
||||
return new GoogleLLM(config);
|
||||
default:
|
||||
throw new Error(`Unsupported LLM provider: ${provider}`);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user