feat: Add Gemini support to TypeScript SDK (#3093)

This commit is contained in:
Saket Aryan
2025-07-03 22:23:52 +05:30
committed by GitHub
parent b336cdf018
commit 5b0f1a7cf8
5 changed files with 37 additions and 3 deletions

View File

@@ -409,6 +409,11 @@ mode: "wide"
<Tab title="TypeScript">
<Update label="2025-07-03" description="v2.1.34">
**New Features:**
- **OSS:** Added Gemini support
</Update>
<Update label="2025-06-24" description="v2.1.33">
**Improvement :**
- **Client:** Added `immutable` param to `add` method.

View File

@@ -12,7 +12,8 @@ To use the Gemini model, set the `GEMINI_API_KEY` environment variable. You can
## Usage
```python
<CodeGroup>
```python Python
import os
from mem0 import Memory
@@ -43,6 +44,32 @@ messages = [
m.add(messages, user_id="alice", metadata={"category": "movies"})
```
```typescript TypeScript
import { Memory } from "mem0ai/oss";
const config = {
llm: {
// You can also use "google" as provider ( for backward compatibility )
provider: "gemini",
config: {
model: "gemini-2.0-flash-001",
temperature: 0.1
}
}
}
const memory = new Memory(config);
const messages = [
{ role: "user", content: "I'm planning to watch a movie tonight. Any recommendations?" },
{ role: "assistant", content: "How about thriller movies? They can be quite engaging." },
{ role: "user", content: "Im not a big fan of thrillers, but I love sci-fi movies." },
{ role: "assistant", content: "Got it! I'll avoid thrillers and suggest sci-fi movies instead." }
]
await memory.add(messages, { userId: "alice", metadata: { category: "movies" } });
```
</CodeGroup>
## Config

View File

@@ -1,6 +1,6 @@
{
"name": "mem0ai",
"version": "2.1.33",
"version": "2.1.34",
"description": "The Memory Layer For Your AI Apps",
"main": "./dist/index.js",
"module": "./dist/index.mjs",

View File

@@ -40,6 +40,7 @@ export class EmbedderFactory {
case "ollama":
return new OllamaEmbedder(config);
case "google":
case "gemini":
return new GoogleEmbedder(config);
case "azure_openai":
return new AzureOpenAIEmbedder(config);
@@ -65,6 +66,7 @@ export class LLMFactory {
case "ollama":
return new OllamaLLM(config);
case "google":
case "gemini":
return new GoogleLLM(config);
case "azure_openai":
return new AzureOpenAILLM(config);

View File

@@ -4,7 +4,7 @@ import type {
TelemetryEventData,
} from "./telemetry.types";
let version = "2.1.26";
let version = "2.1.34";
// Safely check for process.env in different environments
let MEM0_TELEMETRY = true;