feat: add mistral AI as LLM provider (#2496)
Co-authored-by: Saket Aryan <94069182+whysosaket@users.noreply.github.com>
This commit is contained in:
@@ -127,6 +127,14 @@ mode: "wide"
|
|||||||
|
|
||||||
<Tab title="TypeScript">
|
<Tab title="TypeScript">
|
||||||
|
|
||||||
|
<Update label="2025-04-11" description="v2.1.16">
|
||||||
|
**New Features:**
|
||||||
|
- **Mistral LLM:** Added Mistral LLM integration in OSS
|
||||||
|
|
||||||
|
**Improvements:**
|
||||||
|
- **Zod:** Updated Zod to 3.24.1 to avoid conflicts with other packages
|
||||||
|
</Update>
|
||||||
|
|
||||||
<Update label="2025-04-09" description="v2.1.15">
|
<Update label="2025-04-09" description="v2.1.15">
|
||||||
**Improvements:**
|
**Improvements:**
|
||||||
- **Client:** Added support for Mem0 to work with Chrome Extensions
|
- **Client:** Added support for Mem0 to work with Chrome Extensions
|
||||||
|
|||||||
@@ -2,11 +2,12 @@
|
|||||||
title: Mistral AI
|
title: Mistral AI
|
||||||
---
|
---
|
||||||
|
|
||||||
To use mistral's models, please Obtain the Mistral AI api key from their [console](https://console.mistral.ai/). Set the `MISTRAL_API_KEY` environment variable to use the model as given below in the example.
|
To use mistral's models, please obtain the Mistral AI api key from their [console](https://console.mistral.ai/). Set the `MISTRAL_API_KEY` environment variable to use the model as given below in the example.
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
```python
|
<CodeGroup>
|
||||||
|
```python Python
|
||||||
import os
|
import os
|
||||||
from mem0 import Memory
|
from mem0 import Memory
|
||||||
|
|
||||||
@@ -34,6 +35,32 @@ messages = [
|
|||||||
m.add(messages, user_id="alice", metadata={"category": "movies"})
|
m.add(messages, user_id="alice", metadata={"category": "movies"})
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```typescript TypeScript
|
||||||
|
import { Memory } from 'mem0ai/oss';
|
||||||
|
|
||||||
|
const config = {
|
||||||
|
llm: {
|
||||||
|
provider: 'mistral',
|
||||||
|
config: {
|
||||||
|
apiKey: process.env.MISTRAL_API_KEY || '',
|
||||||
|
model: 'mistral-tiny-latest', // Or 'mistral-small-latest', 'mistral-medium-latest', etc.
|
||||||
|
temperature: 0.1,
|
||||||
|
maxTokens: 2000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const memory = new Memory(config);
|
||||||
|
const messages = [
|
||||||
|
{"role": "user", "content": "I'm planning to watch a movie tonight. Any recommendations?"},
|
||||||
|
{"role": "assistant", "content": "How about a thriller movies? They can be quite engaging."},
|
||||||
|
{"role": "user", "content": "I'm not a big fan of thriller movies but I love sci-fi movies."},
|
||||||
|
{"role": "assistant", "content": "Got it! I'll avoid thriller recommendations and suggest sci-fi movies in the future."}
|
||||||
|
]
|
||||||
|
await memory.add(messages, { userId: "alice", metadata: { category: "movies" } });
|
||||||
|
```
|
||||||
|
</CodeGroup>
|
||||||
|
|
||||||
## Config
|
## Config
|
||||||
|
|
||||||
All available parameters for the `litellm` config are present in [Master List of All Params in Config](../config).
|
All available parameters for the `litellm` config are present in [Master List of All Params in Config](../config).
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "mem0ai",
|
"name": "mem0ai",
|
||||||
"version": "2.1.15",
|
"version": "2.1.16",
|
||||||
"description": "The Memory Layer For Your AI Apps",
|
"description": "The Memory Layer For Your AI Apps",
|
||||||
"main": "./dist/index.js",
|
"main": "./dist/index.js",
|
||||||
"module": "./dist/index.mjs",
|
"module": "./dist/index.mjs",
|
||||||
@@ -93,13 +93,13 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"axios": "1.7.7",
|
"axios": "1.7.7",
|
||||||
"openai": "4.28.0",
|
"openai": "4.28.0",
|
||||||
"redis": "^4.6.13",
|
|
||||||
"uuid": "9.0.1",
|
"uuid": "9.0.1",
|
||||||
"zod": "3.22.4"
|
"zod": "^3.24.1"
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"@anthropic-ai/sdk": "0.18.0",
|
"@anthropic-ai/sdk": "0.18.0",
|
||||||
"@qdrant/js-client-rest": "1.13.0",
|
"@qdrant/js-client-rest": "1.13.0",
|
||||||
|
"@mistralai/mistralai": "^1.5.2",
|
||||||
"@google/genai": "^0.7.0",
|
"@google/genai": "^0.7.0",
|
||||||
"@supabase/supabase-js": "^2.49.1",
|
"@supabase/supabase-js": "^2.49.1",
|
||||||
"@types/jest": "29.5.14",
|
"@types/jest": "29.5.14",
|
||||||
@@ -109,7 +109,7 @@
|
|||||||
"neo4j-driver": "^5.28.1",
|
"neo4j-driver": "^5.28.1",
|
||||||
"ollama": "^0.5.14",
|
"ollama": "^0.5.14",
|
||||||
"pg": "8.11.3",
|
"pg": "8.11.3",
|
||||||
"redis": "4.7.0",
|
"redis": "^4.6.13",
|
||||||
"sqlite3": "5.1.7"
|
"sqlite3": "5.1.7"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
|
|||||||
40
mem0-ts/pnpm-lock.yaml
generated
40
mem0-ts/pnpm-lock.yaml
generated
@@ -13,6 +13,9 @@ importers:
|
|||||||
"@google/genai":
|
"@google/genai":
|
||||||
specifier: ^0.7.0
|
specifier: ^0.7.0
|
||||||
version: 0.7.0(encoding@0.1.13)
|
version: 0.7.0(encoding@0.1.13)
|
||||||
|
"@mistralai/mistralai":
|
||||||
|
specifier: ^1.5.2
|
||||||
|
version: 1.5.2(zod@3.24.2)
|
||||||
"@qdrant/js-client-rest":
|
"@qdrant/js-client-rest":
|
||||||
specifier: 1.13.0
|
specifier: 1.13.0
|
||||||
version: 1.13.0(typescript@5.5.4)
|
version: 1.13.0(typescript@5.5.4)
|
||||||
@@ -47,7 +50,7 @@ importers:
|
|||||||
specifier: 8.11.3
|
specifier: 8.11.3
|
||||||
version: 8.11.3
|
version: 8.11.3
|
||||||
redis:
|
redis:
|
||||||
specifier: 4.7.0
|
specifier: ^4.6.13
|
||||||
version: 4.7.0
|
version: 4.7.0
|
||||||
sqlite3:
|
sqlite3:
|
||||||
specifier: 5.1.7
|
specifier: 5.1.7
|
||||||
@@ -56,8 +59,8 @@ importers:
|
|||||||
specifier: 9.0.1
|
specifier: 9.0.1
|
||||||
version: 9.0.1
|
version: 9.0.1
|
||||||
zod:
|
zod:
|
||||||
specifier: 3.22.4
|
specifier: ^3.24.1
|
||||||
version: 3.22.4
|
version: 3.24.2
|
||||||
devDependencies:
|
devDependencies:
|
||||||
"@types/node":
|
"@types/node":
|
||||||
specifier: ^22.7.6
|
specifier: ^22.7.6
|
||||||
@@ -793,6 +796,14 @@ packages:
|
|||||||
integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==,
|
integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
"@mistralai/mistralai@1.5.2":
|
||||||
|
resolution:
|
||||||
|
{
|
||||||
|
integrity: sha512-mBTIDQmuAX9RowMYteZFHJIYlEwDcHzzaxgXzrFtlvH9CkKXK7R1VnZ1sZSe+uLMg0dIXUVdPRUh1SwyFeSqXw==,
|
||||||
|
}
|
||||||
|
peerDependencies:
|
||||||
|
zod: ">= 3"
|
||||||
|
|
||||||
"@nodelib/fs.scandir@2.1.5":
|
"@nodelib/fs.scandir@2.1.5":
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
@@ -4717,10 +4728,18 @@ packages:
|
|||||||
}
|
}
|
||||||
engines: { node: ">=10" }
|
engines: { node: ">=10" }
|
||||||
|
|
||||||
zod@3.22.4:
|
zod-to-json-schema@3.24.5:
|
||||||
resolution:
|
resolution:
|
||||||
{
|
{
|
||||||
integrity: sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==,
|
integrity: sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==,
|
||||||
|
}
|
||||||
|
peerDependencies:
|
||||||
|
zod: ^3.24.1
|
||||||
|
|
||||||
|
zod@3.24.2:
|
||||||
|
resolution:
|
||||||
|
{
|
||||||
|
integrity: sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ==,
|
||||||
}
|
}
|
||||||
|
|
||||||
snapshots:
|
snapshots:
|
||||||
@@ -5227,6 +5246,11 @@ snapshots:
|
|||||||
"@jridgewell/resolve-uri": 3.1.2
|
"@jridgewell/resolve-uri": 3.1.2
|
||||||
"@jridgewell/sourcemap-codec": 1.5.0
|
"@jridgewell/sourcemap-codec": 1.5.0
|
||||||
|
|
||||||
|
"@mistralai/mistralai@1.5.2(zod@3.24.2)":
|
||||||
|
dependencies:
|
||||||
|
zod: 3.24.2
|
||||||
|
zod-to-json-schema: 3.24.5(zod@3.24.2)
|
||||||
|
|
||||||
"@nodelib/fs.scandir@2.1.5":
|
"@nodelib/fs.scandir@2.1.5":
|
||||||
dependencies:
|
dependencies:
|
||||||
"@nodelib/fs.stat": 2.0.5
|
"@nodelib/fs.stat": 2.0.5
|
||||||
@@ -7748,4 +7772,8 @@ snapshots:
|
|||||||
|
|
||||||
yocto-queue@0.1.0: {}
|
yocto-queue@0.1.0: {}
|
||||||
|
|
||||||
zod@3.22.4: {}
|
zod-to-json-schema@3.24.5(zod@3.24.2):
|
||||||
|
dependencies:
|
||||||
|
zod: 3.24.2
|
||||||
|
|
||||||
|
zod@3.24.2: {}
|
||||||
|
|||||||
78
mem0-ts/src/oss/examples/llms/mistral-example.ts
Normal file
78
mem0-ts/src/oss/examples/llms/mistral-example.ts
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import dotenv from "dotenv";
|
||||||
|
import { MistralLLM } from "../../src/llms/mistral";
|
||||||
|
|
||||||
|
// Load environment variables
|
||||||
|
dotenv.config();
|
||||||
|
|
||||||
|
async function testMistral() {
|
||||||
|
// Check for API key
|
||||||
|
if (!process.env.MISTRAL_API_KEY) {
|
||||||
|
console.error("MISTRAL_API_KEY environment variable is required");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("Testing Mistral LLM implementation...");
|
||||||
|
|
||||||
|
// Initialize MistralLLM
|
||||||
|
const mistral = new MistralLLM({
|
||||||
|
apiKey: process.env.MISTRAL_API_KEY,
|
||||||
|
model: "mistral-tiny-latest", // You can change to other models like mistral-small-latest
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Test simple chat completion
|
||||||
|
console.log("Testing simple chat completion:");
|
||||||
|
const chatResponse = await mistral.generateChat([
|
||||||
|
{ role: "system", content: "You are a helpful assistant." },
|
||||||
|
{ role: "user", content: "What is the capital of France?" },
|
||||||
|
]);
|
||||||
|
|
||||||
|
console.log("Chat response:");
|
||||||
|
console.log(`Role: ${chatResponse.role}`);
|
||||||
|
console.log(`Content: ${chatResponse.content}\n`);
|
||||||
|
|
||||||
|
// Test with functions/tools
|
||||||
|
console.log("Testing tool calling:");
|
||||||
|
const tools = [
|
||||||
|
{
|
||||||
|
type: "function",
|
||||||
|
function: {
|
||||||
|
name: "get_weather",
|
||||||
|
description: "Get the current weather in a given location",
|
||||||
|
parameters: {
|
||||||
|
type: "object",
|
||||||
|
properties: {
|
||||||
|
location: {
|
||||||
|
type: "string",
|
||||||
|
description: "The city and state, e.g. San Francisco, CA",
|
||||||
|
},
|
||||||
|
unit: {
|
||||||
|
type: "string",
|
||||||
|
enum: ["celsius", "fahrenheit"],
|
||||||
|
description: "The unit of temperature",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ["location"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const toolResponse = await mistral.generateResponse(
|
||||||
|
[
|
||||||
|
{ role: "system", content: "You are a helpful assistant." },
|
||||||
|
{ role: "user", content: "What's the weather like in Paris, France?" },
|
||||||
|
],
|
||||||
|
undefined,
|
||||||
|
tools,
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log("Tool response:", toolResponse);
|
||||||
|
|
||||||
|
console.log("\n✅ All tests completed successfully");
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error testing Mistral LLM:", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
testMistral().catch(console.error);
|
||||||
@@ -12,6 +12,7 @@ export * from "./llms/openai_structured";
|
|||||||
export * from "./llms/anthropic";
|
export * from "./llms/anthropic";
|
||||||
export * from "./llms/groq";
|
export * from "./llms/groq";
|
||||||
export * from "./llms/ollama";
|
export * from "./llms/ollama";
|
||||||
|
export * from "./llms/mistral";
|
||||||
export * from "./vector_stores/base";
|
export * from "./vector_stores/base";
|
||||||
export * from "./vector_stores/memory";
|
export * from "./vector_stores/memory";
|
||||||
export * from "./vector_stores/qdrant";
|
export * from "./vector_stores/qdrant";
|
||||||
|
|||||||
112
mem0-ts/src/oss/src/llms/mistral.ts
Normal file
112
mem0-ts/src/oss/src/llms/mistral.ts
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
import { Mistral } from "@mistralai/mistralai";
|
||||||
|
import { LLM, LLMResponse } from "./base";
|
||||||
|
import { LLMConfig, Message } from "../types";
|
||||||
|
|
||||||
|
export class MistralLLM implements LLM {
|
||||||
|
private client: Mistral;
|
||||||
|
private model: string;
|
||||||
|
|
||||||
|
constructor(config: LLMConfig) {
|
||||||
|
if (!config.apiKey) {
|
||||||
|
throw new Error("Mistral API key is required");
|
||||||
|
}
|
||||||
|
this.client = new Mistral({
|
||||||
|
apiKey: config.apiKey,
|
||||||
|
});
|
||||||
|
this.model = config.model || "mistral-tiny-latest";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to convert content to string
|
||||||
|
private contentToString(content: any): string {
|
||||||
|
if (typeof content === "string") {
|
||||||
|
return content;
|
||||||
|
}
|
||||||
|
if (Array.isArray(content)) {
|
||||||
|
// Handle ContentChunk array - extract text content
|
||||||
|
return content
|
||||||
|
.map((chunk) => {
|
||||||
|
if (chunk.type === "text") {
|
||||||
|
return chunk.text;
|
||||||
|
} else {
|
||||||
|
return JSON.stringify(chunk);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.join("");
|
||||||
|
}
|
||||||
|
return String(content || "");
|
||||||
|
}
|
||||||
|
|
||||||
|
async generateResponse(
|
||||||
|
messages: Message[],
|
||||||
|
responseFormat?: { type: string },
|
||||||
|
tools?: any[],
|
||||||
|
): Promise<string | LLMResponse> {
|
||||||
|
const response = await this.client.chat.complete({
|
||||||
|
model: this.model,
|
||||||
|
messages: messages.map((msg) => ({
|
||||||
|
role: msg.role as "system" | "user" | "assistant",
|
||||||
|
content:
|
||||||
|
typeof msg.content === "string"
|
||||||
|
? msg.content
|
||||||
|
: JSON.stringify(msg.content),
|
||||||
|
})),
|
||||||
|
...(tools && { tools }),
|
||||||
|
...(responseFormat && { response_format: responseFormat }),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response || !response.choices || response.choices.length === 0) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
const message = response.choices[0].message;
|
||||||
|
|
||||||
|
if (!message) {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
if (message.toolCalls && message.toolCalls.length > 0) {
|
||||||
|
return {
|
||||||
|
content: this.contentToString(message.content),
|
||||||
|
role: message.role || "assistant",
|
||||||
|
toolCalls: message.toolCalls.map((call) => ({
|
||||||
|
name: call.function.name,
|
||||||
|
arguments:
|
||||||
|
typeof call.function.arguments === "string"
|
||||||
|
? call.function.arguments
|
||||||
|
: JSON.stringify(call.function.arguments),
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.contentToString(message.content);
|
||||||
|
}
|
||||||
|
|
||||||
|
async generateChat(messages: Message[]): Promise<LLMResponse> {
|
||||||
|
const formattedMessages = messages.map((msg) => ({
|
||||||
|
role: msg.role as "system" | "user" | "assistant",
|
||||||
|
content:
|
||||||
|
typeof msg.content === "string"
|
||||||
|
? msg.content
|
||||||
|
: JSON.stringify(msg.content),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const response = await this.client.chat.complete({
|
||||||
|
model: this.model,
|
||||||
|
messages: formattedMessages,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response || !response.choices || response.choices.length === 0) {
|
||||||
|
return {
|
||||||
|
content: "",
|
||||||
|
role: "assistant",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const message = response.choices[0].message;
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: this.contentToString(message.content),
|
||||||
|
role: message.role || "assistant",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,6 +4,7 @@ import { OpenAILLM } from "../llms/openai";
|
|||||||
import { OpenAIStructuredLLM } from "../llms/openai_structured";
|
import { OpenAIStructuredLLM } from "../llms/openai_structured";
|
||||||
import { AnthropicLLM } from "../llms/anthropic";
|
import { AnthropicLLM } from "../llms/anthropic";
|
||||||
import { GroqLLM } from "../llms/groq";
|
import { GroqLLM } from "../llms/groq";
|
||||||
|
import { MistralLLM } from "../llms/mistral";
|
||||||
import { MemoryVectorStore } from "../vector_stores/memory";
|
import { MemoryVectorStore } from "../vector_stores/memory";
|
||||||
import {
|
import {
|
||||||
EmbeddingConfig,
|
EmbeddingConfig,
|
||||||
@@ -55,6 +56,8 @@ export class LLMFactory {
|
|||||||
return new OllamaLLM(config);
|
return new OllamaLLM(config);
|
||||||
case "google":
|
case "google":
|
||||||
return new GoogleLLM(config);
|
return new GoogleLLM(config);
|
||||||
|
case "mistral":
|
||||||
|
return new MistralLLM(config);
|
||||||
default:
|
default:
|
||||||
throw new Error(`Unsupported LLM provider: ${provider}`);
|
throw new Error(`Unsupported LLM provider: ${provider}`);
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user