diff --git a/docs/changelog/overview.mdx b/docs/changelog/overview.mdx index 465e24d6..9fca91c9 100644 --- a/docs/changelog/overview.mdx +++ b/docs/changelog/overview.mdx @@ -127,6 +127,14 @@ mode: "wide" + +**New Features:** +- **Mistral LLM:** Added Mistral LLM integration in OSS + +**Improvements:** +- **Zod:** Updated Zod to 3.24.1 to avoid conflicts with other packages + + **Improvements:** - **Client:** Added support for Mem0 to work with Chrome Extensions diff --git a/docs/components/llms/models/mistral_AI.mdx b/docs/components/llms/models/mistral_AI.mdx index fdecd625..632d4877 100644 --- a/docs/components/llms/models/mistral_AI.mdx +++ b/docs/components/llms/models/mistral_AI.mdx @@ -2,11 +2,12 @@ title: Mistral AI --- -To use mistral's models, please Obtain the Mistral AI api key from their [console](https://console.mistral.ai/). Set the `MISTRAL_API_KEY` environment variable to use the model as given below in the example. +To use mistral's models, please obtain the Mistral AI api key from their [console](https://console.mistral.ai/). Set the `MISTRAL_API_KEY` environment variable to use the model as given below in the example. ## Usage -```python + +```python Python import os from mem0 import Memory @@ -34,6 +35,32 @@ messages = [ m.add(messages, user_id="alice", metadata={"category": "movies"}) ``` +```typescript TypeScript +import { Memory } from 'mem0ai/oss'; + +const config = { + llm: { + provider: 'mistral', + config: { + apiKey: process.env.MISTRAL_API_KEY || '', + model: 'mistral-tiny-latest', // Or 'mistral-small-latest', 'mistral-medium-latest', etc. + temperature: 0.1, + maxTokens: 2000, + }, + }, +}; + +const memory = new Memory(config); +const messages = [ + {"role": "user", "content": "I'm planning to watch a movie tonight. Any recommendations?"}, + {"role": "assistant", "content": "How about a thriller movies? They can be quite engaging."}, + {"role": "user", "content": "I'm not a big fan of thriller movies but I love sci-fi movies."}, + {"role": "assistant", "content": "Got it! I'll avoid thriller recommendations and suggest sci-fi movies in the future."} +] +await memory.add(messages, { userId: "alice", metadata: { category: "movies" } }); +``` + + ## Config All available parameters for the `litellm` config are present in [Master List of All Params in Config](../config). \ No newline at end of file diff --git a/mem0-ts/package.json b/mem0-ts/package.json index d634ccde..96fedd42 100644 --- a/mem0-ts/package.json +++ b/mem0-ts/package.json @@ -1,6 +1,6 @@ { "name": "mem0ai", - "version": "2.1.15", + "version": "2.1.16", "description": "The Memory Layer For Your AI Apps", "main": "./dist/index.js", "module": "./dist/index.mjs", @@ -93,13 +93,13 @@ "dependencies": { "axios": "1.7.7", "openai": "4.28.0", - "redis": "^4.6.13", "uuid": "9.0.1", - "zod": "3.22.4" + "zod": "^3.24.1" }, "peerDependencies": { "@anthropic-ai/sdk": "0.18.0", "@qdrant/js-client-rest": "1.13.0", + "@mistralai/mistralai": "^1.5.2", "@google/genai": "^0.7.0", "@supabase/supabase-js": "^2.49.1", "@types/jest": "29.5.14", @@ -109,7 +109,7 @@ "neo4j-driver": "^5.28.1", "ollama": "^0.5.14", "pg": "8.11.3", - "redis": "4.7.0", + "redis": "^4.6.13", "sqlite3": "5.1.7" }, "engines": { diff --git a/mem0-ts/pnpm-lock.yaml b/mem0-ts/pnpm-lock.yaml index c8f1427e..a2e5b057 100644 --- a/mem0-ts/pnpm-lock.yaml +++ b/mem0-ts/pnpm-lock.yaml @@ -13,6 +13,9 @@ importers: "@google/genai": specifier: ^0.7.0 version: 0.7.0(encoding@0.1.13) + "@mistralai/mistralai": + specifier: ^1.5.2 + version: 1.5.2(zod@3.24.2) "@qdrant/js-client-rest": specifier: 1.13.0 version: 1.13.0(typescript@5.5.4) @@ -47,7 +50,7 @@ importers: specifier: 8.11.3 version: 8.11.3 redis: - specifier: 4.7.0 + specifier: ^4.6.13 version: 4.7.0 sqlite3: specifier: 5.1.7 @@ -56,8 +59,8 @@ importers: specifier: 9.0.1 version: 9.0.1 zod: - specifier: 3.22.4 - version: 3.22.4 + specifier: ^3.24.1 + version: 3.24.2 devDependencies: "@types/node": specifier: ^22.7.6 @@ -793,6 +796,14 @@ packages: integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==, } + "@mistralai/mistralai@1.5.2": + resolution: + { + integrity: sha512-mBTIDQmuAX9RowMYteZFHJIYlEwDcHzzaxgXzrFtlvH9CkKXK7R1VnZ1sZSe+uLMg0dIXUVdPRUh1SwyFeSqXw==, + } + peerDependencies: + zod: ">= 3" + "@nodelib/fs.scandir@2.1.5": resolution: { @@ -4717,10 +4728,18 @@ packages: } engines: { node: ">=10" } - zod@3.22.4: + zod-to-json-schema@3.24.5: resolution: { - integrity: sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==, + integrity: sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==, + } + peerDependencies: + zod: ^3.24.1 + + zod@3.24.2: + resolution: + { + integrity: sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ==, } snapshots: @@ -5227,6 +5246,11 @@ snapshots: "@jridgewell/resolve-uri": 3.1.2 "@jridgewell/sourcemap-codec": 1.5.0 + "@mistralai/mistralai@1.5.2(zod@3.24.2)": + dependencies: + zod: 3.24.2 + zod-to-json-schema: 3.24.5(zod@3.24.2) + "@nodelib/fs.scandir@2.1.5": dependencies: "@nodelib/fs.stat": 2.0.5 @@ -7748,4 +7772,8 @@ snapshots: yocto-queue@0.1.0: {} - zod@3.22.4: {} + zod-to-json-schema@3.24.5(zod@3.24.2): + dependencies: + zod: 3.24.2 + + zod@3.24.2: {} diff --git a/mem0-ts/src/oss/examples/llms/mistral-example.ts b/mem0-ts/src/oss/examples/llms/mistral-example.ts new file mode 100644 index 00000000..e39d602a --- /dev/null +++ b/mem0-ts/src/oss/examples/llms/mistral-example.ts @@ -0,0 +1,78 @@ +import dotenv from "dotenv"; +import { MistralLLM } from "../../src/llms/mistral"; + +// Load environment variables +dotenv.config(); + +async function testMistral() { + // Check for API key + if (!process.env.MISTRAL_API_KEY) { + console.error("MISTRAL_API_KEY environment variable is required"); + process.exit(1); + } + + console.log("Testing Mistral LLM implementation..."); + + // Initialize MistralLLM + const mistral = new MistralLLM({ + apiKey: process.env.MISTRAL_API_KEY, + model: "mistral-tiny-latest", // You can change to other models like mistral-small-latest + }); + + try { + // Test simple chat completion + console.log("Testing simple chat completion:"); + const chatResponse = await mistral.generateChat([ + { role: "system", content: "You are a helpful assistant." }, + { role: "user", content: "What is the capital of France?" }, + ]); + + console.log("Chat response:"); + console.log(`Role: ${chatResponse.role}`); + console.log(`Content: ${chatResponse.content}\n`); + + // Test with functions/tools + console.log("Testing tool calling:"); + const tools = [ + { + type: "function", + function: { + name: "get_weather", + description: "Get the current weather in a given location", + parameters: { + type: "object", + properties: { + location: { + type: "string", + description: "The city and state, e.g. San Francisco, CA", + }, + unit: { + type: "string", + enum: ["celsius", "fahrenheit"], + description: "The unit of temperature", + }, + }, + required: ["location"], + }, + }, + }, + ]; + + const toolResponse = await mistral.generateResponse( + [ + { role: "system", content: "You are a helpful assistant." }, + { role: "user", content: "What's the weather like in Paris, France?" }, + ], + undefined, + tools, + ); + + console.log("Tool response:", toolResponse); + + console.log("\n✅ All tests completed successfully"); + } catch (error) { + console.error("Error testing Mistral LLM:", error); + } +} + +testMistral().catch(console.error); diff --git a/mem0-ts/src/oss/src/index.ts b/mem0-ts/src/oss/src/index.ts index 195d9738..13508000 100644 --- a/mem0-ts/src/oss/src/index.ts +++ b/mem0-ts/src/oss/src/index.ts @@ -12,6 +12,7 @@ export * from "./llms/openai_structured"; export * from "./llms/anthropic"; export * from "./llms/groq"; export * from "./llms/ollama"; +export * from "./llms/mistral"; export * from "./vector_stores/base"; export * from "./vector_stores/memory"; export * from "./vector_stores/qdrant"; diff --git a/mem0-ts/src/oss/src/llms/mistral.ts b/mem0-ts/src/oss/src/llms/mistral.ts new file mode 100644 index 00000000..a80972b6 --- /dev/null +++ b/mem0-ts/src/oss/src/llms/mistral.ts @@ -0,0 +1,112 @@ +import { Mistral } from "@mistralai/mistralai"; +import { LLM, LLMResponse } from "./base"; +import { LLMConfig, Message } from "../types"; + +export class MistralLLM implements LLM { + private client: Mistral; + private model: string; + + constructor(config: LLMConfig) { + if (!config.apiKey) { + throw new Error("Mistral API key is required"); + } + this.client = new Mistral({ + apiKey: config.apiKey, + }); + this.model = config.model || "mistral-tiny-latest"; + } + + // Helper function to convert content to string + private contentToString(content: any): string { + if (typeof content === "string") { + return content; + } + if (Array.isArray(content)) { + // Handle ContentChunk array - extract text content + return content + .map((chunk) => { + if (chunk.type === "text") { + return chunk.text; + } else { + return JSON.stringify(chunk); + } + }) + .join(""); + } + return String(content || ""); + } + + async generateResponse( + messages: Message[], + responseFormat?: { type: string }, + tools?: any[], + ): Promise { + const response = await this.client.chat.complete({ + model: this.model, + messages: messages.map((msg) => ({ + role: msg.role as "system" | "user" | "assistant", + content: + typeof msg.content === "string" + ? msg.content + : JSON.stringify(msg.content), + })), + ...(tools && { tools }), + ...(responseFormat && { response_format: responseFormat }), + }); + + if (!response || !response.choices || response.choices.length === 0) { + return ""; + } + + const message = response.choices[0].message; + + if (!message) { + return ""; + } + + if (message.toolCalls && message.toolCalls.length > 0) { + return { + content: this.contentToString(message.content), + role: message.role || "assistant", + toolCalls: message.toolCalls.map((call) => ({ + name: call.function.name, + arguments: + typeof call.function.arguments === "string" + ? call.function.arguments + : JSON.stringify(call.function.arguments), + })), + }; + } + + return this.contentToString(message.content); + } + + async generateChat(messages: Message[]): Promise { + const formattedMessages = messages.map((msg) => ({ + role: msg.role as "system" | "user" | "assistant", + content: + typeof msg.content === "string" + ? msg.content + : JSON.stringify(msg.content), + })); + + const response = await this.client.chat.complete({ + model: this.model, + messages: formattedMessages, + }); + + if (!response || !response.choices || response.choices.length === 0) { + return { + content: "", + role: "assistant", + }; + } + + const message = response.choices[0].message; + + return { + content: this.contentToString(message.content), + role: message.role || "assistant", + }; + } +} diff --git a/mem0-ts/src/oss/src/utils/factory.ts b/mem0-ts/src/oss/src/utils/factory.ts index 77834417..85ba7f83 100644 --- a/mem0-ts/src/oss/src/utils/factory.ts +++ b/mem0-ts/src/oss/src/utils/factory.ts @@ -4,6 +4,7 @@ import { OpenAILLM } from "../llms/openai"; import { OpenAIStructuredLLM } from "../llms/openai_structured"; import { AnthropicLLM } from "../llms/anthropic"; import { GroqLLM } from "../llms/groq"; +import { MistralLLM } from "../llms/mistral"; import { MemoryVectorStore } from "../vector_stores/memory"; import { EmbeddingConfig, @@ -55,6 +56,8 @@ export class LLMFactory { return new OllamaLLM(config); case "google": return new GoogleLLM(config); + case "mistral": + return new MistralLLM(config); default: throw new Error(`Unsupported LLM provider: ${provider}`); }