Vercel AI SDK / Graph Memory (#2601)
Co-authored-by: Deshraj Yadav <deshraj@gatech.edu>
This commit is contained in:
@@ -462,5 +462,15 @@ mode: "wide"
|
||||
</Update>
|
||||
|
||||
</Tab>
|
||||
|
||||
<Tab title="Vercel AI SDK">
|
||||
|
||||
<Update label="2025-05-01" description="v1.0.1">
|
||||
**New Features:**
|
||||
- **Vercel AI SDK:** Added support for graph memories
|
||||
</Update>
|
||||
|
||||
</Tab>
|
||||
|
||||
</Tabs>
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ title: Vercel AI SDK
|
||||
The [**Mem0 AI SDK Provider**](https://www.npmjs.com/package/@mem0/vercel-ai-provider) is a library developed by **Mem0** to integrate with the Vercel AI SDK. This library brings enhanced AI interaction capabilities to your applications by introducing persistent memory functionality.
|
||||
|
||||
<Note type="info">
|
||||
🎉 Exciting news! Mem0 AI SDK now supports <strong>Tools Call</strong>.
|
||||
🎉 Exciting news! Mem0 AI SDK now supports <strong>Graph Memory</strong>.
|
||||
</Note>
|
||||
|
||||
## Overview
|
||||
@@ -81,6 +81,8 @@ npm install @mem0/vercel-ai-provider
|
||||
|
||||
> `getMemories` will return raw memories in the form of an array of objects, while `retrieveMemories` will return a response in string format with a system prompt ingested with the retrieved memories.
|
||||
|
||||
> `getMemories` is an object with two keys: `results` and `relations` if `enable_graph` is enabled. Otherwise, it will return an array of objects.
|
||||
|
||||
### 1. Basic Text Generation with Memory Context
|
||||
|
||||
```typescript
|
||||
@@ -205,6 +207,24 @@ console.log(sources);
|
||||
|
||||
The same can be done for `streamText` as well.
|
||||
|
||||
## Graph Memory
|
||||
|
||||
Mem0 AI SDK now supports Graph Memory. You can enable it by setting `enable_graph` to `true` in the `mem0Config` object.
|
||||
|
||||
```typescript
|
||||
const mem0 = createMem0({
|
||||
mem0Config: { enable_graph: true },
|
||||
});
|
||||
```
|
||||
|
||||
You can also pass `enable_graph` in the standalone functions. This includes `getMemories`, `retrieveMemories`, and `addMemories`.
|
||||
|
||||
```typescript
|
||||
const memories = await getMemories(prompt, { user_id: "borat", mem0ApiKey: "m0-xxx", enable_graph: true });
|
||||
```
|
||||
|
||||
The `getMemories` function will return an object with two keys: `results` and `relations`, if `enable_graph` is set to `true`. Otherwise, it will return an array of objects.
|
||||
|
||||
|
||||
## Key Features
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mem0/vercel-ai-provider",
|
||||
"version": "1.0.0",
|
||||
"version": "1.0.1",
|
||||
"description": "Vercel AI Provider for providing memory to LLMs",
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.mjs",
|
||||
|
||||
@@ -37,21 +37,39 @@ export class Mem0GenericLanguageModel implements LanguageModelV1 {
|
||||
});
|
||||
|
||||
// Get Memories
|
||||
const memories = await getMemories(messagesPrompts, mem0Config);
|
||||
let memories = await getMemories(messagesPrompts, mem0Config);
|
||||
|
||||
const mySystemPrompt = "These are the memories I have stored. Give more weightage to the question by users and try to answer that first. You have to modify your answer based on the memories I have provided. If the memories are irrelevant you can ignore them. Also don't reply to this section of the prompt, or the memories, they are only for your reference. The System prompt starts after text System Message: \n\n";
|
||||
|
||||
const isGraphEnabled = mem0Config.enable_graph;
|
||||
|
||||
let memoriesText = "";
|
||||
let memoriesText2 = "";
|
||||
try {
|
||||
// @ts-ignore
|
||||
memoriesText = memories.map((memory: any) => {
|
||||
return `Memory: ${memory.memory}\n\n`;
|
||||
}).join("\n\n");
|
||||
if (isGraphEnabled) {
|
||||
memoriesText = memories.results.map((memory: any) => {
|
||||
return `Memory: ${memory.memory}\n\n`;
|
||||
}).join("\n\n");
|
||||
|
||||
memoriesText2 = memories.relations.map((memory: any) => {
|
||||
return `Relation: ${memory.source} -> ${memory.relationship} -> ${memory.target} \n\n`;
|
||||
}).join("\n\n");
|
||||
} else {
|
||||
memoriesText = memories.map((memory: any) => {
|
||||
return `Memory: ${memory.memory}\n\n`;
|
||||
}).join("\n\n");
|
||||
}
|
||||
} catch(e) {
|
||||
console.error("Error while parsing memories");
|
||||
}
|
||||
|
||||
const memoriesPrompt = `System Message: ${mySystemPrompt} ${memoriesText}`;
|
||||
let graphPrompt = "";
|
||||
if (isGraphEnabled) {
|
||||
graphPrompt = `HERE ARE THE GRAPHS RELATIONS FOR THE PREFERENCES OF THE USER:\n\n ${memoriesText2}`;
|
||||
}
|
||||
|
||||
const memoriesPrompt = `System Message: ${mySystemPrompt} ${memoriesText} ${graphPrompt} `;
|
||||
|
||||
// System Prompt - The memories go as a system prompt
|
||||
const systemPrompt: LanguageModelV1Message = {
|
||||
@@ -64,6 +82,10 @@ export class Mem0GenericLanguageModel implements LanguageModelV1 {
|
||||
messagesPrompts.unshift(systemPrompt);
|
||||
}
|
||||
|
||||
if (isGraphEnabled) {
|
||||
memories = memories.results;
|
||||
}
|
||||
|
||||
return { memories, messagesPrompts };
|
||||
}
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@ export interface Mem0ConfigSettings {
|
||||
top_k?: number;
|
||||
threshold?: number;
|
||||
rerank?: boolean;
|
||||
enable_graph?: boolean;
|
||||
}
|
||||
|
||||
export interface Mem0ChatConfig extends Mem0ConfigSettings, Mem0ProviderSettings {}
|
||||
|
||||
@@ -71,7 +71,7 @@ const searchInternalMemories = async (query: string, config?: Mem0ConfigSettings
|
||||
environmentVariableName: "MEM0_API_KEY",
|
||||
description: "Mem0",
|
||||
})}`, 'Content-Type': 'application/json'},
|
||||
body: JSON.stringify({query, filters, ...config, top_k: config&&config.top_k || top_k, version: "v2", ...org_project_filters}),
|
||||
body: JSON.stringify({query, filters, ...config, top_k: config&&config.top_k || top_k, version: "v2", output_format: "v1.1", ...org_project_filters}),
|
||||
};
|
||||
const response = await fetch('https://api.mem0.ai/v2/memories/search/', options);
|
||||
const data = await response.json();
|
||||
@@ -109,12 +109,24 @@ const retrieveMemories = async (prompt: LanguageModelV1Prompt | string, config?:
|
||||
const message = typeof prompt === 'string' ? prompt : flattenPrompt(prompt);
|
||||
const systemPrompt = "These are the memories I have stored. Give more weightage to the question by users and try to answer that first. You have to modify your answer based on the memories I have provided. If the memories are irrelevant you can ignore them. Also don't reply to this section of the prompt, or the memories, they are only for your reference. The System prompt starts after text System Message: \n\n";
|
||||
const memories = await searchInternalMemories(message, config);
|
||||
let memoriesText = "";
|
||||
let memoriesText1 = "";
|
||||
let memoriesText2 = "";
|
||||
let graphPrompt = "";
|
||||
try{
|
||||
// @ts-ignore
|
||||
memoriesText = memories.map((memory: any)=>{
|
||||
memoriesText1 = memories.results.map((memory: any)=>{
|
||||
return `Memory: ${memory.memory}\n\n`;
|
||||
}).join("\n\n");
|
||||
|
||||
if (config?.enable_graph) {
|
||||
memoriesText2 = memories.relations.map((memory: any)=>{
|
||||
return `Relation: ${memory.source} -> ${memory.relationship} -> ${memory.target} \n\n`;
|
||||
}).join("\n\n");
|
||||
}
|
||||
|
||||
if (config?.enable_graph) {
|
||||
graphPrompt = `HERE ARE THE GRAPHS RELATIONS FOR THE PREFERENCES OF THE USER:\n\n ${memoriesText2}`;
|
||||
}
|
||||
}catch(e){
|
||||
console.error("Error while parsing memories");
|
||||
// console.log(e);
|
||||
@@ -122,7 +134,7 @@ const retrieveMemories = async (prompt: LanguageModelV1Prompt | string, config?:
|
||||
if(memories.length === 0){
|
||||
return "";
|
||||
}
|
||||
return `System Message: ${systemPrompt} ${memoriesText}`;
|
||||
return `System Message: ${systemPrompt} ${memoriesText1} ${graphPrompt}`;
|
||||
}
|
||||
|
||||
const getMemories = async (prompt: LanguageModelV1Prompt | string, config?: Mem0ConfigSettings)=>{
|
||||
@@ -131,6 +143,9 @@ const getMemories = async (prompt: LanguageModelV1Prompt | string, config?: Mem0
|
||||
try{
|
||||
// @ts-ignore
|
||||
memories = await searchInternalMemories(message, config);
|
||||
if (!config?.enable_graph) {
|
||||
memories = memories.results;
|
||||
}
|
||||
}
|
||||
catch(e){
|
||||
console.error("Error while searching memories");
|
||||
|
||||
Reference in New Issue
Block a user