diff --git a/docs/integrations/vercel-ai-sdk.mdx b/docs/integrations/vercel-ai-sdk.mdx index 810ff337..e49cf8a4 100644 --- a/docs/integrations/vercel-ai-sdk.mdx +++ b/docs/integrations/vercel-ai-sdk.mdx @@ -225,6 +225,18 @@ const memories = await getMemories(prompt, { user_id: "borat", mem0ApiKey: "m0-x The `getMemories` function will return an object with two keys: `results` and `relations`, if `enable_graph` is set to `true`. Otherwise, it will return an array of objects. +## Supported LLM Providers + +| Provider | Configuration Value | +|----------|-------------------| +| OpenAI | openai | +| Anthropic | anthropic | +| Gemini | gemini | +| Google | google | +| Mistral | mistral | +| Groq | groq | + +> **Note**: You can use `google` as provider for Gemini (Google) models. They are same and internally they use `@ai-sdk/google` package. ## Key Features diff --git a/vercel-ai-sdk/package.json b/vercel-ai-sdk/package.json index a298dc93..acbdfdb3 100644 --- a/vercel-ai-sdk/package.json +++ b/vercel-ai-sdk/package.json @@ -1,6 +1,6 @@ { "name": "@mem0/vercel-ai-provider", - "version": "1.0.6", + "version": "1.0.7", "description": "Vercel AI Provider for providing memory to LLMs", "main": "./dist/index.js", "module": "./dist/index.mjs", diff --git a/vercel-ai-sdk/src/provider-response-provider.ts b/vercel-ai-sdk/src/provider-response-provider.ts index 651dfc4a..c9c2fdf0 100644 --- a/vercel-ai-sdk/src/provider-response-provider.ts +++ b/vercel-ai-sdk/src/provider-response-provider.ts @@ -57,6 +57,7 @@ class Mem0AITextGenerator implements LanguageModelV1 { })(modelId); break; case "google": + case "gemini": this.languageModel = createGoogleGenerativeAI({ apiKey: config?.apiKey, ...provider_config as GoogleGenerativeAIProviderSettings,