Add Gemini Model Support to Vercel AI SDK Provider (#3094)
This commit is contained in:
@@ -225,6 +225,18 @@ const memories = await getMemories(prompt, { user_id: "borat", mem0ApiKey: "m0-x
|
||||
|
||||
The `getMemories` function will return an object with two keys: `results` and `relations`, if `enable_graph` is set to `true`. Otherwise, it will return an array of objects.
|
||||
|
||||
## Supported LLM Providers
|
||||
|
||||
| Provider | Configuration Value |
|
||||
|----------|-------------------|
|
||||
| OpenAI | openai |
|
||||
| Anthropic | anthropic |
|
||||
| Gemini | gemini |
|
||||
| Google | google |
|
||||
| Mistral | mistral |
|
||||
| Groq | groq |
|
||||
|
||||
> **Note**: You can use `google` as provider for Gemini (Google) models. They are same and internally they use `@ai-sdk/google` package.
|
||||
|
||||
## Key Features
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@mem0/vercel-ai-provider",
|
||||
"version": "1.0.6",
|
||||
"version": "1.0.7",
|
||||
"description": "Vercel AI Provider for providing memory to LLMs",
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.mjs",
|
||||
|
||||
@@ -57,6 +57,7 @@ class Mem0AITextGenerator implements LanguageModelV1 {
|
||||
})(modelId);
|
||||
break;
|
||||
case "google":
|
||||
case "gemini":
|
||||
this.languageModel = createGoogleGenerativeAI({
|
||||
apiKey: config?.apiKey,
|
||||
...provider_config as GoogleGenerativeAIProviderSettings,
|
||||
|
||||
Reference in New Issue
Block a user