(Feature) Vercel AI SDK (#2024)
This commit is contained in:
2
examples/vercel-ai-sdk-chat-app/.gitattributes
vendored
Normal file
2
examples/vercel-ai-sdk-chat-app/.gitattributes
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# Auto detect text files and perform LF normalization
|
||||
* text=auto
|
||||
29
examples/vercel-ai-sdk-chat-app/.gitignore
vendored
Normal file
29
examples/vercel-ai-sdk-chat-app/.gitignore
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
**/.env
|
||||
**/node_modules
|
||||
**/dist
|
||||
**/.DS_Store
|
||||
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
lerna-debug.log*
|
||||
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
20
examples/vercel-ai-sdk-chat-app/components.json
Normal file
20
examples/vercel-ai-sdk-chat-app/components.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"$schema": "https://ui.shadcn.com/schema.json",
|
||||
"style": "new-york",
|
||||
"rsc": false,
|
||||
"tsx": true,
|
||||
"tailwind": {
|
||||
"config": "tailwind.config.js",
|
||||
"css": "src/index.css",
|
||||
"baseColor": "zinc",
|
||||
"cssVariables": true,
|
||||
"prefix": ""
|
||||
},
|
||||
"aliases": {
|
||||
"components": "@/components",
|
||||
"utils": "@/lib/utils",
|
||||
"ui": "@/components/ui",
|
||||
"lib": "@/lib",
|
||||
"hooks": "@/hooks"
|
||||
}
|
||||
}
|
||||
28
examples/vercel-ai-sdk-chat-app/eslint.config.js
Normal file
28
examples/vercel-ai-sdk-chat-app/eslint.config.js
Normal file
@@ -0,0 +1,28 @@
|
||||
import js from '@eslint/js'
|
||||
import globals from 'globals'
|
||||
import reactHooks from 'eslint-plugin-react-hooks'
|
||||
import reactRefresh from 'eslint-plugin-react-refresh'
|
||||
import tseslint from 'typescript-eslint'
|
||||
|
||||
export default tseslint.config(
|
||||
{ ignores: ['dist'] },
|
||||
{
|
||||
extends: [js.configs.recommended, ...tseslint.configs.recommended],
|
||||
files: ['**/*.{ts,tsx}'],
|
||||
languageOptions: {
|
||||
ecmaVersion: 2020,
|
||||
globals: globals.browser,
|
||||
},
|
||||
plugins: {
|
||||
'react-hooks': reactHooks,
|
||||
'react-refresh': reactRefresh,
|
||||
},
|
||||
rules: {
|
||||
...reactHooks.configs.recommended.rules,
|
||||
'react-refresh/only-export-components': [
|
||||
'warn',
|
||||
{ allowConstantExport: true },
|
||||
],
|
||||
},
|
||||
},
|
||||
)
|
||||
13
examples/vercel-ai-sdk-chat-app/index.html
Normal file
13
examples/vercel-ai-sdk-chat-app/index.html
Normal file
@@ -0,0 +1,13 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<link rel="icon" type="image/svg+xml" href="/mem0_logo.jpeg" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>JustChat | Chat with AI</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
51
examples/vercel-ai-sdk-chat-app/package.json
Normal file
51
examples/vercel-ai-sdk-chat-app/package.json
Normal file
@@ -0,0 +1,51 @@
|
||||
{
|
||||
"name": "mem0-sdk-chat-bot",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "tsc -b && vite build",
|
||||
"lint": "eslint .",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@mem0/vercel-ai-provider": "^0.0.7",
|
||||
"@radix-ui/react-avatar": "^1.1.1",
|
||||
"@radix-ui/react-dialog": "^1.1.2",
|
||||
"@radix-ui/react-icons": "^1.3.1",
|
||||
"@radix-ui/react-label": "^2.1.0",
|
||||
"@radix-ui/react-scroll-area": "^1.2.0",
|
||||
"@radix-ui/react-select": "^2.1.2",
|
||||
"@radix-ui/react-slot": "^1.1.0",
|
||||
"ai": "^3.4.31",
|
||||
"buffer": "^6.0.3",
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"clsx": "^2.1.1",
|
||||
"framer-motion": "^11.11.11",
|
||||
"lucide-react": "^0.454.0",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-markdown": "^9.0.1",
|
||||
"tailwind-merge": "^2.5.4",
|
||||
"tailwindcss-animate": "^1.0.7",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.13.0",
|
||||
"@types/node": "^22.8.6",
|
||||
"@types/react": "^18.3.12",
|
||||
"@types/react-dom": "^18.3.1",
|
||||
"@vitejs/plugin-react": "^4.3.3",
|
||||
"autoprefixer": "^10.4.20",
|
||||
"eslint": "^9.13.0",
|
||||
"eslint-plugin-react-hooks": "^5.0.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.14",
|
||||
"globals": "^15.11.0",
|
||||
"postcss": "^8.4.47",
|
||||
"tailwindcss": "^3.4.14",
|
||||
"typescript": "~5.6.2",
|
||||
"typescript-eslint": "^8.11.0",
|
||||
"vite": "^5.4.10"
|
||||
}
|
||||
}
|
||||
6
examples/vercel-ai-sdk-chat-app/postcss.config.js
Normal file
6
examples/vercel-ai-sdk-chat-app/postcss.config.js
Normal file
@@ -0,0 +1,6 @@
|
||||
export default {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
}
|
||||
BIN
examples/vercel-ai-sdk-chat-app/public/mem0_logo.jpeg
Normal file
BIN
examples/vercel-ai-sdk-chat-app/public/mem0_logo.jpeg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 8.4 KiB |
13
examples/vercel-ai-sdk-chat-app/src/App.tsx
Normal file
13
examples/vercel-ai-sdk-chat-app/src/App.tsx
Normal file
@@ -0,0 +1,13 @@
|
||||
import Home from "./page"
|
||||
|
||||
|
||||
function App() {
|
||||
|
||||
return (
|
||||
<>
|
||||
<Home />
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
export default App
|
||||
BIN
examples/vercel-ai-sdk-chat-app/src/assets/mem0_logo.jpeg
Normal file
BIN
examples/vercel-ai-sdk-chat-app/src/assets/mem0_logo.jpeg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 8.4 KiB |
1
examples/vercel-ai-sdk-chat-app/src/assets/react.svg
Normal file
1
examples/vercel-ai-sdk-chat-app/src/assets/react.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 4.0 KiB |
BIN
examples/vercel-ai-sdk-chat-app/src/assets/user.jpg
Normal file
BIN
examples/vercel-ai-sdk-chat-app/src/assets/user.jpg
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
@@ -0,0 +1,91 @@
|
||||
import { Dispatch, SetStateAction, useContext, useEffect, useState } from 'react'
|
||||
import { Button } from "@/components/ui/button"
|
||||
import { Input } from "@/components/ui/input"
|
||||
import { Label } from "@/components/ui/label"
|
||||
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select"
|
||||
import { Dialog, DialogContent, DialogHeader, DialogTitle, DialogFooter } from "@/components/ui/dialog"
|
||||
import GlobalContext from '@/contexts/GlobalContext'
|
||||
|
||||
export default function ApiSettingsPopup(props: { isOpen: boolean, setIsOpen: Dispatch<SetStateAction<boolean>> }) {
|
||||
const {isOpen, setIsOpen} = props
|
||||
const [mem0ApiKey, setMem0ApiKey] = useState('')
|
||||
const [providerApiKey, setProviderApiKey] = useState('')
|
||||
const [provider, setProvider] = useState('OpenAI')
|
||||
const { selectorHandler, selectedOpenAIKey, selectedMem0Key, selectedProvider } = useContext(GlobalContext);
|
||||
|
||||
const handleSave = () => {
|
||||
// Here you would typically save the settings to your backend or local storage
|
||||
selectorHandler(mem0ApiKey, providerApiKey, provider);
|
||||
setIsOpen(false)
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (selectedOpenAIKey) {
|
||||
setProviderApiKey(selectedOpenAIKey);
|
||||
}
|
||||
if (selectedMem0Key) {
|
||||
setMem0ApiKey(selectedMem0Key);
|
||||
}
|
||||
if (selectedProvider) {
|
||||
setProvider(selectedProvider);
|
||||
}
|
||||
}, [selectedOpenAIKey, selectedMem0Key, selectedProvider]);
|
||||
|
||||
|
||||
|
||||
return (
|
||||
<>
|
||||
<Dialog open={isOpen} onOpenChange={setIsOpen}>
|
||||
<DialogContent className="sm:max-w-[425px]">
|
||||
<DialogHeader>
|
||||
<DialogTitle>API Configuration Settings</DialogTitle>
|
||||
</DialogHeader>
|
||||
<div className="grid gap-4 py-4">
|
||||
<div className="grid grid-cols-4 items-center gap-4">
|
||||
<Label htmlFor="mem0-api-key" className="text-right">
|
||||
Mem0 API Key
|
||||
</Label>
|
||||
<Input
|
||||
id="mem0-api-key"
|
||||
value={mem0ApiKey}
|
||||
onChange={(e) => setMem0ApiKey(e.target.value)}
|
||||
className="col-span-3 rounded-3xl"
|
||||
/>
|
||||
</div>
|
||||
<div className="grid grid-cols-4 items-center gap-4">
|
||||
<Label htmlFor="provider-api-key" className="text-right">
|
||||
Provider API Key
|
||||
</Label>
|
||||
<Input
|
||||
id="provider-api-key"
|
||||
value={providerApiKey}
|
||||
onChange={(e) => setProviderApiKey(e.target.value)}
|
||||
className="col-span-3 rounded-3xl"
|
||||
/>
|
||||
</div>
|
||||
<div className="grid grid-cols-4 items-center gap-4">
|
||||
<Label htmlFor="provider" className="text-right">
|
||||
Provider
|
||||
</Label>
|
||||
<Select value={provider} onValueChange={setProvider}>
|
||||
<SelectTrigger className="col-span-3 rounded-3xl">
|
||||
<SelectValue placeholder="Select provider" />
|
||||
</SelectTrigger>
|
||||
<SelectContent className='rounded-3xl'>
|
||||
<SelectItem value="openai" className='rounded-3xl'>OpenAI</SelectItem>
|
||||
<SelectItem value="anthropic" className='rounded-3xl'>Anthropic</SelectItem>
|
||||
<SelectItem value="cohere" className='rounded-3xl'>Cohere</SelectItem>
|
||||
<SelectItem value="groq" className='rounded-3xl'>Groq</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
</div>
|
||||
<DialogFooter>
|
||||
<Button className='rounded-3xl' variant="outline" onClick={() => setIsOpen(false)}>Cancel</Button>
|
||||
<Button className='rounded-3xl' onClick={handleSave}>Save</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { ChevronLeft, ChevronRight } from "lucide-react";
|
||||
import React from "react";
|
||||
|
||||
const ChevronToggle = (props: {
|
||||
isMemoriesExpanded: boolean;
|
||||
setIsMemoriesExpanded: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
}) => {
|
||||
const { isMemoriesExpanded, setIsMemoriesExpanded } = props;
|
||||
return (
|
||||
<>
|
||||
<div className="relaive">
|
||||
<div className="flex items-center absolute top-1/2 z-10">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
className="h-8 w-8 border-y border rounded-lg relative right-10"
|
||||
onClick={() => setIsMemoriesExpanded(!isMemoriesExpanded)}
|
||||
aria-label={
|
||||
isMemoriesExpanded ? "Collapse memories" : "Expand memories"
|
||||
}
|
||||
>
|
||||
{isMemoriesExpanded ? (
|
||||
<ChevronRight className="h-4 w-4" />
|
||||
) : (
|
||||
<ChevronLeft className="h-4 w-4" />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default ChevronToggle;
|
||||
81
examples/vercel-ai-sdk-chat-app/src/components/header.tsx
Normal file
81
examples/vercel-ai-sdk-chat-app/src/components/header.tsx
Normal file
@@ -0,0 +1,81 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { ChevronRight, X, RefreshCcw, Settings } from "lucide-react";
|
||||
import { Dispatch, SetStateAction, useContext, useEffect, useState } from "react";
|
||||
import GlobalContext from "../contexts/GlobalContext";
|
||||
import { Input } from "./ui/input";
|
||||
|
||||
const Header = (props: {
|
||||
setIsSettingsOpen: Dispatch<SetStateAction<boolean>>;
|
||||
}) => {
|
||||
const { setIsSettingsOpen } = props;
|
||||
const { selectUserHandler, clearUserHandler, selectedUser, clearConfiguration } = useContext(GlobalContext);
|
||||
const [userId, setUserId] = useState<string>("");
|
||||
|
||||
const handleSelectUser = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
setUserId(e.target.value);
|
||||
};
|
||||
|
||||
const handleClearUser = () => {
|
||||
clearUserHandler();
|
||||
setUserId("");
|
||||
};
|
||||
|
||||
const handleSubmit = () => {
|
||||
selectUserHandler(userId);
|
||||
};
|
||||
|
||||
// New function to handle key down events
|
||||
const handleKeyDown = (e: React.KeyboardEvent<HTMLInputElement>) => {
|
||||
if (e.key === 'Enter') {
|
||||
e.preventDefault(); // Prevent form submission if it's in a form
|
||||
handleSubmit();
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (selectedUser) {
|
||||
setUserId(selectedUser);
|
||||
}
|
||||
}, [selectedUser]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<header className="border-b p-4 flex items-center justify-between">
|
||||
<div className="flex items-center space-x-2">
|
||||
<span className="text-xl font-semibold">Mem0 Assistant</span>
|
||||
</div>
|
||||
<div className="flex items-center space-x-2 text-sm">
|
||||
<div className="flex">
|
||||
<Input
|
||||
placeholder="UserId"
|
||||
className="w-full rounded-3xl pr-6 pl-4"
|
||||
value={userId}
|
||||
onChange={handleSelectUser}
|
||||
onKeyDown={handleKeyDown} // Attach the key down handler here
|
||||
/>
|
||||
<Button variant="ghost" size="icon" onClick={handleClearUser} className="relative hover:bg-transparent hover:text-neutral-400 right-8">
|
||||
<X className="h-4 w-4" />
|
||||
</Button>
|
||||
<Button variant="ghost" size="icon" onClick={handleSubmit} className="relative right-6">
|
||||
<ChevronRight className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
<div className="flex items-center space-x-2">
|
||||
<Button variant="ghost" size="icon" onClick={clearConfiguration}>
|
||||
<RefreshCcw className="h-4 w-4" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
onClick={() => setIsSettingsOpen(true)}
|
||||
>
|
||||
<Settings className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</header>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default Header;
|
||||
107
examples/vercel-ai-sdk-chat-app/src/components/input-area.tsx
Normal file
107
examples/vercel-ai-sdk-chat-app/src/components/input-area.tsx
Normal file
@@ -0,0 +1,107 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import GlobalContext from "@/contexts/GlobalContext";
|
||||
import { FileInfo } from "@/types";
|
||||
import { Images, Send, X } from "lucide-react";
|
||||
import { useContext, useRef, useState } from "react";
|
||||
|
||||
const InputArea = () => {
|
||||
const [inputValue, setInputValue] = useState("");
|
||||
const { handleSend, selectedFile, setSelectedFile, setFile } = useContext(GlobalContext);
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
const ref = useRef<HTMLInputElement>(null);
|
||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
const handleFileChange = (event: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const file = event.target.files?.[0]
|
||||
if (file) {
|
||||
setSelectedFile({
|
||||
name: file.name,
|
||||
type: file.type,
|
||||
size: file.size
|
||||
})
|
||||
setFile(file)
|
||||
}
|
||||
}
|
||||
|
||||
const handleSendController = async () => {
|
||||
setLoading(true);
|
||||
setInputValue("");
|
||||
await handleSend(inputValue);
|
||||
setLoading(false);
|
||||
|
||||
// focus on input
|
||||
setTimeout(() => {
|
||||
ref.current?.focus();
|
||||
}, 0);
|
||||
};
|
||||
|
||||
const handleClosePopup = () => {
|
||||
setSelectedFile(null)
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = ''
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="border-t p-4">
|
||||
<div className="flex items-center space-x-2">
|
||||
<div className="relative bottom-3 left-5">
|
||||
<div className="absolute">
|
||||
<Input
|
||||
type="file"
|
||||
accept="image/*"
|
||||
onChange={handleFileChange}
|
||||
ref={fileInputRef}
|
||||
className="sr-only"
|
||||
id="file-upload"
|
||||
/>
|
||||
<label
|
||||
htmlFor="file-upload"
|
||||
className="flex items-center justify-center w-6 h-6 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-200 cursor-pointer"
|
||||
>
|
||||
<Images className="h-4 w-4" />
|
||||
</label>
|
||||
{selectedFile && <FileInfoPopup file={selectedFile} onClose={handleClosePopup} />}
|
||||
</div>
|
||||
</div>
|
||||
<Input
|
||||
value={inputValue}
|
||||
onChange={(e) => setInputValue(e.target.value)}
|
||||
onKeyDown={(e) => e.key === "Enter" && handleSendController()}
|
||||
placeholder="Type a message..."
|
||||
className="flex-1 pl-10 rounded-3xl"
|
||||
disabled={loading}
|
||||
ref={ref}
|
||||
/>
|
||||
<div className="relative right-14 bottom-5 flex">
|
||||
<Button className="absolute rounded-full w-10 h-10 bg-transparent hover:bg-transparent cursor-pointer z-20 text-primary" onClick={handleSendController} disabled={!inputValue.trim() || loading}>
|
||||
<Send className="h-8 w-8" size={50} />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const FileInfoPopup = ({ file, onClose }: { file: FileInfo, onClose: () => void }) => {
|
||||
return (
|
||||
<div className="relative bottom-36">
|
||||
<div className="absolute top-full left-0 mt-1 bg-white dark:bg-gray-800 p-2 rounded-md shadow-md border border-gray-200 dark:border-gray-700 z-10 w-48">
|
||||
<div className="flex justify-between items-center">
|
||||
<h3 className="font-semibold text-sm truncate">{file.name}</h3>
|
||||
<Button variant="ghost" size="sm" onClick={onClose} className="h-5 w-5 p-0">
|
||||
<X className="h-3 w-3" />
|
||||
</Button>
|
||||
</div>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400 truncate">Type: {file.type}</p>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400">Size: {(file.size / 1024).toFixed(2)} KB</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default InputArea;
|
||||
93
examples/vercel-ai-sdk-chat-app/src/components/memories.tsx
Normal file
93
examples/vercel-ai-sdk-chat-app/src/components/memories.tsx
Normal file
@@ -0,0 +1,93 @@
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Card } from "@/components/ui/card";
|
||||
import { ScrollArea } from "@radix-ui/react-scroll-area";
|
||||
import { Memory } from "../types";
|
||||
import GlobalContext from "@/contexts/GlobalContext";
|
||||
import { useContext, useEffect, useState } from "react";
|
||||
import { AnimatePresence, motion } from "framer-motion";
|
||||
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const MemoryItem = ({ memory, index }: { memory: Memory; index: number }) => {
|
||||
return (
|
||||
<motion.div
|
||||
layout
|
||||
initial={{ opacity: 0, y: 20 }}
|
||||
animate={{ opacity: 1, y: 0 }}
|
||||
exit={{ opacity: 0, y: -20 }}
|
||||
transition={{ duration: 0.3 }}
|
||||
key={memory.id}
|
||||
className="space-y-2"
|
||||
>
|
||||
<div className="flex items-start justify-between">
|
||||
<p className="text-sm font-medium">{memory.content}</p>
|
||||
</div>
|
||||
<div className="flex items-center space-x-2 text-xs text-muted-foreground">
|
||||
<span>{new Date(memory.timestamp).toLocaleString()}</span>
|
||||
</div>
|
||||
<div className="flex flex-wrap gap-1">
|
||||
{memory.tags.map((tag) => (
|
||||
<Badge key={tag} variant="secondary" className="text-xs">
|
||||
{tag}
|
||||
</Badge>
|
||||
))}
|
||||
</div>
|
||||
</motion.div>
|
||||
);
|
||||
};
|
||||
|
||||
const Memories = (props: { isMemoriesExpanded: boolean }) => {
|
||||
const { isMemoriesExpanded } = props;
|
||||
const { memories } = useContext(GlobalContext);
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const [prevMemories, setPrevMemories] = useState<Memory[]>([]);
|
||||
|
||||
// Track memory positions for animation
|
||||
useEffect(() => {
|
||||
setPrevMemories(memories);
|
||||
}, [memories]);
|
||||
|
||||
return (
|
||||
<Card
|
||||
className={`border-l rounded-none flex flex-col transition-all duration-300 ${
|
||||
isMemoriesExpanded ? "w-80" : "w-0 overflow-hidden"
|
||||
}`}
|
||||
>
|
||||
<div className="px-4 py-[22px] border-b">
|
||||
<span className="font-semibold">
|
||||
Relevant Memories ({memories.length})
|
||||
</span>
|
||||
</div>
|
||||
{memories.length === 0 && (
|
||||
<motion.div
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
className="p-4 text-center"
|
||||
>
|
||||
<span className="font-semibold">No relevant memories found.</span>
|
||||
<br />
|
||||
Only the relevant memories will be displayed here.
|
||||
</motion.div>
|
||||
)}
|
||||
<ScrollArea className="flex-1 p-4">
|
||||
<motion.div
|
||||
className="space-y-4"
|
||||
layout
|
||||
>
|
||||
<AnimatePresence mode="popLayout">
|
||||
{memories.map((memory: Memory, index: number) => (
|
||||
<MemoryItem
|
||||
key={memory.id}
|
||||
memory={memory}
|
||||
index={index}
|
||||
/>
|
||||
))}
|
||||
</AnimatePresence>
|
||||
</motion.div>
|
||||
</ScrollArea>
|
||||
</Card>
|
||||
);
|
||||
};
|
||||
|
||||
export default Memories;
|
||||
102
examples/vercel-ai-sdk-chat-app/src/components/messages.tsx
Normal file
102
examples/vercel-ai-sdk-chat-app/src/components/messages.tsx
Normal file
@@ -0,0 +1,102 @@
|
||||
import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
import { Message } from "../types";
|
||||
import { useContext, useEffect, useRef } from "react";
|
||||
import GlobalContext from "@/contexts/GlobalContext";
|
||||
import Markdown from "react-markdown";
|
||||
import Mem00Logo from "../assets/mem0_logo.jpeg";
|
||||
import UserLogo from "../assets/user.jpg";
|
||||
|
||||
const Messages = () => {
|
||||
const { messages, thinking } = useContext(GlobalContext);
|
||||
const scrollAreaRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
// scroll to bottom
|
||||
useEffect(() => {
|
||||
if (scrollAreaRef.current) {
|
||||
scrollAreaRef.current.scrollTop += 40; // Scroll down by 40 pixels
|
||||
}
|
||||
}, [messages, thinking]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<ScrollArea ref={scrollAreaRef} className="flex-1 p-4 pr-10">
|
||||
<div className="space-y-4">
|
||||
{messages.map((message: Message) => (
|
||||
<div
|
||||
key={message.id}
|
||||
className={`flex ${
|
||||
message.sender === "user" ? "justify-end" : "justify-start"
|
||||
}`}
|
||||
>
|
||||
<div
|
||||
className={`flex items-start space-x-2 max-w-[80%] ${
|
||||
message.sender === "user"
|
||||
? "flex-row-reverse space-x-reverse"
|
||||
: "flex-row"
|
||||
}`}
|
||||
>
|
||||
<div className="h-full flex flex-col items-center justify-end">
|
||||
<Avatar className="h-8 w-8">
|
||||
<AvatarImage
|
||||
src={
|
||||
message.sender === "assistant" ? Mem00Logo : UserLogo
|
||||
}
|
||||
/>
|
||||
<AvatarFallback>
|
||||
{message.sender === "assistant" ? "AI" : "U"}
|
||||
</AvatarFallback>
|
||||
</Avatar>
|
||||
</div>
|
||||
<div
|
||||
className={`rounded-xl px-3 py-2 ${
|
||||
message.sender === "user"
|
||||
? "bg-blue-500 text-white rounded-br-none"
|
||||
: "bg-muted text-muted-foreground rounded-bl-none"
|
||||
}`}
|
||||
>
|
||||
{message.image && (
|
||||
<div className="w-44 flex items-center justify-center overflow-hidden rounded-lg">
|
||||
<img
|
||||
src={message.image}
|
||||
alt="Message attachment"
|
||||
className="my-2 rounded-lg max-w-full h-auto w-44 mx-auto"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<Markdown>{message.content}</Markdown>
|
||||
<span className="text-xs opacity-50 mt-1 block text-end relative bottom-1 -mb-2">
|
||||
{message.timestamp}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
{thinking && (
|
||||
<div className={`flex justify-start`}>
|
||||
<div
|
||||
className={`flex items-start space-x-2 max-w-[80%] flex-row`}
|
||||
>
|
||||
<Avatar className="h-8 w-8">
|
||||
<AvatarImage src={Mem00Logo} />
|
||||
<AvatarFallback>{"AI"}</AvatarFallback>
|
||||
</Avatar>
|
||||
<div
|
||||
className={`rounded-lg p-3 bg-muted text-muted-foreground`}
|
||||
>
|
||||
<div className="loader">
|
||||
<div className="ball"></div>
|
||||
<div className="ball"></div>
|
||||
<div className="ball"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default Messages;
|
||||
50
examples/vercel-ai-sdk-chat-app/src/components/ui/avatar.tsx
Normal file
50
examples/vercel-ai-sdk-chat-app/src/components/ui/avatar.tsx
Normal file
@@ -0,0 +1,50 @@
|
||||
"use client"
|
||||
|
||||
import * as React from "react"
|
||||
import * as AvatarPrimitive from "@radix-ui/react-avatar"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const Avatar = React.forwardRef<
|
||||
React.ElementRef<typeof AvatarPrimitive.Root>,
|
||||
React.ComponentPropsWithoutRef<typeof AvatarPrimitive.Root>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AvatarPrimitive.Root
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"relative flex h-10 w-10 shrink-0 overflow-hidden rounded-full",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
Avatar.displayName = AvatarPrimitive.Root.displayName
|
||||
|
||||
const AvatarImage = React.forwardRef<
|
||||
React.ElementRef<typeof AvatarPrimitive.Image>,
|
||||
React.ComponentPropsWithoutRef<typeof AvatarPrimitive.Image>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AvatarPrimitive.Image
|
||||
ref={ref}
|
||||
className={cn("aspect-square h-full w-full", className)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
AvatarImage.displayName = AvatarPrimitive.Image.displayName
|
||||
|
||||
const AvatarFallback = React.forwardRef<
|
||||
React.ElementRef<typeof AvatarPrimitive.Fallback>,
|
||||
React.ComponentPropsWithoutRef<typeof AvatarPrimitive.Fallback>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AvatarPrimitive.Fallback
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"flex h-full w-full items-center justify-center rounded-full bg-muted",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
AvatarFallback.displayName = AvatarPrimitive.Fallback.displayName
|
||||
|
||||
export { Avatar, AvatarImage, AvatarFallback }
|
||||
36
examples/vercel-ai-sdk-chat-app/src/components/ui/badge.tsx
Normal file
36
examples/vercel-ai-sdk-chat-app/src/components/ui/badge.tsx
Normal file
@@ -0,0 +1,36 @@
|
||||
import * as React from "react"
|
||||
import { cva, type VariantProps } from "class-variance-authority"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const badgeVariants = cva(
|
||||
"inline-flex items-center rounded-md border px-2.5 py-0.5 text-xs font-semibold transition-colors focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2",
|
||||
{
|
||||
variants: {
|
||||
variant: {
|
||||
default:
|
||||
"border-transparent bg-primary text-primary-foreground shadow hover:bg-primary/80",
|
||||
secondary:
|
||||
"border-transparent bg-secondary text-secondary-foreground hover:bg-secondary/80",
|
||||
destructive:
|
||||
"border-transparent bg-destructive text-destructive-foreground shadow hover:bg-destructive/80",
|
||||
outline: "text-foreground",
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
variant: "default",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
export interface BadgeProps
|
||||
extends React.HTMLAttributes<HTMLDivElement>,
|
||||
VariantProps<typeof badgeVariants> {}
|
||||
|
||||
function Badge({ className, variant, ...props }: BadgeProps) {
|
||||
return (
|
||||
<div className={cn(badgeVariants({ variant }), className)} {...props} />
|
||||
)
|
||||
}
|
||||
|
||||
export { Badge, badgeVariants }
|
||||
57
examples/vercel-ai-sdk-chat-app/src/components/ui/button.tsx
Normal file
57
examples/vercel-ai-sdk-chat-app/src/components/ui/button.tsx
Normal file
@@ -0,0 +1,57 @@
|
||||
import * as React from "react"
|
||||
import { Slot } from "@radix-ui/react-slot"
|
||||
import { cva, type VariantProps } from "class-variance-authority"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const buttonVariants = cva(
|
||||
"inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-md text-sm font-medium transition-colors focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring disabled:pointer-events-none disabled:opacity-50 [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0",
|
||||
{
|
||||
variants: {
|
||||
variant: {
|
||||
default:
|
||||
"bg-primary text-primary-foreground shadow hover:bg-primary/90",
|
||||
destructive:
|
||||
"bg-destructive text-destructive-foreground shadow-sm hover:bg-destructive/90",
|
||||
outline:
|
||||
"border border-input bg-background shadow-sm hover:bg-accent hover:text-accent-foreground",
|
||||
secondary:
|
||||
"bg-secondary text-secondary-foreground shadow-sm hover:bg-secondary/80",
|
||||
ghost: "hover:bg-accent hover:text-accent-foreground",
|
||||
link: "text-primary underline-offset-4 hover:underline",
|
||||
},
|
||||
size: {
|
||||
default: "h-9 px-4 py-2",
|
||||
sm: "h-8 rounded-md px-3 text-xs",
|
||||
lg: "h-10 rounded-md px-8",
|
||||
icon: "h-9 w-9",
|
||||
},
|
||||
},
|
||||
defaultVariants: {
|
||||
variant: "default",
|
||||
size: "default",
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
export interface ButtonProps
|
||||
extends React.ButtonHTMLAttributes<HTMLButtonElement>,
|
||||
VariantProps<typeof buttonVariants> {
|
||||
asChild?: boolean
|
||||
}
|
||||
|
||||
const Button = React.forwardRef<HTMLButtonElement, ButtonProps>(
|
||||
({ className, variant, size, asChild = false, ...props }, ref) => {
|
||||
const Comp = asChild ? Slot : "button"
|
||||
return (
|
||||
<Comp
|
||||
className={cn(buttonVariants({ variant, size, className }))}
|
||||
ref={ref}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
)
|
||||
Button.displayName = "Button"
|
||||
|
||||
export { Button, buttonVariants }
|
||||
76
examples/vercel-ai-sdk-chat-app/src/components/ui/card.tsx
Normal file
76
examples/vercel-ai-sdk-chat-app/src/components/ui/card.tsx
Normal file
@@ -0,0 +1,76 @@
|
||||
import * as React from "react"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const Card = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.HTMLAttributes<HTMLDivElement>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"rounded-xl border bg-card text-card-foreground shadow",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
Card.displayName = "Card"
|
||||
|
||||
const CardHeader = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.HTMLAttributes<HTMLDivElement>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn("flex flex-col space-y-1.5 p-6", className)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
CardHeader.displayName = "CardHeader"
|
||||
|
||||
const CardTitle = React.forwardRef<
|
||||
HTMLParagraphElement,
|
||||
React.HTMLAttributes<HTMLHeadingElement>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<h3
|
||||
ref={ref}
|
||||
className={cn("font-semibold leading-none tracking-tight", className)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
CardTitle.displayName = "CardTitle"
|
||||
|
||||
const CardDescription = React.forwardRef<
|
||||
HTMLParagraphElement,
|
||||
React.HTMLAttributes<HTMLParagraphElement>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<p
|
||||
ref={ref}
|
||||
className={cn("text-sm text-muted-foreground", className)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
CardDescription.displayName = "CardDescription"
|
||||
|
||||
const CardContent = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.HTMLAttributes<HTMLDivElement>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<div ref={ref} className={cn("p-6 pt-0", className)} {...props} />
|
||||
))
|
||||
CardContent.displayName = "CardContent"
|
||||
|
||||
const CardFooter = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.HTMLAttributes<HTMLDivElement>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn("flex items-center p-6 pt-0", className)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
CardFooter.displayName = "CardFooter"
|
||||
|
||||
export { Card, CardHeader, CardFooter, CardTitle, CardDescription, CardContent }
|
||||
120
examples/vercel-ai-sdk-chat-app/src/components/ui/dialog.tsx
Normal file
120
examples/vercel-ai-sdk-chat-app/src/components/ui/dialog.tsx
Normal file
@@ -0,0 +1,120 @@
|
||||
import * as React from "react"
|
||||
import * as DialogPrimitive from "@radix-ui/react-dialog"
|
||||
import { Cross2Icon } from "@radix-ui/react-icons"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const Dialog = DialogPrimitive.Root
|
||||
|
||||
const DialogTrigger = DialogPrimitive.Trigger
|
||||
|
||||
const DialogPortal = DialogPrimitive.Portal
|
||||
|
||||
const DialogClose = DialogPrimitive.Close
|
||||
|
||||
const DialogOverlay = React.forwardRef<
|
||||
React.ElementRef<typeof DialogPrimitive.Overlay>,
|
||||
React.ComponentPropsWithoutRef<typeof DialogPrimitive.Overlay>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<DialogPrimitive.Overlay
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"fixed inset-0 z-50 bg-black/80 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
DialogOverlay.displayName = DialogPrimitive.Overlay.displayName
|
||||
|
||||
const DialogContent = React.forwardRef<
|
||||
React.ElementRef<typeof DialogPrimitive.Content>,
|
||||
React.ComponentPropsWithoutRef<typeof DialogPrimitive.Content>
|
||||
>(({ className, children, ...props }, ref) => (
|
||||
<DialogPortal>
|
||||
<DialogOverlay />
|
||||
<DialogPrimitive.Content
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"fixed left-[50%] top-[50%] z-50 grid w-full max-w-lg translate-x-[-50%] translate-y-[-50%] gap-4 border bg-background p-6 shadow-lg duration-200 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[state=closed]:slide-out-to-left-1/2 data-[state=closed]:slide-out-to-top-[48%] data-[state=open]:slide-in-from-left-1/2 data-[state=open]:slide-in-from-top-[48%] sm:rounded-lg",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
<DialogPrimitive.Close className="absolute right-4 top-4 rounded-sm opacity-70 ring-offset-background transition-opacity hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 disabled:pointer-events-none data-[state=open]:bg-accent data-[state=open]:text-muted-foreground">
|
||||
<Cross2Icon className="h-4 w-4" />
|
||||
<span className="sr-only">Close</span>
|
||||
</DialogPrimitive.Close>
|
||||
</DialogPrimitive.Content>
|
||||
</DialogPortal>
|
||||
))
|
||||
DialogContent.displayName = DialogPrimitive.Content.displayName
|
||||
|
||||
const DialogHeader = ({
|
||||
className,
|
||||
...props
|
||||
}: React.HTMLAttributes<HTMLDivElement>) => (
|
||||
<div
|
||||
className={cn(
|
||||
"flex flex-col space-y-1.5 text-center sm:text-left",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
DialogHeader.displayName = "DialogHeader"
|
||||
|
||||
const DialogFooter = ({
|
||||
className,
|
||||
...props
|
||||
}: React.HTMLAttributes<HTMLDivElement>) => (
|
||||
<div
|
||||
className={cn(
|
||||
"flex flex-col-reverse sm:flex-row sm:justify-end sm:space-x-2",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
DialogFooter.displayName = "DialogFooter"
|
||||
|
||||
const DialogTitle = React.forwardRef<
|
||||
React.ElementRef<typeof DialogPrimitive.Title>,
|
||||
React.ComponentPropsWithoutRef<typeof DialogPrimitive.Title>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<DialogPrimitive.Title
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"text-lg font-semibold leading-none tracking-tight",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
DialogTitle.displayName = DialogPrimitive.Title.displayName
|
||||
|
||||
const DialogDescription = React.forwardRef<
|
||||
React.ElementRef<typeof DialogPrimitive.Description>,
|
||||
React.ComponentPropsWithoutRef<typeof DialogPrimitive.Description>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<DialogPrimitive.Description
|
||||
ref={ref}
|
||||
className={cn("text-sm text-muted-foreground", className)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
DialogDescription.displayName = DialogPrimitive.Description.displayName
|
||||
|
||||
export {
|
||||
Dialog,
|
||||
DialogPortal,
|
||||
DialogOverlay,
|
||||
DialogTrigger,
|
||||
DialogClose,
|
||||
DialogContent,
|
||||
DialogHeader,
|
||||
DialogFooter,
|
||||
DialogTitle,
|
||||
DialogDescription,
|
||||
}
|
||||
25
examples/vercel-ai-sdk-chat-app/src/components/ui/input.tsx
Normal file
25
examples/vercel-ai-sdk-chat-app/src/components/ui/input.tsx
Normal file
@@ -0,0 +1,25 @@
|
||||
import * as React from "react"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
export interface InputProps
|
||||
extends React.InputHTMLAttributes<HTMLInputElement> {}
|
||||
|
||||
const Input = React.forwardRef<HTMLInputElement, InputProps>(
|
||||
({ className, type, ...props }, ref) => {
|
||||
return (
|
||||
<input
|
||||
type={type}
|
||||
className={cn(
|
||||
"flex h-9 w-full rounded-md border border-input bg-transparent px-3 py-1 text-sm shadow-sm transition-colors file:border-0 file:bg-transparent file:text-sm file:font-medium file:text-foreground placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-ring disabled:cursor-not-allowed disabled:opacity-50",
|
||||
className
|
||||
)}
|
||||
ref={ref}
|
||||
{...props}
|
||||
/>
|
||||
)
|
||||
}
|
||||
)
|
||||
Input.displayName = "Input"
|
||||
|
||||
export { Input }
|
||||
24
examples/vercel-ai-sdk-chat-app/src/components/ui/label.tsx
Normal file
24
examples/vercel-ai-sdk-chat-app/src/components/ui/label.tsx
Normal file
@@ -0,0 +1,24 @@
|
||||
import * as React from "react"
|
||||
import * as LabelPrimitive from "@radix-ui/react-label"
|
||||
import { cva, type VariantProps } from "class-variance-authority"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const labelVariants = cva(
|
||||
"text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70"
|
||||
)
|
||||
|
||||
const Label = React.forwardRef<
|
||||
React.ElementRef<typeof LabelPrimitive.Root>,
|
||||
React.ComponentPropsWithoutRef<typeof LabelPrimitive.Root> &
|
||||
VariantProps<typeof labelVariants>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<LabelPrimitive.Root
|
||||
ref={ref}
|
||||
className={cn(labelVariants(), className)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
Label.displayName = LabelPrimitive.Root.displayName
|
||||
|
||||
export { Label }
|
||||
@@ -0,0 +1,46 @@
|
||||
import * as React from "react"
|
||||
import * as ScrollAreaPrimitive from "@radix-ui/react-scroll-area"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const ScrollArea = React.forwardRef<
|
||||
React.ElementRef<typeof ScrollAreaPrimitive.Root>,
|
||||
React.ComponentPropsWithoutRef<typeof ScrollAreaPrimitive.Root>
|
||||
>(({ className, children, ...props }, ref) => (
|
||||
<ScrollAreaPrimitive.Root
|
||||
ref={ref}
|
||||
className={cn("relative overflow-hidden", className)}
|
||||
{...props}
|
||||
>
|
||||
<ScrollAreaPrimitive.Viewport className="h-full w-full rounded-[inherit]">
|
||||
{children}
|
||||
</ScrollAreaPrimitive.Viewport>
|
||||
<ScrollBar />
|
||||
<ScrollAreaPrimitive.Corner />
|
||||
</ScrollAreaPrimitive.Root>
|
||||
))
|
||||
ScrollArea.displayName = ScrollAreaPrimitive.Root.displayName
|
||||
|
||||
const ScrollBar = React.forwardRef<
|
||||
React.ElementRef<typeof ScrollAreaPrimitive.ScrollAreaScrollbar>,
|
||||
React.ComponentPropsWithoutRef<typeof ScrollAreaPrimitive.ScrollAreaScrollbar>
|
||||
>(({ className, orientation = "vertical", ...props }, ref) => (
|
||||
<ScrollAreaPrimitive.ScrollAreaScrollbar
|
||||
ref={ref}
|
||||
orientation={orientation}
|
||||
className={cn(
|
||||
"flex touch-none select-none transition-colors",
|
||||
orientation === "vertical" &&
|
||||
"h-full w-2.5 border-l border-l-transparent p-[1px]",
|
||||
orientation === "horizontal" &&
|
||||
"h-2.5 flex-col border-t border-t-transparent p-[1px]",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<ScrollAreaPrimitive.ScrollAreaThumb className="relative flex-1 rounded-full bg-border" />
|
||||
</ScrollAreaPrimitive.ScrollAreaScrollbar>
|
||||
))
|
||||
ScrollBar.displayName = ScrollAreaPrimitive.ScrollAreaScrollbar.displayName
|
||||
|
||||
export { ScrollArea, ScrollBar }
|
||||
164
examples/vercel-ai-sdk-chat-app/src/components/ui/select.tsx
Normal file
164
examples/vercel-ai-sdk-chat-app/src/components/ui/select.tsx
Normal file
@@ -0,0 +1,164 @@
|
||||
"use client"
|
||||
|
||||
import * as React from "react"
|
||||
import {
|
||||
CaretSortIcon,
|
||||
CheckIcon,
|
||||
ChevronDownIcon,
|
||||
ChevronUpIcon,
|
||||
} from "@radix-ui/react-icons"
|
||||
import * as SelectPrimitive from "@radix-ui/react-select"
|
||||
|
||||
import { cn } from "@/lib/utils"
|
||||
|
||||
const Select = SelectPrimitive.Root
|
||||
|
||||
const SelectGroup = SelectPrimitive.Group
|
||||
|
||||
const SelectValue = SelectPrimitive.Value
|
||||
|
||||
const SelectTrigger = React.forwardRef<
|
||||
React.ElementRef<typeof SelectPrimitive.Trigger>,
|
||||
React.ComponentPropsWithoutRef<typeof SelectPrimitive.Trigger>
|
||||
>(({ className, children, ...props }, ref) => (
|
||||
<SelectPrimitive.Trigger
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"flex h-9 w-full items-center justify-between whitespace-nowrap rounded-md border border-input bg-transparent px-3 py-2 text-sm shadow-sm ring-offset-background placeholder:text-muted-foreground focus:outline-none focus:ring-1 focus:ring-ring disabled:cursor-not-allowed disabled:opacity-50 [&>span]:line-clamp-1",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
<SelectPrimitive.Icon asChild>
|
||||
<CaretSortIcon className="h-4 w-4 opacity-50" />
|
||||
</SelectPrimitive.Icon>
|
||||
</SelectPrimitive.Trigger>
|
||||
))
|
||||
SelectTrigger.displayName = SelectPrimitive.Trigger.displayName
|
||||
|
||||
const SelectScrollUpButton = React.forwardRef<
|
||||
React.ElementRef<typeof SelectPrimitive.ScrollUpButton>,
|
||||
React.ComponentPropsWithoutRef<typeof SelectPrimitive.ScrollUpButton>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<SelectPrimitive.ScrollUpButton
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"flex cursor-default items-center justify-center py-1",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<ChevronUpIcon />
|
||||
</SelectPrimitive.ScrollUpButton>
|
||||
))
|
||||
SelectScrollUpButton.displayName = SelectPrimitive.ScrollUpButton.displayName
|
||||
|
||||
const SelectScrollDownButton = React.forwardRef<
|
||||
React.ElementRef<typeof SelectPrimitive.ScrollDownButton>,
|
||||
React.ComponentPropsWithoutRef<typeof SelectPrimitive.ScrollDownButton>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<SelectPrimitive.ScrollDownButton
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"flex cursor-default items-center justify-center py-1",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<ChevronDownIcon />
|
||||
</SelectPrimitive.ScrollDownButton>
|
||||
))
|
||||
SelectScrollDownButton.displayName =
|
||||
SelectPrimitive.ScrollDownButton.displayName
|
||||
|
||||
const SelectContent = React.forwardRef<
|
||||
React.ElementRef<typeof SelectPrimitive.Content>,
|
||||
React.ComponentPropsWithoutRef<typeof SelectPrimitive.Content>
|
||||
>(({ className, children, position = "popper", ...props }, ref) => (
|
||||
<SelectPrimitive.Portal>
|
||||
<SelectPrimitive.Content
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"relative z-50 max-h-96 min-w-[8rem] overflow-hidden rounded-md border bg-popover text-popover-foreground shadow-md data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[side=bottom]:slide-in-from-top-2 data-[side=left]:slide-in-from-right-2 data-[side=right]:slide-in-from-left-2 data-[side=top]:slide-in-from-bottom-2",
|
||||
position === "popper" &&
|
||||
"data-[side=bottom]:translate-y-1 data-[side=left]:-translate-x-1 data-[side=right]:translate-x-1 data-[side=top]:-translate-y-1",
|
||||
className
|
||||
)}
|
||||
position={position}
|
||||
{...props}
|
||||
>
|
||||
<SelectScrollUpButton />
|
||||
<SelectPrimitive.Viewport
|
||||
className={cn(
|
||||
"p-1",
|
||||
position === "popper" &&
|
||||
"h-[var(--radix-select-trigger-height)] w-full min-w-[var(--radix-select-trigger-width)]"
|
||||
)}
|
||||
>
|
||||
{children}
|
||||
</SelectPrimitive.Viewport>
|
||||
<SelectScrollDownButton />
|
||||
</SelectPrimitive.Content>
|
||||
</SelectPrimitive.Portal>
|
||||
))
|
||||
SelectContent.displayName = SelectPrimitive.Content.displayName
|
||||
|
||||
const SelectLabel = React.forwardRef<
|
||||
React.ElementRef<typeof SelectPrimitive.Label>,
|
||||
React.ComponentPropsWithoutRef<typeof SelectPrimitive.Label>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<SelectPrimitive.Label
|
||||
ref={ref}
|
||||
className={cn("px-2 py-1.5 text-sm font-semibold", className)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
SelectLabel.displayName = SelectPrimitive.Label.displayName
|
||||
|
||||
const SelectItem = React.forwardRef<
|
||||
React.ElementRef<typeof SelectPrimitive.Item>,
|
||||
React.ComponentPropsWithoutRef<typeof SelectPrimitive.Item>
|
||||
>(({ className, children, ...props }, ref) => (
|
||||
<SelectPrimitive.Item
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"relative flex w-full cursor-default select-none items-center rounded-sm py-1.5 pl-2 pr-8 text-sm outline-none focus:bg-accent focus:text-accent-foreground data-[disabled]:pointer-events-none data-[disabled]:opacity-50",
|
||||
className
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<span className="absolute right-2 flex h-3.5 w-3.5 items-center justify-center">
|
||||
<SelectPrimitive.ItemIndicator>
|
||||
<CheckIcon className="h-4 w-4" />
|
||||
</SelectPrimitive.ItemIndicator>
|
||||
</span>
|
||||
<SelectPrimitive.ItemText>{children}</SelectPrimitive.ItemText>
|
||||
</SelectPrimitive.Item>
|
||||
))
|
||||
SelectItem.displayName = SelectPrimitive.Item.displayName
|
||||
|
||||
const SelectSeparator = React.forwardRef<
|
||||
React.ElementRef<typeof SelectPrimitive.Separator>,
|
||||
React.ComponentPropsWithoutRef<typeof SelectPrimitive.Separator>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<SelectPrimitive.Separator
|
||||
ref={ref}
|
||||
className={cn("-mx-1 my-1 h-px bg-muted", className)}
|
||||
{...props}
|
||||
/>
|
||||
))
|
||||
SelectSeparator.displayName = SelectPrimitive.Separator.displayName
|
||||
|
||||
export {
|
||||
Select,
|
||||
SelectGroup,
|
||||
SelectValue,
|
||||
SelectTrigger,
|
||||
SelectContent,
|
||||
SelectLabel,
|
||||
SelectItem,
|
||||
SelectSeparator,
|
||||
SelectScrollUpButton,
|
||||
SelectScrollDownButton,
|
||||
}
|
||||
324
examples/vercel-ai-sdk-chat-app/src/contexts/GlobalContext.tsx
Normal file
324
examples/vercel-ai-sdk-chat-app/src/contexts/GlobalContext.tsx
Normal file
@@ -0,0 +1,324 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
import { createContext, useEffect, useState } from "react";
|
||||
import { createMem0, searchMemories } from "@mem0/vercel-ai-provider";
|
||||
import { LanguageModelV1Prompt, streamText } from "ai";
|
||||
import { Message, Memory, FileInfo } from "@/types";
|
||||
import { Buffer } from 'buffer';
|
||||
|
||||
const GlobalContext = createContext<any>({});
|
||||
|
||||
const WelcomeMessage: Message = {
|
||||
id: "1",
|
||||
content:
|
||||
"👋 Hi there! I'm your personal assistant. How can I help you today? 😊",
|
||||
sender: "assistant",
|
||||
timestamp: new Date().toLocaleTimeString(),
|
||||
};
|
||||
|
||||
const InvalidConfigMessage: Message = {
|
||||
id: "2",
|
||||
content:
|
||||
"Invalid configuration. Please check your API keys, and add a user and try again.",
|
||||
sender: "assistant",
|
||||
timestamp: new Date().toLocaleTimeString(),
|
||||
};
|
||||
|
||||
const SomethingWentWrongMessage: Message = {
|
||||
id: "3",
|
||||
content: "Something went wrong. Please try again.",
|
||||
sender: "assistant",
|
||||
timestamp: new Date().toLocaleTimeString(),
|
||||
};
|
||||
|
||||
const models = {
|
||||
"openai": "gpt-4o",
|
||||
"anthropic": "claude-3-haiku-20240307",
|
||||
"cohere": "command-r-plus",
|
||||
"groq": "gemma2-9b-it"
|
||||
}
|
||||
|
||||
const getModel = (provider: string) => {
|
||||
switch (provider) {
|
||||
case "openai":
|
||||
return models.openai;
|
||||
case "anthropic":
|
||||
return models.anthropic;
|
||||
case "cohere":
|
||||
return models.cohere;
|
||||
case "groq":
|
||||
return models.groq;
|
||||
default:
|
||||
return models.openai;
|
||||
}
|
||||
}
|
||||
|
||||
const GlobalState = (props: any) => {
|
||||
const [memories, setMemories] = useState<Memory[]>([]);
|
||||
const [messages, setMessages] = useState<Message[]>([]);
|
||||
const [selectedUser, setSelectedUser] = useState<string>("");
|
||||
const [thinking, setThinking] = useState<boolean>(false);
|
||||
const [selectedOpenAIKey, setSelectedOpenAIKey] = useState<string>("");
|
||||
const [selectedMem0Key, setSelectedMem0Key] = useState<string>("");
|
||||
const [selectedProvider, setSelectedProvider] = useState<string>("openai");
|
||||
const [selectedFile, setSelectedFile] = useState<FileInfo | null>(null)
|
||||
const [file, setFile] = useState<any>(null)
|
||||
|
||||
const mem0 = createMem0({
|
||||
provider: selectedProvider,
|
||||
mem0ApiKey: selectedMem0Key,
|
||||
apiKey: selectedOpenAIKey,
|
||||
});
|
||||
|
||||
const clearConfiguration = () => {
|
||||
localStorage.removeItem("mem0ApiKey");
|
||||
localStorage.removeItem("openaiApiKey");
|
||||
localStorage.removeItem("provider");
|
||||
setSelectedMem0Key("");
|
||||
setSelectedOpenAIKey("");
|
||||
setSelectedProvider("openai");
|
||||
setSelectedUser("");
|
||||
setMessages([WelcomeMessage]);
|
||||
setMemories([]);
|
||||
setFile(null);
|
||||
};
|
||||
|
||||
const selectorHandler = (mem0: string, openai: string, provider: string) => {
|
||||
setSelectedMem0Key(mem0);
|
||||
setSelectedOpenAIKey(openai);
|
||||
setSelectedProvider(provider);
|
||||
localStorage.setItem("mem0ApiKey", mem0);
|
||||
localStorage.setItem("openaiApiKey", openai);
|
||||
localStorage.setItem("provider", provider);
|
||||
};
|
||||
|
||||
|
||||
useEffect(() => {
|
||||
const mem0 = localStorage.getItem("mem0ApiKey");
|
||||
const openai = localStorage.getItem("openaiApiKey");
|
||||
const provider = localStorage.getItem("provider");
|
||||
const user = localStorage.getItem("user");
|
||||
if (mem0 && openai && provider) {
|
||||
selectorHandler(mem0, openai, provider);
|
||||
}
|
||||
if (user) {
|
||||
setSelectedUser(user);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const selectUserHandler = (user: string) => {
|
||||
setSelectedUser(user);
|
||||
localStorage.setItem("user", user);
|
||||
};
|
||||
|
||||
const clearUserHandler = () => {
|
||||
setSelectedUser("");
|
||||
setMemories([]);
|
||||
};
|
||||
|
||||
const getMemories = async (messages: LanguageModelV1Prompt) => {
|
||||
try {
|
||||
const smemories = await searchMemories(messages, {
|
||||
user_id: selectedUser || "",
|
||||
mem0ApiKey: import.meta.env.VITE_MEM0_API_KEY,
|
||||
});
|
||||
|
||||
const newMemories = smemories.map((memory: any) => ({
|
||||
id: memory.id,
|
||||
content: memory.memory,
|
||||
timestamp: memory.updated_at,
|
||||
tags: memory.categories,
|
||||
}));
|
||||
setMemories(newMemories);
|
||||
} catch (error) {
|
||||
console.error("Error in getMemories:", error);
|
||||
}
|
||||
};
|
||||
|
||||
const handleSend = async (inputValue: string) => {
|
||||
if (!inputValue.trim() && !file) return;
|
||||
if (!selectedUser) {
|
||||
const newMessage: Message = {
|
||||
id: Date.now().toString(),
|
||||
content: inputValue,
|
||||
sender: "user",
|
||||
timestamp: new Date().toLocaleTimeString(),
|
||||
};
|
||||
setMessages((prev) => [...prev, newMessage, InvalidConfigMessage]);
|
||||
return;
|
||||
}
|
||||
|
||||
const userMessage: Message = {
|
||||
id: Date.now().toString(),
|
||||
content: inputValue,
|
||||
sender: "user",
|
||||
timestamp: new Date().toLocaleTimeString(),
|
||||
};
|
||||
|
||||
let fileData;
|
||||
if (file) {
|
||||
if (file.type.startsWith("image/")) {
|
||||
// Convert image to Base64
|
||||
fileData = await convertToBase64(file);
|
||||
userMessage.image = fileData;
|
||||
} else if (file.type.startsWith("audio/")) {
|
||||
// Convert audio to ArrayBuffer
|
||||
fileData = await getFileBuffer(file);
|
||||
userMessage.audio = fileData;
|
||||
}
|
||||
}
|
||||
|
||||
// Update the state with the new user message
|
||||
setMessages((prev) => [...prev, userMessage]);
|
||||
setThinking(true);
|
||||
|
||||
// Transform messages into the required format
|
||||
const messagesForPrompt: LanguageModelV1Prompt = [];
|
||||
messages.map((message) => {
|
||||
const messageContent: any = {
|
||||
role: message.sender,
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: message.content,
|
||||
},
|
||||
],
|
||||
};
|
||||
if (message.image) {
|
||||
messageContent.content.push({
|
||||
type: "image",
|
||||
image: message.image,
|
||||
});
|
||||
}
|
||||
if (message.audio) {
|
||||
messageContent.content.push({
|
||||
type: 'file',
|
||||
mimeType: 'audio/mpeg',
|
||||
data: message.audio,
|
||||
});
|
||||
}
|
||||
if(!message.audio) messagesForPrompt.push(messageContent);
|
||||
});
|
||||
|
||||
const newMessage: any = {
|
||||
role: "user",
|
||||
content: [
|
||||
{
|
||||
type: "text",
|
||||
text: inputValue,
|
||||
},
|
||||
],
|
||||
};
|
||||
if (file) {
|
||||
if (file.type.startsWith("image/")) {
|
||||
newMessage.content.push({
|
||||
type: "image",
|
||||
image: userMessage.image,
|
||||
});
|
||||
} else if (file.type.startsWith("audio/")) {
|
||||
newMessage.content.push({
|
||||
type: 'file',
|
||||
mimeType: 'audio/mpeg',
|
||||
data: userMessage.audio,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
messagesForPrompt.push(newMessage);
|
||||
getMemories(messagesForPrompt);
|
||||
|
||||
setFile(null);
|
||||
setSelectedFile(null);
|
||||
|
||||
try {
|
||||
const { textStream } = await streamText({
|
||||
model: mem0(getModel(selectedProvider), {
|
||||
user_id: selectedUser || "",
|
||||
}),
|
||||
messages: messagesForPrompt,
|
||||
});
|
||||
|
||||
const assistantMessageId = Date.now() + 1;
|
||||
const assistantMessage: Message = {
|
||||
id: assistantMessageId.toString(),
|
||||
content: "",
|
||||
sender: "assistant",
|
||||
timestamp: new Date().toLocaleTimeString(),
|
||||
};
|
||||
|
||||
setMessages((prev) => [...prev, assistantMessage]);
|
||||
|
||||
// Stream the text part by part
|
||||
for await (const textPart of textStream) {
|
||||
assistantMessage.content += textPart;
|
||||
setThinking(false);
|
||||
setFile(null);
|
||||
setSelectedFile(null);
|
||||
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) =>
|
||||
msg.id === assistantMessageId.toString()
|
||||
? { ...msg, content: assistantMessage.content }
|
||||
: msg
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
setThinking(false);
|
||||
} catch (error) {
|
||||
console.error("Error in handleSend:", error);
|
||||
setMessages((prev) => [...prev, SomethingWentWrongMessage]);
|
||||
setThinking(false);
|
||||
setFile(null);
|
||||
setSelectedFile(null);
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
setMessages([WelcomeMessage]);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<GlobalContext.Provider
|
||||
value={{
|
||||
selectedUser,
|
||||
selectUserHandler,
|
||||
clearUserHandler,
|
||||
messages,
|
||||
memories,
|
||||
handleSend,
|
||||
thinking,
|
||||
selectedMem0Key,
|
||||
selectedOpenAIKey,
|
||||
selectedProvider,
|
||||
selectorHandler,
|
||||
clearConfiguration,
|
||||
selectedFile,
|
||||
setSelectedFile,
|
||||
file,
|
||||
setFile
|
||||
}}
|
||||
>
|
||||
{props.children}
|
||||
</GlobalContext.Provider>
|
||||
);
|
||||
};
|
||||
|
||||
export default GlobalContext;
|
||||
export { GlobalState };
|
||||
|
||||
|
||||
const convertToBase64 = (file: File): Promise<string> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.readAsDataURL(file);
|
||||
reader.onload = () => resolve(reader.result as string); // Resolve with Base64 string
|
||||
reader.onerror = error => reject(error); // Reject on error
|
||||
});
|
||||
};
|
||||
|
||||
async function getFileBuffer(file: any) {
|
||||
const response = await fetch(file);
|
||||
const arrayBuffer = await response.arrayBuffer();
|
||||
const buffer = Buffer.from(arrayBuffer);
|
||||
return buffer;
|
||||
}
|
||||
97
examples/vercel-ai-sdk-chat-app/src/index.css
Normal file
97
examples/vercel-ai-sdk-chat-app/src/index.css
Normal file
@@ -0,0 +1,97 @@
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
@layer base {
|
||||
:root {
|
||||
--background: 0 0% 100%;
|
||||
--foreground: 240 10% 3.9%;
|
||||
--card: 0 0% 100%;
|
||||
--card-foreground: 240 10% 3.9%;
|
||||
--popover: 0 0% 100%;
|
||||
--popover-foreground: 240 10% 3.9%;
|
||||
--primary: 240 5.9% 10%;
|
||||
--primary-foreground: 0 0% 98%;
|
||||
--secondary: 240 4.8% 95.9%;
|
||||
--secondary-foreground: 240 5.9% 10%;
|
||||
--muted: 240 4.8% 95.9%;
|
||||
--muted-foreground: 240 3.8% 46.1%;
|
||||
--accent: 240 4.8% 95.9%;
|
||||
--accent-foreground: 240 5.9% 10%;
|
||||
--destructive: 0 84.2% 60.2%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--border: 240 5.9% 90%;
|
||||
--input: 240 5.9% 90%;
|
||||
--ring: 240 10% 3.9%;
|
||||
--chart-1: 12 76% 61%;
|
||||
--chart-2: 173 58% 39%;
|
||||
--chart-3: 197 37% 24%;
|
||||
--chart-4: 43 74% 66%;
|
||||
--chart-5: 27 87% 67%;
|
||||
--radius: 0.5rem
|
||||
}
|
||||
.dark {
|
||||
--background: 240 10% 3.9%;
|
||||
--foreground: 0 0% 98%;
|
||||
--card: 240 10% 3.9%;
|
||||
--card-foreground: 0 0% 98%;
|
||||
--popover: 240 10% 3.9%;
|
||||
--popover-foreground: 0 0% 98%;
|
||||
--primary: 0 0% 98%;
|
||||
--primary-foreground: 240 5.9% 10%;
|
||||
--secondary: 240 3.7% 15.9%;
|
||||
--secondary-foreground: 0 0% 98%;
|
||||
--muted: 240 3.7% 15.9%;
|
||||
--muted-foreground: 240 5% 64.9%;
|
||||
--accent: 240 3.7% 15.9%;
|
||||
--accent-foreground: 0 0% 98%;
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--border: 240 3.7% 15.9%;
|
||||
--input: 240 3.7% 15.9%;
|
||||
--ring: 240 4.9% 83.9%;
|
||||
--chart-1: 220 70% 50%;
|
||||
--chart-2: 160 60% 45%;
|
||||
--chart-3: 30 80% 55%;
|
||||
--chart-4: 280 65% 60%;
|
||||
--chart-5: 340 75% 55%
|
||||
}
|
||||
}
|
||||
@layer base {
|
||||
* {
|
||||
@apply border-border;
|
||||
}
|
||||
body {
|
||||
@apply bg-background text-foreground;
|
||||
}
|
||||
}
|
||||
|
||||
.loader {
|
||||
display: flex;
|
||||
align-items: flex-end;
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
.ball {
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
background-color: #4e4e4e;
|
||||
border-radius: 50%;
|
||||
animation: bounce 0.6s infinite alternate;
|
||||
}
|
||||
|
||||
.ball:nth-child(2) {
|
||||
animation-delay: 0.2s;
|
||||
}
|
||||
|
||||
.ball:nth-child(3) {
|
||||
animation-delay: 0.4s;
|
||||
}
|
||||
|
||||
@keyframes bounce {
|
||||
from {
|
||||
transform: translateY(0);
|
||||
}
|
||||
to {
|
||||
transform: translateY(-4px);
|
||||
}
|
||||
}
|
||||
10
examples/vercel-ai-sdk-chat-app/src/main.tsx
Normal file
10
examples/vercel-ai-sdk-chat-app/src/main.tsx
Normal file
@@ -0,0 +1,10 @@
|
||||
import { StrictMode } from 'react'
|
||||
import { createRoot } from 'react-dom/client'
|
||||
import './index.css'
|
||||
import App from './App.tsx'
|
||||
|
||||
createRoot(document.getElementById('root')!).render(
|
||||
<StrictMode>
|
||||
<App />
|
||||
</StrictMode>,
|
||||
)
|
||||
14
examples/vercel-ai-sdk-chat-app/src/page.tsx
Normal file
14
examples/vercel-ai-sdk-chat-app/src/page.tsx
Normal file
@@ -0,0 +1,14 @@
|
||||
"use client";
|
||||
import { GlobalState } from "./contexts/GlobalContext";
|
||||
import Component from "./pages/home";
|
||||
|
||||
|
||||
export default function Home() {
|
||||
return (
|
||||
<div>
|
||||
<GlobalState>
|
||||
<Component />
|
||||
</GlobalState>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
41
examples/vercel-ai-sdk-chat-app/src/pages/home.tsx
Normal file
41
examples/vercel-ai-sdk-chat-app/src/pages/home.tsx
Normal file
@@ -0,0 +1,41 @@
|
||||
import { useState } from "react";
|
||||
import ApiSettingsPopup from "../components/api-settings-popup";
|
||||
import Memories from "../components/memories";
|
||||
import Header from "../components/header";
|
||||
import Messages from "../components/messages";
|
||||
import InputArea from "../components/input-area";
|
||||
import ChevronToggle from "../components/chevron-toggle";
|
||||
|
||||
|
||||
export default function Home() {
|
||||
const [isMemoriesExpanded, setIsMemoriesExpanded] = useState(true);
|
||||
const [isSettingsOpen, setIsSettingsOpen] = useState(false);
|
||||
|
||||
return (
|
||||
<>
|
||||
<ApiSettingsPopup isOpen={isSettingsOpen} setIsOpen={setIsSettingsOpen} />
|
||||
<div className="flex h-screen bg-background">
|
||||
{/* Main Chat Area */}
|
||||
<div className="flex-1 flex flex-col">
|
||||
{/* Header */}
|
||||
<Header setIsSettingsOpen={setIsSettingsOpen} />
|
||||
|
||||
{/* Messages */}
|
||||
<Messages />
|
||||
|
||||
{/* Input Area */}
|
||||
<InputArea />
|
||||
</div>
|
||||
|
||||
{/* Chevron Toggle */}
|
||||
<ChevronToggle
|
||||
isMemoriesExpanded={isMemoriesExpanded}
|
||||
setIsMemoriesExpanded={setIsMemoriesExpanded}
|
||||
/>
|
||||
|
||||
{/* Memories Sidebar */}
|
||||
<Memories isMemoriesExpanded={isMemoriesExpanded} />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
22
examples/vercel-ai-sdk-chat-app/src/types.ts
Normal file
22
examples/vercel-ai-sdk-chat-app/src/types.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
export interface Memory {
|
||||
id: string;
|
||||
content: string;
|
||||
timestamp: string;
|
||||
tags: string[];
|
||||
}
|
||||
|
||||
export interface Message {
|
||||
id: string;
|
||||
content: string;
|
||||
sender: "user" | "assistant";
|
||||
timestamp: string;
|
||||
image?: string;
|
||||
audio?: any;
|
||||
}
|
||||
|
||||
export interface FileInfo {
|
||||
name: string;
|
||||
type: string;
|
||||
size: number;
|
||||
}
|
||||
1
examples/vercel-ai-sdk-chat-app/src/vite-env.d.ts
vendored
Normal file
1
examples/vercel-ai-sdk-chat-app/src/vite-env.d.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/// <reference types="vite/client" />
|
||||
62
examples/vercel-ai-sdk-chat-app/tailwind.config.js
Normal file
62
examples/vercel-ai-sdk-chat-app/tailwind.config.js
Normal file
@@ -0,0 +1,62 @@
|
||||
// tailwind.config.js
|
||||
/* eslint-env node */
|
||||
|
||||
/** @type {import('tailwindcss').Config} */
|
||||
import tailwindcssAnimate from 'tailwindcss-animate';
|
||||
|
||||
export default {
|
||||
darkMode: ["class"],
|
||||
content: ["./index.html", "./src/**/*.{ts,tsx,js,jsx}"],
|
||||
theme: {
|
||||
extend: {
|
||||
borderRadius: {
|
||||
lg: 'var(--radius)',
|
||||
md: 'calc(var(--radius) - 2px)',
|
||||
sm: 'calc(var(--radius) - 4px)',
|
||||
},
|
||||
colors: {
|
||||
background: 'hsl(var(--background))',
|
||||
foreground: 'hsl(var(--foreground))',
|
||||
card: {
|
||||
DEFAULT: 'hsl(var(--card))',
|
||||
foreground: 'hsl(var(--card-foreground))',
|
||||
},
|
||||
popover: {
|
||||
DEFAULT: 'hsl(var(--popover))',
|
||||
foreground: 'hsl(var(--popover-foreground))',
|
||||
},
|
||||
primary: {
|
||||
DEFAULT: 'hsl(var(--primary))',
|
||||
foreground: 'hsl(var(--primary-foreground))',
|
||||
},
|
||||
secondary: {
|
||||
DEFAULT: 'hsl(var(--secondary))',
|
||||
foreground: 'hsl(var(--secondary-foreground))',
|
||||
},
|
||||
muted: {
|
||||
DEFAULT: 'hsl(var(--muted))',
|
||||
foreground: 'hsl(var(--muted-foreground))',
|
||||
},
|
||||
accent: {
|
||||
DEFAULT: 'hsl(var(--accent))',
|
||||
foreground: 'hsl(var(--accent-foreground))',
|
||||
},
|
||||
destructive: {
|
||||
DEFAULT: 'hsl(var(--destructive))',
|
||||
foreground: 'hsl(var(--destructive-foreground))',
|
||||
},
|
||||
border: 'hsl(var(--border))',
|
||||
input: 'hsl(var(--input))',
|
||||
ring: 'hsl(var(--ring))',
|
||||
chart: {
|
||||
'1': 'hsl(var(--chart-1))',
|
||||
'2': 'hsl(var(--chart-2))',
|
||||
'3': 'hsl(var(--chart-3))',
|
||||
'4': 'hsl(var(--chart-4))',
|
||||
'5': 'hsl(var(--chart-5))',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
plugins: [tailwindcssAnimate],
|
||||
};
|
||||
32
examples/vercel-ai-sdk-chat-app/tsconfig.app.json
Normal file
32
examples/vercel-ai-sdk-chat-app/tsconfig.app.json
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
|
||||
"target": "ES2020",
|
||||
"useDefineForClassFields": true,
|
||||
"lib": ["ES2020", "DOM", "DOM.Iterable"],
|
||||
"module": "ESNext",
|
||||
"skipLibCheck": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@/*": [
|
||||
"./src/*"
|
||||
]
|
||||
},
|
||||
|
||||
/* Bundler mode */
|
||||
"moduleResolution": "Bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"isolatedModules": true,
|
||||
"moduleDetection": "force",
|
||||
"noEmit": true,
|
||||
"jsx": "react-jsx",
|
||||
|
||||
/* Linting */
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedSideEffectImports": true
|
||||
},
|
||||
"include": ["src"]
|
||||
}
|
||||
13
examples/vercel-ai-sdk-chat-app/tsconfig.json
Normal file
13
examples/vercel-ai-sdk-chat-app/tsconfig.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"files": [],
|
||||
"references": [
|
||||
{ "path": "./tsconfig.app.json" },
|
||||
{ "path": "./tsconfig.node.json" }
|
||||
],
|
||||
"compilerOptions": {
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@/*": ["./src/*"]
|
||||
}
|
||||
}
|
||||
}
|
||||
24
examples/vercel-ai-sdk-chat-app/tsconfig.node.json
Normal file
24
examples/vercel-ai-sdk-chat-app/tsconfig.node.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
|
||||
"target": "ES2022",
|
||||
"lib": ["ES2023"],
|
||||
"module": "ESNext",
|
||||
"skipLibCheck": true,
|
||||
|
||||
/* Bundler mode */
|
||||
"moduleResolution": "Bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"isolatedModules": true,
|
||||
"moduleDetection": "force",
|
||||
"noEmit": true,
|
||||
|
||||
/* Linting */
|
||||
"strict": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noUncheckedSideEffectImports": true
|
||||
},
|
||||
"include": ["vite.config.ts"]
|
||||
}
|
||||
13
examples/vercel-ai-sdk-chat-app/vite.config.ts
Normal file
13
examples/vercel-ai-sdk-chat-app/vite.config.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import path from "path"
|
||||
import react from "@vitejs/plugin-react"
|
||||
import { defineConfig } from "vite"
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [react()],
|
||||
resolve: {
|
||||
alias: {
|
||||
"@": path.resolve(__dirname, "./src"),
|
||||
buffer: 'buffer'
|
||||
},
|
||||
},
|
||||
})
|
||||
2
vercel-ai-sdk/.gitattributes
vendored
Normal file
2
vercel-ai-sdk/.gitattributes
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# Auto detect text files and perform LF normalization
|
||||
* text=auto
|
||||
10
vercel-ai-sdk/.gitignore
vendored
Normal file
10
vercel-ai-sdk/.gitignore
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
**/.env
|
||||
**/node_modules
|
||||
**/.DS_Store
|
||||
|
||||
# Ignore test-related files
|
||||
**/coverage.data
|
||||
**/coverage/
|
||||
|
||||
# Build files
|
||||
**/dist
|
||||
228
vercel-ai-sdk/README.md
Normal file
228
vercel-ai-sdk/README.md
Normal file
@@ -0,0 +1,228 @@
|
||||
# Mem0 AI SDK Provider
|
||||
|
||||
The **Mem0 AI SDK Provider** is a community-maintained library developed by [Mem0](https://mem0.ai/) to integrate with the Vercel AI SDK. This library brings enhanced AI interaction capabilities to your applications by introducing persistent memory functionality. With Mem0, language model conversations gain memory, enabling more contextualized and personalized responses based on past interactions.
|
||||
|
||||
Discover more of **Mem0** on [GitHub](https://github.com/mem0ai).
|
||||
Explore the [Mem0 Documentation](https://docs.mem0.ai/overview) to gain deeper control and flexibility in managing your memories.
|
||||
|
||||
For detailed information on using the Vercel AI SDK, refer to Vercel’s [API Reference](https://sdk.vercel.ai/docs/reference) and [Documentation](https://sdk.vercel.ai/docs).
|
||||
|
||||
## Features
|
||||
|
||||
- 🧠 Persistent memory storage for AI conversations
|
||||
- 🔄 Seamless integration with Vercel AI SDK
|
||||
- 🚀 Support for multiple LLM providers
|
||||
- 📝 Rich message format support
|
||||
- ⚡ Streaming capabilities
|
||||
- 🔍 Context-aware responses
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @mem0/vercel-ai-provider
|
||||
```
|
||||
|
||||
## Before We Begin
|
||||
|
||||
### Setting Up Mem0
|
||||
|
||||
1. Obtain your [Mem0 API Key](https://app.mem0.ai/dashboard/api-keys) from the Mem0 dashboard.
|
||||
|
||||
2. Initialize the Mem0 Client:
|
||||
|
||||
```typescript
|
||||
import { createMem0 } from "@mem0/vercel-ai-provider";
|
||||
|
||||
const mem0 = createMem0({
|
||||
provider: "openai",
|
||||
mem0ApiKey: "m0-xxx",
|
||||
apiKey: "openai-api-key",
|
||||
config: {
|
||||
compatibility: "strict",
|
||||
// Additional model-specific configuration options can be added here.
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Note
|
||||
By default, the `openai` provider is used, so specifying it is optional:
|
||||
```typescript
|
||||
const mem0 = createMem0();
|
||||
```
|
||||
For better security, consider setting `MEM0_API_KEY` and `OPENAI_API_KEY` as environment variables.
|
||||
|
||||
3. Add Memories to Enhance Context:
|
||||
|
||||
```typescript
|
||||
import { LanguageModelV1Prompt } from "ai";
|
||||
import { addMemories } from "@mem0/vercel-ai-provider";
|
||||
|
||||
const messages: LanguageModelV1Prompt = [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "text", text: "I love red cars." },
|
||||
{ type: "text", text: "I like Toyota Cars." },
|
||||
{ type: "text", text: "I prefer SUVs." },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
await addMemories(messages, { user_id: "borat" });
|
||||
```
|
||||
|
||||
These memories are now stored in your profile. You can view and manage them on the [Mem0 Dashboard](https://app.mem0.ai/dashboard/users).
|
||||
|
||||
### Note:
|
||||
|
||||
For standalone features, such as `addMemories` and `retrieveMemories`,
|
||||
you must either set `MEM0_API_KEY` as an environment variable or pass it directly in the function call.
|
||||
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
await addMemories(messages, { user_id: "borat", mem0ApiKey: "m0-xxx" });
|
||||
await retrieveMemories(prompt, { user_id: "borat", mem0ApiKey: "m0-xxx" });
|
||||
```
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### 1. Basic Text Generation with Memory Context
|
||||
|
||||
```typescript
|
||||
import { generateText } from "ai";
|
||||
import { createMem0 } from "@mem0/vercel-ai-provider";
|
||||
|
||||
const mem0 = createMem0();
|
||||
|
||||
const { text } = await generateText({
|
||||
model: mem0("gpt-4-turbo", {
|
||||
user_id: "borat",
|
||||
}),
|
||||
prompt: "Suggest me a good car to buy!",
|
||||
});
|
||||
```
|
||||
|
||||
### 2. Combining OpenAI Provider with Memory Utils
|
||||
|
||||
```typescript
|
||||
import { generateText } from "ai";
|
||||
import { openai } from "@ai-sdk/openai";
|
||||
import { retrieveMemories } from "@mem0/vercel-ai-provider";
|
||||
|
||||
const prompt = "Suggest me a good car to buy.";
|
||||
const memories = await retrieveMemories(prompt, { user_id: "borat" });
|
||||
|
||||
const { text } = await generateText({
|
||||
model: openai("gpt-4-turbo"),
|
||||
prompt: prompt,
|
||||
system: memories,
|
||||
});
|
||||
```
|
||||
|
||||
### 3. Structured Message Format with Memory
|
||||
|
||||
```typescript
|
||||
import { generateText } from "ai";
|
||||
import { createMem0 } from "@mem0/vercel-ai-provider";
|
||||
|
||||
const mem0 = createMem0();
|
||||
|
||||
const { text } = await generateText({
|
||||
model: mem0("gpt-4-turbo", {
|
||||
user_id: "borat",
|
||||
}),
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "text", text: "Suggest me a good car to buy." },
|
||||
{ type: "text", text: "Why is it better than the other cars for me?" },
|
||||
{ type: "text", text: "Give options for every price range." },
|
||||
],
|
||||
},
|
||||
],
|
||||
});
|
||||
```
|
||||
|
||||
### 4. Advanced Memory Integration with OpenAI
|
||||
|
||||
```typescript
|
||||
import { generateText, LanguageModelV1Prompt } from "ai";
|
||||
import { openai } from "@ai-sdk/openai";
|
||||
import { retrieveMemories } from "@mem0/vercel-ai-provider";
|
||||
|
||||
// New format using system parameter for memory context
|
||||
const messages: LanguageModelV1Prompt = [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "text", text: "Suggest me a good car to buy." },
|
||||
{ type: "text", text: "Why is it better than the other cars for me?" },
|
||||
{ type: "text", text: "Give options for every price range." },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const memories = await retrieveMemories(messages, { user_id: "borat" });
|
||||
|
||||
const { text } = await generateText({
|
||||
model: openai("gpt-4-turbo"),
|
||||
messages: messages,
|
||||
system: memories,
|
||||
});
|
||||
```
|
||||
|
||||
### 5. Streaming Responses with Memory Context
|
||||
|
||||
```typescript
|
||||
import { streamText } from "ai";
|
||||
import { createMem0 } from "@mem0/vercel-ai-provider";
|
||||
|
||||
const mem0 = createMem0();
|
||||
|
||||
const { textStream } = await streamText({
|
||||
model: mem0("gpt-4-turbo", {
|
||||
user_id: "borat",
|
||||
}),
|
||||
prompt:
|
||||
"Suggest me a good car to buy! Why is it better than the other cars for me? Give options for every price range.",
|
||||
});
|
||||
|
||||
for await (const textPart of textStream) {
|
||||
process.stdout.write(textPart);
|
||||
}
|
||||
```
|
||||
|
||||
## Core Functions
|
||||
|
||||
- `createMem0()`: Initializes a new mem0 provider instance with optional configuration
|
||||
- `retrieveMemories()`: Enriches prompts with relevant memories
|
||||
- `addMemories()`: Add memories to your profile
|
||||
|
||||
## Configuration Options
|
||||
|
||||
```typescript
|
||||
const mem0 = createMem0({
|
||||
config: {
|
||||
...
|
||||
// Additional model-specific configuration options can be added here.
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **User Identification**: Always provide a unique `user_id` identifier for consistent memory retrieval
|
||||
2. **Context Management**: Use appropriate context window sizes to balance performance and memory
|
||||
3. **Error Handling**: Implement proper error handling for memory operations
|
||||
4. **Memory Cleanup**: Regularly clean up unused memory contexts to optimize performance
|
||||
|
||||
We also have support for `agent_id`, `app_id`, and `run_id`. Refer [Docs](https://docs.mem0.ai/api-reference/memory/add-memories).
|
||||
|
||||
## Notes
|
||||
|
||||
- Requires proper API key configuration for underlying providers (e.g., OpenAI)
|
||||
- Memory features depend on proper user identification via `user_id`
|
||||
- Supports both streaming and non-streaming responses
|
||||
- Compatible with all Vercel AI SDK features and patterns
|
||||
105
vercel-ai-sdk/config/test-config.ts
Normal file
105
vercel-ai-sdk/config/test-config.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import dotenv from "dotenv";
|
||||
import { createMem0 } from "../src";
|
||||
|
||||
dotenv.config();
|
||||
|
||||
export interface Provider {
|
||||
name: string;
|
||||
activeModel: string;
|
||||
apiKey: string | undefined;
|
||||
}
|
||||
|
||||
export const testConfig = {
|
||||
apiKey: process.env.MEM0_API_KEY,
|
||||
userId: "mem0-ai-sdk-test-user-1134774",
|
||||
deleteId: "",
|
||||
providers: [
|
||||
{
|
||||
name: "openai",
|
||||
activeModel: "gpt-4-turbo",
|
||||
apiKey: process.env.OPENAI_API_KEY,
|
||||
}
|
||||
,
|
||||
{
|
||||
name: "anthropic",
|
||||
activeModel: "claude-3-5-sonnet-20240620",
|
||||
apiKey: process.env.ANTHROPIC_API_KEY,
|
||||
},
|
||||
// {
|
||||
// name: "groq",
|
||||
// activeModel: "gemma2-9b-it",
|
||||
// apiKey: process.env.GROQ_API_KEY,
|
||||
// },
|
||||
{
|
||||
name: "cohere",
|
||||
activeModel: "command-r-plus",
|
||||
apiKey: process.env.COHERE_API_KEY,
|
||||
}
|
||||
],
|
||||
models: {
|
||||
openai: "gpt-4-turbo",
|
||||
anthropic: "claude-3-haiku-20240307",
|
||||
groq: "gemma2-9b-it",
|
||||
cohere: "command-r-plus"
|
||||
},
|
||||
apiKeys: {
|
||||
openai: process.env.OPENAI_API_KEY,
|
||||
anthropic: process.env.ANTHROPIC_API_KEY,
|
||||
groq: process.env.GROQ_API_KEY,
|
||||
cohere: process.env.COHERE_API_KEY,
|
||||
},
|
||||
|
||||
createTestClient: (provider: Provider) => {
|
||||
return createMem0({
|
||||
provider: provider.name,
|
||||
mem0ApiKey: process.env.MEM0_API_KEY,
|
||||
apiKey: provider.apiKey,
|
||||
});
|
||||
},
|
||||
fetchDeleteId: async function () {
|
||||
const options = {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Token ${this.apiKey}`,
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await fetch('https://api.mem0.ai/v1/entities/', options);
|
||||
const data = await response.json();
|
||||
const entity = data.results.find((item: any) => item.name === this.userId);
|
||||
if (entity) {
|
||||
this.deleteId = entity.id;
|
||||
} else {
|
||||
console.error("No matching entity found for userId:", this.userId);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error fetching deleteId:", error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
deleteUser: async function () {
|
||||
if (!this.deleteId) {
|
||||
console.error("deleteId is not set. Ensure fetchDeleteId is called first.");
|
||||
return;
|
||||
}
|
||||
|
||||
const options = {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Authorization: `Token ${this.apiKey}`,
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await fetch(`https://api.mem0.ai/v1/entities/user/${this.deleteId}/`, options);
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to delete user: ${response.statusText}`);
|
||||
}
|
||||
await response.json();
|
||||
} catch (error) {
|
||||
console.error("Error deleting user:", error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
};
|
||||
6
vercel-ai-sdk/jest.config.js
Normal file
6
vercel-ai-sdk/jest.config.js
Normal file
@@ -0,0 +1,6 @@
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
globalTeardown: './teardown.ts',
|
||||
};
|
||||
|
||||
5
vercel-ai-sdk/nodemon.json
Normal file
5
vercel-ai-sdk/nodemon.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"watch": ["src"],
|
||||
"ext": ".ts,.js",
|
||||
"exec": "ts-node ./example/index.ts"
|
||||
}
|
||||
69
vercel-ai-sdk/package.json
Normal file
69
vercel-ai-sdk/package.json
Normal file
@@ -0,0 +1,69 @@
|
||||
{
|
||||
"name": "@mem0/vercel-ai-provider",
|
||||
"version": "0.0.7",
|
||||
"description": "Vercel AI Provider for providing memory to LLMs",
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.mjs",
|
||||
"types": "./dist/index.d.ts",
|
||||
"files": [
|
||||
"dist/**/*"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsup",
|
||||
"clean": "rm -rf dist",
|
||||
"dev": "nodemon",
|
||||
"lint": "eslint \"./**/*.ts*\"",
|
||||
"type-check": "tsc --noEmit",
|
||||
"prettier-check": "prettier --check \"./**/*.ts*\"",
|
||||
"test": "jest",
|
||||
"test:edge": "vitest --config vitest.edge.config.js --run",
|
||||
"test:node": "vitest --config vitest.node.config.js --run"
|
||||
},
|
||||
"keywords": [
|
||||
"ai",
|
||||
"vercel-ai"
|
||||
],
|
||||
"author": "Saket Aryan <saketaryan2002@gmail.com>",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@ai-sdk/anthropic": "^0.0.54",
|
||||
"@ai-sdk/cohere": "^0.0.28",
|
||||
"@ai-sdk/groq": "^0.0.3",
|
||||
"@ai-sdk/openai": "^0.0.71",
|
||||
"@ai-sdk/provider": "^0.0.26",
|
||||
"@ai-sdk/provider-utils": "^1.0.22",
|
||||
"ai": "^3.4.31",
|
||||
"dotenv": "^16.4.5",
|
||||
"partial-json": "0.1.7",
|
||||
"ts-node": "^10.9.2",
|
||||
"zod": "^3.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@edge-runtime/vm": "^3.2.0",
|
||||
"@types/jest": "^29.5.14",
|
||||
"@types/node": "^18.19.46",
|
||||
"jest": "^29.7.0",
|
||||
"nodemon": "^3.1.7",
|
||||
"ts-jest": "^29.2.5",
|
||||
"tsup": "^8.3.0",
|
||||
"typescript": "5.5.4"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.0.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"zod": {
|
||||
"optional": true
|
||||
}
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"directories": {
|
||||
"example": "example",
|
||||
"test": "tests"
|
||||
}
|
||||
}
|
||||
4
vercel-ai-sdk/src/index.ts
Normal file
4
vercel-ai-sdk/src/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from './mem0-facade'
|
||||
export type { Mem0Provider, Mem0ProviderSettings } from './mem0-provider'
|
||||
export { createMem0, mem0 } from './mem0-provider'
|
||||
export {addMemories, retrieveMemories, searchMemories } from './mem0-utils'
|
||||
150
vercel-ai-sdk/src/mem0-chat-language-model.ts
Normal file
150
vercel-ai-sdk/src/mem0-chat-language-model.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
/* eslint-disable camelcase */
|
||||
import {
|
||||
LanguageModelV1,
|
||||
LanguageModelV1CallOptions,
|
||||
LanguageModelV1CallWarning,
|
||||
LanguageModelV1FinishReason,
|
||||
LanguageModelV1FunctionToolCall,
|
||||
LanguageModelV1LogProbs,
|
||||
LanguageModelV1ProviderMetadata,
|
||||
LanguageModelV1StreamPart,
|
||||
} from "@ai-sdk/provider";
|
||||
|
||||
import { Mem0ChatModelId, Mem0ChatSettings } from "./mem0-chat-settings";
|
||||
import { Mem0ClassSelector } from "./mem0-provider-selector";
|
||||
import { filterStream } from "./stream-utils";
|
||||
import { Mem0Config } from "./mem0-chat-settings";
|
||||
import { OpenAIProviderSettings } from "@ai-sdk/openai";
|
||||
import { Mem0ProviderSettings } from "./mem0-provider";
|
||||
|
||||
|
||||
interface Mem0ChatConfig {
|
||||
baseURL: string;
|
||||
fetch?: typeof fetch;
|
||||
headers: () => Record<string, string | undefined>;
|
||||
provider: string;
|
||||
organization?: string;
|
||||
project?: string;
|
||||
name?: string;
|
||||
apiKey?: string;
|
||||
mem0_api_key?: string;
|
||||
}
|
||||
|
||||
export class Mem0ChatLanguageModel implements LanguageModelV1 {
|
||||
readonly specificationVersion = "v1";
|
||||
readonly defaultObjectGenerationMode = "json";
|
||||
readonly supportsImageUrls = false;
|
||||
|
||||
constructor(
|
||||
public readonly modelId: Mem0ChatModelId,
|
||||
public readonly settings: Mem0ChatSettings,
|
||||
public readonly config: Mem0ChatConfig,
|
||||
public readonly provider_config?: OpenAIProviderSettings
|
||||
) {
|
||||
this.provider = config.provider;
|
||||
}
|
||||
|
||||
provider: string;
|
||||
supportsStructuredOutputs?: boolean | undefined;
|
||||
|
||||
async doGenerate(options: LanguageModelV1CallOptions): Promise<{
|
||||
text?: string;
|
||||
toolCalls?: Array<LanguageModelV1FunctionToolCall>;
|
||||
finishReason: LanguageModelV1FinishReason;
|
||||
usage: { promptTokens: number; completionTokens: number };
|
||||
rawCall: { rawPrompt: unknown; rawSettings: Record<string, unknown> };
|
||||
rawResponse?: { headers?: Record<string, string> };
|
||||
response?: { id?: string; timestamp?: Date; modelId?: string };
|
||||
warnings?: LanguageModelV1CallWarning[];
|
||||
providerMetadata?: LanguageModelV1ProviderMetadata;
|
||||
logprobs?: LanguageModelV1LogProbs;
|
||||
}> {
|
||||
try {
|
||||
const provider = this.config.provider;
|
||||
const mem0_api_key = this.config.mem0_api_key;
|
||||
const settings: Mem0ProviderSettings = {
|
||||
provider: provider,
|
||||
mem0ApiKey: mem0_api_key,
|
||||
apiKey: this.config.apiKey,
|
||||
modelType: "chat"
|
||||
}
|
||||
const selector = new Mem0ClassSelector(this.modelId, settings,this.provider_config);
|
||||
let messagesPrompts = options.prompt;
|
||||
const model = selector.createProvider();
|
||||
const user_id = this.settings.user_id;
|
||||
const app_id = this.settings.app_id;
|
||||
const agent_id = this.settings.agent_id;
|
||||
const run_id = this.settings.run_id;
|
||||
const org_name = this.settings.org_name;
|
||||
const project_name = this.settings.project_name;
|
||||
const apiKey = mem0_api_key;
|
||||
|
||||
const config: Mem0Config = {user_id, app_id, agent_id, run_id, org_name, project_name, mem0ApiKey: apiKey};
|
||||
|
||||
const ans = await model.generateText(messagesPrompts, config);
|
||||
|
||||
|
||||
return {
|
||||
text: ans.text,
|
||||
finishReason: ans.finishReason,
|
||||
usage: ans.usage,
|
||||
rawCall: {
|
||||
rawPrompt: options.prompt,
|
||||
rawSettings: {},
|
||||
},
|
||||
response: ans.response,
|
||||
warnings: ans.warnings,
|
||||
};
|
||||
} catch (error) {
|
||||
// Handle errors properly
|
||||
console.error("Error in doGenerate:", error);
|
||||
throw new Error("Failed to generate response.");
|
||||
}
|
||||
}
|
||||
|
||||
async doStream(options: LanguageModelV1CallOptions): Promise<{
|
||||
stream: ReadableStream<LanguageModelV1StreamPart>;
|
||||
rawCall: { rawPrompt: unknown; rawSettings: Record<string, unknown> };
|
||||
rawResponse?: { headers?: Record<string, string> };
|
||||
warnings?: LanguageModelV1CallWarning[];
|
||||
}> {
|
||||
try {
|
||||
const provider = this.config.provider;
|
||||
const mem0_api_key = this.config.mem0_api_key;
|
||||
const settings: Mem0ProviderSettings = {
|
||||
provider: provider,
|
||||
mem0ApiKey: mem0_api_key,
|
||||
apiKey: this.config.apiKey,
|
||||
modelType: "chat"
|
||||
}
|
||||
const selector = new Mem0ClassSelector(this.modelId, settings,this.provider_config);
|
||||
let messagesPrompts = options.prompt;
|
||||
const model = selector.createProvider();
|
||||
const user_id = this.settings.user_id;
|
||||
const app_id = this.settings.app_id;
|
||||
const agent_id = this.settings.agent_id;
|
||||
const run_id = this.settings.run_id;
|
||||
const org_name = this.settings.org_name;
|
||||
const project_name = this.settings.project_name;
|
||||
|
||||
const apiKey = mem0_api_key;
|
||||
|
||||
const config: Mem0Config = {user_id, app_id, agent_id, run_id, org_name, project_name, mem0ApiKey: apiKey};
|
||||
const response = await model.streamText(messagesPrompts, config);
|
||||
// @ts-ignore
|
||||
const filteredStream = await filterStream(response.originalStream);
|
||||
return {
|
||||
// @ts-ignore
|
||||
stream: filteredStream,
|
||||
rawCall: {
|
||||
rawPrompt: options.prompt,
|
||||
rawSettings: {},
|
||||
},
|
||||
...response,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Error in doStream:", error);
|
||||
throw new Error("Streaming failed or method not implemented.");
|
||||
}
|
||||
}
|
||||
}
|
||||
36
vercel-ai-sdk/src/mem0-chat-settings.ts
Normal file
36
vercel-ai-sdk/src/mem0-chat-settings.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { OpenAIChatSettings } from "@ai-sdk/openai/internal";
|
||||
|
||||
export type Mem0ChatModelId =
|
||||
| "o1-preview"
|
||||
| "o1-mini"
|
||||
| "gpt-4o"
|
||||
| "gpt-4o-2024-05-13"
|
||||
| "gpt-4o-2024-08-06"
|
||||
| "gpt-4o-audio-preview"
|
||||
| "gpt-4o-audio-preview-2024-10-01"
|
||||
| "gpt-4o-mini"
|
||||
| "gpt-4o-mini-2024-07-18"
|
||||
| "gpt-4-turbo"
|
||||
| "gpt-4-turbo-2024-04-09"
|
||||
| "gpt-4-turbo-preview"
|
||||
| "gpt-4-0125-preview"
|
||||
| "gpt-4-1106-preview"
|
||||
| "gpt-4"
|
||||
| "gpt-4-0613"
|
||||
| "gpt-3.5-turbo-0125"
|
||||
| "gpt-3.5-turbo"
|
||||
| "gpt-3.5-turbo-1106"
|
||||
| (string & NonNullable<unknown>);
|
||||
|
||||
export interface Mem0ChatSettings extends OpenAIChatSettings {
|
||||
user_id?: string;
|
||||
app_id?: string;
|
||||
agent_id?: string;
|
||||
run_id?: string;
|
||||
org_name?: string;
|
||||
project_name?: string;
|
||||
mem0ApiKey?: string;
|
||||
structuredOutputs?: boolean;
|
||||
}
|
||||
|
||||
export interface Mem0Config extends Mem0ChatSettings {}
|
||||
150
vercel-ai-sdk/src/mem0-completion-language-model.ts
Normal file
150
vercel-ai-sdk/src/mem0-completion-language-model.ts
Normal file
@@ -0,0 +1,150 @@
|
||||
/* eslint-disable camelcase */
|
||||
import {
|
||||
LanguageModelV1,
|
||||
LanguageModelV1CallOptions,
|
||||
LanguageModelV1CallWarning,
|
||||
LanguageModelV1FinishReason,
|
||||
LanguageModelV1FunctionToolCall,
|
||||
LanguageModelV1LogProbs,
|
||||
LanguageModelV1ProviderMetadata,
|
||||
LanguageModelV1StreamPart,
|
||||
} from "@ai-sdk/provider";
|
||||
|
||||
import { Mem0ChatModelId, Mem0ChatSettings } from "./mem0-chat-settings";
|
||||
import { Mem0ClassSelector } from "./mem0-provider-selector";
|
||||
import { filterStream } from "./stream-utils";
|
||||
import { Mem0Config } from "./mem0-completion-settings";
|
||||
import { OpenAIProviderSettings } from "@ai-sdk/openai";
|
||||
import { Mem0ProviderSettings } from "./mem0-provider";
|
||||
|
||||
|
||||
interface Mem0CompletionConfig {
|
||||
baseURL: string;
|
||||
fetch?: typeof fetch;
|
||||
headers: () => Record<string, string | undefined>;
|
||||
provider: string;
|
||||
organization?: string;
|
||||
project?: string;
|
||||
name?: string;
|
||||
apiKey?: string;
|
||||
mem0_api_key?: string;
|
||||
}
|
||||
|
||||
export class Mem0CompletionLanguageModel implements LanguageModelV1 {
|
||||
readonly specificationVersion = "v1";
|
||||
readonly defaultObjectGenerationMode = "json";
|
||||
readonly supportsImageUrls = false;
|
||||
|
||||
constructor(
|
||||
public readonly modelId: Mem0ChatModelId,
|
||||
public readonly settings: Mem0ChatSettings,
|
||||
public readonly config: Mem0CompletionConfig,
|
||||
public readonly provider_config?: OpenAIProviderSettings
|
||||
) {
|
||||
this.provider = config.provider;
|
||||
}
|
||||
|
||||
provider: string;
|
||||
supportsStructuredOutputs?: boolean | undefined;
|
||||
|
||||
async doGenerate(options: LanguageModelV1CallOptions): Promise<{
|
||||
text?: string;
|
||||
toolCalls?: Array<LanguageModelV1FunctionToolCall>;
|
||||
finishReason: LanguageModelV1FinishReason;
|
||||
usage: { promptTokens: number; completionTokens: number };
|
||||
rawCall: { rawPrompt: unknown; rawSettings: Record<string, unknown> };
|
||||
rawResponse?: { headers?: Record<string, string> };
|
||||
response?: { id?: string; timestamp?: Date; modelId?: string };
|
||||
warnings?: LanguageModelV1CallWarning[];
|
||||
providerMetadata?: LanguageModelV1ProviderMetadata;
|
||||
logprobs?: LanguageModelV1LogProbs;
|
||||
}> {
|
||||
try {
|
||||
const provider = this.config.provider;
|
||||
const mem0_api_key = this.config.mem0_api_key;
|
||||
const settings: Mem0ProviderSettings = {
|
||||
provider: provider,
|
||||
mem0ApiKey: mem0_api_key,
|
||||
apiKey: this.config.apiKey,
|
||||
modelType: "completion"
|
||||
}
|
||||
const selector = new Mem0ClassSelector(this.modelId, settings,this.provider_config);
|
||||
let messagesPrompts = options.prompt;
|
||||
const model = selector.createProvider();
|
||||
const user_id = this.settings.user_id;
|
||||
const app_id = this.settings.app_id;
|
||||
const agent_id = this.settings.agent_id;
|
||||
const run_id = this.settings.run_id;
|
||||
const org_name = this.settings.org_name;
|
||||
const project_name = this.settings.project_name;
|
||||
const apiKey = mem0_api_key;
|
||||
|
||||
const config: Mem0Config = {user_id, app_id, agent_id, run_id, org_name, project_name, mem0ApiKey: apiKey, modelType: "completion"};
|
||||
|
||||
const ans = await model.generateText(messagesPrompts, config);
|
||||
|
||||
|
||||
return {
|
||||
text: ans.text,
|
||||
finishReason: ans.finishReason,
|
||||
usage: ans.usage,
|
||||
rawCall: {
|
||||
rawPrompt: options.prompt,
|
||||
rawSettings: {},
|
||||
},
|
||||
response: ans.response,
|
||||
warnings: ans.warnings,
|
||||
};
|
||||
} catch (error) {
|
||||
// Handle errors properly
|
||||
console.error("Error in doGenerate:", error);
|
||||
throw new Error("Failed to generate response.");
|
||||
}
|
||||
}
|
||||
|
||||
async doStream(options: LanguageModelV1CallOptions): Promise<{
|
||||
stream: ReadableStream<LanguageModelV1StreamPart>;
|
||||
rawCall: { rawPrompt: unknown; rawSettings: Record<string, unknown> };
|
||||
rawResponse?: { headers?: Record<string, string> };
|
||||
warnings?: LanguageModelV1CallWarning[];
|
||||
}> {
|
||||
try {
|
||||
const provider = this.config.provider;
|
||||
const mem0_api_key = this.config.mem0_api_key;
|
||||
const settings: Mem0ProviderSettings = {
|
||||
provider: provider,
|
||||
mem0ApiKey: mem0_api_key,
|
||||
apiKey: this.config.apiKey,
|
||||
modelType: "completion"
|
||||
}
|
||||
const selector = new Mem0ClassSelector(this.modelId, settings,this.provider_config);
|
||||
let messagesPrompts = options.prompt;
|
||||
const model = selector.createProvider();
|
||||
const user_id = this.settings.user_id;
|
||||
const app_id = this.settings.app_id;
|
||||
const agent_id = this.settings.agent_id;
|
||||
const run_id = this.settings.run_id;
|
||||
const org_name = this.settings.org_name;
|
||||
const project_name = this.settings.project_name;
|
||||
|
||||
const apiKey = mem0_api_key;
|
||||
|
||||
const config: Mem0Config = {user_id, app_id, agent_id, run_id, org_name, project_name, mem0ApiKey: apiKey, modelType: "completion"};
|
||||
const response = await model.streamText(messagesPrompts, config);
|
||||
// @ts-ignore
|
||||
const filteredStream = await filterStream(response.originalStream);
|
||||
return {
|
||||
// @ts-ignore
|
||||
stream: filteredStream,
|
||||
rawCall: {
|
||||
rawPrompt: options.prompt,
|
||||
rawSettings: {},
|
||||
},
|
||||
...response,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Error in doStream:", error);
|
||||
throw new Error("Streaming failed or method not implemented.");
|
||||
}
|
||||
}
|
||||
}
|
||||
19
vercel-ai-sdk/src/mem0-completion-settings.ts
Normal file
19
vercel-ai-sdk/src/mem0-completion-settings.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import { OpenAICompletionSettings } from "@ai-sdk/openai/internal";
|
||||
|
||||
export type Mem0CompletionModelId =
|
||||
| "gpt-3.5-turbo"
|
||||
| (string & NonNullable<unknown>);
|
||||
|
||||
export interface Mem0CompletionSettings extends OpenAICompletionSettings {
|
||||
user_id?: string;
|
||||
app_id?: string;
|
||||
agent_id?: string;
|
||||
run_id?: string;
|
||||
org_name?: string;
|
||||
project_name?: string;
|
||||
mem0ApiKey?: string;
|
||||
structuredOutputs?: boolean;
|
||||
modelType?: string;
|
||||
}
|
||||
|
||||
export interface Mem0Config extends Mem0CompletionSettings {}
|
||||
36
vercel-ai-sdk/src/mem0-facade.ts
Normal file
36
vercel-ai-sdk/src/mem0-facade.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { withoutTrailingSlash } from '@ai-sdk/provider-utils'
|
||||
|
||||
import { Mem0ChatLanguageModel } from './mem0-chat-language-model'
|
||||
import { Mem0ChatModelId, Mem0ChatSettings } from './mem0-chat-settings'
|
||||
import { Mem0ProviderSettings } from './mem0-provider'
|
||||
|
||||
export class Mem0 {
|
||||
readonly baseURL: string
|
||||
|
||||
readonly headers?: Record<string, string>
|
||||
|
||||
constructor(options: Mem0ProviderSettings = {
|
||||
provider: 'openai',
|
||||
}) {
|
||||
this.baseURL =
|
||||
withoutTrailingSlash(options.baseURL) ?? 'http://127.0.0.1:11434/api'
|
||||
|
||||
this.headers = options.headers
|
||||
}
|
||||
|
||||
private get baseConfig() {
|
||||
return {
|
||||
baseURL: this.baseURL,
|
||||
headers: () => ({
|
||||
...this.headers,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
chat(modelId: Mem0ChatModelId, settings: Mem0ChatSettings = {}) {
|
||||
return new Mem0ChatLanguageModel(modelId, settings, {
|
||||
provider: 'openai',
|
||||
...this.baseConfig,
|
||||
})
|
||||
}
|
||||
}
|
||||
148
vercel-ai-sdk/src/mem0-generic-language-model.ts
Normal file
148
vercel-ai-sdk/src/mem0-generic-language-model.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
/* eslint-disable camelcase */
|
||||
import {
|
||||
LanguageModelV1,
|
||||
LanguageModelV1CallOptions,
|
||||
LanguageModelV1CallWarning,
|
||||
LanguageModelV1FinishReason,
|
||||
LanguageModelV1FunctionToolCall,
|
||||
LanguageModelV1LogProbs,
|
||||
LanguageModelV1ProviderMetadata,
|
||||
LanguageModelV1StreamPart,
|
||||
} from "@ai-sdk/provider";
|
||||
|
||||
import { Mem0ChatModelId, Mem0ChatSettings } from "./mem0-chat-settings";
|
||||
import { Mem0ClassSelector } from "./mem0-provider-selector";
|
||||
import { filterStream } from "./stream-utils";
|
||||
import { Mem0Config } from "./mem0-chat-settings";
|
||||
import { OpenAIProviderSettings } from "@ai-sdk/openai";
|
||||
import { Mem0ProviderSettings } from "./mem0-provider";
|
||||
|
||||
|
||||
interface Mem0ChatConfig {
|
||||
baseURL: string;
|
||||
fetch?: typeof fetch;
|
||||
headers: () => Record<string, string | undefined>;
|
||||
provider: string;
|
||||
organization?: string;
|
||||
project?: string;
|
||||
name?: string;
|
||||
apiKey?: string;
|
||||
mem0_api_key?: string;
|
||||
}
|
||||
|
||||
export class Mem0GenericLanguageModel implements LanguageModelV1 {
|
||||
readonly specificationVersion = "v1";
|
||||
readonly defaultObjectGenerationMode = "json";
|
||||
readonly supportsImageUrls = false;
|
||||
|
||||
constructor(
|
||||
public readonly modelId: Mem0ChatModelId,
|
||||
public readonly settings: Mem0ChatSettings,
|
||||
public readonly config: Mem0ChatConfig,
|
||||
public readonly provider_config?: OpenAIProviderSettings
|
||||
) {
|
||||
this.provider = config.provider;
|
||||
}
|
||||
|
||||
provider: string;
|
||||
supportsStructuredOutputs?: boolean | undefined;
|
||||
|
||||
async doGenerate(options: LanguageModelV1CallOptions): Promise<{
|
||||
text?: string;
|
||||
toolCalls?: Array<LanguageModelV1FunctionToolCall>;
|
||||
finishReason: LanguageModelV1FinishReason;
|
||||
usage: { promptTokens: number; completionTokens: number };
|
||||
rawCall: { rawPrompt: unknown; rawSettings: Record<string, unknown> };
|
||||
rawResponse?: { headers?: Record<string, string> };
|
||||
response?: { id?: string; timestamp?: Date; modelId?: string };
|
||||
warnings?: LanguageModelV1CallWarning[];
|
||||
providerMetadata?: LanguageModelV1ProviderMetadata;
|
||||
logprobs?: LanguageModelV1LogProbs;
|
||||
}> {
|
||||
try {
|
||||
const provider = this.config.provider;
|
||||
const mem0_api_key = this.config.mem0_api_key;
|
||||
const settings: Mem0ProviderSettings = {
|
||||
provider: provider,
|
||||
mem0ApiKey: mem0_api_key,
|
||||
apiKey: this.config.apiKey,
|
||||
}
|
||||
const selector = new Mem0ClassSelector(this.modelId, settings,this.provider_config);
|
||||
let messagesPrompts = options.prompt;
|
||||
const model = selector.createProvider();
|
||||
const user_id = this.settings.user_id;
|
||||
const app_id = this.settings.app_id;
|
||||
const agent_id = this.settings.agent_id;
|
||||
const run_id = this.settings.run_id;
|
||||
const org_name = this.settings.org_name;
|
||||
const project_name = this.settings.project_name;
|
||||
const apiKey = mem0_api_key;
|
||||
|
||||
const config: Mem0Config = {user_id, app_id, agent_id, run_id, org_name, project_name, mem0ApiKey: apiKey};
|
||||
|
||||
const ans = await model.generateText(messagesPrompts, config);
|
||||
|
||||
|
||||
return {
|
||||
text: ans.text,
|
||||
finishReason: ans.finishReason,
|
||||
usage: ans.usage,
|
||||
rawCall: {
|
||||
rawPrompt: options.prompt,
|
||||
rawSettings: {},
|
||||
},
|
||||
response: ans.response,
|
||||
warnings: ans.warnings,
|
||||
};
|
||||
} catch (error) {
|
||||
// Handle errors properly
|
||||
console.error("Error in doGenerate:", error);
|
||||
throw new Error("Failed to generate response.");
|
||||
}
|
||||
}
|
||||
|
||||
async doStream(options: LanguageModelV1CallOptions): Promise<{
|
||||
stream: ReadableStream<LanguageModelV1StreamPart>;
|
||||
rawCall: { rawPrompt: unknown; rawSettings: Record<string, unknown> };
|
||||
rawResponse?: { headers?: Record<string, string> };
|
||||
warnings?: LanguageModelV1CallWarning[];
|
||||
}> {
|
||||
try {
|
||||
const provider = this.config.provider;
|
||||
const mem0_api_key = this.config.mem0_api_key;
|
||||
const settings: Mem0ProviderSettings = {
|
||||
provider: provider,
|
||||
mem0ApiKey: mem0_api_key,
|
||||
apiKey: this.config.apiKey,
|
||||
}
|
||||
const selector = new Mem0ClassSelector(this.modelId, settings,this.provider_config);
|
||||
let messagesPrompts = options.prompt;
|
||||
const model = selector.createProvider();
|
||||
const user_id = this.settings.user_id;
|
||||
const app_id = this.settings.app_id;
|
||||
const agent_id = this.settings.agent_id;
|
||||
const run_id = this.settings.run_id;
|
||||
const org_name = this.settings.org_name;
|
||||
const project_name = this.settings.project_name;
|
||||
|
||||
const apiKey = mem0_api_key;
|
||||
|
||||
const config: Mem0Config = {user_id, app_id, agent_id, run_id, org_name, project_name, mem0ApiKey: apiKey};
|
||||
const response = await model.streamText(messagesPrompts, config);
|
||||
// @ts-ignore
|
||||
const filteredStream = await filterStream(response.originalStream);
|
||||
return {
|
||||
// @ts-ignore
|
||||
stream: filteredStream,
|
||||
rawCall: {
|
||||
rawPrompt: options.prompt,
|
||||
rawSettings: {},
|
||||
},
|
||||
...response,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Error in doStream:", error);
|
||||
throw new Error("Streaming failed or method not implemented.");
|
||||
}
|
||||
}
|
||||
}
|
||||
34
vercel-ai-sdk/src/mem0-provider-selector.ts
Normal file
34
vercel-ai-sdk/src/mem0-provider-selector.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { OpenAIProviderSettings } from "@ai-sdk/openai";
|
||||
import { Mem0ProviderSettings } from "./mem0-provider";
|
||||
import Mem0AITextGenerator, { ProviderSettings } from "./provider-response-provider";
|
||||
|
||||
class Mem0ClassSelector {
|
||||
modelId: string;
|
||||
provider_wrapper: string;
|
||||
model: string;
|
||||
config: Mem0ProviderSettings;
|
||||
provider_config?: ProviderSettings;
|
||||
static supportedProviders = ["openai", "anthropic", "cohere", "groq"];
|
||||
|
||||
constructor(modelId: string, config: Mem0ProviderSettings, provider_config?: ProviderSettings) {
|
||||
this.modelId = modelId;
|
||||
this.provider_wrapper = config.provider || "openai";
|
||||
this.model = this.modelId;
|
||||
this.provider_config = provider_config;
|
||||
if(config) this.config = config;
|
||||
else this.config = {
|
||||
provider: this.provider_wrapper,
|
||||
};
|
||||
|
||||
// Check if provider_wrapper is supported
|
||||
if (!Mem0ClassSelector.supportedProviders.includes(this.provider_wrapper)) {
|
||||
throw new Error(`Model not supported: ${this.provider_wrapper}`);
|
||||
}
|
||||
}
|
||||
|
||||
createProvider() {
|
||||
return new Mem0AITextGenerator(this.provider_wrapper, this.model, this.config , this.provider_config || {});
|
||||
}
|
||||
}
|
||||
|
||||
export { Mem0ClassSelector };
|
||||
145
vercel-ai-sdk/src/mem0-provider.ts
Normal file
145
vercel-ai-sdk/src/mem0-provider.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
import { LanguageModelV1, ProviderV1 } from '@ai-sdk/provider'
|
||||
import { withoutTrailingSlash } from '@ai-sdk/provider-utils'
|
||||
|
||||
import { Mem0ChatLanguageModel } from './mem0-chat-language-model'
|
||||
import { Mem0ChatModelId, Mem0ChatSettings } from './mem0-chat-settings'
|
||||
import { OpenAIProviderSettings } from '@ai-sdk/openai'
|
||||
import { Mem0CompletionModelId, Mem0CompletionSettings } from './mem0-completion-settings'
|
||||
import { Mem0GenericLanguageModel } from './mem0-generic-language-model'
|
||||
import { Mem0CompletionLanguageModel } from './mem0-completion-language-model'
|
||||
|
||||
|
||||
export interface Mem0Provider extends ProviderV1 {
|
||||
(modelId: Mem0ChatModelId, settings?: Mem0ChatSettings): LanguageModelV1
|
||||
|
||||
chat(
|
||||
modelId: Mem0ChatModelId,
|
||||
settings?: Mem0ChatSettings,
|
||||
): LanguageModelV1
|
||||
|
||||
|
||||
languageModel(
|
||||
modelId: Mem0ChatModelId,
|
||||
settings?: Mem0ChatSettings,
|
||||
): LanguageModelV1
|
||||
|
||||
completion(
|
||||
modelId: Mem0CompletionModelId,
|
||||
settings?: Mem0CompletionSettings,
|
||||
): LanguageModelV1
|
||||
}
|
||||
|
||||
export interface Mem0ProviderSettings extends OpenAIProviderSettings {
|
||||
baseURL?: string
|
||||
/**
|
||||
* Custom fetch implementation. You can use it as a middleware to intercept
|
||||
* requests or to provide a custom fetch implementation for e.g. testing
|
||||
*/
|
||||
fetch?: typeof fetch
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
generateId?: () => string
|
||||
/**
|
||||
* Custom headers to include in the requests.
|
||||
*/
|
||||
headers?: Record<string, string>
|
||||
organization?: string;
|
||||
project?: string;
|
||||
name?: string;
|
||||
mem0ApiKey?: string;
|
||||
apiKey?: string;
|
||||
provider?: string;
|
||||
config?: OpenAIProviderSettings;
|
||||
modelType?: "completion" | "chat";
|
||||
}
|
||||
|
||||
export function createMem0(
|
||||
options: Mem0ProviderSettings = {
|
||||
provider: "openai",
|
||||
},
|
||||
): Mem0Provider {
|
||||
const baseURL =
|
||||
withoutTrailingSlash(options.baseURL) ?? 'http://127.0.0.1:11434/api'
|
||||
|
||||
const getHeaders = () => ({
|
||||
...options.headers,
|
||||
})
|
||||
|
||||
const createGenericModel = (
|
||||
modelId: Mem0ChatModelId,
|
||||
settings: Mem0ChatSettings = {},
|
||||
) =>
|
||||
new Mem0GenericLanguageModel(modelId, settings, {
|
||||
baseURL,
|
||||
fetch: options.fetch,
|
||||
headers: getHeaders,
|
||||
provider: options.provider || "openai",
|
||||
organization: options.organization,
|
||||
project: options.project,
|
||||
name: options.name,
|
||||
mem0_api_key: options.mem0ApiKey,
|
||||
apiKey: options.apiKey,
|
||||
}, options.config)
|
||||
|
||||
const createChatModel = (
|
||||
modelId: Mem0ChatModelId,
|
||||
settings: Mem0ChatSettings = {},
|
||||
) =>
|
||||
|
||||
new Mem0ChatLanguageModel(modelId, settings, {
|
||||
baseURL,
|
||||
fetch: options.fetch,
|
||||
headers: getHeaders,
|
||||
provider: options.provider || "openai",
|
||||
organization: options.organization,
|
||||
project: options.project,
|
||||
name: options.name,
|
||||
mem0_api_key: options.mem0ApiKey,
|
||||
apiKey: options.apiKey,
|
||||
}, options.config)
|
||||
|
||||
const createCompletionModel = (
|
||||
modelId: Mem0CompletionModelId,
|
||||
settings: Mem0CompletionSettings = {}
|
||||
) =>
|
||||
new Mem0CompletionLanguageModel(
|
||||
modelId,
|
||||
settings,
|
||||
{
|
||||
baseURL,
|
||||
fetch: options.fetch,
|
||||
headers: getHeaders,
|
||||
provider: options.provider || "openai",
|
||||
organization: options.organization,
|
||||
project: options.project,
|
||||
name: options.name,
|
||||
mem0_api_key: options.mem0ApiKey,
|
||||
apiKey: options.apiKey
|
||||
},
|
||||
options.config
|
||||
);
|
||||
|
||||
const provider = function (
|
||||
modelId: Mem0ChatModelId,
|
||||
settings?: Mem0ChatSettings,
|
||||
) {
|
||||
if (new.target) {
|
||||
throw new Error(
|
||||
'The Mem0 model function cannot be called with the new keyword.',
|
||||
)
|
||||
}
|
||||
|
||||
return createGenericModel(modelId, settings)
|
||||
}
|
||||
|
||||
|
||||
|
||||
provider.chat = createChatModel
|
||||
provider.completion = createCompletionModel
|
||||
provider.languageModel = createChatModel
|
||||
|
||||
return provider as unknown as Mem0Provider
|
||||
}
|
||||
|
||||
export const mem0 = createMem0()
|
||||
114
vercel-ai-sdk/src/mem0-utils.ts
Normal file
114
vercel-ai-sdk/src/mem0-utils.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import { LanguageModelV1Prompt } from 'ai';
|
||||
import { Mem0Config } from './mem0-chat-settings';
|
||||
if (typeof process !== 'undefined' && process.env && process.env.NODE_ENV !== 'production') {
|
||||
// Dynamically import dotenv only in non-production environments
|
||||
import('dotenv').then((dotenv) => dotenv.config());
|
||||
}
|
||||
|
||||
const tokenIsPresent = (config?: Mem0Config)=>{
|
||||
if(!config && !config!.mem0ApiKey && (typeof process !== 'undefined' && process.env && !process.env.MEM0_API_KEY)){
|
||||
throw Error("MEM0_API_KEY is not present. Please set env MEM0_API_KEY as the value of your API KEY.");
|
||||
}
|
||||
}
|
||||
|
||||
interface Message {
|
||||
role: string;
|
||||
content: string | Array<{type: string, text: string}>;
|
||||
}
|
||||
|
||||
const flattenPrompt = (prompt: LanguageModelV1Prompt) => {
|
||||
return prompt.map((part) => {
|
||||
if (part.role === "user") {
|
||||
return part.content
|
||||
.filter((obj) => obj.type === 'text')
|
||||
.map((obj) => obj.text)
|
||||
.join(" ");
|
||||
}
|
||||
return "";
|
||||
}).join(" ");
|
||||
}
|
||||
|
||||
const searchInternalMemories = async (query: string, config?: Mem0Config, top_k: number = 5)=> {
|
||||
tokenIsPresent(config);
|
||||
const filters = {
|
||||
OR: [
|
||||
{
|
||||
user_id: config&&config.user_id,
|
||||
},
|
||||
{
|
||||
app_id: config&&config.app_id,
|
||||
},
|
||||
{
|
||||
agent_id: config&&config.agent_id,
|
||||
},
|
||||
{
|
||||
run_id: config&&config.run_id,
|
||||
},
|
||||
],
|
||||
};
|
||||
const options = {
|
||||
method: 'POST',
|
||||
headers: {Authorization: `Token ${(config&&config.mem0ApiKey) || (typeof process !== 'undefined' && process.env && process.env.MEM0_API_KEY) || ""}`, 'Content-Type': 'application/json'},
|
||||
body: JSON.stringify({query, filters, top_k, version: "v2", org_name: config&&config.org_name, project_name: config&&config.project_name}),
|
||||
};
|
||||
const response = await fetch('https://api.mem0.ai/v2/memories/search/', options);
|
||||
const data = await response.json();
|
||||
return data;
|
||||
}
|
||||
|
||||
const addMemories = async (messages: LanguageModelV1Prompt, config?: Mem0Config)=>{
|
||||
tokenIsPresent(config);
|
||||
const message = flattenPrompt(messages);
|
||||
const response = await updateMemories([
|
||||
{ role: "user", content: message },
|
||||
{ role: "assistant", content: "Thank You!" },
|
||||
], config);
|
||||
return response;
|
||||
}
|
||||
|
||||
const updateMemories = async (messages: Array<Message>, config?: Mem0Config)=>{
|
||||
tokenIsPresent(config);
|
||||
const options = {
|
||||
method: 'POST',
|
||||
headers: {Authorization: `Token ${(config&&config.mem0ApiKey) || (typeof process !== 'undefined' && process.env && process.env.MEM0_API_KEY) || ""}`, 'Content-Type': 'application/json'},
|
||||
body: JSON.stringify({messages, ...config}),
|
||||
};
|
||||
|
||||
const response = await fetch('https://api.mem0.ai/v1/memories/', options);
|
||||
const data = await response.json();
|
||||
return data;
|
||||
}
|
||||
|
||||
const retrieveMemories = async (prompt: LanguageModelV1Prompt | string, config?: Mem0Config)=>{
|
||||
tokenIsPresent(config);
|
||||
const message = typeof prompt === 'string' ? prompt : flattenPrompt(prompt);
|
||||
const systemPrompt = "These are the memories I have stored. Give more weightage to the question by users and try to answer that first. You have to modify your answer based on the memories I have provided. If the memories are irrelevant you can ignore them. Also don't reply to this section of the prompt, or the memories, they are only for your reference. The System prompt starts after text System Message: \n\n";
|
||||
const memories = await searchInternalMemories(message, config);
|
||||
let memoriesText = "";
|
||||
try{
|
||||
// @ts-ignore
|
||||
memoriesText = memories.map((memory: any)=>{
|
||||
return `Memory: ${memory.memory}\n\n`;
|
||||
}).join("\n\n");
|
||||
}catch(e){
|
||||
console.error("Error while parsing memories");
|
||||
// console.log(e);
|
||||
}
|
||||
return `System Message: ${systemPrompt} ${memoriesText}`;
|
||||
}
|
||||
|
||||
const searchMemories = async (prompt: LanguageModelV1Prompt | string, config?: Mem0Config)=>{
|
||||
tokenIsPresent(config);
|
||||
const message = typeof prompt === 'string' ? prompt : flattenPrompt(prompt);
|
||||
let memories = [];
|
||||
try{
|
||||
// @ts-ignore
|
||||
memories = await searchInternalMemories(message, config);
|
||||
}
|
||||
catch(e){
|
||||
console.error("Error while searching memories");
|
||||
}
|
||||
return memories;
|
||||
}
|
||||
|
||||
export {addMemories, updateMemories, retrieveMemories, flattenPrompt, searchMemories};
|
||||
113
vercel-ai-sdk/src/provider-response-provider.ts
Normal file
113
vercel-ai-sdk/src/provider-response-provider.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { createOpenAI, OpenAIProviderSettings } from "@ai-sdk/openai";
|
||||
import { generateText as aiGenerateText, streamText as aiStreamText, LanguageModelV1Prompt } from "ai";
|
||||
import { updateMemories, retrieveMemories, flattenPrompt } from "./mem0-utils";
|
||||
import { Mem0Config } from "./mem0-chat-settings";
|
||||
import { Mem0ProviderSettings } from "./mem0-provider";
|
||||
import { CohereProviderSettings, createCohere } from "@ai-sdk/cohere";
|
||||
import { AnthropicProviderSettings, createAnthropic } from "@ai-sdk/anthropic";
|
||||
import { createGroq, GroqProviderSettings } from "@ai-sdk/groq";
|
||||
|
||||
export type Provider = ReturnType<typeof createOpenAI> | ReturnType<typeof createCohere> | ReturnType<typeof createAnthropic> | ReturnType<typeof createGroq> | any;
|
||||
export type ProviderSettings = OpenAIProviderSettings | CohereProviderSettings | AnthropicProviderSettings | GroqProviderSettings;
|
||||
|
||||
class Mem0AITextGenerator {
|
||||
provider: Provider;
|
||||
model: string;
|
||||
provider_config?: ProviderSettings;
|
||||
config: Mem0ProviderSettings;
|
||||
|
||||
constructor(provider: string, model: string, config: Mem0ProviderSettings, provider_config: ProviderSettings) {
|
||||
switch (provider) {
|
||||
case "openai":
|
||||
this.provider = createOpenAI({
|
||||
apiKey: config?.apiKey,
|
||||
...provider_config,
|
||||
});
|
||||
if(config?.modelType === "completion"){
|
||||
this.provider = createOpenAI({
|
||||
apiKey: config?.apiKey,
|
||||
...provider_config,
|
||||
}).completion;
|
||||
}else if(config?.modelType === "chat"){
|
||||
this.provider = createOpenAI({
|
||||
apiKey: config?.apiKey,
|
||||
...provider_config,
|
||||
}).chat;
|
||||
}
|
||||
break;
|
||||
case "cohere":
|
||||
this.provider = createCohere({
|
||||
apiKey: config?.apiKey,
|
||||
...provider_config,
|
||||
});
|
||||
break;
|
||||
case "anthropic":
|
||||
this.provider = createAnthropic({
|
||||
apiKey: config?.apiKey,
|
||||
...provider_config,
|
||||
});
|
||||
break;
|
||||
case "groq":
|
||||
this.provider = createGroq({
|
||||
apiKey: config?.apiKey,
|
||||
...provider_config,
|
||||
});
|
||||
break;
|
||||
default:
|
||||
throw new Error("Invalid provider");
|
||||
}
|
||||
this.model = model;
|
||||
this.provider_config = provider_config;
|
||||
this.config = config!;
|
||||
}
|
||||
|
||||
|
||||
async generateText(prompt: LanguageModelV1Prompt, config: Mem0Config) {
|
||||
try {
|
||||
const flattenPromptResponse = flattenPrompt(prompt);
|
||||
const newPrompt = await retrieveMemories(prompt, config);
|
||||
const response = await aiGenerateText({
|
||||
// @ts-ignore
|
||||
model: this.provider(this.model),
|
||||
messages: prompt,
|
||||
system: newPrompt
|
||||
});
|
||||
|
||||
await updateMemories([
|
||||
{ role: "user", content: flattenPromptResponse },
|
||||
{ role: "assistant", content: response.text },
|
||||
], config);
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
console.error("Error generating text:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async streamText(prompt: LanguageModelV1Prompt, config: Mem0Config) {
|
||||
try {
|
||||
const flattenPromptResponse = flattenPrompt(prompt);
|
||||
const newPrompt = await retrieveMemories(prompt, config);
|
||||
|
||||
await updateMemories([
|
||||
{ role: "user", content: flattenPromptResponse },
|
||||
{ role: "assistant", content: "Thank You!" },
|
||||
], config);
|
||||
|
||||
const response = await aiStreamText({
|
||||
// @ts-ignore
|
||||
model: this.provider(this.model),
|
||||
messages: prompt,
|
||||
system: newPrompt
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
console.error("Error generating text:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default Mem0AITextGenerator;
|
||||
28
vercel-ai-sdk/src/stream-utils.ts
Normal file
28
vercel-ai-sdk/src/stream-utils.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
async function filterStream(originalStream: ReadableStream) {
|
||||
const reader = originalStream.getReader();
|
||||
const filteredStream = new ReadableStream({
|
||||
async start(controller) {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) {
|
||||
controller.close();
|
||||
break;
|
||||
}
|
||||
try {
|
||||
const chunk = JSON.parse(value);
|
||||
if (chunk.type !== "step-finish") {
|
||||
controller.enqueue(value);
|
||||
}
|
||||
} catch (error) {
|
||||
if (!(value.type==='step-finish')) {
|
||||
controller.enqueue(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return filteredStream;
|
||||
}
|
||||
|
||||
export { filterStream };
|
||||
12
vercel-ai-sdk/teardown.ts
Normal file
12
vercel-ai-sdk/teardown.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { testConfig } from './config/test-config';
|
||||
|
||||
export default async function () {
|
||||
console.log("Running global teardown...");
|
||||
try {
|
||||
await testConfig.fetchDeleteId();
|
||||
await testConfig.deleteUser();
|
||||
console.log("User deleted successfully after all tests.");
|
||||
} catch (error) {
|
||||
console.error("Failed to delete user after all tests:", error);
|
||||
}
|
||||
}
|
||||
110
vercel-ai-sdk/tests/anthropic-structured-ouput.test.ts
Normal file
110
vercel-ai-sdk/tests/anthropic-structured-ouput.test.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import dotenv from "dotenv";
|
||||
dotenv.config();
|
||||
|
||||
import { generateObject } from "ai";
|
||||
import { testConfig } from "../config/test-config";
|
||||
import { z } from "zod";
|
||||
|
||||
interface Provider {
|
||||
name: string;
|
||||
activeModel: string;
|
||||
apiKey: string | undefined;
|
||||
}
|
||||
|
||||
const provider: Provider = {
|
||||
name: "anthropic",
|
||||
activeModel: "claude-3-5-sonnet-20240620",
|
||||
apiKey: process.env.ANTHROPIC_API_KEY,
|
||||
}
|
||||
describe("ANTHROPIC Structured Outputs", () => {
|
||||
const { userId } = testConfig;
|
||||
let mem0: ReturnType<typeof testConfig.createTestClient>;
|
||||
jest.setTimeout(30000);
|
||||
|
||||
beforeEach(() => {
|
||||
mem0 = testConfig.createTestClient(provider);
|
||||
});
|
||||
|
||||
describe("ANTHROPIC Object Generation Tests", () => {
|
||||
// Test 1: Generate a car preference object
|
||||
it("should generate a car preference object with name and steps", async () => {
|
||||
const { object } = await generateObject({
|
||||
model: mem0(provider.activeModel, {
|
||||
user_id: userId,
|
||||
}),
|
||||
schema: z.object({
|
||||
car: z.object({
|
||||
name: z.string(),
|
||||
steps: z.array(z.string()),
|
||||
}),
|
||||
}),
|
||||
prompt: "Which car would I like?",
|
||||
});
|
||||
|
||||
expect(object.car).toBeDefined();
|
||||
expect(typeof object.car.name).toBe("string");
|
||||
expect(Array.isArray(object.car.steps)).toBe(true);
|
||||
expect(object.car.steps.every((step) => typeof step === "string")).toBe(true);
|
||||
});
|
||||
|
||||
// Test 2: Generate an array of car objects
|
||||
it("should generate an array of three car objects with name, class, and description", async () => {
|
||||
const { object } = await generateObject({
|
||||
model: mem0(provider.activeModel, {
|
||||
user_id: userId,
|
||||
}),
|
||||
output: "array",
|
||||
schema: z.object({
|
||||
name: z.string(),
|
||||
class: z.string(),
|
||||
description: z.string(),
|
||||
}),
|
||||
prompt: "Write name of three cars that I would like.",
|
||||
});
|
||||
|
||||
expect(Array.isArray(object)).toBe(true);
|
||||
expect(object.length).toBe(3);
|
||||
object.forEach((car) => {
|
||||
expect(car).toHaveProperty("name");
|
||||
expect(typeof car.name).toBe("string");
|
||||
expect(car).toHaveProperty("class");
|
||||
expect(typeof car.class).toBe("string");
|
||||
expect(car).toHaveProperty("description");
|
||||
expect(typeof car.description).toBe("string");
|
||||
});
|
||||
});
|
||||
|
||||
// Test 3: Generate an enum for movie genre classification
|
||||
it("should classify the genre of a movie plot", async () => {
|
||||
const { object } = await generateObject({
|
||||
model: mem0(provider.activeModel, {
|
||||
user_id: userId,
|
||||
}),
|
||||
output: "enum",
|
||||
enum: ["action", "comedy", "drama", "horror", "sci-fi"],
|
||||
prompt: 'Classify the genre of this movie plot: "A group of astronauts travel through a wormhole in search of a new habitable planet for humanity."',
|
||||
});
|
||||
|
||||
expect(object).toBeDefined();
|
||||
expect(object).toBe("sci-fi");
|
||||
});
|
||||
|
||||
// Test 4: Generate an object of car names without schema
|
||||
it("should generate an object with car names", async () => {
|
||||
const { object } = await generateObject({
|
||||
model: mem0(provider.activeModel, {
|
||||
user_id: userId,
|
||||
}),
|
||||
output: "no-schema",
|
||||
prompt: "Write name of 3 cars that I would like.",
|
||||
});
|
||||
|
||||
const carObject = object as { cars: string[] };
|
||||
|
||||
expect(carObject).toBeDefined();
|
||||
expect(Array.isArray(carObject.cars)).toBe(true);
|
||||
expect(carObject.cars.length).toBe(3);
|
||||
expect(carObject.cars.every((car) => typeof car === "string")).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
61
vercel-ai-sdk/tests/anthropic.test.ts
Normal file
61
vercel-ai-sdk/tests/anthropic.test.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import dotenv from "dotenv";
|
||||
dotenv.config();
|
||||
|
||||
import { retrieveMemories } from "../src";
|
||||
import { generateText, LanguageModelV1Prompt } from "ai";
|
||||
import { testConfig } from "../config/test-config";
|
||||
import { createAnthropic } from "@ai-sdk/anthropic";
|
||||
|
||||
describe("ANTHROPIC Functions", () => {
|
||||
const { userId } = testConfig;
|
||||
jest.setTimeout(30000);
|
||||
|
||||
let anthropic: any;
|
||||
|
||||
beforeEach(() => {
|
||||
anthropic = createAnthropic({
|
||||
apiKey: process.env.ANTHROPIC_API_KEY,
|
||||
});
|
||||
});
|
||||
|
||||
it("should retrieve memories and generate text using ANTHROPIC provider", async () => {
|
||||
const messages: LanguageModelV1Prompt = [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "text", text: "Suggest me a good car to buy." },
|
||||
{ type: "text", text: " Write only the car name and it's color." },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
// Retrieve memories based on previous messages
|
||||
const memories = await retrieveMemories(messages, { user_id: userId });
|
||||
|
||||
const { text } = await generateText({
|
||||
// @ts-ignore
|
||||
model: anthropic("claude-3-haiku-20240307"),
|
||||
messages: messages,
|
||||
system: memories,
|
||||
});
|
||||
|
||||
// Expect text to be a string
|
||||
expect(typeof text).toBe('string');
|
||||
expect(text.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("should generate text using ANTHROPIC provider with memories", async () => {
|
||||
const prompt = "Suggest me a good car to buy.";
|
||||
const memories = await retrieveMemories(prompt, { user_id: userId });
|
||||
|
||||
const { text } = await generateText({
|
||||
// @ts-ignore
|
||||
model: anthropic("claude-3-haiku-20240307"),
|
||||
prompt: prompt,
|
||||
system: memories
|
||||
});
|
||||
|
||||
expect(typeof text).toBe('string');
|
||||
expect(text.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
60
vercel-ai-sdk/tests/cohere.test.ts
Normal file
60
vercel-ai-sdk/tests/cohere.test.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import dotenv from "dotenv";
|
||||
dotenv.config();
|
||||
|
||||
import { retrieveMemories } from "../src";
|
||||
import { generateText, LanguageModelV1Prompt } from "ai";
|
||||
import { testConfig } from "../config/test-config";
|
||||
import { createCohere } from "@ai-sdk/cohere";
|
||||
|
||||
describe("COHERE Functions", () => {
|
||||
const { userId } = testConfig;
|
||||
jest.setTimeout(30000);
|
||||
let cohere: any;
|
||||
|
||||
beforeEach(() => {
|
||||
cohere = createCohere({
|
||||
apiKey: process.env.COHERE_API_KEY,
|
||||
});
|
||||
});
|
||||
|
||||
it("should retrieve memories and generate text using COHERE provider", async () => {
|
||||
const messages: LanguageModelV1Prompt = [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "text", text: "Suggest me a good car to buy." },
|
||||
{ type: "text", text: " Write only the car name and it's color." },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
// Retrieve memories based on previous messages
|
||||
const memories = await retrieveMemories(messages, { user_id: userId });
|
||||
|
||||
const { text } = await generateText({
|
||||
// @ts-ignore
|
||||
model: cohere("command-r-plus"),
|
||||
messages: messages,
|
||||
system: memories,
|
||||
});
|
||||
|
||||
// Expect text to be a string
|
||||
expect(typeof text).toBe('string');
|
||||
expect(text.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("should generate text using COHERE provider with memories", async () => {
|
||||
const prompt = "Suggest me a good car to buy.";
|
||||
const memories = await retrieveMemories(prompt, { user_id: userId });
|
||||
|
||||
const { text } = await generateText({
|
||||
// @ts-ignore
|
||||
model: cohere("command-r-plus"),
|
||||
prompt: prompt,
|
||||
system: memories
|
||||
});
|
||||
|
||||
expect(typeof text).toBe('string');
|
||||
expect(text.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
86
vercel-ai-sdk/tests/generate-output.test.ts
Normal file
86
vercel-ai-sdk/tests/generate-output.test.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { generateText, LanguageModelV1Prompt, streamText } from "ai";
|
||||
import { addMemories } from "../src";
|
||||
import { testConfig } from "../config/test-config";
|
||||
|
||||
interface Provider {
|
||||
name: string;
|
||||
activeModel: string;
|
||||
apiKey: string | undefined;
|
||||
}
|
||||
|
||||
describe.each(testConfig.providers)('TESTS: Generate/Stream Text with model %s', (provider: Provider) => {
|
||||
const { userId } = testConfig;
|
||||
let mem0: ReturnType<typeof testConfig.createTestClient>;
|
||||
jest.setTimeout(50000);
|
||||
|
||||
beforeEach(() => {
|
||||
mem0 = testConfig.createTestClient(provider);
|
||||
});
|
||||
|
||||
beforeAll(async () => {
|
||||
// Add some test memories before all tests
|
||||
const messages: LanguageModelV1Prompt = [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "text", text: "I love red cars." },
|
||||
{ type: "text", text: "I like Toyota Cars." },
|
||||
{ type: "text", text: "I prefer SUVs." },
|
||||
],
|
||||
}
|
||||
];
|
||||
await addMemories(messages, { user_id: userId });
|
||||
});
|
||||
|
||||
it("should generate text using mem0 model", async () => {
|
||||
const { text } = await generateText({
|
||||
model: mem0(provider.activeModel, {
|
||||
user_id: userId,
|
||||
}),
|
||||
prompt: "Suggest me a good car to buy!",
|
||||
});
|
||||
|
||||
expect(typeof text).toBe('string');
|
||||
expect(text.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("should generate text using provider with memories", async () => {
|
||||
const { text } = await generateText({
|
||||
model: mem0(provider.activeModel, {
|
||||
user_id: userId,
|
||||
}),
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "text", text: "Suggest me a good car to buy." },
|
||||
{ type: "text", text: "Write only the car name and it's color." },
|
||||
],
|
||||
}
|
||||
],
|
||||
});
|
||||
// Expect text to be a string
|
||||
expect(typeof text).toBe('string');
|
||||
expect(text.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("should stream text using Mem0 provider", async () => {
|
||||
const { textStream } = await streamText({
|
||||
model: mem0(provider.activeModel, {
|
||||
user_id: userId, // Use the uniform userId
|
||||
}),
|
||||
prompt: "Suggest me a good car to buy! Write only the car name and it's color.",
|
||||
});
|
||||
|
||||
// Collect streamed text parts
|
||||
let streamedText = '';
|
||||
for await (const textPart of textStream) {
|
||||
streamedText += textPart;
|
||||
}
|
||||
|
||||
// Ensure the streamed text is a string
|
||||
expect(typeof streamedText).toBe('string');
|
||||
expect(streamedText.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
});
|
||||
61
vercel-ai-sdk/tests/groq.test.ts
Normal file
61
vercel-ai-sdk/tests/groq.test.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import dotenv from "dotenv";
|
||||
dotenv.config();
|
||||
|
||||
import { retrieveMemories } from "../src";
|
||||
import { generateText, LanguageModelV1Prompt } from "ai";
|
||||
import { testConfig } from "../config/test-config";
|
||||
import { createGroq } from "@ai-sdk/groq";
|
||||
|
||||
describe("GROQ Functions", () => {
|
||||
const { userId } = testConfig;
|
||||
jest.setTimeout(30000);
|
||||
|
||||
let groq: any;
|
||||
|
||||
beforeEach(() => {
|
||||
groq = createGroq({
|
||||
apiKey: process.env.GROQ_API_KEY,
|
||||
});
|
||||
});
|
||||
|
||||
it("should retrieve memories and generate text using GROQ provider", async () => {
|
||||
const messages: LanguageModelV1Prompt = [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "text", text: "Suggest me a good car to buy." },
|
||||
{ type: "text", text: " Write only the car name and it's color." },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
// Retrieve memories based on previous messages
|
||||
const memories = await retrieveMemories(messages, { user_id: userId });
|
||||
|
||||
const { text } = await generateText({
|
||||
// @ts-ignore
|
||||
model: groq("gemma2-9b-it"),
|
||||
messages: messages,
|
||||
system: memories,
|
||||
});
|
||||
|
||||
// Expect text to be a string
|
||||
expect(typeof text).toBe('string');
|
||||
expect(text.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("should generate text using GROQ provider with memories", async () => {
|
||||
const prompt = "Suggest me a good car to buy.";
|
||||
const memories = await retrieveMemories(prompt, { user_id: userId });
|
||||
|
||||
const { text } = await generateText({
|
||||
// @ts-ignore
|
||||
model: groq("gemma2-9b-it"),
|
||||
prompt: prompt,
|
||||
system: memories
|
||||
});
|
||||
|
||||
expect(typeof text).toBe('string');
|
||||
expect(text.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
75
vercel-ai-sdk/tests/memory-core.test.ts
Normal file
75
vercel-ai-sdk/tests/memory-core.test.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
import { addMemories, retrieveMemories } from "../src";
|
||||
import { LanguageModelV1Prompt } from "ai";
|
||||
import { testConfig } from "../config/test-config";
|
||||
|
||||
describe("Memory Core Functions", () => {
|
||||
const { userId } = testConfig;
|
||||
jest.setTimeout(10000);
|
||||
|
||||
describe("addMemories", () => {
|
||||
it("should successfully add memories and return correct format", async () => {
|
||||
const messages: LanguageModelV1Prompt = [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "text", text: "I love red cars." },
|
||||
{ type: "text", text: "I like Toyota Cars." },
|
||||
{ type: "text", text: "I prefer SUVs." },
|
||||
],
|
||||
}
|
||||
];
|
||||
|
||||
const response = await addMemories(messages, { user_id: userId });
|
||||
|
||||
expect(Array.isArray(response)).toBe(true);
|
||||
response.forEach((memory: { event: any; }) => {
|
||||
expect(memory).toHaveProperty('id');
|
||||
expect(memory).toHaveProperty('data');
|
||||
expect(memory).toHaveProperty('event');
|
||||
expect(memory.event).toBe('ADD');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("retrieveMemories", () => {
|
||||
beforeEach(async () => {
|
||||
// Add some test memories before each retrieval test
|
||||
const messages: LanguageModelV1Prompt = [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "text", text: "I love red cars." },
|
||||
{ type: "text", text: "I like Toyota Cars." },
|
||||
{ type: "text", text: "I prefer SUVs." },
|
||||
],
|
||||
}
|
||||
];
|
||||
await addMemories(messages, { user_id: userId });
|
||||
});
|
||||
|
||||
it("should retrieve memories with string prompt", async () => {
|
||||
const prompt = "Which car would I prefer?";
|
||||
const response = await retrieveMemories(prompt, { user_id: userId });
|
||||
|
||||
expect(typeof response).toBe('string');
|
||||
expect(response.match(/Memory:/g)?.length).toBeGreaterThan(2);
|
||||
});
|
||||
|
||||
it("should retrieve memories with array of prompts", async () => {
|
||||
const messages: LanguageModelV1Prompt = [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "text", text: "Which car would I prefer?" },
|
||||
{ type: "text", text: "Suggest me some cars" },
|
||||
],
|
||||
}
|
||||
];
|
||||
|
||||
const response = await retrieveMemories(messages, { user_id: userId });
|
||||
|
||||
expect(typeof response).toBe('string');
|
||||
expect(response.match(/Memory:/g)?.length).toBeGreaterThan(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
110
vercel-ai-sdk/tests/openai-structured-ouput.test.ts
Normal file
110
vercel-ai-sdk/tests/openai-structured-ouput.test.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import dotenv from "dotenv";
|
||||
dotenv.config();
|
||||
|
||||
import { generateObject } from "ai";
|
||||
import { testConfig } from "../config/test-config";
|
||||
import { z } from "zod";
|
||||
|
||||
interface Provider {
|
||||
name: string;
|
||||
activeModel: string;
|
||||
apiKey: string | undefined;
|
||||
}
|
||||
|
||||
const provider: Provider = {
|
||||
name: "openai",
|
||||
activeModel: "gpt-4-turbo",
|
||||
apiKey: process.env.OPENAI_API_KEY,
|
||||
}
|
||||
describe("OPENAI Structured Outputs", () => {
|
||||
const { userId } = testConfig;
|
||||
let mem0: ReturnType<typeof testConfig.createTestClient>;
|
||||
jest.setTimeout(30000);
|
||||
|
||||
beforeEach(() => {
|
||||
mem0 = testConfig.createTestClient(provider);
|
||||
});
|
||||
|
||||
describe("openai Object Generation Tests", () => {
|
||||
// Test 1: Generate a car preference object
|
||||
it("should generate a car preference object with name and steps", async () => {
|
||||
const { object } = await generateObject({
|
||||
model: mem0(provider.activeModel, {
|
||||
user_id: userId,
|
||||
}),
|
||||
schema: z.object({
|
||||
car: z.object({
|
||||
name: z.string(),
|
||||
steps: z.array(z.string()),
|
||||
}),
|
||||
}),
|
||||
prompt: "Which car would I like?",
|
||||
});
|
||||
|
||||
expect(object.car).toBeDefined();
|
||||
expect(typeof object.car.name).toBe("string");
|
||||
expect(Array.isArray(object.car.steps)).toBe(true);
|
||||
expect(object.car.steps.every((step) => typeof step === "string")).toBe(true);
|
||||
});
|
||||
|
||||
// Test 2: Generate an array of car objects
|
||||
it("should generate an array of three car objects with name, class, and description", async () => {
|
||||
const { object } = await generateObject({
|
||||
model: mem0(provider.activeModel, {
|
||||
user_id: userId,
|
||||
}),
|
||||
output: "array",
|
||||
schema: z.object({
|
||||
name: z.string(),
|
||||
class: z.string().describe('Cars should be "SUV", "Sedan", or "Hatchback"'),
|
||||
description: z.string(),
|
||||
}),
|
||||
prompt: "Write name of three cars that I would like.",
|
||||
});
|
||||
|
||||
expect(Array.isArray(object)).toBe(true);
|
||||
expect(object.length).toBe(3);
|
||||
object.forEach((car) => {
|
||||
expect(car).toHaveProperty("name");
|
||||
expect(typeof car.name).toBe("string");
|
||||
expect(car).toHaveProperty("class");
|
||||
expect(typeof car.class).toBe("string");
|
||||
expect(car).toHaveProperty("description");
|
||||
expect(typeof car.description).toBe("string");
|
||||
});
|
||||
});
|
||||
|
||||
// Test 3: Generate an enum for movie genre classification
|
||||
it("should classify the genre of a movie plot", async () => {
|
||||
const { object } = await generateObject({
|
||||
model: mem0(provider.activeModel, {
|
||||
user_id: userId,
|
||||
}),
|
||||
output: "enum",
|
||||
enum: ["action", "comedy", "drama", "horror", "sci-fi"],
|
||||
prompt: 'Classify the genre of this movie plot: "A group of astronauts travel through a wormhole in search of a new habitable planet for humanity."',
|
||||
});
|
||||
|
||||
expect(object).toBeDefined();
|
||||
expect(object).toBe("sci-fi");
|
||||
});
|
||||
|
||||
// Test 4: Generate an object of car names without schema
|
||||
it("should generate an object with car names", async () => {
|
||||
const { object } = await generateObject({
|
||||
model: mem0(provider.activeModel, {
|
||||
user_id: userId,
|
||||
}),
|
||||
output: "no-schema",
|
||||
prompt: "Write name of 3 cars that I would like.",
|
||||
});
|
||||
|
||||
const carObject = object as { cars: string[] };
|
||||
|
||||
expect(carObject).toBeDefined();
|
||||
expect(Array.isArray(carObject.cars)).toBe(true);
|
||||
expect(carObject.cars.length).toBe(3);
|
||||
expect(carObject.cars.every((car) => typeof car === "string")).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
58
vercel-ai-sdk/tests/openai.test.ts
Normal file
58
vercel-ai-sdk/tests/openai.test.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import dotenv from "dotenv";
|
||||
dotenv.config();
|
||||
|
||||
import { retrieveMemories } from "../src";
|
||||
import { generateText, LanguageModelV1Prompt } from "ai";
|
||||
import { testConfig } from "../config/test-config";
|
||||
import { createOpenAI } from "@ai-sdk/openai";
|
||||
|
||||
describe("OPENAI Functions", () => {
|
||||
const { userId } = testConfig;
|
||||
jest.setTimeout(30000);
|
||||
let openai: any;
|
||||
|
||||
beforeEach(() => {
|
||||
openai = createOpenAI({
|
||||
apiKey: process.env.OPENAI_API_KEY,
|
||||
});
|
||||
});
|
||||
|
||||
it("should retrieve memories and generate text using OpenAI provider", async () => {
|
||||
const messages: LanguageModelV1Prompt = [
|
||||
{
|
||||
role: "user",
|
||||
content: [
|
||||
{ type: "text", text: "Suggest me a good car to buy." },
|
||||
{ type: "text", text: " Write only the car name and it's color." },
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
// Retrieve memories based on previous messages
|
||||
const memories = await retrieveMemories(messages, { user_id: userId });
|
||||
|
||||
const { text } = await generateText({
|
||||
model: openai("gpt-4-turbo"),
|
||||
messages: messages,
|
||||
system: memories,
|
||||
});
|
||||
|
||||
// Expect text to be a string
|
||||
expect(typeof text).toBe('string');
|
||||
expect(text.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("should generate text using openai provider with memories", async () => {
|
||||
const prompt = "Suggest me a good car to buy.";
|
||||
const memories = await retrieveMemories(prompt, { user_id: userId });
|
||||
|
||||
const { text } = await generateText({
|
||||
model: openai("gpt-4-turbo"),
|
||||
prompt: prompt,
|
||||
system: memories
|
||||
});
|
||||
|
||||
expect(typeof text).toBe('string');
|
||||
expect(text.length).toBeGreaterThan(0);
|
||||
});
|
||||
});
|
||||
77
vercel-ai-sdk/tests/text-properties.test.ts
Normal file
77
vercel-ai-sdk/tests/text-properties.test.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { generateText, streamText } from "ai";
|
||||
import { testConfig } from "../config/test-config";
|
||||
|
||||
interface Provider {
|
||||
name: string;
|
||||
activeModel: string;
|
||||
apiKey: string | undefined;
|
||||
}
|
||||
|
||||
describe.each(testConfig.providers)('TEXT/STREAM PROPERTIES: Tests with model %s', (provider: Provider) => {
|
||||
const { userId } = testConfig;
|
||||
let mem0: ReturnType<typeof testConfig.createTestClient>;
|
||||
jest.setTimeout(50000);
|
||||
|
||||
beforeEach(() => {
|
||||
mem0 = testConfig.createTestClient(provider);
|
||||
});
|
||||
|
||||
it("should stream text with onChunk handler", async () => {
|
||||
const chunkTexts: string[] = [];
|
||||
const { textStream } = await streamText({
|
||||
model: mem0(provider.activeModel, {
|
||||
user_id: userId, // Use the uniform userId
|
||||
}),
|
||||
prompt: "Write only the name of the car I prefer and its color.",
|
||||
onChunk({ chunk }) {
|
||||
if (chunk.type === "text-delta") {
|
||||
// Store chunk text for assertions
|
||||
chunkTexts.push(chunk.textDelta);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
// Wait for the stream to complete
|
||||
for await (const _ of textStream) {
|
||||
}
|
||||
|
||||
// Ensure chunks are collected
|
||||
expect(chunkTexts.length).toBeGreaterThan(0);
|
||||
expect(chunkTexts.every((text) => typeof text === "string")).toBe(true);
|
||||
});
|
||||
|
||||
it("should call onFinish handler without throwing an error", async () => {
|
||||
await streamText({
|
||||
model: mem0(provider.activeModel, {
|
||||
user_id: userId, // Use the uniform userId
|
||||
}),
|
||||
prompt: "Write only the name of the car I prefer and its color.",
|
||||
onFinish({ text, finishReason, usage }) {
|
||||
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should generate fullStream with expected usage", async () => {
|
||||
const {
|
||||
text, // combined text
|
||||
usage, // combined usage of all steps
|
||||
} = await generateText({
|
||||
model: mem0(provider.activeModel), // Ensure the model name is correct
|
||||
maxSteps: 5, // Enable multi-step calls
|
||||
experimental_continueSteps: true,
|
||||
prompt:
|
||||
"Suggest me some good cars to buy. Each response MUST HAVE at least 200 words.",
|
||||
});
|
||||
|
||||
// Ensure text is a string
|
||||
expect(typeof text).toBe("string");
|
||||
|
||||
// Check usage
|
||||
// promptTokens is a number, so we use toBeCloseTo instead of toBe and it should be in the range 155 to 165
|
||||
expect(usage.promptTokens).toBeGreaterThanOrEqual(100);
|
||||
expect(usage.promptTokens).toBeLessThanOrEqual(500);
|
||||
expect(usage.completionTokens).toBeGreaterThanOrEqual(250); // Check completion tokens are above 250
|
||||
expect(usage.totalTokens).toBeGreaterThan(400); // Check total tokens are above 400
|
||||
});
|
||||
});
|
||||
29
vercel-ai-sdk/tsconfig.json
Normal file
29
vercel-ai-sdk/tsconfig.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/tsconfig",
|
||||
"compilerOptions": {
|
||||
"composite": false,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"inlineSources": false,
|
||||
"isolatedModules": true,
|
||||
"moduleResolution": "node",
|
||||
"noUnusedLocals": false,
|
||||
"noUnusedParameters": false,
|
||||
"preserveWatchOutput": true,
|
||||
"skipLibCheck": true,
|
||||
"strict": true,
|
||||
"types": ["@types/node", "jest"],
|
||||
"jsx": "react-jsx",
|
||||
"lib": ["dom", "ES2021"],
|
||||
"module": "ESNext",
|
||||
"target": "ES2018",
|
||||
"stripInternal": true,
|
||||
"paths": {
|
||||
"@/*": ["./src/*"]
|
||||
}
|
||||
},
|
||||
"include": ["."],
|
||||
"exclude": ["dist", "build", "node_modules"]
|
||||
}
|
||||
10
vercel-ai-sdk/tsup.config.ts
Normal file
10
vercel-ai-sdk/tsup.config.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { defineConfig } from 'tsup'
|
||||
|
||||
export default defineConfig([
|
||||
{
|
||||
dts: true,
|
||||
entry: ['src/index.ts'],
|
||||
format: ['cjs', 'esm'],
|
||||
sourcemap: true,
|
||||
},
|
||||
])
|
||||
Reference in New Issue
Block a user