Update multimodal example (#2335)
This commit is contained in:
@@ -51,4 +51,4 @@
|
|||||||
"vite": "^6.2.1"
|
"vite": "^6.2.1"
|
||||||
},
|
},
|
||||||
"packageManager": "pnpm@10.5.2+sha512.da9dc28cd3ff40d0592188235ab25d3202add8a207afbedc682220e4a0029ffbff4562102b9e6e46b4e3f9e8bd53e6d05de48544b0c57d4b0179e22c76d1199b"
|
"packageManager": "pnpm@10.5.2+sha512.da9dc28cd3ff40d0592188235ab25d3202add8a207afbedc682220e4a0029ffbff4562102b9e6e46b4e3f9e8bd53e6d05de48544b0c57d4b0179e22c76d1199b"
|
||||||
}
|
}
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
import { useState } from 'react';
|
import { useState } from 'react';
|
||||||
import { MemoryClient } from 'saket-test';
|
import { MemoryClient, Memory as Mem0Memory } from 'mem0ai';
|
||||||
import { OpenAI } from 'openai';
|
import { OpenAI } from 'openai';
|
||||||
import { Message, Memory } from '@/types';
|
import { Message, Memory } from '@/types';
|
||||||
import { WELCOME_MESSAGE, INVALID_CONFIG_MESSAGE, ERROR_MESSAGE, AI_MODELS, Provider } from '@/constants/messages';
|
import { WELCOME_MESSAGE, INVALID_CONFIG_MESSAGE, ERROR_MESSAGE, Provider } from '@/constants/messages';
|
||||||
|
|
||||||
interface UseChatProps {
|
interface UseChatProps {
|
||||||
user: string;
|
user: string;
|
||||||
@@ -30,32 +30,28 @@ interface PromptMessage {
|
|||||||
content: MessageContent;
|
content: MessageContent;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const useChat = ({ user, mem0ApiKey, openaiApiKey, provider }: UseChatProps): UseChatReturn => {
|
export const useChat = ({ user, mem0ApiKey, openaiApiKey }: UseChatProps): UseChatReturn => {
|
||||||
const [messages, setMessages] = useState<Message[]>([WELCOME_MESSAGE]);
|
const [messages, setMessages] = useState<Message[]>([WELCOME_MESSAGE]);
|
||||||
const [memories, setMemories] = useState<Memory[]>([]);
|
const [memories, setMemories] = useState<Memory[]>();
|
||||||
const [thinking, setThinking] = useState(false);
|
const [thinking, setThinking] = useState(false);
|
||||||
|
|
||||||
const openai = new OpenAI({ apiKey: openaiApiKey});
|
const openai = new OpenAI({ apiKey: openaiApiKey, dangerouslyAllowBrowser: true});
|
||||||
const memoryClient = new MemoryClient({ apiKey: mem0ApiKey });
|
|
||||||
|
|
||||||
const updateMemories = async (messages: PromptMessage[]) => {
|
const updateMemories = async (messages: PromptMessage[]) => {
|
||||||
console.log(messages);
|
const memoryClient = new MemoryClient({ apiKey: mem0ApiKey || '' });
|
||||||
try {
|
try {
|
||||||
await memoryClient.add(messages, {
|
await memoryClient.add(messages, {
|
||||||
user_id: user,
|
user_id: user,
|
||||||
output_format: "v1.1",
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const response = await memoryClient.getAll({
|
const response = await memoryClient.getAll({
|
||||||
user_id: user,
|
user_id: user,
|
||||||
page: 1,
|
|
||||||
page_size: 50,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const newMemories = response.results.map((memory: any) => ({
|
const newMemories = response.map((memory: Mem0Memory) => ({
|
||||||
id: memory.id,
|
id: memory.id || '',
|
||||||
content: memory.memory,
|
content: memory.memory || '',
|
||||||
timestamp: memory.updated_at,
|
timestamp: String(memory.updated_at) || '',
|
||||||
tags: memory.categories || [],
|
tags: memory.categories || [],
|
||||||
}));
|
}));
|
||||||
setMemories(newMemories);
|
setMemories(newMemories);
|
||||||
@@ -88,6 +84,8 @@ export const useChat = ({ user, mem0ApiKey, openaiApiKey, provider }: UseChatPro
|
|||||||
const sendMessage = async (content: string, fileData?: { type: string; data: string | Buffer }) => {
|
const sendMessage = async (content: string, fileData?: { type: string; data: string | Buffer }) => {
|
||||||
if (!content.trim() && !fileData) return;
|
if (!content.trim() && !fileData) return;
|
||||||
|
|
||||||
|
const memoryClient = new MemoryClient({ apiKey: mem0ApiKey || '' });
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
const newMessage: Message = {
|
const newMessage: Message = {
|
||||||
id: Date.now().toString(),
|
id: Date.now().toString(),
|
||||||
@@ -127,37 +125,36 @@ export const useChat = ({ user, mem0ApiKey, openaiApiKey, provider }: UseChatPro
|
|||||||
// Check if any message has image content
|
// Check if any message has image content
|
||||||
const hasImage = messagesForLLM.some(msg => {
|
const hasImage = messagesForLLM.some(msg => {
|
||||||
if (typeof msg.content === 'object' && msg.content !== null) {
|
if (typeof msg.content === 'object' && msg.content !== null) {
|
||||||
const content = msg.content as any;
|
const content = msg.content as MessageContent;
|
||||||
return content.type === 'image_url';
|
return typeof content === 'object' && content !== null && 'type' in content && content.type === 'image_url';
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
});
|
});
|
||||||
|
|
||||||
// For image messages, only use the text content
|
// For image messages, only use the text content
|
||||||
if (hasImage) {
|
if (hasImage) {
|
||||||
messagesForLLM = [{
|
messagesForLLM = [
|
||||||
role: 'user',
|
...messagesForLLM,
|
||||||
content: userMessage.content
|
{
|
||||||
}];
|
role: 'user',
|
||||||
|
content: userMessage.content
|
||||||
|
}
|
||||||
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetch relevant memories if there's an image
|
// Fetch relevant memories if there's an image
|
||||||
let relevantMemories = '';
|
let relevantMemories = '';
|
||||||
if (hasImage) {
|
|
||||||
try {
|
try {
|
||||||
const searchResponse = await memoryClient.getAll({
|
const searchResponse = await memoryClient.getAll({
|
||||||
user_id: user,
|
user_id: user
|
||||||
page: 1,
|
|
||||||
page_size: 10,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
relevantMemories = searchResponse.results
|
relevantMemories = searchResponse
|
||||||
.map((memory: any) => `Previous context: ${memory.memory}`)
|
.map((memory: Mem0Memory) => `Previous context: ${memory.memory}`)
|
||||||
.join('\n');
|
.join('\n');
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error fetching memories:', error);
|
console.error('Error fetching memories:', error);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Add a system message with memories context if there are memories and image
|
// Add a system message with memories context if there are memories and image
|
||||||
if (relevantMemories.length > 0 && hasImage) {
|
if (relevantMemories.length > 0 && hasImage) {
|
||||||
@@ -170,12 +167,18 @@ export const useChat = ({ user, mem0ApiKey, openaiApiKey, provider }: UseChatPro
|
|||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('Messages for LLM:', messagesForLLM);
|
const generateRandomId = () => {
|
||||||
|
return Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15);
|
||||||
|
}
|
||||||
|
|
||||||
const completion = await openai.chat.completions.create({
|
const completion = await openai.chat.completions.create({
|
||||||
model: "gpt-4",
|
model: "gpt-4o-mini",
|
||||||
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||||
|
// @ts-expect-error
|
||||||
messages: messagesForLLM.map(msg => ({
|
messages: messagesForLLM.map(msg => ({
|
||||||
role: msg.role,
|
role: msg.role === 'user' ? 'user' : 'assistant',
|
||||||
content: msg.content
|
content: typeof msg.content === 'object' && msg.content !== null ? [msg.content] : msg.content,
|
||||||
|
name: generateRandomId(),
|
||||||
})),
|
})),
|
||||||
stream: true,
|
stream: true,
|
||||||
});
|
});
|
||||||
@@ -213,7 +216,7 @@ export const useChat = ({ user, mem0ApiKey, openaiApiKey, provider }: UseChatPro
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
messages,
|
messages,
|
||||||
memories,
|
memories: memories || [],
|
||||||
thinking,
|
thinking,
|
||||||
sendMessage,
|
sendMessage,
|
||||||
};
|
};
|
||||||
|
|||||||
Reference in New Issue
Block a user