From 44f2490667c1afa412878dc4f87eb452b0254a80 Mon Sep 17 00:00:00 2001 From: Dev Khant Date: Thu, 10 Apr 2025 10:24:10 +0530 Subject: [PATCH] Doc: modify 2 examples to show OpenAIResponses API (#2525) --- docs/examples/personal-ai-tutor.mdx | 22 ++++++++++---------- docs/examples/personal-travel-assistant.mdx | 23 +++++++++++++-------- 2 files changed, 25 insertions(+), 20 deletions(-) diff --git a/docs/examples/personal-ai-tutor.mdx b/docs/examples/personal-ai-tutor.mdx index 71ebae1d..220577aa 100644 --- a/docs/examples/personal-ai-tutor.mdx +++ b/docs/examples/personal-ai-tutor.mdx @@ -20,6 +20,7 @@ pip install openai mem0ai Below is the complete code to create and interact with a Personalized AI Tutor using Mem0: ```python +import os from openai import OpenAI from mem0 import Memory @@ -54,22 +55,21 @@ class PersonalAITutor: :param question: The question to ask the AI. :param user_id: Optional user ID to associate with the memory. """ - # Start a streaming chat completion request to the AI - stream = self.client.chat.completions.create( - model="gpt-4", - stream=True, - messages=[ - {"role": "system", "content": "You are a personal AI Tutor."}, - {"role": "user", "content": question} - ] + # Start a streaming response request to the AI + response = self.client.responses.create( + model="gpt-4o", + instructions="You are a personal AI Tutor.", + input=question, + stream=True ) + # Store the question in memory self.memory.add(question, user_id=user_id, metadata={"app_id": self.app_id}) # Print the response from the AI in real-time - for chunk in stream: - if chunk.choices[0].delta.content is not None: - print(chunk.choices[0].delta.content, end="") + for event in response: + if event.type == "response.output_text.delta": + print(event.delta, end="") def get_memories(self, user_id=None): """ diff --git a/docs/examples/personal-travel-assistant.mdx b/docs/examples/personal-travel-assistant.mdx index 6187725d..8894f143 100644 --- a/docs/examples/personal-travel-assistant.mdx +++ b/docs/examples/personal-travel-assistant.mdx @@ -63,18 +63,23 @@ class PersonalTravelAssistant: def ask_question(self, question, user_id): # Fetch previous related memories previous_memories = self.search_memories(question, user_id=user_id) - prompt = question - if previous_memories: - prompt = f"User input: {question}\n Previous memories: {previous_memories}" - self.messages.append({"role": "user", "content": prompt}) - # Generate response using GPT-4o - response = self.client.chat.completions.create( + # Build the prompt + system_message = "You are a personal AI Assistant." + + if previous_memories: + prompt = f"{system_message}\n\nUser input: {question}\nPrevious memories: {', '.join(previous_memories)}" + else: + prompt = f"{system_message}\n\nUser input: {question}" + + # Generate response using Responses API + response = self.client.responses.create( model="gpt-4o", - messages=self.messages + input=prompt ) - answer = response.choices[0].message.content - self.messages.append({"role": "assistant", "content": answer}) + + # Extract answer from the response + answer = response.output[0].content[0].text # Store the question in memory self.memory.add(question, user_id=user_id)