Adding v1.1 code snipper for personal-travel-assistant (#1785)
This commit is contained in:
@@ -19,7 +19,99 @@ pip install openai mem0ai
|
|||||||
|
|
||||||
Here's the complete code to create and interact with a Personalized AI Travel Assistant using Mem0:
|
Here's the complete code to create and interact with a Personalized AI Travel Assistant using Mem0:
|
||||||
|
|
||||||
```python
|
<CodeGroup>
|
||||||
|
|
||||||
|
```python After v1.1
|
||||||
|
import os
|
||||||
|
from openai import OpenAI
|
||||||
|
from mem0 import Memory
|
||||||
|
|
||||||
|
# Set the OpenAI API key
|
||||||
|
os.environ['OPENAI_API_KEY'] = "sk-xxx"
|
||||||
|
|
||||||
|
config = {
|
||||||
|
"llm": {
|
||||||
|
"provider": "openai",
|
||||||
|
"config": {
|
||||||
|
"model": "gpt-4o",
|
||||||
|
"temperature": 0.1,
|
||||||
|
"max_tokens": 2000,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"embedder": {
|
||||||
|
"provider": "openai",
|
||||||
|
"config": {
|
||||||
|
"model": "text-embedding-3-large"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"vector_store": {
|
||||||
|
"provider": "qdrant",
|
||||||
|
"config": {
|
||||||
|
"collection_name": "test",
|
||||||
|
"embedding_model_dims": 3072,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"version": "v1.1",
|
||||||
|
}
|
||||||
|
|
||||||
|
class PersonalTravelAssistant:
|
||||||
|
def __init__(self):
|
||||||
|
self.client = OpenAI()
|
||||||
|
self.memory = Memory.from_config(config)
|
||||||
|
self.messages = [{"role": "system", "content": "You are a personal AI Assistant."}]
|
||||||
|
|
||||||
|
def ask_question(self, question, user_id):
|
||||||
|
# Fetch previous related memories
|
||||||
|
previous_memories = self.search_memories(question, user_id=user_id)
|
||||||
|
prompt = question
|
||||||
|
if previous_memories:
|
||||||
|
prompt = f"User input: {question}\n Previous memories: {previous_memories}"
|
||||||
|
self.messages.append({"role": "user", "content": prompt})
|
||||||
|
|
||||||
|
# Generate response using GPT-4o
|
||||||
|
response = self.client.chat.completions.create(
|
||||||
|
model="gpt-4o",
|
||||||
|
messages=self.messages
|
||||||
|
)
|
||||||
|
answer = response.choices[0].message.content
|
||||||
|
self.messages.append({"role": "assistant", "content": answer})
|
||||||
|
|
||||||
|
# Store the question in memory
|
||||||
|
self.memory.add(question, user_id=user_id)
|
||||||
|
return answer
|
||||||
|
|
||||||
|
def get_memories(self, user_id):
|
||||||
|
memories = self.memory.get_all(user_id=user_id)
|
||||||
|
return [m['memory'] for m in memories['memories']]
|
||||||
|
|
||||||
|
def search_memories(self, query, user_id):
|
||||||
|
memories = self.memory.search(query, user_id=user_id)
|
||||||
|
return [m['memory'] for m in memories['memories']]
|
||||||
|
|
||||||
|
# Usage example
|
||||||
|
user_id = "traveler_123"
|
||||||
|
ai_assistant = PersonalTravelAssistant()
|
||||||
|
|
||||||
|
def main():
|
||||||
|
while True:
|
||||||
|
question = input("Question: ")
|
||||||
|
if question.lower() in ['q', 'exit']:
|
||||||
|
print("Exiting...")
|
||||||
|
break
|
||||||
|
|
||||||
|
answer = ai_assistant.ask_question(question, user_id=user_id)
|
||||||
|
print(f"Answer: {answer}")
|
||||||
|
memories = ai_assistant.get_memories(user_id=user_id)
|
||||||
|
print("Memories:")
|
||||||
|
for memory in memories:
|
||||||
|
print(f"- {memory}")
|
||||||
|
print("-----")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
```
|
||||||
|
|
||||||
|
```python Before v1.1
|
||||||
import os
|
import os
|
||||||
from openai import OpenAI
|
from openai import OpenAI
|
||||||
from mem0 import Memory
|
from mem0 import Memory
|
||||||
@@ -55,11 +147,11 @@ class PersonalTravelAssistant:
|
|||||||
|
|
||||||
def get_memories(self, user_id):
|
def get_memories(self, user_id):
|
||||||
memories = self.memory.get_all(user_id=user_id)
|
memories = self.memory.get_all(user_id=user_id)
|
||||||
return [m['text'] for m in memories]
|
return [m['memory'] for m in memories['memories']]
|
||||||
|
|
||||||
def search_memories(self, query, user_id):
|
def search_memories(self, query, user_id):
|
||||||
memories = self.memory.search(query, user_id=user_id)
|
memories = self.memory.search(query, user_id=user_id)
|
||||||
return [m['text'] for m in memories]
|
return [m['memory'] for m in memories['memories']]
|
||||||
|
|
||||||
# Usage example
|
# Usage example
|
||||||
user_id = "traveler_123"
|
user_id = "traveler_123"
|
||||||
@@ -83,6 +175,8 @@ def main():
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
```
|
```
|
||||||
|
</CodeGroup>
|
||||||
|
|
||||||
|
|
||||||
## Key Components
|
## Key Components
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user