Fix LLM config and Doc update for anthropic (#1983)
This commit is contained in:
261
cookbooks/customer-support-chatbot.ipynb
Normal file
261
cookbooks/customer-support-chatbot.ipynb
Normal file
@@ -0,0 +1,261 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"from typing import List, Dict\n",
|
||||
"from mem0 import Memory\n",
|
||||
"from datetime import datetime\n",
|
||||
"import anthropic\n",
|
||||
"\n",
|
||||
"# Set up environment variables\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = \"your_openai_api_key\" # needed for embedding model\n",
|
||||
"os.environ[\"ANTHROPIC_API_KEY\"] = \"your_anthropic_api_key\""
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"class SupportChatbot:\n",
|
||||
" def __init__(self):\n",
|
||||
" # Initialize Mem0 with Anthropic's Claude\n",
|
||||
" self.config = {\n",
|
||||
" \"llm\": {\n",
|
||||
" \"provider\": \"anthropic\",\n",
|
||||
" \"config\": {\n",
|
||||
" \"model\": \"claude-3-5-sonnet-latest\",\n",
|
||||
" \"temperature\": 0.1,\n",
|
||||
" \"max_tokens\": 2000,\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
" self.client = anthropic.Client(api_key=os.environ[\"ANTHROPIC_API_KEY\"])\n",
|
||||
" self.memory = Memory.from_config(self.config)\n",
|
||||
"\n",
|
||||
" # Define support context\n",
|
||||
" self.system_context = \"\"\"\n",
|
||||
" You are a helpful customer support agent. Use the following guidelines:\n",
|
||||
" - Be polite and professional\n",
|
||||
" - Show empathy for customer issues\n",
|
||||
" - Reference past interactions when relevant\n",
|
||||
" - Maintain consistent information across conversations\n",
|
||||
" - If you're unsure about something, ask for clarification\n",
|
||||
" - Keep track of open issues and follow-ups\n",
|
||||
" \"\"\"\n",
|
||||
"\n",
|
||||
" def store_customer_interaction(self,\n",
|
||||
" user_id: str,\n",
|
||||
" message: str,\n",
|
||||
" response: str,\n",
|
||||
" metadata: Dict = None):\n",
|
||||
" \"\"\"Store customer interaction in memory.\"\"\"\n",
|
||||
" if metadata is None:\n",
|
||||
" metadata = {}\n",
|
||||
"\n",
|
||||
" # Add timestamp to metadata\n",
|
||||
" metadata[\"timestamp\"] = datetime.now().isoformat()\n",
|
||||
"\n",
|
||||
" # Format conversation for storage\n",
|
||||
" conversation = [\n",
|
||||
" {\"role\": \"user\", \"content\": message},\n",
|
||||
" {\"role\": \"assistant\", \"content\": response}\n",
|
||||
" ]\n",
|
||||
"\n",
|
||||
" # Store in Mem0\n",
|
||||
" self.memory.add(\n",
|
||||
" conversation,\n",
|
||||
" user_id=user_id,\n",
|
||||
" metadata=metadata\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
" def get_relevant_history(self, user_id: str, query: str) -> List[Dict]:\n",
|
||||
" \"\"\"Retrieve relevant past interactions.\"\"\"\n",
|
||||
" return self.memory.search(\n",
|
||||
" query=query,\n",
|
||||
" user_id=user_id,\n",
|
||||
" limit=5 # Adjust based on needs\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
" def handle_customer_query(self, user_id: str, query: str) -> str:\n",
|
||||
" \"\"\"Process customer query with context from past interactions.\"\"\"\n",
|
||||
"\n",
|
||||
" # Get relevant past interactions\n",
|
||||
" relevant_history = self.get_relevant_history(user_id, query)\n",
|
||||
"\n",
|
||||
" # Build context from relevant history\n",
|
||||
" context = \"Previous relevant interactions:\\n\"\n",
|
||||
" for memory in relevant_history:\n",
|
||||
" context += f\"Customer: {memory['memory']}\\n\"\n",
|
||||
" context += f\"Support: {memory['memory']}\\n\"\n",
|
||||
" context += \"---\\n\"\n",
|
||||
"\n",
|
||||
" # Prepare prompt with context and current query\n",
|
||||
" prompt = f\"\"\"\n",
|
||||
" {self.system_context}\n",
|
||||
"\n",
|
||||
" {context}\n",
|
||||
"\n",
|
||||
" Current customer query: {query}\n",
|
||||
"\n",
|
||||
" Provide a helpful response that takes into account any relevant past interactions.\n",
|
||||
" \"\"\"\n",
|
||||
"\n",
|
||||
" # Generate response using Claude\n",
|
||||
" response = self.client.messages.create(\n",
|
||||
" model=\"claude-3-5-sonnet-latest\",\n",
|
||||
" messages=[{\"role\": \"user\", \"content\": prompt}],\n",
|
||||
" max_tokens=2000,\n",
|
||||
" temperature=0.1\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
" # Store interaction\n",
|
||||
" self.store_customer_interaction(\n",
|
||||
" user_id=user_id,\n",
|
||||
" message=query,\n",
|
||||
" response=response,\n",
|
||||
" metadata={\"type\": \"support_query\"}\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
" return response.content[0].text"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Welcome to Customer Support! Type 'exit' to end the conversation.\n",
|
||||
"Customer: Hi, I'm having trouble connecting my new smartwatch to the mobile app. It keeps showing a connection error.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"/var/folders/5x/9kmqjfm947g5yh44m7fjk75r0000gn/T/ipykernel_91762/3000273332.py:55: DeprecationWarning: The current get_all API output format is deprecated. To use the latest format, set `api_version='v1.1'`. The current format will be removed in mem0ai 1.1.0 and later versions.\n",
|
||||
" return self.memory.search(\n",
|
||||
"/var/folders/5x/9kmqjfm947g5yh44m7fjk75r0000gn/T/ipykernel_91762/3000273332.py:47: DeprecationWarning: The current add API output format is deprecated. To use the latest format, set `api_version='v1.1'`. The current format will be removed in mem0ai 1.1.0 and later versions.\n",
|
||||
" self.memory.add(\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Support: Hello! Thank you for reaching out about the connection issue with your smartwatch. I understand how frustrating it can be when a new device won't connect properly. I'll be happy to help you resolve this.\n",
|
||||
"\n",
|
||||
"To better assist you, could you please provide me with:\n",
|
||||
"1. The model of your smartwatch\n",
|
||||
"2. The type of phone you're using (iOS or Android)\n",
|
||||
"3. Whether you've already installed the companion app and if it's the latest version\n",
|
||||
"\n",
|
||||
"In the meantime, here are some general troubleshooting steps you can try:\n",
|
||||
"1. Ensure Bluetooth is enabled on your phone\n",
|
||||
"2. Restart both your smartwatch and phone\n",
|
||||
"3. Make sure your smartwatch is sufficiently charged\n",
|
||||
"4. Check if your phone's operating system is up to date\n",
|
||||
"\n",
|
||||
"Once you provide those additional details, I can give you more specific guidance to resolve the connection error. Would you mind sharing that information? \n",
|
||||
"\n",
|
||||
"\n",
|
||||
"Customer: The connection issue is still happening even after trying the steps you suggested.\n",
|
||||
"Support: I apologize that you're still experiencing connection issues with your smartwatch even after trying the previous troubleshooting steps. I understand how frustrating this must be for you. Let's try some additional steps to resolve this persistent problem.\n",
|
||||
"\n",
|
||||
"Before we proceed, could you please confirm:\n",
|
||||
"1. Which specific steps you've already attempted?\n",
|
||||
"2. Are you seeing any particular error message on the screen?\n",
|
||||
"3. What model of smartwatch and phone are you using?\n",
|
||||
"\n",
|
||||
"This information will help me provide more targeted solutions and avoid suggesting steps you've already tried. In the meantime, here are a couple of additional troubleshooting steps we can try:\n",
|
||||
"\n",
|
||||
"1. Completely reset the Bluetooth connection:\n",
|
||||
" - Turn off Bluetooth on your phone\n",
|
||||
" - Restart both your phone and smartwatch\n",
|
||||
" - Turn Bluetooth back on and attempt to pair again\n",
|
||||
"\n",
|
||||
"2. Check if your smartwatch firmware is up to date\n",
|
||||
"\n",
|
||||
"Would you be able to try these steps and let me know the results? I'll stay with you until we resolve this issue. \n",
|
||||
"\n",
|
||||
"\n",
|
||||
"Customer: \n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"ename": "BadRequestError",
|
||||
"evalue": "Error code: 400 - {'error': {'message': \"'$.input' is invalid. Please check the API reference: https://platform.openai.com/docs/api-reference.\", 'type': 'invalid_request_error', 'param': None, 'code': None}}",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[0;31mBadRequestError\u001b[0m Traceback (most recent call last)",
|
||||
"Cell \u001b[0;32mIn[3], line 16\u001b[0m\n\u001b[1;32m 13\u001b[0m \u001b[38;5;28;01mbreak\u001b[39;00m\n\u001b[1;32m 15\u001b[0m \u001b[38;5;66;03m# Handle the query and print the response\u001b[39;00m\n\u001b[0;32m---> 16\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[43mchatbot\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mhandle_customer_query\u001b[49m\u001b[43m(\u001b[49m\u001b[43muser_id\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mquery\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 17\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mSupport:\u001b[39m\u001b[38;5;124m\"\u001b[39m, response, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n",
|
||||
"Cell \u001b[0;32mIn[2], line 65\u001b[0m, in \u001b[0;36mSupportChatbot.handle_customer_query\u001b[0;34m(self, user_id, query)\u001b[0m\n\u001b[1;32m 62\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Process customer query with context from past interactions.\"\"\"\u001b[39;00m\n\u001b[1;32m 64\u001b[0m \u001b[38;5;66;03m# Get relevant past interactions\u001b[39;00m\n\u001b[0;32m---> 65\u001b[0m relevant_history \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_relevant_history\u001b[49m\u001b[43m(\u001b[49m\u001b[43muser_id\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mquery\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 67\u001b[0m \u001b[38;5;66;03m# Build context from relevant history\u001b[39;00m\n\u001b[1;32m 68\u001b[0m context \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mPrevious relevant interactions:\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n",
|
||||
"Cell \u001b[0;32mIn[2], line 55\u001b[0m, in \u001b[0;36mSupportChatbot.get_relevant_history\u001b[0;34m(self, user_id, query)\u001b[0m\n\u001b[1;32m 53\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mget_relevant_history\u001b[39m(\u001b[38;5;28mself\u001b[39m, user_id: \u001b[38;5;28mstr\u001b[39m, query: \u001b[38;5;28mstr\u001b[39m) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m List[Dict]:\n\u001b[1;32m 54\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Retrieve relevant past interactions.\"\"\"\u001b[39;00m\n\u001b[0;32m---> 55\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmemory\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msearch\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 56\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mquery\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 57\u001b[0m \u001b[43m \u001b[49m\u001b[43muser_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muser_id\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 58\u001b[0m \u001b[43m \u001b[49m\u001b[43mlimit\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m5\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Adjust based on needs\u001b[39;49;00m\n\u001b[1;32m 59\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n",
|
||||
"File \u001b[0;32m~/Desktop/mem0/.venv/lib/python3.12/site-packages/mem0/memory/main.py:404\u001b[0m, in \u001b[0;36mMemory.search\u001b[0;34m(self, query, user_id, agent_id, run_id, limit, filters)\u001b[0m\n\u001b[1;32m 396\u001b[0m future_graph_entities \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 397\u001b[0m executor\u001b[38;5;241m.\u001b[39msubmit(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgraph\u001b[38;5;241m.\u001b[39msearch, query, filters, limit)\n\u001b[1;32m 398\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mversion \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mv1.1\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39menable_graph\n\u001b[1;32m 399\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 400\u001b[0m )\n\u001b[1;32m 402\u001b[0m concurrent\u001b[38;5;241m.\u001b[39mfutures\u001b[38;5;241m.\u001b[39mwait([future_memories, future_graph_entities] \u001b[38;5;28;01mif\u001b[39;00m future_graph_entities \u001b[38;5;28;01melse\u001b[39;00m [future_memories])\n\u001b[0;32m--> 404\u001b[0m original_memories \u001b[38;5;241m=\u001b[39m \u001b[43mfuture_memories\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mresult\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 405\u001b[0m graph_entities \u001b[38;5;241m=\u001b[39m future_graph_entities\u001b[38;5;241m.\u001b[39mresult() \u001b[38;5;28;01mif\u001b[39;00m future_graph_entities \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 407\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mversion \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mv1.1\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n",
|
||||
"File \u001b[0;32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/concurrent/futures/_base.py:449\u001b[0m, in \u001b[0;36mFuture.result\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m 447\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m CancelledError()\n\u001b[1;32m 448\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_state \u001b[38;5;241m==\u001b[39m FINISHED:\n\u001b[0;32m--> 449\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m__get_result\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 451\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_condition\u001b[38;5;241m.\u001b[39mwait(timeout)\n\u001b[1;32m 453\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_state \u001b[38;5;129;01min\u001b[39;00m [CANCELLED, CANCELLED_AND_NOTIFIED]:\n",
|
||||
"File \u001b[0;32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/concurrent/futures/_base.py:401\u001b[0m, in \u001b[0;36mFuture.__get_result\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 399\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_exception:\n\u001b[1;32m 400\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 401\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_exception\n\u001b[1;32m 402\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[1;32m 403\u001b[0m \u001b[38;5;66;03m# Break a reference cycle with the exception in self._exception\u001b[39;00m\n\u001b[1;32m 404\u001b[0m \u001b[38;5;28mself\u001b[39m \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n",
|
||||
"File \u001b[0;32m/Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/concurrent/futures/thread.py:58\u001b[0m, in \u001b[0;36m_WorkItem.run\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 55\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m\n\u001b[1;32m 57\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m---> 58\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 59\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m exc:\n\u001b[1;32m 60\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfuture\u001b[38;5;241m.\u001b[39mset_exception(exc)\n",
|
||||
"File \u001b[0;32m~/Desktop/mem0/.venv/lib/python3.12/site-packages/mem0/memory/main.py:423\u001b[0m, in \u001b[0;36mMemory._search_vector_store\u001b[0;34m(self, query, filters, limit)\u001b[0m\n\u001b[1;32m 422\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_search_vector_store\u001b[39m(\u001b[38;5;28mself\u001b[39m, query, filters, limit):\n\u001b[0;32m--> 423\u001b[0m embeddings \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43membedding_model\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43membed\u001b[49m\u001b[43m(\u001b[49m\u001b[43mquery\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 424\u001b[0m memories \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mvector_store\u001b[38;5;241m.\u001b[39msearch(query\u001b[38;5;241m=\u001b[39membeddings, limit\u001b[38;5;241m=\u001b[39mlimit, filters\u001b[38;5;241m=\u001b[39mfilters)\n\u001b[1;32m 426\u001b[0m excluded_keys \u001b[38;5;241m=\u001b[39m {\n\u001b[1;32m 427\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124muser_id\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 428\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124magent_id\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 433\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mupdated_at\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m 434\u001b[0m }\n",
|
||||
"File \u001b[0;32m~/Desktop/mem0/.venv/lib/python3.12/site-packages/mem0/embeddings/openai.py:32\u001b[0m, in \u001b[0;36mOpenAIEmbedding.embed\u001b[0;34m(self, text)\u001b[0m\n\u001b[1;32m 22\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 23\u001b[0m \u001b[38;5;124;03mGet the embedding for the given text using OpenAI.\u001b[39;00m\n\u001b[1;32m 24\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 29\u001b[0m \u001b[38;5;124;03m list: The embedding vector.\u001b[39;00m\n\u001b[1;32m 30\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 31\u001b[0m text \u001b[38;5;241m=\u001b[39m text\u001b[38;5;241m.\u001b[39mreplace(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m \u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m---> 32\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mclient\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43membeddings\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m[\u001b[49m\u001b[43mtext\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconfig\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmodel\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241m.\u001b[39mdata[\u001b[38;5;241m0\u001b[39m]\u001b[38;5;241m.\u001b[39membedding\n",
|
||||
"File \u001b[0;32m~/Desktop/mem0/.venv/lib/python3.12/site-packages/openai/resources/embeddings.py:124\u001b[0m, in \u001b[0;36mEmbeddings.create\u001b[0;34m(self, input, model, dimensions, encoding_format, user, extra_headers, extra_query, extra_body, timeout)\u001b[0m\n\u001b[1;32m 118\u001b[0m embedding\u001b[38;5;241m.\u001b[39membedding \u001b[38;5;241m=\u001b[39m np\u001b[38;5;241m.\u001b[39mfrombuffer( \u001b[38;5;66;03m# type: ignore[no-untyped-call]\u001b[39;00m\n\u001b[1;32m 119\u001b[0m base64\u001b[38;5;241m.\u001b[39mb64decode(data), dtype\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mfloat32\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 120\u001b[0m )\u001b[38;5;241m.\u001b[39mtolist()\n\u001b[1;32m 122\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m obj\n\u001b[0;32m--> 124\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_post\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 125\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m/embeddings\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 126\u001b[0m \u001b[43m \u001b[49m\u001b[43mbody\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmaybe_transform\u001b[49m\u001b[43m(\u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43membedding_create_params\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mEmbeddingCreateParams\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 127\u001b[0m \u001b[43m \u001b[49m\u001b[43moptions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmake_request_options\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 128\u001b[0m \u001b[43m \u001b[49m\u001b[43mextra_headers\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mextra_headers\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 129\u001b[0m \u001b[43m \u001b[49m\u001b[43mextra_query\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mextra_query\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 130\u001b[0m \u001b[43m \u001b[49m\u001b[43mextra_body\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mextra_body\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 131\u001b[0m \u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 132\u001b[0m \u001b[43m \u001b[49m\u001b[43mpost_parser\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mparser\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 133\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 134\u001b[0m \u001b[43m \u001b[49m\u001b[43mcast_to\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mCreateEmbeddingResponse\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 135\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n",
|
||||
"File \u001b[0;32m~/Desktop/mem0/.venv/lib/python3.12/site-packages/openai/_base_client.py:1270\u001b[0m, in \u001b[0;36mSyncAPIClient.post\u001b[0;34m(self, path, cast_to, body, options, files, stream, stream_cls)\u001b[0m\n\u001b[1;32m 1256\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mpost\u001b[39m(\n\u001b[1;32m 1257\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 1258\u001b[0m path: \u001b[38;5;28mstr\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1265\u001b[0m stream_cls: \u001b[38;5;28mtype\u001b[39m[_StreamT] \u001b[38;5;241m|\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 1266\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m ResponseT \u001b[38;5;241m|\u001b[39m _StreamT:\n\u001b[1;32m 1267\u001b[0m opts \u001b[38;5;241m=\u001b[39m FinalRequestOptions\u001b[38;5;241m.\u001b[39mconstruct(\n\u001b[1;32m 1268\u001b[0m method\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mpost\u001b[39m\u001b[38;5;124m\"\u001b[39m, url\u001b[38;5;241m=\u001b[39mpath, json_data\u001b[38;5;241m=\u001b[39mbody, files\u001b[38;5;241m=\u001b[39mto_httpx_files(files), \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39moptions\n\u001b[1;32m 1269\u001b[0m )\n\u001b[0;32m-> 1270\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m cast(ResponseT, \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcast_to\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mopts\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstream_cls\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream_cls\u001b[49m\u001b[43m)\u001b[49m)\n",
|
||||
"File \u001b[0;32m~/Desktop/mem0/.venv/lib/python3.12/site-packages/openai/_base_client.py:947\u001b[0m, in \u001b[0;36mSyncAPIClient.request\u001b[0;34m(self, cast_to, options, remaining_retries, stream, stream_cls)\u001b[0m\n\u001b[1;32m 944\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 945\u001b[0m retries_taken \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[0;32m--> 947\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_request\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 948\u001b[0m \u001b[43m \u001b[49m\u001b[43mcast_to\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcast_to\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 949\u001b[0m \u001b[43m \u001b[49m\u001b[43moptions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43moptions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 950\u001b[0m \u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 951\u001b[0m \u001b[43m \u001b[49m\u001b[43mstream_cls\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream_cls\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 952\u001b[0m \u001b[43m \u001b[49m\u001b[43mretries_taken\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mretries_taken\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 953\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n",
|
||||
"File \u001b[0;32m~/Desktop/mem0/.venv/lib/python3.12/site-packages/openai/_base_client.py:1051\u001b[0m, in \u001b[0;36mSyncAPIClient._request\u001b[0;34m(self, cast_to, options, retries_taken, stream, stream_cls)\u001b[0m\n\u001b[1;32m 1048\u001b[0m err\u001b[38;5;241m.\u001b[39mresponse\u001b[38;5;241m.\u001b[39mread()\n\u001b[1;32m 1050\u001b[0m log\u001b[38;5;241m.\u001b[39mdebug(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mRe-raising status error\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m-> 1051\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_make_status_error_from_response(err\u001b[38;5;241m.\u001b[39mresponse) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1053\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_process_response(\n\u001b[1;32m 1054\u001b[0m cast_to\u001b[38;5;241m=\u001b[39mcast_to,\n\u001b[1;32m 1055\u001b[0m options\u001b[38;5;241m=\u001b[39moptions,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 1059\u001b[0m retries_taken\u001b[38;5;241m=\u001b[39mretries_taken,\n\u001b[1;32m 1060\u001b[0m )\n",
|
||||
"\u001b[0;31mBadRequestError\u001b[0m: Error code: 400 - {'error': {'message': \"'$.input' is invalid. Please check the API reference: https://platform.openai.com/docs/api-reference.\", 'type': 'invalid_request_error', 'param': None, 'code': None}}"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"chatbot = SupportChatbot()\n",
|
||||
"user_id = \"customer_bot\"\n",
|
||||
"print(\"Welcome to Customer Support! Type 'exit' to end the conversation.\")\n",
|
||||
"\n",
|
||||
"while True:\n",
|
||||
" # Get user input\n",
|
||||
" query = input()\n",
|
||||
" print(\"Customer:\", query)\n",
|
||||
" \n",
|
||||
" # Check if user wants to exit\n",
|
||||
" if query.lower() == 'exit':\n",
|
||||
" print(\"Thank you for using our support service. Goodbye!\")\n",
|
||||
" break\n",
|
||||
" \n",
|
||||
" # Handle the query and print the response\n",
|
||||
" response = chatbot.handle_customer_query(user_id, query)\n",
|
||||
" print(\"Support:\", response, \"\\n\\n\")"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": ".venv",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.12.4"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
Reference in New Issue
Block a user