Formatting (#2750)
This commit is contained in:
@@ -13,7 +13,7 @@
|
||||
"import anthropic\n",
|
||||
"\n",
|
||||
"# Set up environment variables\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = \"your_openai_api_key\" # needed for embedding model\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = \"your_openai_api_key\" # needed for embedding model\n",
|
||||
"os.environ[\"ANTHROPIC_API_KEY\"] = \"your_anthropic_api_key\""
|
||||
]
|
||||
},
|
||||
@@ -33,7 +33,7 @@
|
||||
" \"model\": \"claude-3-5-sonnet-latest\",\n",
|
||||
" \"temperature\": 0.1,\n",
|
||||
" \"max_tokens\": 2000,\n",
|
||||
" }\n",
|
||||
" },\n",
|
||||
" }\n",
|
||||
" }\n",
|
||||
" self.client = anthropic.Client(api_key=os.environ[\"ANTHROPIC_API_KEY\"])\n",
|
||||
@@ -50,11 +50,7 @@
|
||||
" - Keep track of open issues and follow-ups\n",
|
||||
" \"\"\"\n",
|
||||
"\n",
|
||||
" def store_customer_interaction(self,\n",
|
||||
" user_id: str,\n",
|
||||
" message: str,\n",
|
||||
" response: str,\n",
|
||||
" metadata: Dict = None):\n",
|
||||
" def store_customer_interaction(self, user_id: str, message: str, response: str, metadata: Dict = None):\n",
|
||||
" \"\"\"Store customer interaction in memory.\"\"\"\n",
|
||||
" if metadata is None:\n",
|
||||
" metadata = {}\n",
|
||||
@@ -63,24 +59,17 @@
|
||||
" metadata[\"timestamp\"] = datetime.now().isoformat()\n",
|
||||
"\n",
|
||||
" # Format conversation for storage\n",
|
||||
" conversation = [\n",
|
||||
" {\"role\": \"user\", \"content\": message},\n",
|
||||
" {\"role\": \"assistant\", \"content\": response}\n",
|
||||
" ]\n",
|
||||
" conversation = [{\"role\": \"user\", \"content\": message}, {\"role\": \"assistant\", \"content\": response}]\n",
|
||||
"\n",
|
||||
" # Store in Mem0\n",
|
||||
" self.memory.add(\n",
|
||||
" conversation,\n",
|
||||
" user_id=user_id,\n",
|
||||
" metadata=metadata\n",
|
||||
" )\n",
|
||||
" self.memory.add(conversation, user_id=user_id, metadata=metadata)\n",
|
||||
"\n",
|
||||
" def get_relevant_history(self, user_id: str, query: str) -> List[Dict]:\n",
|
||||
" \"\"\"Retrieve relevant past interactions.\"\"\"\n",
|
||||
" return self.memory.search(\n",
|
||||
" query=query,\n",
|
||||
" user_id=user_id,\n",
|
||||
" limit=5 # Adjust based on needs\n",
|
||||
" limit=5, # Adjust based on needs\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
" def handle_customer_query(self, user_id: str, query: str) -> str:\n",
|
||||
@@ -112,15 +101,12 @@
|
||||
" model=\"claude-3-5-sonnet-latest\",\n",
|
||||
" messages=[{\"role\": \"user\", \"content\": prompt}],\n",
|
||||
" max_tokens=2000,\n",
|
||||
" temperature=0.1\n",
|
||||
" temperature=0.1,\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
" # Store interaction\n",
|
||||
" self.store_customer_interaction(\n",
|
||||
" user_id=user_id,\n",
|
||||
" message=query,\n",
|
||||
" response=response,\n",
|
||||
" metadata={\"type\": \"support_query\"}\n",
|
||||
" user_id=user_id, message=query, response=response, metadata={\"type\": \"support_query\"}\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
" return response.content[0].text"
|
||||
@@ -203,12 +189,12 @@
|
||||
" # Get user input\n",
|
||||
" query = input()\n",
|
||||
" print(\"Customer:\", query)\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" # Check if user wants to exit\n",
|
||||
" if query.lower() == 'exit':\n",
|
||||
" if query.lower() == \"exit\":\n",
|
||||
" print(\"Thank you for using our support service. Goodbye!\")\n",
|
||||
" break\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" # Handle the query and print the response\n",
|
||||
" response = chatbot.handle_customer_query(user_id, query)\n",
|
||||
" print(\"Support:\", response, \"\\n\\n\")"
|
||||
|
||||
Reference in New Issue
Block a user