diff --git a/.gitignore b/.gitignore index 2dfa42cd..77254bff 100644 --- a/.gitignore +++ b/.gitignore @@ -182,3 +182,4 @@ notebooks/*.yaml # local directories for testing eval/ +qdrant_storage/ \ No newline at end of file diff --git a/docs/_snippets/get-help.mdx b/docs/_snippets/get-help.mdx new file mode 100644 index 00000000..0e62d3f8 --- /dev/null +++ b/docs/_snippets/get-help.mdx @@ -0,0 +1,11 @@ + + + Talk to founders + + + Join our slack community + + + Join our discord community + + diff --git a/docs/examples/customer-support-agent.mdx b/docs/examples/customer-support-agent.mdx new file mode 100644 index 00000000..58ecf33a --- /dev/null +++ b/docs/examples/customer-support-agent.mdx @@ -0,0 +1,106 @@ +--- +title: Customer Support AI Agent +--- + +You can create a personalized Customer Support AI Agent using Mem0. This guide will walk you through the necessary steps and provide the complete code to get you started. + +## Overview + +The Customer Support AI Agent leverages Mem0 to retain information across interactions, enabling a personalized and efficient support experience. + +## Setup + +Install the necessary packages using pip: + +```bash +pip install openai mem0ai +``` + +## Full Code Example + +Below is the simplified code to create and interact with a Customer Support AI Agent using Mem0: + +```python +from openai import OpenAI +from mem0 import Memory + +class CustomerSupportAIAgent: + def __init__(self): + """ + Initialize the CustomerSupportAIAgent with memory configuration and OpenAI client. + """ + config = { + "vector_store": { + "provider": "qdrant", + "config": { + "host": "localhost", + "port": 6333, + } + }, + } + self.memory = Memory.from_config(config) + self.client = OpenAI() + self.app_id = "customer-support" + + def handle_query(self, query, user_id=None): + """ + Handle a customer query and store the relevant information in memory. + + :param query: The customer query to handle. + :param user_id: Optional user ID to associate with the memory. + """ + # Start a streaming chat completion request to the AI + stream = self.client.chat.completions.create( + model="gpt-4", + stream=True, + messages=[ + {"role": "system", "content": "You are a customer support AI agent."}, + {"role": "user", "content": query} + ] + ) + # Store the query in memory + self.memory.add(query, user_id=user_id, metadata={"app_id": self.app_id}) + + # Print the response from the AI in real-time + for chunk in stream: + if chunk.choices[0].delta.content is not None: + print(chunk.choices[0].delta.content, end="") + + def get_memories(self, user_id=None): + """ + Retrieve all memories associated with the given customer ID. + + :param user_id: Optional user ID to filter memories. + :return: List of memories. + """ + return self.memory.get_all(user_id=user_id) + +# Instantiate the CustomerSupportAIAgent +support_agent = CustomerSupportAIAgent() + +# Define a customer ID +customer_id = "jane_doe" + +# Handle a customer query +support_agent.handle_query("I need help with my recent order. It hasn't arrived yet.", user_id=customer_id) +``` + +### Fetching Memories + +You can fetch all the memories at any point in time using the following code: + +```python +memories = support_agent.get_memories(user_id=customer_id) +for m in memories: + print(m['text']) +``` + +### Key Points + +- **Initialization**: The CustomerSupportAIAgent class is initialized with the necessary memory configuration and OpenAI client setup. +- **Handling Queries**: The handle_query method sends a query to the AI and stores the relevant information in memory. +- **Retrieving Memories**: The get_memories method fetches all stored memories associated with a customer. + +### Conclusion + +As the conversation progresses, Mem0's memory automatically updates based on the interactions, providing a continuously improving personalized support experience. \ No newline at end of file diff --git a/docs/examples/overview.mdx b/docs/examples/overview.mdx new file mode 100644 index 00000000..934720d3 --- /dev/null +++ b/docs/examples/overview.mdx @@ -0,0 +1,28 @@ +--- +title: Overview +description: How to use mem0 in your existing applications? +--- + + +With Mem0, you can create stateful LLM-based applications such as chatbots, virtual assistants, or AI agents. Mem0 enhances your applications by providing a memory layer that makes responses: + +- More personalized +- More reliable +- Cost-effective by reducing the number of LLM interactions +- More engaging +- Enables long-term memory + +Here are some examples of how Mem0 can be integrated into various applications: + +## Example Use Cases + + + + + Build a Personalized AI Tutor that adapts to student progress and learning preferences. This tutor can offer tailored lessons, remember past interactions, and provide a more effective and engaging educational experience. + + + + Develop a Personal AI Assistant that can remember user preferences, past interactions, and context to provide personalized and efficient assistance. This assistant can manage tasks, provide reminders, and adapt to individual user needs, enhancing productivity and user experience. + + \ No newline at end of file diff --git a/docs/examples/personal-ai-tutor.mdx b/docs/examples/personal-ai-tutor.mdx new file mode 100644 index 00000000..3ccb4dab --- /dev/null +++ b/docs/examples/personal-ai-tutor.mdx @@ -0,0 +1,108 @@ +--- +title: Personalized AI Tutor +--- + +You can create a personalized AI Tutor using Mem0. This guide will walk you through the necessary steps and provide the complete code to get you started. + +## Overview + +The Personalized AI Tutor leverages Mem0 to retain information across interactions, enabling a tailored learning experience. By integrating with OpenAI's GPT-4 model, the tutor can provide detailed and context-aware responses to user queries. + +## Setup +Before you begin, ensure you have the required dependencies installed. You can install the necessary packages using pip: + +```bash +pip install openai mem0ai +``` + +## Full Code Example + +Below is the complete code to create and interact with a Personalized AI Tutor using Mem0: + +```python +from openai import OpenAI +from mem0 import Memory + +# Initialize the OpenAI client +client = OpenAI() + +class PersonalAITutor: + def __init__(self): + """ + Initialize the PersonalAITutor with memory configuration and OpenAI client. + """ + config = { + "vector_store": { + "provider": "qdrant", + "config": { + "host": "localhost", + "port": 6333, + } + }, + } + self.memory = Memory.from_config(config) + self.client = client + self.app_id = "app-1" + + def ask(self, question, user_id=None): + """ + Ask a question to the AI and store the relevant facts in memory + + :param question: The question to ask the AI. + :param user_id: Optional user ID to associate with the memory. + """ + # Start a streaming chat completion request to the AI + stream = self.client.chat.completions.create( + model="gpt-4", + stream=True, + messages=[ + {"role": "system", "content": "You are a personal AI Tutor."}, + {"role": "user", "content": question} + ] + ) + # Store the question in memory + self.memory.add(question, user_id=user_id, metadata={"app_id": self.app_id}) + + # Print the response from the AI in real-time + for chunk in stream: + if chunk.choices[0].delta.content is not None: + print(chunk.choices[0].delta.content, end="") + + def get_memories(self, user_id=None): + """ + Retrieve all memories associated with the given user ID. + + :param user_id: Optional user ID to filter memories. + :return: List of memories. + """ + return self.memory.get_all(user_id=user_id) + +# Instantiate the PersonalAITutor +ai_tutor = PersonalAITutor() + +# Define a user ID +user_id = "john_doe" + +# Ask a question +ai_tutor.ask("I am learning introduction to CS. What is queue? Briefly explain.", user_id=user_id) +``` + +### Fetching Memories + +You can fetch all the memories at any point in time using the following code: + +```python +memories = ai_tutor.get_memories(user_id=user_id) +for m in memories: + print(m['text']) +``` + +### Key Points + +- **Initialization**: The PersonalAITutor class is initialized with the necessary memory configuration and OpenAI client setup. +- **Asking Questions**: The ask method sends a question to the AI and stores the relevant information in memory. +- **Retrieving Memories**: The get_memories method fetches all stored memories associated with a user. + +### Conclusion + +As the conversation progresses, Mem0's memory automatically updates based on the interactions, providing a continuously improving personalized learning experience. This setup ensures that the AI Tutor can offer contextually relevant and accurate responses, enhancing the overall educational process. diff --git a/docs/images/ai-assistant.png b/docs/images/ai-assistant.png new file mode 100644 index 00000000..b116a440 Binary files /dev/null and b/docs/images/ai-assistant.png differ diff --git a/docs/images/ai-tutor.png b/docs/images/ai-tutor.png new file mode 100644 index 00000000..abd4a9b8 Binary files /dev/null and b/docs/images/ai-tutor.png differ diff --git a/docs/images/customer-support-agent.png b/docs/images/customer-support-agent.png new file mode 100644 index 00000000..5ae06b36 Binary files /dev/null and b/docs/images/customer-support-agent.png differ diff --git a/docs/introduction.mdx b/docs/introduction.mdx deleted file mode 100644 index 3ad95bf1..00000000 --- a/docs/introduction.mdx +++ /dev/null @@ -1,191 +0,0 @@ ---- -title: Introduction -description: 'Welcome to the Mem0 documentation' ---- - -Mem0 is the long-term memory for AI Agents. - -## Installation - -```bash -pip install mem0ai -``` - -## Usage - -### Instantiate - -```python -from mem0 import Memory -m = Memory() -``` - -Mem0 uses Qdrant by default for storing the semantic memories. If you want to use Qdrant in server mode, use the following method to instantiate. - -Run qdrant first: - -```bash -docker pull qdrant/qdrant - -docker run -p 6333:6333 -p 6334:6334 \ - -v $(pwd)/qdrant_storage:/qdrant/storage:z \ - qdrant/qdrant -``` - -Then, instantiate memory with qdrant server: - -```python -from mem0 import Memory - -config = { - "vector_store": { - "provider": "qdrant", - "config": { - "host": "localhost", - "port": 6333, - } - }, -} - -m = Memory.from_config(config) -``` - -### Store a Memory - -```python -m.add("Likes to play cricket over weekend", user_id="deshraj", metadata={"foo": "bar"}) -# Output: -# [ -# { -# 'id': 'm1', -# 'event': 'add', -# 'data': 'Likes to play cricket over weekend' -# } -# ] - -# Similarly, you can store a memory for an agent -m.add("Agent X is best travel agent in Paris", agent_id="agent-x", metadata={"type": "long-term"}) -``` - -### Retrieve all memories - -#### 1. Get all memories -```python -m.get_all() -# Output: -# [ -# { -# 'id': 'm1', -# 'text': 'Likes to play cricket over weekend', -# 'metadata': { -# 'data': 'Likes to play cricket over weekend' -# } -# }, -# { -# 'id': 'm2', -# 'text': 'Agent X is best travel agent in Paris', -# 'metadata': { -# 'data': 'Agent X is best travel agent in Paris' -# } -# } -# ] - -``` -#### 2. Get memories for specific user - -```python -m.get_all(user_id="deshraj") -``` - -#### 3. Get memories for specific agent - -```python -m.get_all(agent_id="agent-x") -``` - -#### 4. Get memories for a user during an agent run - -```python -m.get_all(agent_id="agent-x", user_id="deshraj") -``` - -### Retrieve a Memory - -```python -memory_id = "m1" -m.get(memory_id) -# Output: -# { -# 'id': '1', -# 'text': 'Likes to play cricket over weekend', -# 'metadata': { -# 'data': 'Likes to play cricket over weekend' -# } -# } -``` - -### Search for related memories - -```python -m.search(query="What is my name", user_id="deshraj") -``` - -### Update a Memory - -```python -m.update(memory_id="m1", data="Likes to play tennis") -``` - -### Get history of a Memory - -```python -m.history(memory_id="m1") -# Output: -# [ -# { -# 'id': 'h1', -# 'memory_id': 'm1', -# 'prev_value': None, -# 'new_value': 'Likes to play cricket over weekend', -# 'event': 'add', -# 'timestamp': '2024-06-12 21:00:54.466687', -# 'is_deleted': 0 -# }, -# { -# 'id': 'h2', -# 'memory_id': 'm1', -# 'prev_value': 'Likes to play cricket over weekend', -# 'new_value': 'Likes to play tennis', -# 'event': 'update', -# 'timestamp': '2024-06-12 21:01:17.230943', -# 'is_deleted': 0 -# } -# ] -``` - -### Delete a Memory - -```python -m.delete(memory_id="m1") -``` - -### Delete memories of a user or agent - -```python -m.delete_all(user_id="deshraj") -m.delete_all(agent_id="agent-x") -``` - -### Delete all Memories - -```python -m.reset() -``` - -## Contributing - -Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change. Please make sure to update tests as appropriate. - -## License - -[Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0) diff --git a/docs/logo/favicon.png b/docs/logo/favicon.png new file mode 100644 index 00000000..683d39cb Binary files /dev/null and b/docs/logo/favicon.png differ diff --git a/docs/mint.json b/docs/mint.json index f63ab237..98b18475 100644 --- a/docs/mint.json +++ b/docs/mint.json @@ -1,7 +1,7 @@ { "$schema": "https://mintlify.com/schema.json", "name": "Mem0.ai", - "favicon": "/logo/light.svg", + "favicon": "/logo/favicon.png", "colors": { "primary": "#3B2FC9", "light": "#6673FF", @@ -16,6 +16,12 @@ "light": "/logo/light.svg", "href": "https://github.com/embedchain/embedchain" }, + "tabs": [ + { + "name": "💡 Examples", + "url": "examples" + } + ], "topbarLinks": [ { "name": "Support", @@ -32,13 +38,27 @@ "name": "Discord", "icon": "discord", "url": "https://mem0.ai/discord/" + }, + { + "name": "Talk to founders", + "icon": "calendar", + "url": "https://cal.com/taranjeetio/meet" } ], "navigation": [ { "group": "Get Started", "pages": [ - "introduction" + "overview", + "quickstart" + ] + }, + { + "group": "💡 Examples", + "pages": [ + "examples/overview", + "examples/personal-ai-tutor", + "examples/customer-support-agent" ] } ], diff --git a/docs/overview.mdx b/docs/overview.mdx new file mode 100644 index 00000000..83fc107e --- /dev/null +++ b/docs/overview.mdx @@ -0,0 +1,59 @@ +--- +title: 📚 Overview +description: 'Welcome to the Mem0 docs!' +--- + +> Mem0 provides a smart, self-improving memory layer for Large Language Models, enabling personalized AI experiences across applications. + +## Core features + +- **User, Session, and AI Agent Memory**: Retains information across user sessions, interactions, and AI agents, ensuring continuity and context. +- **Adaptive Personalization**: Continuously improves personalization based on user interactions and feedback. +- **Developer-Friendly API**: Offers a straightforward API for seamless integration into various applications. +- **Platform Consistency**: Ensures consistent behavior and data across different platforms and devices. +- **Managed Service**: Provides a hosted solution for easy deployment and maintenance. + +If you are looking to quick start, jump to one of the following links: + + + + Jump to quickstart section to get started + + + Checkout curated examples + + + +## Common Use Cases + +- **Personalized Learning Assistants**: Long-term memory allows learning assistants to remember user preferences, past interactions, and progress, providing a more tailored and effective learning experience. + +- **Customer Support AI Agents**: By retaining information from previous interactions, customer support bots can offer more accurate and context-aware assistance, improving customer satisfaction and reducing resolution times. + +- **Healthcare Assistants**: Long-term memory enables healthcare assistants to keep track of patient history, medication schedules, and treatment plans, ensuring personalized and consistent care. + +- **Virtual Companions**: Virtual companions can use long-term memory to build deeper relationships with users by remembering personal details, preferences, and past conversations, making interactions more meaningful. + +- **Productivity Tools**: Long-term memory helps productivity tools remember user habits, frequently used documents, and task history, streamlining workflows and enhancing efficiency. + +- **Gaming AI**: In gaming, AI with long-term memory can create more immersive experiences by remembering player choices, strategies, and progress, adapting the game environment accordingly. + +## How is Mem0 different from RAG? + +Mem0's memory implementation for Large Language Models (LLMs) offers several advantages over Retrieval-Augmented Generation (RAG): + +- **Entity Relationships**: Mem0 can understand and relate entities across different interactions, unlike RAG which retrieves information from static documents. This leads to a deeper understanding of context and relationships. + +- **Recency, Relevancy, and Decay**: Mem0 prioritizes recent interactions and gradually forgets outdated information, ensuring the memory remains relevant and up-to-date for more accurate responses. + +- **Contextual Continuity**: Mem0 retains information across sessions, maintaining continuity in conversations and interactions, which is essential for long-term engagement applications like virtual companions or personalized learning assistants. + +- **Adaptive Learning**: Mem0 improves its personalization based on user interactions and feedback, making the memory more accurate and tailored to individual users over time. + +- **Dynamic Updates**: Mem0 can dynamically update its memory with new information and interactions, unlike RAG which relies on static data. This allows for real-time adjustments and improvements, enhancing the user experience. + +These advanced memory capabilities make Mem0 a powerful tool for developers aiming to create personalized and context-aware AI applications. + +If you have any questions, please feel free to reach out to us using one of the following methods: + + \ No newline at end of file diff --git a/docs/quickstart.mdx b/docs/quickstart.mdx new file mode 100644 index 00000000..a6e55de2 --- /dev/null +++ b/docs/quickstart.mdx @@ -0,0 +1,195 @@ +--- +title: 🚀 Quickstart +description: 'Get started with Mem0 quickly!' +--- + +> Welcome to the Mem0 quickstart guide. This guide will help you get up and running with Mem0 in no time. + +## Installation + +To install Mem0, you can use pip. Run the following command in your terminal: + +```bash +pip install mem0ai +``` + +## Basic Usage + +### Initialize Mem0 + + + +```python +from mem0 import Memory +m = Memory() +``` + + +If you want to run Mem0 in production, initialize using the following method: + +Run Qdrant first: + +```bash +docker pull qdrant/qdrant + +docker run -p 6333:6333 -p 6334:6334 \ + -v $(pwd)/qdrant_storage:/qdrant/storage:z \ + qdrant/qdrant +``` + +Then, instantiate memory with qdrant server: + +```python +from mem0 import Memory + +config = { + "vector_store": { + "provider": "qdrant", + "config": { + "host": "localhost", + "port": 6333, + } + }, +} + +m = Memory.from_config(config) +``` + + + + +### Store a Memory + +```python +# For a user +result = m.add("Likes to play cricket on weekends", user_id="alice", metadata={"category": "hobbies"}) +print(result) +``` + +Output: +```python +[ + { + 'id': 'm1', + 'event': 'add', + 'data': 'Likes to play cricket on weekends' + } +] +``` + +### Retrieve Memories + +```python +# Get all memories +all_memories = m.get_all() +print(all_memories) +``` + +Output: + +```python +[ + { + 'id': 'm1', + 'text': 'Likes to play cricket on weekends', + 'metadata': { + 'data': 'Likes to play cricket on weekends', + 'category': 'hobbies' + } + }, + # ... other memories ... +] +``` + +```python +# Get a single memory by ID +specific_memory = m.get("m1") +print(specific_memory) +``` + +Output: +```python +{ + 'id': 'm1', + 'text': 'Likes to play cricket on weekends', + 'metadata': { + 'data': 'Likes to play cricket on weekends', + 'category': 'hobbies' + } +} +``` + +### Search Memories + +```python +related_memories = m.search(query="What are Alice's hobbies?", user_id="alice") +print(related_memories) +``` + +Output: + +```python +[ + { + 'id': 'm1', + 'text': 'Likes to play cricket on weekends', + 'metadata': { + 'data': 'Likes to play cricket on weekends', + 'category': 'hobbies' + }, + 'score': 0.85 # Similarity score + }, + # ... other related memories ... +] +``` + +### Update a Memory + +```python +result = m.update(memory_id="m1", data="Likes to play tennis on weekends") +print(result) +``` + +Output: + +```python +{ + 'id': 'm1', + 'event': 'update', + 'data': 'Likes to play tennis on weekends' +} +``` + +### Memory History + +```python +history = m.history(memory_id="m1") +print(history) +``` +Output: +```python +[ + { + 'id': 'h1', + 'memory_id': 'm1', + 'prev_value': None, + 'new_value': 'Likes to play cricket on weekends', + 'event': 'add', + 'timestamp': '2024-07-14 10:00:54.466687', + 'is_deleted': 0 + }, + { + 'id': 'h2', + 'memory_id': 'm1', + 'prev_value': 'Likes to play cricket on weekends', + 'new_value': 'Likes to play tennis on weekends', + 'event': 'update', + 'timestamp': '2024-07-14 10:15:17.230943', + 'is_deleted': 0 + } +] +``` + +If you have any questions, please feel free to reach out to us using one of the following methods: + + \ No newline at end of file diff --git a/embedchain/docs/mint.json b/embedchain/docs/mint.json index 0f70941a..5a0be0bd 100644 --- a/embedchain/docs/mint.json +++ b/embedchain/docs/mint.json @@ -52,7 +52,7 @@ "url": "https://embedchain.ai/slack" }, "primaryTab": { - "name": "Documentation" + "name": "📘 Documentation" }, "navigation": [ { diff --git a/embedchain/model_prices_and_context_window.json b/embedchain/model_prices_and_context_window.json deleted file mode 100644 index 1a9b6e4d..00000000 --- a/embedchain/model_prices_and_context_window.json +++ /dev/null @@ -1,803 +0,0 @@ -{ - "openai/gpt-4": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00003, - "output_cost_per_token": 0.00006 - }, - "openai/gpt-4o": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000005, - "output_cost_per_token": 0.000015 - }, - "openai/gpt-4o-2024-05-13": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000005, - "output_cost_per_token": 0.000015 - }, - "openai/gpt-4-turbo-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00001, - "output_cost_per_token": 0.00003 - }, - "openai/gpt-4-0314": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00003, - "output_cost_per_token": 0.00006 - }, - "openai/gpt-4-0613": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00003, - "output_cost_per_token": 0.00006 - }, - "openai/gpt-4-32k": { - "max_tokens": 4096, - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00006, - "output_cost_per_token": 0.00012 - }, - "openai/gpt-4-32k-0314": { - "max_tokens": 4096, - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00006, - "output_cost_per_token": 0.00012 - }, - "openai/gpt-4-32k-0613": { - "max_tokens": 4096, - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00006, - "output_cost_per_token": 0.00012 - }, - "openai/gpt-4-turbo": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00001, - "output_cost_per_token": 0.00003 - }, - "openai/gpt-4-turbo-2024-04-09": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00001, - "output_cost_per_token": 0.00003 - }, - "openai/gpt-4-1106-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00001, - "output_cost_per_token": 0.00003 - }, - "openai/gpt-4-0125-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00001, - "output_cost_per_token": 0.00003 - }, - "openai/gpt-3.5-turbo": { - "max_tokens": 4097, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0000015, - "output_cost_per_token": 0.000002 - }, - "openai/gpt-3.5-turbo-0301": { - "max_tokens": 4097, - "max_input_tokens": 4097, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0000015, - "output_cost_per_token": 0.000002 - }, - "openai/gpt-3.5-turbo-0613": { - "input_cost_per_token": 0.0000015, - "output_cost_per_token": 0.000002 - }, - "openai/gpt-3.5-turbo-1106": { - "max_tokens": 16385, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0000010, - "output_cost_per_token": 0.0000020 - }, - "openai/gpt-3.5-turbo-0125": { - "max_tokens": 16385, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0000005, - "output_cost_per_token": 0.0000015 - }, - "openai/gpt-3.5-turbo-16k": { - "max_tokens": 16385, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000003, - "output_cost_per_token": 0.000004 - }, - "openai/gpt-3.5-turbo-16k-0613": { - "max_tokens": 16385, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000003, - "output_cost_per_token": 0.000004 - }, - "openai/text-embedding-3-large": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "output_vector_size": 3072, - "input_cost_per_token": 0.00000013, - "output_cost_per_token": 0.000000 - }, - "openai/text-embedding-3-small": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "output_vector_size": 1536, - "input_cost_per_token": 0.00000002, - "output_cost_per_token": 0.000000 - }, - "openai/text-embedding-ada-002": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "output_vector_size": 1536, - "input_cost_per_token": 0.0000001, - "output_cost_per_token": 0.000000 - }, - "openai/text-embedding-ada-002-v2": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "input_cost_per_token": 0.0000001, - "output_cost_per_token": 0.000000 - }, - "openai/babbage-002": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0000004, - "output_cost_per_token": 0.0000004 - }, - "openai/davinci-002": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000002, - "output_cost_per_token": 0.000002 - }, - "openai/gpt-3.5-turbo-instruct": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0000015, - "output_cost_per_token": 0.000002 - }, - "openai/gpt-3.5-turbo-instruct-0914": { - "max_tokens": 4097, - "max_input_tokens": 8192, - "max_output_tokens": 4097, - "input_cost_per_token": 0.0000015, - "output_cost_per_token": 0.000002 - }, - "azure/gpt-4o": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000005, - "output_cost_per_token": 0.000015 - }, - "azure/gpt-4-turbo-2024-04-09": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00001, - "output_cost_per_token": 0.00003 - }, - "azure/gpt-4-0125-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00001, - "output_cost_per_token": 0.00003 - }, - "azure/gpt-4-1106-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00001, - "output_cost_per_token": 0.00003 - }, - "azure/gpt-4-0613": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00003, - "output_cost_per_token": 0.00006 - }, - "azure/gpt-4-32k-0613": { - "max_tokens": 4096, - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00006, - "output_cost_per_token": 0.00012 - }, - "azure/gpt-4-32k": { - "max_tokens": 4096, - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00006, - "output_cost_per_token": 0.00012 - }, - "azure/gpt-4": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00003, - "output_cost_per_token": 0.00006 - }, - "azure/gpt-4-turbo": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00001, - "output_cost_per_token": 0.00003 - }, - "azure/gpt-4-turbo-vision-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00001, - "output_cost_per_token": 0.00003 - }, - "azure/gpt-3.5-turbo-16k-0613": { - "max_tokens": 4096, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000003, - "output_cost_per_token": 0.000004 - }, - "azure/gpt-3.5-turbo-1106": { - "max_tokens": 4096, - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0000015, - "output_cost_per_token": 0.000002 - }, - "azure/gpt-3.5-turbo-0125": { - "max_tokens": 4096, - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0000005, - "output_cost_per_token": 0.0000015 - }, - "azure/gpt-3.5-turbo-16k": { - "max_tokens": 4096, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000003, - "output_cost_per_token": 0.000004 - }, - "azure/gpt-3.5-turbo": { - "max_tokens": 4096, - "max_input_tokens": 4097, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0000005, - "output_cost_per_token": 0.0000015 - }, - "azure/gpt-3.5-turbo-instruct-0914": { - "max_tokens": 4097, - "max_input_tokens": 4097, - "input_cost_per_token": 0.0000015, - "output_cost_per_token": 0.000002 - }, - "azure/gpt-3.5-turbo-instruct": { - "max_tokens": 4097, - "max_input_tokens": 4097, - "input_cost_per_token": 0.0000015, - "output_cost_per_token": 0.000002 - }, - "azure/text-embedding-ada-002": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "input_cost_per_token": 0.0000001, - "output_cost_per_token": 0.000000 - }, - "azure/text-embedding-3-large": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "input_cost_per_token": 0.00000013, - "output_cost_per_token": 0.000000 - }, - "azure/text-embedding-3-small": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "input_cost_per_token": 0.00000002, - "output_cost_per_token": 0.000000 - }, - "mistralai/mistral-tiny": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.00000025, - "output_cost_per_token": 0.00000025 - }, - "mistralai/mistral-small": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.000001, - "output_cost_per_token": 0.000003 - }, - "mistralai/mistral-small-latest": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.000001, - "output_cost_per_token": 0.000003 - }, - "mistralai/mistral-medium": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.0000027, - "output_cost_per_token": 0.0000081 - }, - "mistralai/mistral-medium-latest": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.0000027, - "output_cost_per_token": 0.0000081 - }, - "mistralai/mistral-medium-2312": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.0000027, - "output_cost_per_token": 0.0000081 - }, - "mistralai/mistral-large-latest": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.000004, - "output_cost_per_token": 0.000012 - }, - "mistralai/mistral-large-2402": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.000004, - "output_cost_per_token": 0.000012 - }, - "mistralai/open-mistral-7b": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.00000025, - "output_cost_per_token": 0.00000025 - }, - "mistralai/open-mixtral-8x7b": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.0000007, - "output_cost_per_token": 0.0000007 - }, - "mistralai/open-mixtral-8x22b": { - "max_tokens": 8191, - "max_input_tokens": 64000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.000002, - "output_cost_per_token": 0.000006 - }, - "mistralai/codestral-latest": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.000001, - "output_cost_per_token": 0.000003 - }, - "mistralai/codestral-2405": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.000001, - "output_cost_per_token": 0.000003 - }, - "mistralai/mistral-embed": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "input_cost_per_token": 0.0000001, - "output_cost_per_token": 0.0 - }, - "groq/llama2-70b-4096": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00000070, - "output_cost_per_token": 0.00000080 - }, - "groq/llama3-8b-8192": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.00000010, - "output_cost_per_token": 0.00000010 - }, - "groq/llama3-70b-8192": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.00000064, - "output_cost_per_token": 0.00000080 - }, - "groq/mixtral-8x7b-32768": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 0.00000027, - "output_cost_per_token": 0.00000027 - }, - "groq/gemma-7b-it": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.00000010, - "output_cost_per_token": 0.00000010 - }, - "anthropic/claude-instant-1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.00000163, - "output_cost_per_token": 0.00000551 - }, - "anthropic/claude-instant-1.2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.000000163, - "output_cost_per_token": 0.000000551 - }, - "anthropic/claude-2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.000008, - "output_cost_per_token": 0.000024 - }, - "anthropic/claude-2.1": { - "max_tokens": 8191, - "max_input_tokens": 200000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.000008, - "output_cost_per_token": 0.000024 - }, - "anthropic/claude-3-haiku-20240307": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00000025, - "output_cost_per_token": 0.00000125 - }, - "anthropic/claude-3-opus-20240229": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000015, - "output_cost_per_token": 0.000075 - }, - "anthropic/claude-3-sonnet-20240229": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000003, - "output_cost_per_token": 0.000015 - }, - "vertexai/chat-bison": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000000125, - "output_cost_per_token": 0.000000125 - }, - "vertexai/chat-bison@001": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000000125, - "output_cost_per_token": 0.000000125 - }, - "vertexai/chat-bison@002": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000000125, - "output_cost_per_token": 0.000000125 - }, - "vertexai/chat-bison-32k": { - "max_tokens": 8192, - "max_input_tokens": 32000, - "max_output_tokens": 8192, - "input_cost_per_token": 0.000000125, - "output_cost_per_token": 0.000000125 - }, - "vertexai/code-bison": { - "max_tokens": 1024, - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "input_cost_per_token": 0.000000125, - "output_cost_per_token": 0.000000125 - }, - "vertexai/code-bison@001": { - "max_tokens": 1024, - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "input_cost_per_token": 0.000000125, - "output_cost_per_token": 0.000000125 - }, - "vertexai/code-gecko@001": { - "max_tokens": 64, - "max_input_tokens": 2048, - "max_output_tokens": 64, - "input_cost_per_token": 0.000000125, - "output_cost_per_token": 0.000000125 - }, - "vertexai/code-gecko@002": { - "max_tokens": 64, - "max_input_tokens": 2048, - "max_output_tokens": 64, - "input_cost_per_token": 0.000000125, - "output_cost_per_token": 0.000000125 - }, - "vertexai/code-gecko": { - "max_tokens": 64, - "max_input_tokens": 2048, - "max_output_tokens": 64, - "input_cost_per_token": 0.000000125, - "output_cost_per_token": 0.000000125 - }, - "vertexai/codechat-bison": { - "max_tokens": 1024, - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "input_cost_per_token": 0.000000125, - "output_cost_per_token": 0.000000125 - }, - "vertexai/codechat-bison@001": { - "max_tokens": 1024, - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "input_cost_per_token": 0.000000125, - "output_cost_per_token": 0.000000125 - }, - "vertexai/codechat-bison-32k": { - "max_tokens": 8192, - "max_input_tokens": 32000, - "max_output_tokens": 8192, - "input_cost_per_token": 0.000000125, - "output_cost_per_token": 0.000000125 - }, - "vertexai/gemini-pro": { - "max_tokens": 8192, - "max_input_tokens": 32760, - "max_output_tokens": 8192, - "input_cost_per_token": 0.00000025, - "output_cost_per_token": 0.0000005 - }, - "vertexai/gemini-1.0-pro": { - "max_tokens": 8192, - "max_input_tokens": 32760, - "max_output_tokens": 8192, - "input_cost_per_token": 0.00000025, - "output_cost_per_token": 0.0000005 - }, - "vertexai/gemini-1.0-pro-001": { - "max_tokens": 8192, - "max_input_tokens": 32760, - "max_output_tokens": 8192, - "input_cost_per_token": 0.00000025, - "output_cost_per_token": 0.0000005 - }, - "vertexai/gemini-1.0-pro-002": { - "max_tokens": 8192, - "max_input_tokens": 32760, - "max_output_tokens": 8192, - "input_cost_per_token": 0.00000025, - "output_cost_per_token": 0.0000005 - }, - "vertexai/gemini-1.5-pro": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "input_cost_per_token": 0.000000625, - "output_cost_per_token": 0.000001875 - }, - "vertexai/gemini-1.5-flash-001": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "input_cost_per_token": 0, - "output_cost_per_token": 0 - }, - "vertexai/gemini-1.5-flash-preview-0514": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "input_cost_per_token": 0, - "output_cost_per_token": 0 - }, - "vertexai/gemini-1.5-pro-001": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "input_cost_per_token": 0.000000625, - "output_cost_per_token": 0.000001875 - }, - "vertexai/gemini-1.5-pro-preview-0514": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "input_cost_per_token": 0.000000625, - "output_cost_per_token": 0.000001875 - }, - "vertexai/gemini-1.5-pro-preview-0215": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "input_cost_per_token": 0.000000625, - "output_cost_per_token": 0.000001875 - }, - "vertexai/gemini-1.5-pro-preview-0409": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "input_cost_per_token": 0.000000625, - "output_cost_per_token": 0.000001875 - }, - "vertexai/gemini-experimental": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "input_cost_per_token": 0, - "output_cost_per_token": 0 - }, - "vertexai/gemini-pro-vision": { - "max_tokens": 2048, - "max_input_tokens": 16384, - "max_output_tokens": 2048, - "max_images_per_prompt": 16, - "max_videos_per_prompt": 1, - "max_video_length": 2, - "input_cost_per_token": 0.00000025, - "output_cost_per_token": 0.0000005 - }, - "vertexai/gemini-1.0-pro-vision": { - "max_tokens": 2048, - "max_input_tokens": 16384, - "max_output_tokens": 2048, - "max_images_per_prompt": 16, - "max_videos_per_prompt": 1, - "max_video_length": 2, - "input_cost_per_token": 0.00000025, - "output_cost_per_token": 0.0000005 - }, - "vertexai/gemini-1.0-pro-vision-001": { - "max_tokens": 2048, - "max_input_tokens": 16384, - "max_output_tokens": 2048, - "max_images_per_prompt": 16, - "max_videos_per_prompt": 1, - "max_video_length": 2, - "input_cost_per_token": 0.00000025, - "output_cost_per_token": 0.0000005 - }, - "vertexai/claude-3-sonnet@20240229": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000003, - "output_cost_per_token": 0.000015 - }, - "vertexai/claude-3-haiku@20240307": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00000025, - "output_cost_per_token": 0.00000125 - }, - "vertexai/claude-3-opus@20240229": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000015, - "output_cost_per_token": 0.000075 - }, - "cohere/command-r": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00000050, - "output_cost_per_token": 0.0000015 - }, - "cohere/command-light": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000015, - "output_cost_per_token": 0.000015 - }, - "cohere/command-r-plus": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000003, - "output_cost_per_token": 0.000015 - }, - "cohere/command-nightly": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000015, - "output_cost_per_token": 0.000015 - }, - "cohere/command": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000015, - "output_cost_per_token": 0.000015 - }, - "cohere/command-medium-beta": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000015, - "output_cost_per_token": 0.000015 - }, - "cohere/command-xlarge-beta": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.000015, - "output_cost_per_token": 0.000015 - }, - "together/together-ai-up-to-3b": { - "input_cost_per_token": 0.0000001, - "output_cost_per_token": 0.0000001 - }, - "together/together-ai-3.1b-7b": { - "input_cost_per_token": 0.0000002, - "output_cost_per_token": 0.0000002 - }, - "together/together-ai-7.1b-20b": { - "max_tokens": 1000, - "input_cost_per_token": 0.0000004, - "output_cost_per_token": 0.0000004 - }, - "together/together-ai-20.1b-40b": { - "input_cost_per_token": 0.0000008, - "output_cost_per_token": 0.0000008 - }, - "together/together-ai-40.1b-70b": { - "input_cost_per_token": 0.0000009, - "output_cost_per_token": 0.0000009 - }, - "together/mistralai/Mixtral-8x7B-Instruct-v0.1": { - "input_cost_per_token": 0.0000006, - "output_cost_per_token": 0.0000006 - } -} \ No newline at end of file diff --git a/mem0/__init__.py b/mem0/__init__.py index 10133d3f..dd185ecf 100644 --- a/mem0/__init__.py +++ b/mem0/__init__.py @@ -1,5 +1,5 @@ import importlib.metadata -__version__ = importlib.metadata.version(__package__ or __name__) +__version__ = importlib.metadata.version("mem0ai") from mem0.memory.main import Memory # noqa diff --git a/mem0/configs/prompts.py b/mem0/configs/prompts.py index 972d27e5..bb596900 100644 --- a/mem0/configs/prompts.py +++ b/mem0/configs/prompts.py @@ -1,5 +1,5 @@ UPDATE_MEMORY_PROMPT = """ -You are an expert at merging, updating, and organizing user memories. When provided with existing memories and new information, your task is to merge and update the memory list to reflect the most accurate and current information. You are also provided with the matching score for each existing memory to the new information. Make sure to leverage this information to make informed decisions about which memories to update or merge. +You are an expert at merging, updating, and organizing memories. When provided with existing memories and new information, your task is to merge and update the memory list to reflect the most accurate and current information. You are also provided with the matching score for each existing memory to the new information. Make sure to leverage this information to make informed decisions about which memories to update or merge. Guidelines: - Eliminate duplicate memories and merge related memories to ensure a concise and updated list. @@ -15,3 +15,17 @@ Here are the details of the task: - New Memory: {memory} """ + +MEMORY_DEDUCTION_PROMPT = """ +Deduce the facts, preferences, and memories from the provided text. +Just return the facts, preferences, and memories in bullet points: +Natural language text: {user_input} +User/Agent details: {metadata} + +Constraint for deducing facts, preferences, and memories: +- The facts, preferences, and memories should be concise and informative. +- Don't start by "The person likes Pizza". Instead, start with "Likes Pizza". +- Don't remember the user/agent details provided. Only remember the facts, preferences, and memories. + +Deduced facts, preferences, and memories: +""" diff --git a/mem0/llms/openai.py b/mem0/llms/openai.py index bf9a38cd..2f614c34 100644 --- a/mem0/llms/openai.py +++ b/mem0/llms/openai.py @@ -38,4 +38,3 @@ class OpenAILLM(LLMBase): response = self.client.chat.completions.create(**params) return response - # return response.choices[0].message["content"] diff --git a/mem0/memory/main.py b/mem0/memory/main.py index 631b55ff..0bb89c64 100644 --- a/mem0/memory/main.py +++ b/mem0/memory/main.py @@ -14,6 +14,7 @@ from mem0.llms.utils.tools import ( DELETE_MEMORY_TOOL, UPDATE_MEMORY_TOOL, ) +from mem0.configs.prompts import MEMORY_DEDUCTION_PROMPT from mem0.memory.base import MemoryBase from mem0.memory.setup import mem0_dir, setup_config from mem0.memory.storage import SQLiteManager @@ -100,6 +101,7 @@ class Memory(MemoryBase): run_id=None, metadata=None, filters=None, + prompt=None, ): """ Create a new memory. @@ -116,7 +118,6 @@ class Memory(MemoryBase): str: ID of the created memory. """ if metadata is None: - logging.warn("Metadata not provided. Using empty metadata.") metadata = {} embeddings = self.embedding_model.embed(data) @@ -128,6 +129,18 @@ class Memory(MemoryBase): if run_id: filters["run_id"] = metadata["run_id"] = run_id + if not prompt: + prompt = MEMORY_DEDUCTION_PROMPT.format(user_input=data, metadata=metadata) + extracted_memories = self.llm.generate_response( + messages=[ + { + "role": "system", + "content": "You are an expert at deducing facts, preferences and memories from unstructured text.", + }, + {"role": "user", "content": prompt}, + ] + ) + extracted_memories = extracted_memories.choices[0].message.content existing_memories = self.vector_store.search( name=self.collection_name, query=embeddings, @@ -148,7 +161,9 @@ class Memory(MemoryBase): for item in existing_memories ] logging.info(f"Total existing memories: {len(existing_memories)}") - messages = get_update_memory_messages(serialized_existing_memories, data) + messages = get_update_memory_messages( + serialized_existing_memories, extracted_memories + ) # Add tools for noop, add, update, delete memory. tools = [ADD_MEMORY_TOOL, UPDATE_MEMORY_TOOL, DELETE_MEMORY_TOOL] response = self.llm.generate_response(messages=messages, tools=tools) diff --git a/poetry.lock b/poetry.lock index a3652965..568429da 100644 --- a/poetry.lock +++ b/poetry.lock @@ -36,6 +36,46 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (>=0.23)"] +[[package]] +name = "appnope" +version = "0.1.4" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = ">=3.6" +files = [ + {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, + {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, +] + +[[package]] +name = "asttokens" +version = "2.4.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] + +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +optional = false +python-versions = "*" +files = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] + [[package]] name = "backoff" version = "2.2.1" @@ -58,6 +98,70 @@ files = [ {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "charset-normalizer" version = "3.3.2" @@ -168,6 +272,65 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "comm" +version = "0.2.2" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +optional = false +python-versions = ">=3.8" +files = [ + {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, + {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, +] + +[package.dependencies] +traitlets = ">=4" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "debugpy" +version = "1.8.2" +description = "An implementation of the Debug Adapter Protocol for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "debugpy-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7ee2e1afbf44b138c005e4380097d92532e1001580853a7cb40ed84e0ef1c3d2"}, + {file = "debugpy-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f8c3f7c53130a070f0fc845a0f2cee8ed88d220d6b04595897b66605df1edd6"}, + {file = "debugpy-1.8.2-cp310-cp310-win32.whl", hash = "sha256:f179af1e1bd4c88b0b9f0fa153569b24f6b6f3de33f94703336363ae62f4bf47"}, + {file = "debugpy-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:0600faef1d0b8d0e85c816b8bb0cb90ed94fc611f308d5fde28cb8b3d2ff0fe3"}, + {file = "debugpy-1.8.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8a13417ccd5978a642e91fb79b871baded925d4fadd4dfafec1928196292aa0a"}, + {file = "debugpy-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acdf39855f65c48ac9667b2801234fc64d46778021efac2de7e50907ab90c634"}, + {file = "debugpy-1.8.2-cp311-cp311-win32.whl", hash = "sha256:2cbd4d9a2fc5e7f583ff9bf11f3b7d78dfda8401e8bb6856ad1ed190be4281ad"}, + {file = "debugpy-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:d3408fddd76414034c02880e891ea434e9a9cf3a69842098ef92f6e809d09afa"}, + {file = "debugpy-1.8.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:5d3ccd39e4021f2eb86b8d748a96c766058b39443c1f18b2dc52c10ac2757835"}, + {file = "debugpy-1.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62658aefe289598680193ff655ff3940e2a601765259b123dc7f89c0239b8cd3"}, + {file = "debugpy-1.8.2-cp312-cp312-win32.whl", hash = "sha256:bd11fe35d6fd3431f1546d94121322c0ac572e1bfb1f6be0e9b8655fb4ea941e"}, + {file = "debugpy-1.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:15bc2f4b0f5e99bf86c162c91a74c0631dbd9cef3c6a1d1329c946586255e859"}, + {file = "debugpy-1.8.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:5a019d4574afedc6ead1daa22736c530712465c0c4cd44f820d803d937531b2d"}, + {file = "debugpy-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40f062d6877d2e45b112c0bbade9a17aac507445fd638922b1a5434df34aed02"}, + {file = "debugpy-1.8.2-cp38-cp38-win32.whl", hash = "sha256:c78ba1680f1015c0ca7115671fe347b28b446081dada3fedf54138f44e4ba031"}, + {file = "debugpy-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:cf327316ae0c0e7dd81eb92d24ba8b5e88bb4d1b585b5c0d32929274a66a5210"}, + {file = "debugpy-1.8.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:1523bc551e28e15147815d1397afc150ac99dbd3a8e64641d53425dba57b0ff9"}, + {file = "debugpy-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e24ccb0cd6f8bfaec68d577cb49e9c680621c336f347479b3fce060ba7c09ec1"}, + {file = "debugpy-1.8.2-cp39-cp39-win32.whl", hash = "sha256:7f8d57a98c5a486c5c7824bc0b9f2f11189d08d73635c326abef268f83950326"}, + {file = "debugpy-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:16c8dcab02617b75697a0a925a62943e26a0330da076e2a10437edd9f0bf3755"}, + {file = "debugpy-1.8.2-py2.py3-none-any.whl", hash = "sha256:16e16df3a98a35c63c3ab1e4d19be4cbc7fdda92d9ddc059294f18910928e0ca"}, + {file = "debugpy-1.8.2.zip", hash = "sha256:95378ed08ed2089221896b9b3a8d021e642c24edc8fef20e5d4342ca8be65c00"}, +] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + [[package]] name = "distro" version = "1.9.0" @@ -181,18 +344,32 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "executing" +version = "2.0.1" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.5" +files = [ + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + [[package]] name = "grpcio" version = "1.64.1" @@ -416,6 +593,25 @@ files = [ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] +[[package]] +name = "importlib-metadata" +version = "8.0.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, + {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -427,6 +623,78 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "ipykernel" +version = "6.29.5" +description = "IPython Kernel for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, + {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" +debugpy = ">=1.6.5" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=24" +tornado = ">=6.1" +traitlets = ">=5.4.0" + +[package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "ipython" +version = "8.12.3" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipython-8.12.3-py3-none-any.whl", hash = "sha256:b0340d46a933d27c657b211a329d0be23793c36595acf9e6ef4164bc01a1804c"}, + {file = "ipython-8.12.3.tar.gz", hash = "sha256:3910c4b54543c2ad73d06579aa771041b7d5707b033bd488669b4cf544e3b363"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} + +[package.extras] +all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +black = ["black"] +doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] + [[package]] name = "isort" version = "5.13.2" @@ -441,6 +709,82 @@ files = [ [package.extras] colors = ["colorama (>=0.4.6)"] +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jupyter-client" +version = "8.6.2" +description = "Jupyter protocol implementation and client libraries" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_client-8.6.2-py3-none-any.whl", hash = "sha256:50cbc5c66fd1b8f65ecb66bc490ab73217993632809b6e505687de18e9dea39f"}, + {file = "jupyter_client-8.6.2.tar.gz", hash = "sha256:2bda14d55ee5ba58552a8c53ae43d215ad9868853489213f37da060ced54d8df"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = ">=5.3" + +[package.extras] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] + +[[package]] +name = "jupyter-core" +version = "5.7.2" +description = "Jupyter core package. A base package on which Jupyter projects rely." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, + {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, +] + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, +] + +[package.dependencies] +traitlets = "*" + [[package]] name = "monotonic" version = "1.6" @@ -452,6 +796,17 @@ files = [ {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, ] +[[package]] +name = "nest-asyncio" +version = "1.6.0" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, +] + [[package]] name = "numpy" version = "1.24.4" @@ -591,6 +946,62 @@ files = [ {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] +[[package]] +name = "parso" +version = "0.8.4" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[package.extras] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] + +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +optional = false +python-versions = "*" +files = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + [[package]] name = "pluggy" version = "1.5.0" @@ -608,13 +1019,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "portalocker" -version = "2.10.0" +version = "2.10.1" description = "Wraps the portalocker recipe for easy usage" optional = false python-versions = ">=3.8" files = [ - {file = "portalocker-2.10.0-py3-none-any.whl", hash = "sha256:48944147b2cd42520549bc1bb8fe44e220296e56f7c3d551bc6ecce69d9b0de1"}, - {file = "portalocker-2.10.0.tar.gz", hash = "sha256:49de8bc0a2f68ca98bf9e219c81a3e6b27097c7bf505a87c5a112ce1aaeb9b81"}, + {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"}, + {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, ] [package.dependencies] @@ -648,6 +1059,20 @@ dev = ["black", "flake8", "flake8-print", "isort", "pre-commit"] sentry = ["django", "sentry-sdk"] test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint", "pytest", "pytest-timeout"] +[[package]] +name = "prompt-toolkit" +version = "3.0.47" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, + {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, +] + +[package.dependencies] +wcwidth = "*" + [[package]] name = "protobuf" version = "5.27.2" @@ -668,6 +1093,71 @@ files = [ {file = "protobuf-5.27.2.tar.gz", hash = "sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714"}, ] +[[package]] +name = "psutil" +version = "6.0.0" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, + {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, + {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, + {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, + {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, + {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, + {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, + {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, + {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, + {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, + {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, + {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, + {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, + {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + [[package]] name = "pydantic" version = "2.8.2" @@ -791,6 +1281,20 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + [[package]] name = "pytest" version = "8.2.2" @@ -850,6 +1354,106 @@ files = [ {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, ] +[[package]] +name = "pyzmq" +version = "26.0.3" +description = "Python bindings for 0MQ" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:44dd6fc3034f1eaa72ece33588867df9e006a7303725a12d64c3dff92330f625"}, + {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acb704195a71ac5ea5ecf2811c9ee19ecdc62b91878528302dd0be1b9451cc90"}, + {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dbb9c997932473a27afa93954bb77a9f9b786b4ccf718d903f35da3232317de"}, + {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bcb34f869d431799c3ee7d516554797f7760cb2198ecaa89c3f176f72d062be"}, + {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ece17ec5f20d7d9b442e5174ae9f020365d01ba7c112205a4d59cf19dc38ee"}, + {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ba6e5e6588e49139a0979d03a7deb9c734bde647b9a8808f26acf9c547cab1bf"}, + {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3bf8b000a4e2967e6dfdd8656cd0757d18c7e5ce3d16339e550bd462f4857e59"}, + {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2136f64fbb86451dbbf70223635a468272dd20075f988a102bf8a3f194a411dc"}, + {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e8918973fbd34e7814f59143c5f600ecd38b8038161239fd1a3d33d5817a38b8"}, + {file = "pyzmq-26.0.3-cp310-cp310-win32.whl", hash = "sha256:0aaf982e68a7ac284377d051c742610220fd06d330dcd4c4dbb4cdd77c22a537"}, + {file = "pyzmq-26.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:f1a9b7d00fdf60b4039f4455afd031fe85ee8305b019334b72dcf73c567edc47"}, + {file = "pyzmq-26.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:80b12f25d805a919d53efc0a5ad7c0c0326f13b4eae981a5d7b7cc343318ebb7"}, + {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:a72a84570f84c374b4c287183debc776dc319d3e8ce6b6a0041ce2e400de3f32"}, + {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ca684ee649b55fd8f378127ac8462fb6c85f251c2fb027eb3c887e8ee347bcd"}, + {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e222562dc0f38571c8b1ffdae9d7adb866363134299264a1958d077800b193b7"}, + {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17cde1db0754c35a91ac00b22b25c11da6eec5746431d6e5092f0cd31a3fea9"}, + {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7c0c0b3244bb2275abe255d4a30c050d541c6cb18b870975553f1fb6f37527"}, + {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ac97a21de3712afe6a6c071abfad40a6224fd14fa6ff0ff8d0c6e6cd4e2f807a"}, + {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88b88282e55fa39dd556d7fc04160bcf39dea015f78e0cecec8ff4f06c1fc2b5"}, + {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:72b67f966b57dbd18dcc7efbc1c7fc9f5f983e572db1877081f075004614fcdd"}, + {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4b6cecbbf3b7380f3b61de3a7b93cb721125dc125c854c14ddc91225ba52f83"}, + {file = "pyzmq-26.0.3-cp311-cp311-win32.whl", hash = "sha256:eed56b6a39216d31ff8cd2f1d048b5bf1700e4b32a01b14379c3b6dde9ce3aa3"}, + {file = "pyzmq-26.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:3191d312c73e3cfd0f0afdf51df8405aafeb0bad71e7ed8f68b24b63c4f36500"}, + {file = "pyzmq-26.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:b6907da3017ef55139cf0e417c5123a84c7332520e73a6902ff1f79046cd3b94"}, + {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:068ca17214038ae986d68f4a7021f97e187ed278ab6dccb79f837d765a54d753"}, + {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7821d44fe07335bea256b9f1f41474a642ca55fa671dfd9f00af8d68a920c2d4"}, + {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb438a26d87c123bb318e5f2b3d86a36060b01f22fbdffd8cf247d52f7c9a2b"}, + {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69ea9d6d9baa25a4dc9cef5e2b77b8537827b122214f210dd925132e34ae9b12"}, + {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7daa3e1369355766dea11f1d8ef829905c3b9da886ea3152788dc25ee6079e02"}, + {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6ca7a9a06b52d0e38ccf6bca1aeff7be178917893f3883f37b75589d42c4ac20"}, + {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1b7d0e124948daa4d9686d421ef5087c0516bc6179fdcf8828b8444f8e461a77"}, + {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e746524418b70f38550f2190eeee834db8850088c834d4c8406fbb9bc1ae10b2"}, + {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6b3146f9ae6af82c47a5282ac8803523d381b3b21caeae0327ed2f7ecb718798"}, + {file = "pyzmq-26.0.3-cp312-cp312-win32.whl", hash = "sha256:2b291d1230845871c00c8462c50565a9cd6026fe1228e77ca934470bb7d70ea0"}, + {file = "pyzmq-26.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:926838a535c2c1ea21c903f909a9a54e675c2126728c21381a94ddf37c3cbddf"}, + {file = "pyzmq-26.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:5bf6c237f8c681dfb91b17f8435b2735951f0d1fad10cc5dfd96db110243370b"}, + {file = "pyzmq-26.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c0991f5a96a8e620f7691e61178cd8f457b49e17b7d9cfa2067e2a0a89fc1d5"}, + {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dbf012d8fcb9f2cf0643b65df3b355fdd74fc0035d70bb5c845e9e30a3a4654b"}, + {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:01fbfbeb8249a68d257f601deb50c70c929dc2dfe683b754659569e502fbd3aa"}, + {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c8eb19abe87029c18f226d42b8a2c9efdd139d08f8bf6e085dd9075446db450"}, + {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5344b896e79800af86ad643408ca9aa303a017f6ebff8cee5a3163c1e9aec987"}, + {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:204e0f176fd1d067671157d049466869b3ae1fc51e354708b0dc41cf94e23a3a"}, + {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a42db008d58530efa3b881eeee4991146de0b790e095f7ae43ba5cc612decbc5"}, + {file = "pyzmq-26.0.3-cp37-cp37m-win32.whl", hash = "sha256:8d7a498671ca87e32b54cb47c82a92b40130a26c5197d392720a1bce1b3c77cf"}, + {file = "pyzmq-26.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:3b4032a96410bdc760061b14ed6a33613ffb7f702181ba999df5d16fb96ba16a"}, + {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2cc4e280098c1b192c42a849de8de2c8e0f3a84086a76ec5b07bfee29bda7d18"}, + {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bde86a2ed3ce587fa2b207424ce15b9a83a9fa14422dcc1c5356a13aed3df9d"}, + {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:34106f68e20e6ff253c9f596ea50397dbd8699828d55e8fa18bd4323d8d966e6"}, + {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ebbbd0e728af5db9b04e56389e2299a57ea8b9dd15c9759153ee2455b32be6ad"}, + {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6b1d1c631e5940cac5a0b22c5379c86e8df6a4ec277c7a856b714021ab6cfad"}, + {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e891ce81edd463b3b4c3b885c5603c00141151dd9c6936d98a680c8c72fe5c67"}, + {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9b273ecfbc590a1b98f014ae41e5cf723932f3b53ba9367cfb676f838038b32c"}, + {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b32bff85fb02a75ea0b68f21e2412255b5731f3f389ed9aecc13a6752f58ac97"}, + {file = "pyzmq-26.0.3-cp38-cp38-win32.whl", hash = "sha256:f6c21c00478a7bea93caaaef9e7629145d4153b15a8653e8bb4609d4bc70dbfc"}, + {file = "pyzmq-26.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:3401613148d93ef0fd9aabdbddb212de3db7a4475367f49f590c837355343972"}, + {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:2ed8357f4c6e0daa4f3baf31832df8a33334e0fe5b020a61bc8b345a3db7a606"}, + {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1c8f2a2ca45292084c75bb6d3a25545cff0ed931ed228d3a1810ae3758f975f"}, + {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b63731993cdddcc8e087c64e9cf003f909262b359110070183d7f3025d1c56b5"}, + {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b3cd31f859b662ac5d7f4226ec7d8bd60384fa037fc02aee6ff0b53ba29a3ba8"}, + {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:115f8359402fa527cf47708d6f8a0f8234f0e9ca0cab7c18c9c189c194dbf620"}, + {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:715bdf952b9533ba13dfcf1f431a8f49e63cecc31d91d007bc1deb914f47d0e4"}, + {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e1258c639e00bf5e8a522fec6c3eaa3e30cf1c23a2f21a586be7e04d50c9acab"}, + {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:15c59e780be8f30a60816a9adab900c12a58d79c1ac742b4a8df044ab2a6d920"}, + {file = "pyzmq-26.0.3-cp39-cp39-win32.whl", hash = "sha256:d0cdde3c78d8ab5b46595054e5def32a755fc028685add5ddc7403e9f6de9879"}, + {file = "pyzmq-26.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:ce828058d482ef860746bf532822842e0ff484e27f540ef5c813d516dd8896d2"}, + {file = "pyzmq-26.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:788f15721c64109cf720791714dc14afd0f449d63f3a5487724f024345067381"}, + {file = "pyzmq-26.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c18645ef6294d99b256806e34653e86236eb266278c8ec8112622b61db255de"}, + {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e6bc96ebe49604df3ec2c6389cc3876cabe475e6bfc84ced1bf4e630662cb35"}, + {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:971e8990c5cc4ddcff26e149398fc7b0f6a042306e82500f5e8db3b10ce69f84"}, + {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8416c23161abd94cc7da80c734ad7c9f5dbebdadfdaa77dad78244457448223"}, + {file = "pyzmq-26.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:082a2988364b60bb5de809373098361cf1dbb239623e39e46cb18bc035ed9c0c"}, + {file = "pyzmq-26.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d57dfbf9737763b3a60d26e6800e02e04284926329aee8fb01049635e957fe81"}, + {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:77a85dca4c2430ac04dc2a2185c2deb3858a34fe7f403d0a946fa56970cf60a1"}, + {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c82a6d952a1d555bf4be42b6532927d2a5686dd3c3e280e5f63225ab47ac1f5"}, + {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4496b1282c70c442809fc1b151977c3d967bfb33e4e17cedbf226d97de18f709"}, + {file = "pyzmq-26.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e4946d6bdb7ba972dfda282f9127e5756d4f299028b1566d1245fa0d438847e6"}, + {file = "pyzmq-26.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03c0ae165e700364b266876d712acb1ac02693acd920afa67da2ebb91a0b3c09"}, + {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3e3070e680f79887d60feeda051a58d0ac36622e1759f305a41059eff62c6da7"}, + {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6ca08b840fe95d1c2bd9ab92dac5685f949fc6f9ae820ec16193e5ddf603c3b2"}, + {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e76654e9dbfb835b3518f9938e565c7806976c07b37c33526b574cc1a1050480"}, + {file = "pyzmq-26.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:871587bdadd1075b112e697173e946a07d722459d20716ceb3d1bd6c64bd08ce"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d0a2d1bd63a4ad79483049b26514e70fa618ce6115220da9efdff63688808b17"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0270b49b6847f0d106d64b5086e9ad5dc8a902413b5dbbb15d12b60f9c1747a4"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703c60b9910488d3d0954ca585c34f541e506a091a41930e663a098d3b794c67"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74423631b6be371edfbf7eabb02ab995c2563fee60a80a30829176842e71722a"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4adfbb5451196842a88fda3612e2c0414134874bffb1c2ce83ab4242ec9e027d"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3516119f4f9b8671083a70b6afaa0a070f5683e431ab3dc26e9215620d7ca1ad"}, + {file = "pyzmq-26.0.3.tar.gz", hash = "sha256:dba7d9f2e047dfa2bca3b01f4f84aa5246725203d6284e3790f2ca15fba6b40a"}, +] + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + [[package]] name = "qdrant-client" version = "1.10.1" @@ -961,6 +1565,25 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + [[package]] name = "tomli" version = "2.0.1" @@ -972,6 +1595,26 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "tornado" +version = "6.4.1" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">=3.8" +files = [ + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, + {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, + {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, + {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, +] + [[package]] name = "tqdm" version = "4.66.4" @@ -992,6 +1635,21 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "traitlets" +version = "5.14.3" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] + [[package]] name = "typing-extensions" version = "4.12.2" @@ -1020,7 +1678,33 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "zipp" +version = "3.19.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, +] + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "645ed126f59268fc6cc8cae8e4ae1599fa65b462d8629c97ab45b6e14bdc1bc2" +content-hash = "5138c101a58db8dbddcb640545a5b2b4fc482f9e555008d117e315ae292d7697" diff --git a/pyproject.toml b/pyproject.toml index 6b2b5c0a..8ad51594 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "mem0ai" -version = "0.0.2" +version = "0.0.3" description = "Long-term memory for AI Agents" authors = ["Deshraj Yadav ", "Taranjeet Singh "] exclude = [ @@ -29,6 +29,7 @@ pytest = "^8.2.2" ruff = "^0.4.8" isort = "^5.13.2" pytest = "^8.2.2" +ipykernel = "^6.29.5" [tool.poetry.group.optional.dependencies]