diff --git a/Makefile b/Makefile
index 9460b0a9..f07c17d6 100644
--- a/Makefile
+++ b/Makefile
@@ -14,7 +14,7 @@ install_all:
poetry install
poetry run pip install groq together boto3 litellm ollama chromadb weaviate weaviate-client sentence_transformers vertexai \
google-generativeai elasticsearch opensearch-py vecs pinecone pinecone-text faiss-cpu langchain-community \
- upstash-vector azure-search-documents
+ upstash-vector azure-search-documents langchain-memgraph
# Format code with ruff
format:
diff --git a/docs/open-source/graph_memory/overview.mdx b/docs/open-source/graph_memory/overview.mdx
index 1867539b..d59f1b36 100644
--- a/docs/open-source/graph_memory/overview.mdx
+++ b/docs/open-source/graph_memory/overview.mdx
@@ -47,8 +47,14 @@ allowfullscreen
## Initialize Graph Memory
-To initialize Graph Memory you'll need to set up your configuration with graph store providers.
-Currently, we support Neo4j as a graph store provider. You can setup [Neo4j](https://neo4j.com/) locally or use the hosted [Neo4j AuraDB](https://neo4j.com/product/auradb/).
+To initialize Graph Memory you'll need to set up your configuration with graph
+store providers. Currently, we support [Neo4j](#initialize-neo4j) and
+[Memgraph](#initialize-memgraph) as graph store providers.
+
+
+### Initialize Neo4j
+
+You can setup [Neo4j](https://neo4j.com/) locally or use the hosted [Neo4j AuraDB](https://neo4j.com/product/auradb/).
If you are using Neo4j locally, then you need to install [APOC plugins](https://neo4j.com/labs/apoc/4.1/installation/).
@@ -163,6 +169,67 @@ const memory = new Memory(config);
If you are using NodeSDK, you need to pass `enableGraph` as `true` in the `config` object.
+### Initialize Memgraph
+
+Run Memgraph with Docker:
+
+```bash
+docker run -p 7687:7687 memgraph/memgraph-mage:latest --schema-info-enabled=True
+```
+
+The `--schema-info-enabled` flag is set to `True` for more performant schema
+generation.
+
+Additional information can be found on [Memgraph
+documentation](https://memgraph.com/docs).
+
+User can also customize the LLM for Graph Memory from the [Supported LLM list](https://docs.mem0.ai/components/llms/overview) with three levels of configuration:
+
+1. **Main Configuration**: If `llm` is set in the main config, it will be used for all graph operations.
+2. **Graph Store Configuration**: If `llm` is set in the graph_store config, it will override the main config `llm` and be used specifically for graph operations.
+3. **Default Configuration**: If no custom LLM is set, the default LLM (`gpt-4o-2024-08-06`) will be used for all graph operations.
+
+Here's how you can do it:
+
+
+
+```python Python
+from mem0 import Memory
+
+config = {
+ "graph_store": {
+ "provider": "memgraph",
+ "config": {
+ "url": "bolt://localhost:7687",
+ "username": "memgraph",
+ "password": "xxx",
+ },
+ },
+}
+
+m = Memory.from_config(config_dict=config)
+```
+
+```python Python (Advanced)
+config = {
+ "embedder": {
+ "provider": "openai",
+ "config": {"model": "text-embedding-3-large", "embedding_dims": 1536},
+ },
+ "graph_store": {
+ "provider": "memgraph",
+ "config": {
+ "url": "bolt://localhost:7687",
+ "username": "memgraph",
+ "password": "xxx"
+ }
+ }
+}
+
+m = Memory.from_config(config_dict=config)
+```
+
+
## Graph Operations
The Mem0's graph supports the following operations:
diff --git a/examples/graph-db-demo/alice-memories.png b/examples/graph-db-demo/alice-memories.png
new file mode 100644
index 00000000..c1fe6d19
Binary files /dev/null and b/examples/graph-db-demo/alice-memories.png differ
diff --git a/examples/graph-db-demo/memgraph-example.ipynb b/examples/graph-db-demo/memgraph-example.ipynb
new file mode 100644
index 00000000..b559b6e2
--- /dev/null
+++ b/examples/graph-db-demo/memgraph-example.ipynb
@@ -0,0 +1,230 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Memgraph as Graph Memory"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Prerequisites\n",
+ "\n",
+ "### 1. Install Mem0 with Graph Memory support \n",
+ "\n",
+ "To use Mem0 with Graph Memory support, install it using pip:\n",
+ "\n",
+ "```bash\n",
+ "pip install \"mem0ai[graph]\"\n",
+ "```\n",
+ "\n",
+ "This command installs Mem0 along with the necessary dependencies for graph functionality.\n",
+ "\n",
+ "### 2. Install Memgraph\n",
+ "\n",
+ "To utilize Memgraph as Graph Memory, run it with Docker:\n",
+ "\n",
+ "```bash\n",
+ "docker run -p 7687:7687 memgraph/memgraph-mage:latest --schema-info-enabled=True\n",
+ "```\n",
+ "\n",
+ "The `--schema-info-enabled` flag is set to `True` for more performant schema\n",
+ "generation.\n",
+ "\n",
+ "Additional information can be found on [Memgraph documentation](https://memgraph.com/docs). "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Configuration\n",
+ "\n",
+ "Do all the imports and configure OpenAI (enter your OpenAI API key):"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from mem0 import Memory\n",
+ "\n",
+ "import os\n",
+ "\n",
+ "os.environ[\"OPENAI_API_KEY\"] = (\n",
+ " \"\"\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Set up configuration to use the embedder model and Memgraph as a graph store:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "config = {\n",
+ " \"embedder\": {\n",
+ " \"provider\": \"openai\",\n",
+ " \"config\": {\"model\": \"text-embedding-3-large\", \"embedding_dims\": 1536},\n",
+ " },\n",
+ " \"graph_store\": {\n",
+ " \"provider\": \"memgraph\",\n",
+ " \"config\": {\n",
+ " \"url\": \"bolt://localhost:7687\",\n",
+ " \"username\": \"memgraph\",\n",
+ " \"password\": \"mem0graph\",\n",
+ " },\n",
+ " },\n",
+ "}"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Graph Memory initializiation \n",
+ "\n",
+ "Initialize Memgraph as a Graph Memory store: "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "/Users/katelatte/repos/forks/mem0/.venv/lib/python3.13/site-packages/neo4j/_sync/driver.py:547: DeprecationWarning: Relying on Driver's destructor to close the session is deprecated. Please make sure to close the session. Use it as a context (`with` statement) or make sure to call `.close()` explicitly. Future versions of the driver will not close drivers automatically.\n",
+ " _deprecation_warn(\n"
+ ]
+ }
+ ],
+ "source": [
+ "m = Memory.from_config(config_dict=config)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Store memories \n",
+ "\n",
+ "Create memories:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "messages = [\n",
+ " {\n",
+ " \"role\": \"user\",\n",
+ " \"content\": \"I'm planning to watch a movie tonight. Any recommendations?\",\n",
+ " },\n",
+ " {\n",
+ " \"role\": \"assistant\",\n",
+ " \"content\": \"How about a thriller movies? They can be quite engaging.\",\n",
+ " },\n",
+ " {\n",
+ " \"role\": \"user\",\n",
+ " \"content\": \"I'm not a big fan of thriller movies but I love sci-fi movies.\",\n",
+ " },\n",
+ " {\n",
+ " \"role\": \"assistant\",\n",
+ " \"content\": \"Got it! I'll avoid thriller recommendations and suggest sci-fi movies in the future.\",\n",
+ " },\n",
+ "]\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Store memories in Memgraph:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Store inferred memories (default behavior)\n",
+ "result = m.add(\n",
+ " messages, user_id=\"alice\", metadata={\"category\": \"movie_recommendations\"}\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ ""
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Search memories"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Loves sci-fi movies 0.31536642873408993\n",
+ "Planning to watch a movie tonight 0.09684523796547778\n",
+ "Not a big fan of thriller movies 0.09468540071789475\n"
+ ]
+ }
+ ],
+ "source": [
+ "for result in m.search(\"what does alice love?\", user_id=\"alice\")[\"results\"]:\n",
+ " print(result[\"memory\"], result[\"score\"])"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": ".venv",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.13.2"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/mem0/graphs/configs.py b/mem0/graphs/configs.py
index c14249ad..e0e98274 100644
--- a/mem0/graphs/configs.py
+++ b/mem0/graphs/configs.py
@@ -20,6 +20,22 @@ class Neo4jConfig(BaseModel):
if not url or not username or not password:
raise ValueError("Please provide 'url', 'username' and 'password'.")
return values
+
+class MemgraphConfig(BaseModel):
+ url: Optional[str] = Field(None, description="Host address for the graph database")
+ username: Optional[str] = Field(None, description="Username for the graph database")
+ password: Optional[str] = Field(None, description="Password for the graph database")
+
+ @model_validator(mode="before")
+ def check_host_port_or_path(cls, values):
+ url, username, password = (
+ values.get("url"),
+ values.get("username"),
+ values.get("password"),
+ )
+ if not url or not username or not password:
+ raise ValueError("Please provide 'url', 'username' and 'password'.")
+ return values
class GraphStoreConfig(BaseModel):
@@ -35,5 +51,7 @@ class GraphStoreConfig(BaseModel):
provider = values.data.get("provider")
if provider == "neo4j":
return Neo4jConfig(**v.model_dump())
+ elif provider == "memgraph":
+ return MemgraphConfig(**v.model_dump())
else:
raise ValueError(f"Unsupported graph store provider: {provider}")
diff --git a/mem0/memory/main.py b/mem0/memory/main.py
index 50dfe8ab..5b35a40f 100644
--- a/mem0/memory/main.py
+++ b/mem0/memory/main.py
@@ -59,7 +59,10 @@ class Memory(MemoryBase):
self.enable_graph = False
if self.config.graph_store.config:
- from mem0.memory.graph_memory import MemoryGraph
+ if self.config.graph_store.provider == "memgraph":
+ from mem0.memory.memgraph_memory import MemoryGraph
+ else:
+ from mem0.memory.graph_memory import MemoryGraph
self.graph = MemoryGraph(self.config)
self.enable_graph = True
diff --git a/mem0/memory/memgraph_memory.py b/mem0/memory/memgraph_memory.py
new file mode 100644
index 00000000..e425ba55
--- /dev/null
+++ b/mem0/memory/memgraph_memory.py
@@ -0,0 +1,516 @@
+import logging
+
+from mem0.memory.utils import format_entities
+
+try:
+ from langchain_memgraph import Memgraph
+except ImportError:
+ raise ImportError(
+ "langchain_memgraph is not installed. Please install it using pip install langchain-memgraph"
+ )
+
+try:
+ from rank_bm25 import BM25Okapi
+except ImportError:
+ raise ImportError(
+ "rank_bm25 is not installed. Please install it using pip install rank-bm25"
+ )
+
+from mem0.graphs.tools import (
+ DELETE_MEMORY_STRUCT_TOOL_GRAPH,
+ DELETE_MEMORY_TOOL_GRAPH,
+ EXTRACT_ENTITIES_STRUCT_TOOL,
+ EXTRACT_ENTITIES_TOOL,
+ RELATIONS_STRUCT_TOOL,
+ RELATIONS_TOOL,
+)
+from mem0.graphs.utils import EXTRACT_RELATIONS_PROMPT, get_delete_messages
+from mem0.utils.factory import EmbedderFactory, LlmFactory
+
+logger = logging.getLogger(__name__)
+
+
+class MemoryGraph:
+ def __init__(self, config):
+ self.config = config
+ self.graph = Memgraph(
+ self.config.graph_store.config.url,
+ self.config.graph_store.config.username,
+ self.config.graph_store.config.password,
+ )
+ self.embedding_model = EmbedderFactory.create(
+ self.config.embedder.provider,
+ self.config.embedder.config,
+ {"enable_embeddings": True},
+ )
+
+ self.llm_provider = "openai_structured"
+ if self.config.llm.provider:
+ self.llm_provider = self.config.llm.provider
+ if self.config.graph_store.llm:
+ self.llm_provider = self.config.graph_store.llm.provider
+
+ self.llm = LlmFactory.create(self.llm_provider, self.config.llm.config)
+ self.user_id = None
+ self.threshold = 0.7
+
+ # Setup Memgraph:
+ # 1. Create vector index (created Entity label on all nodes)
+ # 2. Create label property index for performance optimizations
+ embedding_dims = self.config.embedder.config["embedding_dims"]
+ create_vector_index_query = f"CREATE VECTOR INDEX memzero ON :Entity(embedding) WITH CONFIG {{'dimension': {embedding_dims}, 'capacity': 1000, 'metric': 'cos'}};"
+ self.graph.query(create_vector_index_query, params={})
+ create_label_prop_index_query = f"CREATE INDEX ON :Entity(user_id);"
+ self.graph.query(create_label_prop_index_query, params={})
+ create_label_index_query = f"CREATE INDEX ON :Entity;"
+ self.graph.query(create_label_index_query, params={})
+
+ def add(self, data, filters):
+ """
+ Adds data to the graph.
+
+ Args:
+ data (str): The data to add to the graph.
+ filters (dict): A dictionary containing filters to be applied during the addition.
+ """
+ entity_type_map = self._retrieve_nodes_from_data(data, filters)
+ to_be_added = self._establish_nodes_relations_from_data(
+ data, filters, entity_type_map
+ )
+ search_output = self._search_graph_db(
+ node_list=list(entity_type_map.keys()), filters=filters
+ )
+ to_be_deleted = self._get_delete_entities_from_search_output(
+ search_output, data, filters
+ )
+
+ # TODO: Batch queries with APOC plugin
+ # TODO: Add more filter support
+ deleted_entities = self._delete_entities(to_be_deleted, filters["user_id"])
+ added_entities = self._add_entities(
+ to_be_added, filters["user_id"], entity_type_map
+ )
+
+ return {"deleted_entities": deleted_entities, "added_entities": added_entities}
+
+ def search(self, query, filters, limit=100):
+ """
+ Search for memories and related graph data.
+
+ Args:
+ query (str): Query to search for.
+ filters (dict): A dictionary containing filters to be applied during the search.
+ limit (int): The maximum number of nodes and relationships to retrieve. Defaults to 100.
+
+ Returns:
+ dict: A dictionary containing:
+ - "contexts": List of search results from the base data store.
+ - "entities": List of related graph data based on the query.
+ """
+ entity_type_map = self._retrieve_nodes_from_data(query, filters)
+ search_output = self._search_graph_db(
+ node_list=list(entity_type_map.keys()), filters=filters
+ )
+
+ if not search_output:
+ return []
+
+ search_outputs_sequence = [
+ [item["source"], item["relationship"], item["destination"]]
+ for item in search_output
+ ]
+ bm25 = BM25Okapi(search_outputs_sequence)
+
+ tokenized_query = query.split(" ")
+ reranked_results = bm25.get_top_n(tokenized_query, search_outputs_sequence, n=5)
+
+ search_results = []
+ for item in reranked_results:
+ search_results.append(
+ {"source": item[0], "relationship": item[1], "destination": item[2]}
+ )
+
+ logger.info(f"Returned {len(search_results)} search results")
+
+ return search_results
+
+ def delete_all(self, filters):
+ cypher = """
+ MATCH (n {user_id: $user_id})
+ DETACH DELETE n
+ """
+ params = {"user_id": filters["user_id"]}
+ self.graph.query(cypher, params=params)
+
+ def get_all(self, filters, limit=100):
+ """
+ Retrieves all nodes and relationships from the graph database based on optional filtering criteria.
+
+ Args:
+ filters (dict): A dictionary containing filters to be applied during the retrieval.
+ limit (int): The maximum number of nodes and relationships to retrieve. Defaults to 100.
+ Returns:
+ list: A list of dictionaries, each containing:
+ - 'contexts': The base data store response for each memory.
+ - 'entities': A list of strings representing the nodes and relationships
+ """
+
+ # return all nodes and relationships
+ query = """
+ MATCH (n:Entity {user_id: $user_id})-[r]->(m:Entity {user_id: $user_id})
+ RETURN n.name AS source, type(r) AS relationship, m.name AS target
+ LIMIT $limit
+ """
+ results = self.graph.query(
+ query, params={"user_id": filters["user_id"], "limit": limit}
+ )
+
+ final_results = []
+ for result in results:
+ final_results.append(
+ {
+ "source": result["source"],
+ "relationship": result["relationship"],
+ "target": result["target"],
+ }
+ )
+
+ logger.info(f"Retrieved {len(final_results)} relationships")
+
+ return final_results
+
+ def _retrieve_nodes_from_data(self, data, filters):
+ """Extracts all the entities mentioned in the query."""
+ _tools = [EXTRACT_ENTITIES_TOOL]
+ if self.llm_provider in ["azure_openai_structured", "openai_structured"]:
+ _tools = [EXTRACT_ENTITIES_STRUCT_TOOL]
+ search_results = self.llm.generate_response(
+ messages=[
+ {
+ "role": "system",
+ "content": f"You are a smart assistant who understands entities and their types in a given text. If user message contains self reference such as 'I', 'me', 'my' etc. then use {filters['user_id']} as the source entity. Extract all the entities from the text. ***DO NOT*** answer the question itself if the given text is a question.",
+ },
+ {"role": "user", "content": data},
+ ],
+ tools=_tools,
+ )
+
+ entity_type_map = {}
+
+ try:
+ for tool_call in search_results["tool_calls"]:
+ if tool_call["name"] != "extract_entities":
+ continue
+ for item in tool_call["arguments"]["entities"]:
+ entity_type_map[item["entity"]] = item["entity_type"]
+ except Exception as e:
+ logger.exception(
+ f"Error in search tool: {e}, llm_provider={self.llm_provider}, search_results={search_results}"
+ )
+
+ entity_type_map = {
+ k.lower().replace(" ", "_"): v.lower().replace(" ", "_")
+ for k, v in entity_type_map.items()
+ }
+ logger.debug(
+ f"Entity type map: {entity_type_map}\n search_results={search_results}"
+ )
+ return entity_type_map
+
+ def _establish_nodes_relations_from_data(self, data, filters, entity_type_map):
+ """Eshtablish relations among the extracted nodes."""
+ if self.config.graph_store.custom_prompt:
+ messages = [
+ {
+ "role": "system",
+ "content": EXTRACT_RELATIONS_PROMPT.replace(
+ "USER_ID", filters["user_id"]
+ ).replace(
+ "CUSTOM_PROMPT", f"4. {self.config.graph_store.custom_prompt}"
+ ),
+ },
+ {"role": "user", "content": data},
+ ]
+ else:
+ messages = [
+ {
+ "role": "system",
+ "content": EXTRACT_RELATIONS_PROMPT.replace(
+ "USER_ID", filters["user_id"]
+ ),
+ },
+ {
+ "role": "user",
+ "content": f"List of entities: {list(entity_type_map.keys())}. \n\nText: {data}",
+ },
+ ]
+
+ _tools = [RELATIONS_TOOL]
+ if self.llm_provider in ["azure_openai_structured", "openai_structured"]:
+ _tools = [RELATIONS_STRUCT_TOOL]
+
+ extracted_entities = self.llm.generate_response(
+ messages=messages,
+ tools=_tools,
+ )
+
+ entities = []
+ if extracted_entities["tool_calls"]:
+ entities = extracted_entities["tool_calls"][0]["arguments"]["entities"]
+
+ entities = self._remove_spaces_from_entities(entities)
+ logger.debug(f"Extracted entities: {entities}")
+ return entities
+
+ def _search_graph_db(self, node_list, filters, limit=100):
+ """Search similar nodes among and their respective incoming and outgoing relations."""
+ result_relations = []
+
+ for node in node_list:
+ n_embedding = self.embedding_model.embed(node)
+
+ cypher_query = f"""
+ MATCH (n:Entity {{user_id: $user_id}})-[r]->(m:Entity)
+ WHERE n.embedding IS NOT NULL
+ WITH collect(n) AS nodes1, collect(m) AS nodes2, r
+ CALL node_similarity.cosine_pairwise("embedding", nodes1, nodes2)
+ YIELD node1, node2, similarity
+ WITH node1, node2, similarity, r
+ WHERE similarity >= $threshold
+ RETURN node1.user_id AS source, id(node1) AS source_id, type(r) AS relationship, id(r) AS relation_id, node2.user_id AS destination, id(node2) AS destination_id, similarity
+ UNION
+ MATCH (n:Entity {{user_id: $user_id}})<-[r]-(m:Entity)
+ WHERE n.embedding IS NOT NULL
+ WITH collect(n) AS nodes1, collect(m) AS nodes2, r
+ CALL node_similarity.cosine_pairwise("embedding", nodes1, nodes2)
+ YIELD node1, node2, similarity
+ WITH node1, node2, similarity, r
+ WHERE similarity >= $threshold
+ RETURN node2.name AS source, id(node2) AS source_id, type(r) AS relationship, id(r) AS relation_id, node1.name AS destination, id(node1) AS destination_id, similarity
+ ORDER BY similarity DESC
+ LIMIT $limit;
+ """
+ params = {
+ "n_embedding": n_embedding,
+ "threshold": self.threshold,
+ "user_id": filters["user_id"],
+ "limit": limit,
+ }
+ ans = self.graph.query(cypher_query, params=params)
+ result_relations.extend(ans)
+
+ return result_relations
+
+ def _get_delete_entities_from_search_output(self, search_output, data, filters):
+ """Get the entities to be deleted from the search output."""
+ search_output_string = format_entities(search_output)
+ system_prompt, user_prompt = get_delete_messages(
+ search_output_string, data, filters["user_id"]
+ )
+
+ _tools = [DELETE_MEMORY_TOOL_GRAPH]
+ if self.llm_provider in ["azure_openai_structured", "openai_structured"]:
+ _tools = [
+ DELETE_MEMORY_STRUCT_TOOL_GRAPH,
+ ]
+
+ memory_updates = self.llm.generate_response(
+ messages=[
+ {"role": "system", "content": system_prompt},
+ {"role": "user", "content": user_prompt},
+ ],
+ tools=_tools,
+ )
+ to_be_deleted = []
+ for item in memory_updates["tool_calls"]:
+ if item["name"] == "delete_graph_memory":
+ to_be_deleted.append(item["arguments"])
+ # in case if it is not in the correct format
+ to_be_deleted = self._remove_spaces_from_entities(to_be_deleted)
+ logger.debug(f"Deleted relationships: {to_be_deleted}")
+ return to_be_deleted
+
+ def _delete_entities(self, to_be_deleted, user_id):
+ """Delete the entities from the graph."""
+ results = []
+ for item in to_be_deleted:
+ source = item["source"]
+ destination = item["destination"]
+ relationship = item["relationship"]
+
+ # Delete the specific relationship between nodes
+ cypher = f"""
+ MATCH (n:Entity {{name: $source_name, user_id: $user_id}})
+ -[r:{relationship}]->
+ (m {{name: $dest_name, user_id: $user_id}})
+ DELETE r
+ RETURN
+ n.name AS source,
+ m.name AS target,
+ type(r) AS relationship
+ """
+ params = {
+ "source_name": source,
+ "dest_name": destination,
+ "user_id": user_id,
+ }
+ result = self.graph.query(cypher, params=params)
+ results.append(result)
+ return results
+
+ # added Entity label to all nodes for vector search to work
+ def _add_entities(self, to_be_added, user_id, entity_type_map):
+ """Add the new entities to the graph. Merge the nodes if they already exist."""
+ results = []
+ for item in to_be_added:
+ # entities
+ source = item["source"]
+ destination = item["destination"]
+ relationship = item["relationship"]
+
+ # types
+ source_type = entity_type_map.get(source, "unknown")
+ destination_type = entity_type_map.get(destination, "unknown")
+
+ # embeddings
+ source_embedding = self.embedding_model.embed(source)
+ dest_embedding = self.embedding_model.embed(destination)
+
+ # search for the nodes with the closest embeddings; this is basically
+ # comparison of one embedding to all embeddings in a graph -> vector
+ # search with cosine similarity metric
+ source_node_search_result = self._search_source_node(
+ source_embedding, user_id, threshold=0.9
+ )
+ destination_node_search_result = self._search_destination_node(
+ dest_embedding, user_id, threshold=0.9
+ )
+
+ # TODO: Create a cypher query and common params for all the cases
+ if not destination_node_search_result and source_node_search_result:
+ cypher = f"""
+ MATCH (source:Entity)
+ WHERE id(source) = $source_id
+ MERGE (destination:{destination_type}:Entity {{name: $destination_name, user_id: $user_id}})
+ ON CREATE SET
+ destination.created = timestamp(),
+ destination.embedding = $destination_embedding,
+ destination:Entity
+ MERGE (source)-[r:{relationship}]->(destination)
+ ON CREATE SET
+ r.created = timestamp()
+ RETURN source.name AS source, type(r) AS relationship, destination.name AS target
+ """
+
+ params = {
+ "source_id": source_node_search_result[0]["id(source_candidate)"],
+ "destination_name": destination,
+ "destination_embedding": dest_embedding,
+ "user_id": user_id,
+ }
+ elif destination_node_search_result and not source_node_search_result:
+ cypher = f"""
+ MATCH (destination:Entity)
+ WHERE id(destination) = $destination_id
+ MERGE (source:{source_type}:Entity {{name: $source_name, user_id: $user_id}})
+ ON CREATE SET
+ source.created = timestamp(),
+ source.embedding = $source_embedding,
+ source:Entity
+ MERGE (source)-[r:{relationship}]->(destination)
+ ON CREATE SET
+ r.created = timestamp()
+ RETURN source.name AS source, type(r) AS relationship, destination.name AS target
+ """
+
+ params = {
+ "destination_id": destination_node_search_result[0][
+ "id(destination_candidate)"
+ ],
+ "source_name": source,
+ "source_embedding": source_embedding,
+ "user_id": user_id,
+ }
+ elif source_node_search_result and destination_node_search_result:
+ cypher = f"""
+ MATCH (source:Entity)
+ WHERE id(source) = $source_id
+ MATCH (destination:Entity)
+ WHERE id(destination) = $destination_id
+ MERGE (source)-[r:{relationship}]->(destination)
+ ON CREATE SET
+ r.created_at = timestamp(),
+ r.updated_at = timestamp()
+ RETURN source.name AS source, type(r) AS relationship, destination.name AS target
+ """
+ params = {
+ "source_id": source_node_search_result[0]["id(source_candidate)"],
+ "destination_id": destination_node_search_result[0][
+ "id(destination_candidate)"
+ ],
+ "user_id": user_id,
+ }
+ else:
+ cypher = f"""
+ MERGE (n:{source_type}:Entity {{name: $source_name, user_id: $user_id}})
+ ON CREATE SET n.created = timestamp(), n.embedding = $source_embedding, n:Entity
+ ON MATCH SET n.embedding = $source_embedding
+ MERGE (m:{destination_type}:Entity {{name: $dest_name, user_id: $user_id}})
+ ON CREATE SET m.created = timestamp(), m.embedding = $dest_embedding, m:Entity
+ ON MATCH SET m.embedding = $dest_embedding
+ MERGE (n)-[rel:{relationship}]->(m)
+ ON CREATE SET rel.created = timestamp()
+ RETURN n.name AS source, type(rel) AS relationship, m.name AS target
+ """
+ params = {
+ "source_name": source,
+ "dest_name": destination,
+ "source_embedding": source_embedding,
+ "dest_embedding": dest_embedding,
+ "user_id": user_id,
+ }
+ result = self.graph.query(cypher, params=params)
+ results.append(result)
+ return results
+
+ def _remove_spaces_from_entities(self, entity_list):
+ for item in entity_list:
+ item["source"] = item["source"].lower().replace(" ", "_")
+ item["relationship"] = item["relationship"].lower().replace(" ", "_")
+ item["destination"] = item["destination"].lower().replace(" ", "_")
+ return entity_list
+
+ def _search_source_node(self, source_embedding, user_id, threshold=0.9):
+ cypher = f"""
+ CALL vector_search.search("memzero", 1, $source_embedding)
+ YIELD distance, node, similarity
+ WITH node AS source_candidate, similarity
+ WHERE source_candidate.user_id = $user_id AND similarity >= $threshold
+ RETURN id(source_candidate);
+ """
+
+ params = {
+ "source_embedding": source_embedding,
+ "user_id": user_id,
+ "threshold": threshold,
+ }
+
+ result = self.graph.query(cypher, params=params)
+ return result
+
+ def _search_destination_node(self, destination_embedding, user_id, threshold=0.9):
+ cypher = f"""
+ CALL vector_search.search("memzero", 1, $destination_embedding)
+ YIELD distance, node, similarity
+ WITH node AS destination_candidate, similarity
+ WHERE node.user_id = $user_id AND similarity >= $threshold
+ RETURN id(destination_candidate);
+ """
+ params = {
+ "destination_embedding": destination_embedding,
+ "user_id": user_id,
+ "threshold": threshold,
+ }
+
+ result = self.graph.query(cypher, params=params)
+ return result
diff --git a/poetry.lock b/poetry.lock
index 831ca4e9..ceacb6f4 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand.
[[package]]
name = "annotated-types"
@@ -7,6 +7,7 @@ description = "Reusable constraint types to use with typing.Annotated"
optional = false
python-versions = ">=3.8"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
@@ -19,6 +20,7 @@ description = "High level compatibility layer for multiple asynchronous event lo
optional = false
python-versions = ">=3.9"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"},
{file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"},
@@ -32,7 +34,7 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""}
[package.extras]
doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"]
-test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""]
+test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"]
trio = ["trio (>=0.26.1)"]
[[package]]
@@ -55,6 +57,7 @@ description = "Function decoration for backoff and retry"
optional = false
python-versions = ">=3.7,<4.0"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"},
{file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
@@ -67,6 +70,7 @@ description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"},
{file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"},
@@ -79,7 +83,7 @@ description = "Foreign Function Interface for Python calling C code."
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "extra == \"graph\" and platform_python_implementation == \"PyPy\""
+markers = "platform_python_implementation == \"PyPy\" and (python_version <= \"3.12\" or python_version >= \"3.13\")"
files = [
{file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"},
{file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"},
@@ -160,6 +164,7 @@ description = "The Real First Universal Charset Detector. Open, modern and activ
optional = false
python-versions = ">=3.7"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"},
{file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"},
@@ -266,7 +271,7 @@ files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
-markers = {main = "platform_system == \"Windows\"", dev = "sys_platform == \"win32\"", test = "sys_platform == \"win32\""}
+markers = {main = "platform_system == \"Windows\" and (python_version <= \"3.12\" or python_version >= \"3.13\")", dev = "sys_platform == \"win32\" and (python_version <= \"3.12\" or python_version >= \"3.13\")", test = "sys_platform == \"win32\" and (python_version <= \"3.12\" or python_version >= \"3.13\")"}
[[package]]
name = "distro"
@@ -275,6 +280,7 @@ description = "Distro - an OS platform information API"
optional = false
python-versions = ">=3.6"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"},
{file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
@@ -303,7 +309,7 @@ description = "File-system specification"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "extra == \"graph\" and (python_version <= \"3.12\" or python_version >= \"3.13\")"
files = [
{file = "fsspec-2024.12.0-py3-none-any.whl", hash = "sha256:b520aed47ad9804237ff878b504267a3b0b441e97508bd6d2d8774e3db85cee2"},
{file = "fsspec-2024.12.0.tar.gz", hash = "sha256:670700c977ed2fb51e0d9f9253177ed20cbde4a3e5c0283cc5385b5870c8533f"},
@@ -344,7 +350,7 @@ description = "Lightweight in-process concurrent programming"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "(platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") and python_version < \"3.14\""
+markers = "(platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") and python_version < \"3.14\" and (python_version <= \"3.12\" or python_version >= \"3.13\")"
files = [
{file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"},
{file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"},
@@ -432,6 +438,7 @@ description = "HTTP/2-based RPC framework"
optional = false
python-versions = ">=3.9"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"},
{file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"},
@@ -496,6 +503,7 @@ description = "Protobuf code generator for gRPC"
optional = false
python-versions = ">=3.9"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "grpcio_tools-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:f4ad7f0d756546902597053d70b3af2606fbd70d7972876cd75c1e241d22ae00"},
{file = "grpcio_tools-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:64bdb291df61cf570b5256777ad5fe2b1db6d67bc46e55dc56a0a862722ae329"},
@@ -562,6 +570,7 @@ description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
optional = false
python-versions = ">=3.7"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
@@ -574,6 +583,7 @@ description = "Pure-Python HTTP/2 protocol implementation"
optional = false
python-versions = ">=3.9"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0"},
{file = "h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f"},
@@ -590,6 +600,7 @@ description = "Pure-Python HPACK header encoding"
optional = false
python-versions = ">=3.9"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496"},
{file = "hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca"},
@@ -602,6 +613,7 @@ description = "A minimal low-level HTTP client."
optional = false
python-versions = ">=3.8"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"},
{file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"},
@@ -624,6 +636,7 @@ description = "The next generation HTTP client."
optional = false
python-versions = ">=3.8"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"},
{file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"},
@@ -637,7 +650,7 @@ httpcore = "==1.*"
idna = "*"
[package.extras]
-brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
+brotli = ["brotli", "brotlicffi"]
cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (==1.*)"]
@@ -650,6 +663,7 @@ description = "Pure-Python HTTP/2 framing"
optional = false
python-versions = ">=3.9"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5"},
{file = "hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08"},
@@ -662,6 +676,7 @@ description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.6"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
@@ -677,6 +692,7 @@ description = "brain-dead simple config-ini parsing"
optional = false
python-versions = ">=3.8"
groups = ["dev", "test"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"},
{file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"},
@@ -689,6 +705,7 @@ description = "A Python utility / library to sort Python imports."
optional = false
python-versions = ">=3.8.0"
groups = ["dev"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"},
{file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"},
@@ -704,6 +721,7 @@ description = "Fast iterable JSON parser."
optional = false
python-versions = ">=3.8"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "jiter-0.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:816ec9b60fdfd1fec87da1d7ed46c66c44ffec37ab2ef7de5b147b2fce3fd5ad"},
{file = "jiter-0.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b1d3086f8a3ee0194ecf2008cf81286a5c3e540d977fa038ff23576c023c0ea"},
@@ -790,7 +808,7 @@ description = "A package to repair broken json strings"
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "extra == \"graph\" and (python_version <= \"3.12\" or python_version >= \"3.13\")"
files = [
{file = "json_repair-0.39.1-py3-none-any.whl", hash = "sha256:3001409a2f319249f13e13d6c622117a5b70ea7e0c6f43864a0233cdffc3a599"},
{file = "json_repair-0.39.1.tar.gz", hash = "sha256:e90a489f247e1a8fc86612a5c719872a3dbf9cbaffd6d55f238ec571a77740fa"},
@@ -803,7 +821,7 @@ description = "Apply JSON-Patches (RFC 6902)"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"},
{file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"},
@@ -819,7 +837,7 @@ description = "Identify specific nodes in a JSON document (RFC 6901)"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"},
{file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"},
@@ -832,7 +850,7 @@ description = "Building applications with LLMs through composability"
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "extra == \"graph\" and (python_version <= \"3.12\" or python_version >= \"3.13\")"
files = [
{file = "langchain-0.3.21-py3-none-any.whl", hash = "sha256:c8bd2372440cc5d48cb50b2d532c2e24036124f1c467002ceb15bc7b86c92579"},
{file = "langchain-0.3.21.tar.gz", hash = "sha256:a10c81f8c450158af90bf37190298d996208cfd15dd3accc1c585f068473d619"},
@@ -873,7 +891,7 @@ description = "Building applications with LLMs through composability"
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "langchain_core-0.3.48-py3-none-any.whl", hash = "sha256:21e4fe84262b9c7ad8aefe7816439ede130893f8a64b8c965cd9695c2be91c73"},
{file = "langchain_core-0.3.48.tar.gz", hash = "sha256:be4b2fe36d8a11fb4b6b13e0808b12aea9f25e345624ffafe1d606afb6059f21"},
@@ -891,6 +909,23 @@ PyYAML = ">=5.3"
tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10.0.0"
typing-extensions = ">=4.7"
+[[package]]
+name = "langchain-memgraph"
+version = "0.1.1"
+description = "An integration package connecting Memgraph and LangChain"
+optional = false
+python-versions = "<4.0,>=3.9"
+groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
+files = [
+ {file = "langchain_memgraph-0.1.1-py3-none-any.whl", hash = "sha256:656e272a317d596c01016210fe5adb7ca5a9485cf733bdfe65e23cb80c360b52"},
+ {file = "langchain_memgraph-0.1.1.tar.gz", hash = "sha256:64e8560720a4382db230bcbc45d9e5dcbd329da4ea1c192ab867dc0157724554"},
+]
+
+[package.dependencies]
+langchain-core = ">=0.3.15,<0.4.0"
+neo4j = ">=5.28.1,<6.0.0"
+
[[package]]
name = "langchain-neo4j"
version = "0.4.0"
@@ -898,7 +933,7 @@ description = "An integration package connecting Neo4j and LangChain"
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "extra == \"graph\" and (python_version <= \"3.12\" or python_version >= \"3.13\")"
files = [
{file = "langchain_neo4j-0.4.0-py3-none-any.whl", hash = "sha256:2760b5757e7a402884cf3419830217651df97fe4f44b3fec6c96b14b6d7fd18e"},
{file = "langchain_neo4j-0.4.0.tar.gz", hash = "sha256:3f059a66411cec1062a2b8c44953a70d0fff9e123e9fb1d6b3f17a0bef6d6114"},
@@ -917,7 +952,7 @@ description = "LangChain text splitting utilities"
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "extra == \"graph\" and (python_version <= \"3.12\" or python_version >= \"3.13\")"
files = [
{file = "langchain_text_splitters-0.3.7-py3-none-any.whl", hash = "sha256:31ba826013e3f563359d7c7f1e99b1cdb94897f665675ee505718c116e7e20ad"},
{file = "langchain_text_splitters-0.3.7.tar.gz", hash = "sha256:7dbf0fb98e10bb91792a1d33f540e2287f9cc1dc30ade45b7aedd2d5cd3dc70b"},
@@ -933,7 +968,7 @@ description = "Client library to connect to the LangSmith LLM Tracing and Evalua
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "langsmith-0.3.18-py3-none-any.whl", hash = "sha256:7ad65ec26084312a039885ef625ae72a69ad089818b64bacf7ce6daff672353a"},
{file = "langsmith-0.3.18.tar.gz", hash = "sha256:18ff2d8f2e77b375485e4fb3d0dbf7b30fabbd438c7347c3534470e9b7d187b8"},
@@ -964,6 +999,7 @@ description = "An implementation of time.monotonic() for Python 2 & < 3.3"
optional = false
python-versions = "*"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"},
{file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"},
@@ -976,7 +1012,7 @@ description = "Neo4j Bolt driver for Python"
optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "neo4j-5.28.1-py3-none-any.whl", hash = "sha256:6755ef9e5f4e14b403aef1138fb6315b120631a0075c138b5ddb2a06b87b09fd"},
{file = "neo4j-5.28.1.tar.gz", hash = "sha256:ae8e37a1d895099062c75bc359b2cce62099baac7be768d0eba7180c1298e214"},
@@ -997,7 +1033,7 @@ description = "Python package to allow easy integration to Neo4j's GraphRAG feat
optional = false
python-versions = "<4.0.0,>=3.9.0"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "extra == \"graph\" and (python_version <= \"3.12\" or python_version >= \"3.13\")"
files = [
{file = "neo4j_graphrag-1.6.0-py3-none-any.whl", hash = "sha256:e5f3ee7eae2fa48bf9627498274d2fb5e7a29445e51845d97614bf0b2d8dca55"},
{file = "neo4j_graphrag-1.6.0.tar.gz", hash = "sha256:21e5c5171293e00233be81631778bace2a3f6c0d063d2712415e54cdc37d17ff"},
@@ -1015,9 +1051,9 @@ types-pyyaml = ">=6.0.12.20240917,<7.0.0.0"
[package.extras]
anthropic = ["anthropic (>=0.49.0,<0.50.0)"]
cohere = ["cohere (>=5.9.0,<6.0.0)"]
-experimental = ["langchain-text-splitters (>=0.3.0,<0.4.0)", "llama-index (>=0.12.0,<0.13.0)", "pygraphviz (>=1.0.0,<2.0.0) ; python_version < \"3.10\"", "pygraphviz (>=1.13.0,<2.0.0) ; python_version >= \"3.10\" and python_full_version < \"4.0.0\""]
+experimental = ["langchain-text-splitters (>=0.3.0,<0.4.0)", "llama-index (>=0.12.0,<0.13.0)", "pygraphviz (>=1.0.0,<2.0.0)", "pygraphviz (>=1.13.0,<2.0.0)"]
google = ["google-cloud-aiplatform (>=1.66.0,<2.0.0)"]
-kg-creation-tools = ["pygraphviz (>=1.0.0,<2.0.0) ; python_version < \"3.10\"", "pygraphviz (>=1.13.0,<2.0.0) ; python_version >= \"3.10\" and python_full_version < \"4.0.0\""]
+kg-creation-tools = ["pygraphviz (>=1.0.0,<2.0.0)", "pygraphviz (>=1.13.0,<2.0.0)"]
mistralai = ["mistralai (>=1.0.3,<2.0.0)"]
ollama = ["ollama (>=0.4.4,<0.5.0)"]
openai = ["openai (>=1.51.1,<2.0.0)"]
@@ -1089,7 +1125,7 @@ description = "Fundamental package for array computing in Python"
optional = false
python-versions = ">=3.10"
groups = ["main"]
-markers = "python_version >= \"3.10\""
+markers = "python_version >= \"3.10\" and python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "numpy-2.2.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8146f3550d627252269ac42ae660281d673eb6f8b32f113538e0cc2a9aed42b9"},
{file = "numpy-2.2.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e642d86b8f956098b564a45e6f6ce68a22c2c97a04f5acd3f221f57b8cb850ae"},
@@ -1155,6 +1191,7 @@ description = "The official Python library for the openai API"
optional = false
python-versions = ">=3.8"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "openai-1.68.2-py3-none-any.whl", hash = "sha256:24484cb5c9a33b58576fdc5acf0e5f92603024a4e39d0b99793dfa1eb14c2b36"},
{file = "openai-1.68.2.tar.gz", hash = "sha256:b720f0a95a1dbe1429c0d9bb62096a0d98057bcda82516f6e8af10284bdd5b19"},
@@ -1182,7 +1219,7 @@ description = "Fast, correct Python JSON library supporting dataclasses, datetim
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "extra == \"graph\" and platform_python_implementation != \"PyPy\""
+markers = "platform_python_implementation != \"PyPy\" and (python_version <= \"3.12\" or python_version >= \"3.13\")"
files = [
{file = "orjson-3.10.16-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4cb473b8e79154fa778fb56d2d73763d977be3dcc140587e07dbc545bbfc38f8"},
{file = "orjson-3.10.16-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:622a8e85eeec1948690409a19ca1c7d9fd8ff116f4861d261e6ae2094fe59a00"},
@@ -1261,11 +1298,11 @@ description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.8"
groups = ["main", "dev", "test"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"},
{file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
]
-markers = {main = "extra == \"graph\""}
[[package]]
name = "pluggy"
@@ -1274,6 +1311,7 @@ description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.8"
groups = ["dev", "test"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
@@ -1290,6 +1328,7 @@ description = "Wraps the portalocker recipe for easy usage"
optional = false
python-versions = ">=3.8"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"},
{file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"},
@@ -1310,6 +1349,7 @@ description = "Integrate PostHog into any python application."
optional = false
python-versions = "*"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "posthog-3.21.0-py2.py3-none-any.whl", hash = "sha256:1e07626bb5219369dd36826881fa61711713e8175d3557db4657e64ecb351467"},
{file = "posthog-3.21.0.tar.gz", hash = "sha256:62e339789f6f018b6a892357f5703d1f1e63c97aee75061b3dc97c5e5c6a5304"},
@@ -1336,6 +1376,7 @@ description = ""
optional = false
python-versions = ">=3.8"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7"},
{file = "protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d"},
@@ -1357,6 +1398,7 @@ description = "psycopg2 - Python-PostgreSQL Database Adapter"
optional = false
python-versions = ">=3.8"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"},
{file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"},
@@ -1435,7 +1477,7 @@ description = "C parser in Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "extra == \"graph\" and platform_python_implementation == \"PyPy\""
+markers = "platform_python_implementation == \"PyPy\" and (python_version <= \"3.12\" or python_version >= \"3.13\")"
files = [
{file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
{file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
@@ -1448,6 +1490,7 @@ description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.8"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"},
{file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"},
@@ -1460,7 +1503,7 @@ typing-extensions = ">=4.12.2"
[package.extras]
email = ["email-validator (>=2.0.0)"]
-timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""]
+timezone = ["tzdata"]
[[package]]
name = "pydantic-core"
@@ -1469,6 +1512,7 @@ description = "Core functionality for Pydantic validation and serialization"
optional = false
python-versions = ">=3.8"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"},
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"},
@@ -1582,7 +1626,7 @@ description = "A pure-python PDF library capable of splitting, merging, cropping
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "extra == \"graph\" and (python_version <= \"3.12\" or python_version >= \"3.13\")"
files = [
{file = "pypdf-5.4.0-py3-none-any.whl", hash = "sha256:db994ab47cadc81057ea1591b90e5b543e2b7ef2d0e31ef41a9bfe763c119dab"},
{file = "pypdf-5.4.0.tar.gz", hash = "sha256:9af476a9dc30fcb137659b0dec747ea94aa954933c52cf02ee33e39a16fe9175"},
@@ -1606,6 +1650,7 @@ description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
groups = ["dev", "test"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"},
{file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"},
@@ -1629,6 +1674,7 @@ description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
@@ -1644,6 +1690,7 @@ description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"},
{file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"},
@@ -1656,7 +1703,7 @@ description = "Python for Window Extensions"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "platform_system == \"Windows\""
+markers = "platform_system == \"Windows\" and (python_version <= \"3.12\" or python_version >= \"3.13\")"
files = [
{file = "pywin32-310-cp310-cp310-win32.whl", hash = "sha256:6dd97011efc8bf51d6793a82292419eba2c71cf8e7250cfac03bba284454abc1"},
{file = "pywin32-310-cp310-cp310-win_amd64.whl", hash = "sha256:c3e78706e4229b915a0821941a84e7ef420bf2b77e08c9dae3c76fd03fd2ae3d"},
@@ -1683,7 +1730,7 @@ description = "YAML parser and emitter for Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
{file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
@@ -1747,6 +1794,7 @@ description = "Client library for the Qdrant vector search engine"
optional = false
python-versions = ">=3.9"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "qdrant_client-1.13.3-py3-none-any.whl", hash = "sha256:f52cacbb936e547d3fceb1aaed3e3c56be0ebfd48e8ea495ea3dbc89c671d1d2"},
{file = "qdrant_client-1.13.3.tar.gz", hash = "sha256:61ca09e07c6d7ac0dfbdeb13dca4fe5f3e08fa430cb0d74d66ef5d023a70adfc"},
@@ -1777,7 +1825,7 @@ description = "Various BM25 algorithms for document ranking"
optional = false
python-versions = "*"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "extra == \"graph\" and (python_version <= \"3.12\" or python_version >= \"3.13\")"
files = [
{file = "rank_bm25-0.2.2-py3-none-any.whl", hash = "sha256:7bd4a95571adadfc271746fa146a4bcfd89c0cf731e49c3d1ad863290adbe8ae"},
{file = "rank_bm25-0.2.2.tar.gz", hash = "sha256:096ccef76f8188563419aaf384a02f0ea459503fdf77901378d4fd9d87e5e51d"},
@@ -1796,6 +1844,7 @@ description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.8"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
@@ -1818,7 +1867,7 @@ description = "A utility belt for advanced users of python-requests"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"},
{file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"},
@@ -1834,6 +1883,7 @@ description = "An extremely fast Python linter and code formatter, written in Ru
optional = false
python-versions = ">=3.7"
groups = ["dev"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "ruff-0.6.9-py3-none-linux_armv6l.whl", hash = "sha256:064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd"},
{file = "ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec"},
@@ -1862,19 +1912,20 @@ description = "Easily download, build, install, upgrade, and uninstall Python pa
optional = false
python-versions = ">=3.9"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "setuptools-78.1.0-py3-none-any.whl", hash = "sha256:3e386e96793c8702ae83d17b853fb93d3e09ef82ec62722e61da5cd22376dcd8"},
{file = "setuptools-78.1.0.tar.gz", hash = "sha256:18fd474d4a82a5f83dac888df697af65afa82dec7323d09c3e37d1f14288da54"},
]
[package.extras]
-check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""]
-core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"]
+core = ["importlib_metadata (>=6)", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
enabler = ["pytest-enabler (>=2.2)"]
-test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
-type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"]
+test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
+type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"]
[[package]]
name = "six"
@@ -1883,6 +1934,7 @@ description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},
{file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
@@ -1895,6 +1947,7 @@ description = "Sniff out which async library your code is running under"
optional = false
python-versions = ">=3.7"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
@@ -1907,6 +1960,7 @@ description = "Database Abstraction Library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "SQLAlchemy-2.0.39-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:66a40003bc244e4ad86b72abb9965d304726d05a939e8c09ce844d27af9e6d37"},
{file = "SQLAlchemy-2.0.39-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67de057fbcb04a066171bd9ee6bcb58738d89378ee3cabff0bffbf343ae1c787"},
@@ -2003,7 +2057,7 @@ description = "Retry code until it succeeds"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"},
{file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"},
@@ -2063,6 +2117,7 @@ description = "Fast, Extensible Progress Meter"
optional = false
python-versions = ">=3.7"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"},
{file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"},
@@ -2085,7 +2140,7 @@ description = "Typing stubs for PyYAML"
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "extra == \"graph\" and (python_version <= \"3.12\" or python_version >= \"3.13\")"
files = [
{file = "types_pyyaml-6.0.12.20250326-py3-none-any.whl", hash = "sha256:961871cfbdc1ad8ae3cb6ae3f13007262bcfc168adc513119755a6e4d5d7ed65"},
{file = "types_pyyaml-6.0.12.20250326.tar.gz", hash = "sha256:5e2d86d8706697803f361ba0b8188eef2999e1c372cd4faee4ebb0844b8a4190"},
@@ -2098,6 +2153,7 @@ description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "typing_extensions-4.13.0-py3-none-any.whl", hash = "sha256:c8dd92cc0d6425a97c18fbb9d1954e5ff92c1ca881a309c45f06ebc0b79058e5"},
{file = "typing_extensions-4.13.0.tar.gz", hash = "sha256:0a4ac55a5820789d87e297727d229866c9650f6521b64206413c4fbada24d95b"},
@@ -2110,13 +2166,14 @@ description = "HTTP library with thread-safe connection pooling, file post, and
optional = false
python-versions = ">=3.9"
groups = ["main"]
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"},
{file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"},
]
[package.extras]
-brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""]
+brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
@@ -2128,7 +2185,7 @@ description = "Zstandard bindings for Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "extra == \"graph\""
+markers = "python_version <= \"3.12\" or python_version >= \"3.13\""
files = [
{file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"},
{file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"},
@@ -2236,9 +2293,9 @@ cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\
cffi = ["cffi (>=1.11)"]
[extras]
-graph = ["langchain-neo4j", "neo4j", "rank-bm25"]
+graph = ["langchain-memgraph", "langchain-neo4j", "neo4j", "rank-bm25"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.9,<4.0"
-content-hash = "5848e23bdd7b453f938c9b5f6171866faa01bdcc2651bedb83ee9f4fe90e8bc8"
+content-hash = "2f2496320b637ae8b74ee70707a8ea3431b3cd0ed045af847bb9b660bca334ac"
diff --git a/pyproject.toml b/pyproject.toml
index a7e0564d..778e70af 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -31,9 +31,10 @@ langchain-neo4j = "^0.4.0"
neo4j = "^5.23.1"
rank-bm25 = "^0.2.2"
psycopg2-binary = "^2.9.10"
+langchain-memgraph = "^0.1.1"
[tool.poetry.extras]
-graph = ["langchain-neo4j", "neo4j", "rank-bm25"]
+graph = ["langchain-neo4j", "neo4j", "rank-bm25", "langchain-memgraph"]
[tool.poetry.group.test.dependencies]
pytest = "^8.2.2"
diff --git a/server/main.py b/server/main.py
index 165c1af2..0e1723f2 100644
--- a/server/main.py
+++ b/server/main.py
@@ -25,6 +25,10 @@ NEO4J_URI = os.environ.get("NEO4J_URI", "bolt://neo4j:7687")
NEO4J_USERNAME = os.environ.get("NEO4J_USERNAME", "neo4j")
NEO4J_PASSWORD = os.environ.get("NEO4J_PASSWORD", "mem0graph")
+MEMGRAPH_URI = os.environ.get("MEMGRAPH_URI", "bolt://localhost:7687")
+MEMGRAPH_USERNAME = os.environ.get("MEMGRAPH_USERNAME", "memgraph")
+MEMGRAPH_PASSWORD = os.environ.get("MEMGRAPH_PASSWORD", "mem0graph")
+
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
HISTORY_DB_PATH = os.environ.get("HISTORY_DB_PATH", "/app/history/history.db")