[Mem0] Update dependencies and make the package lighter (#1708)
Co-authored-by: Dev-Khant <devkhant24@gmail.com>
This commit is contained in:
@@ -7,14 +7,16 @@ try:
|
||||
import chromadb
|
||||
from chromadb.config import Settings
|
||||
except ImportError:
|
||||
raise ImportError("Chromadb requires extra dependencies. Install with `pip install chromadb`") from None
|
||||
raise ImportError(
|
||||
"Chromadb requires extra dependencies. Install with `pip install chromadb`"
|
||||
) from None
|
||||
|
||||
from mem0.vector_stores.base import VectorStoreBase
|
||||
|
||||
|
||||
class OutputData(BaseModel):
|
||||
id: Optional[str] # memory id
|
||||
score: Optional[float] # distance
|
||||
score: Optional[float] # distance
|
||||
payload: Optional[Dict] # metadata
|
||||
|
||||
|
||||
@@ -25,7 +27,7 @@ class ChromaDB(VectorStoreBase):
|
||||
client: Optional[chromadb.Client] = None,
|
||||
host: Optional[str] = None,
|
||||
port: Optional[int] = None,
|
||||
path: Optional[str] = None
|
||||
path: Optional[str] = None,
|
||||
):
|
||||
"""
|
||||
Initialize the Chromadb vector store.
|
||||
@@ -68,7 +70,7 @@ class ChromaDB(VectorStoreBase):
|
||||
Returns:
|
||||
List[OutputData]: Parsed output data.
|
||||
"""
|
||||
keys = ['ids', 'distances', 'metadatas']
|
||||
keys = ["ids", "distances", "metadatas"]
|
||||
values = []
|
||||
|
||||
for key in keys:
|
||||
@@ -78,14 +80,24 @@ class ChromaDB(VectorStoreBase):
|
||||
values.append(value)
|
||||
|
||||
ids, distances, metadatas = values
|
||||
max_length = max(len(v) for v in values if isinstance(v, list) and v is not None)
|
||||
max_length = max(
|
||||
len(v) for v in values if isinstance(v, list) and v is not None
|
||||
)
|
||||
|
||||
result = []
|
||||
for i in range(max_length):
|
||||
entry = OutputData(
|
||||
id=ids[i] if isinstance(ids, list) and ids and i < len(ids) else None,
|
||||
score=distances[i] if isinstance(distances, list) and distances and i < len(distances) else None,
|
||||
payload=metadatas[i] if isinstance(metadatas, list) and metadatas and i < len(metadatas) else None,
|
||||
score=(
|
||||
distances[i]
|
||||
if isinstance(distances, list) and distances and i < len(distances)
|
||||
else None
|
||||
),
|
||||
payload=(
|
||||
metadatas[i]
|
||||
if isinstance(metadatas, list) and metadatas and i < len(metadatas)
|
||||
else None
|
||||
),
|
||||
)
|
||||
result.append(entry)
|
||||
|
||||
@@ -114,7 +126,12 @@ class ChromaDB(VectorStoreBase):
|
||||
)
|
||||
return collection
|
||||
|
||||
def insert(self, vectors: List[list], payloads: Optional[List[Dict]] = None, ids: Optional[List[str]] = None):
|
||||
def insert(
|
||||
self,
|
||||
vectors: List[list],
|
||||
payloads: Optional[List[Dict]] = None,
|
||||
ids: Optional[List[str]] = None,
|
||||
):
|
||||
"""
|
||||
Insert vectors into a collection.
|
||||
|
||||
@@ -125,7 +142,9 @@ class ChromaDB(VectorStoreBase):
|
||||
"""
|
||||
self.collection.add(ids=ids, embeddings=vectors, metadatas=payloads)
|
||||
|
||||
def search(self, query: List[list], limit: int = 5, filters: Optional[Dict] = None) -> List[OutputData]:
|
||||
def search(
|
||||
self, query: List[list], limit: int = 5, filters: Optional[Dict] = None
|
||||
) -> List[OutputData]:
|
||||
"""
|
||||
Search for similar vectors.
|
||||
|
||||
@@ -137,7 +156,9 @@ class ChromaDB(VectorStoreBase):
|
||||
Returns:
|
||||
List[OutputData]: Search results.
|
||||
"""
|
||||
results = self.collection.query(query_embeddings=query, where=filters, n_results=limit)
|
||||
results = self.collection.query(
|
||||
query_embeddings=query, where=filters, n_results=limit
|
||||
)
|
||||
final_results = self._parse_output(results)
|
||||
return final_results
|
||||
|
||||
@@ -150,7 +171,12 @@ class ChromaDB(VectorStoreBase):
|
||||
"""
|
||||
self.collection.delete(ids=vector_id)
|
||||
|
||||
def update(self, vector_id: str, vector: Optional[List[float]] = None, payload: Optional[Dict] = None):
|
||||
def update(
|
||||
self,
|
||||
vector_id: str,
|
||||
vector: Optional[List[float]] = None,
|
||||
payload: Optional[Dict] = None,
|
||||
):
|
||||
"""
|
||||
Update a vector and its payload.
|
||||
|
||||
@@ -184,8 +210,8 @@ class ChromaDB(VectorStoreBase):
|
||||
return self.client.list_collections()
|
||||
|
||||
def delete_col(self):
|
||||
"""
|
||||
Delete a collection.
|
||||
"""
|
||||
Delete a collection.
|
||||
"""
|
||||
self.client.delete_collection(name=self.collection_name)
|
||||
|
||||
@@ -198,7 +224,9 @@ class ChromaDB(VectorStoreBase):
|
||||
"""
|
||||
return self.client.get_collection(name=self.collection_name)
|
||||
|
||||
def list(self, filters: Optional[Dict] = None, limit: int = 100) -> List[OutputData]:
|
||||
def list(
|
||||
self, filters: Optional[Dict] = None, limit: int = 100
|
||||
) -> List[OutputData]:
|
||||
"""
|
||||
List all vectors in a collection.
|
||||
|
||||
|
||||
@@ -1,31 +1,34 @@
|
||||
from typing import Optional, Dict
|
||||
from pydantic import BaseModel, Field, model_validator
|
||||
|
||||
|
||||
class VectorStoreConfig(BaseModel):
|
||||
provider: str = Field(
|
||||
description="Provider of the vector store (e.g., 'qdrant', 'chroma')",
|
||||
default="qdrant",
|
||||
)
|
||||
config: Optional[Dict] = Field(
|
||||
description="Configuration for the specific vector store",
|
||||
default=None
|
||||
description="Configuration for the specific vector store", default=None
|
||||
)
|
||||
|
||||
_provider_configs: Dict[str, str] = {
|
||||
"qdrant": "QdrantConfig",
|
||||
"chroma": "ChromaDbConfig",
|
||||
"pgvector": "PGVectorConfig"
|
||||
"pgvector": "PGVectorConfig",
|
||||
}
|
||||
|
||||
@model_validator(mode="after")
|
||||
def validate_and_create_config(self) -> 'VectorStoreConfig':
|
||||
def validate_and_create_config(self) -> "VectorStoreConfig":
|
||||
provider = self.provider
|
||||
config = self.config
|
||||
|
||||
if provider not in self._provider_configs:
|
||||
raise ValueError(f"Unsupported vector store provider: {provider}")
|
||||
|
||||
module = __import__(f"mem0.configs.vector_stores.{provider}", fromlist=[self._provider_configs[provider]])
|
||||
module = __import__(
|
||||
f"mem0.configs.vector_stores.{provider}",
|
||||
fromlist=[self._provider_configs[provider]],
|
||||
)
|
||||
config_class = getattr(module, self._provider_configs[provider])
|
||||
|
||||
if config is None:
|
||||
@@ -40,4 +43,4 @@ class VectorStoreConfig(BaseModel):
|
||||
config["path"] = f"/tmp/{provider}"
|
||||
|
||||
self.config = config_class(**config)
|
||||
return self
|
||||
return self
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
import json
|
||||
from typing import Optional, List, Dict, Any
|
||||
from typing import Optional, List
|
||||
from pydantic import BaseModel
|
||||
|
||||
try:
|
||||
import psycopg2
|
||||
from psycopg2.extras import execute_values
|
||||
except ImportError:
|
||||
raise ImportError("PGVector requires extra dependencies. Install with `pip install psycopg2`") from None
|
||||
raise ImportError(
|
||||
"PGVector requires extra dependencies. Install with `pip install psycopg2`"
|
||||
) from None
|
||||
|
||||
|
||||
from mem0.vector_stores.base import VectorStoreBase
|
||||
|
||||
|
||||
class OutputData(BaseModel):
|
||||
id: Optional[str]
|
||||
score: Optional[float]
|
||||
@@ -19,14 +22,7 @@ class OutputData(BaseModel):
|
||||
|
||||
class PGVector(VectorStoreBase):
|
||||
def __init__(
|
||||
self,
|
||||
dbname,
|
||||
collection_name,
|
||||
embedding_model_dims,
|
||||
user,
|
||||
password,
|
||||
host,
|
||||
port
|
||||
self, dbname, collection_name, embedding_model_dims, user, password, host, port
|
||||
):
|
||||
"""
|
||||
Initialize the PGVector database.
|
||||
@@ -43,18 +39,14 @@ class PGVector(VectorStoreBase):
|
||||
self.collection_name = collection_name
|
||||
|
||||
self.conn = psycopg2.connect(
|
||||
dbname=dbname,
|
||||
user=user,
|
||||
password=password,
|
||||
host=host,
|
||||
port=port
|
||||
dbname=dbname, user=user, password=password, host=host, port=port
|
||||
)
|
||||
self.cur = self.conn.cursor()
|
||||
|
||||
collections = self.list_cols()
|
||||
if collection_name not in collections:
|
||||
self.create_col(embedding_model_dims)
|
||||
|
||||
|
||||
def create_col(self, embedding_model_dims):
|
||||
"""
|
||||
Create a new collection (table in PostgreSQL).
|
||||
@@ -63,16 +55,18 @@ class PGVector(VectorStoreBase):
|
||||
name (str): Name of the collection.
|
||||
embedding_model_dims (int, optional): Dimension of the embedding vector.
|
||||
"""
|
||||
self.cur.execute(f"""
|
||||
self.cur.execute(
|
||||
f"""
|
||||
CREATE TABLE IF NOT EXISTS {self.collection_name} (
|
||||
id UUID PRIMARY KEY,
|
||||
vector vector({embedding_model_dims}),
|
||||
payload JSONB
|
||||
);
|
||||
""")
|
||||
"""
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def insert(self, vectors, payloads = None, ids = None):
|
||||
def insert(self, vectors, payloads=None, ids=None):
|
||||
"""
|
||||
Insert vectors into a collection.
|
||||
|
||||
@@ -83,11 +77,18 @@ class PGVector(VectorStoreBase):
|
||||
"""
|
||||
json_payloads = [json.dumps(payload) for payload in payloads]
|
||||
|
||||
data = [(id, vector, payload) for id, vector, payload in zip(ids, vectors, json_payloads)]
|
||||
execute_values(self.cur, f"INSERT INTO {self.collection_name} (id, vector, payload) VALUES %s", data)
|
||||
data = [
|
||||
(id, vector, payload)
|
||||
for id, vector, payload in zip(ids, vectors, json_payloads)
|
||||
]
|
||||
execute_values(
|
||||
self.cur,
|
||||
f"INSERT INTO {self.collection_name} (id, vector, payload) VALUES %s",
|
||||
data,
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def search(self, query, limit = 5, filters = None):
|
||||
def search(self, query, limit=5, filters=None):
|
||||
"""
|
||||
Search for similar vectors.
|
||||
|
||||
@@ -104,21 +105,28 @@ class PGVector(VectorStoreBase):
|
||||
|
||||
if filters:
|
||||
for k, v in filters.items():
|
||||
filter_conditions.append(f"payload->>%s = %s")
|
||||
filter_conditions.append("payload->>%s = %s")
|
||||
filter_params.extend([k, str(v)])
|
||||
|
||||
filter_clause = "WHERE " + " AND ".join(filter_conditions) if filter_conditions else ""
|
||||
filter_clause = (
|
||||
"WHERE " + " AND ".join(filter_conditions) if filter_conditions else ""
|
||||
)
|
||||
|
||||
self.cur.execute(f"""
|
||||
self.cur.execute(
|
||||
f"""
|
||||
SELECT id, vector <-> %s::vector AS distance, payload
|
||||
FROM {self.collection_name}
|
||||
{filter_clause}
|
||||
ORDER BY distance
|
||||
LIMIT %s
|
||||
""", (query, *filter_params, limit))
|
||||
""",
|
||||
(query, *filter_params, limit),
|
||||
)
|
||||
|
||||
results = self.cur.fetchall()
|
||||
return [OutputData(id=str(r[0]), score=float(r[1]), payload=r[2]) for r in results]
|
||||
return [
|
||||
OutputData(id=str(r[0]), score=float(r[1]), payload=r[2]) for r in results
|
||||
]
|
||||
|
||||
def delete(self, vector_id):
|
||||
"""
|
||||
@@ -127,10 +135,12 @@ class PGVector(VectorStoreBase):
|
||||
Args:
|
||||
vector_id (str): ID of the vector to delete.
|
||||
"""
|
||||
self.cur.execute(f"DELETE FROM {self.collection_name} WHERE id = %s", (vector_id,))
|
||||
self.cur.execute(
|
||||
f"DELETE FROM {self.collection_name} WHERE id = %s", (vector_id,)
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def update(self, vector_id, vector = None, payload = None):
|
||||
def update(self, vector_id, vector=None, payload=None):
|
||||
"""
|
||||
Update a vector and its payload.
|
||||
|
||||
@@ -140,9 +150,15 @@ class PGVector(VectorStoreBase):
|
||||
payload (Dict, optional): Updated payload.
|
||||
"""
|
||||
if vector:
|
||||
self.cur.execute(f"UPDATE {self.collection_name} SET vector = %s WHERE id = %s", (vector, vector_id))
|
||||
self.cur.execute(
|
||||
f"UPDATE {self.collection_name} SET vector = %s WHERE id = %s",
|
||||
(vector, vector_id),
|
||||
)
|
||||
if payload:
|
||||
self.cur.execute(f"UPDATE {self.collection_name} SET payload = %s WHERE id = %s", (psycopg2.extras.Json(payload), vector_id))
|
||||
self.cur.execute(
|
||||
f"UPDATE {self.collection_name} SET payload = %s WHERE id = %s",
|
||||
(psycopg2.extras.Json(payload), vector_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def get(self, vector_id) -> OutputData:
|
||||
@@ -155,7 +171,10 @@ class PGVector(VectorStoreBase):
|
||||
Returns:
|
||||
OutputData: Retrieved vector.
|
||||
"""
|
||||
self.cur.execute(f"SELECT id, vector, payload FROM {self.collection_name} WHERE id = %s", (vector_id,))
|
||||
self.cur.execute(
|
||||
f"SELECT id, vector, payload FROM {self.collection_name} WHERE id = %s",
|
||||
(vector_id,),
|
||||
)
|
||||
result = self.cur.fetchone()
|
||||
if not result:
|
||||
return None
|
||||
@@ -168,11 +187,13 @@ class PGVector(VectorStoreBase):
|
||||
Returns:
|
||||
List[str]: List of collection names.
|
||||
"""
|
||||
self.cur.execute("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'")
|
||||
self.cur.execute(
|
||||
"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'"
|
||||
)
|
||||
return [row[0] for row in self.cur.fetchall()]
|
||||
|
||||
def delete_col(self):
|
||||
""" Delete a collection. """
|
||||
"""Delete a collection."""
|
||||
self.cur.execute(f"DROP TABLE IF EXISTS {self.collection_name}")
|
||||
self.conn.commit()
|
||||
|
||||
@@ -183,22 +204,21 @@ class PGVector(VectorStoreBase):
|
||||
Returns:
|
||||
Dict[str, Any]: Collection information.
|
||||
"""
|
||||
self.cur.execute(f"""
|
||||
self.cur.execute(
|
||||
f"""
|
||||
SELECT
|
||||
table_name,
|
||||
(SELECT COUNT(*) FROM {self.collection_name}) as row_count,
|
||||
(SELECT pg_size_pretty(pg_total_relation_size('{self.collection_name}'))) as total_size
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'public' AND table_name = %s
|
||||
""", (self.collection_name,))
|
||||
""",
|
||||
(self.collection_name,),
|
||||
)
|
||||
result = self.cur.fetchone()
|
||||
return {
|
||||
"name": result[0],
|
||||
"count": result[1],
|
||||
"size": result[2]
|
||||
}
|
||||
return {"name": result[0], "count": result[1], "size": result[2]}
|
||||
|
||||
def list(self, filters = None, limit = 100):
|
||||
def list(self, filters=None, limit=100):
|
||||
"""
|
||||
List all vectors in a collection.
|
||||
|
||||
@@ -214,10 +234,12 @@ class PGVector(VectorStoreBase):
|
||||
|
||||
if filters:
|
||||
for k, v in filters.items():
|
||||
filter_conditions.append(f"payload->>%s = %s")
|
||||
filter_conditions.append("payload->>%s = %s")
|
||||
filter_params.extend([k, str(v)])
|
||||
|
||||
filter_clause = "WHERE " + " AND ".join(filter_conditions) if filter_conditions else ""
|
||||
filter_clause = (
|
||||
"WHERE " + " AND ".join(filter_conditions) if filter_conditions else ""
|
||||
)
|
||||
|
||||
query = f"""
|
||||
SELECT id, vector, payload
|
||||
@@ -235,7 +257,7 @@ class PGVector(VectorStoreBase):
|
||||
"""
|
||||
Close the database connection when the object is deleted.
|
||||
"""
|
||||
if hasattr(self, 'cur'):
|
||||
if hasattr(self, "cur"):
|
||||
self.cur.close()
|
||||
if hasattr(self, 'conn'):
|
||||
self.conn.close()
|
||||
if hasattr(self, "conn"):
|
||||
self.conn.close()
|
||||
|
||||
@@ -28,7 +28,7 @@ class Qdrant(VectorStoreBase):
|
||||
path: str = None,
|
||||
url: str = None,
|
||||
api_key: str = None,
|
||||
on_disk: bool = False
|
||||
on_disk: bool = False,
|
||||
):
|
||||
"""
|
||||
Initialize the Qdrant vector store.
|
||||
@@ -60,13 +60,15 @@ class Qdrant(VectorStoreBase):
|
||||
if not on_disk:
|
||||
if os.path.exists(path) and os.path.isdir(path):
|
||||
shutil.rmtree(path)
|
||||
|
||||
|
||||
self.client = QdrantClient(**params)
|
||||
|
||||
|
||||
self.collection_name = collection_name
|
||||
self.create_col(embedding_model_dims, on_disk)
|
||||
|
||||
def create_col(self, vector_size: int, on_disk: bool, distance: Distance = Distance.COSINE):
|
||||
def create_col(
|
||||
self, vector_size: int, on_disk: bool, distance: Distance = Distance.COSINE
|
||||
):
|
||||
"""
|
||||
Create a new collection.
|
||||
|
||||
@@ -79,12 +81,16 @@ class Qdrant(VectorStoreBase):
|
||||
response = self.list_cols()
|
||||
for collection in response.collections:
|
||||
if collection.name == self.collection_name:
|
||||
logging.debug(f"Collection {self.collection_name} already exists. Skipping creation.")
|
||||
logging.debug(
|
||||
f"Collection {self.collection_name} already exists. Skipping creation."
|
||||
)
|
||||
return
|
||||
|
||||
self.client.create_collection(
|
||||
collection_name=self.collection_name,
|
||||
vectors_config=VectorParams(size=vector_size, distance=distance, on_disk=on_disk),
|
||||
vectors_config=VectorParams(
|
||||
size=vector_size, distance=distance, on_disk=on_disk
|
||||
),
|
||||
)
|
||||
|
||||
def insert(self, vectors: list, payloads: list = None, ids: list = None):
|
||||
@@ -202,7 +208,7 @@ class Qdrant(VectorStoreBase):
|
||||
return self.client.get_collections()
|
||||
|
||||
def delete_col(self):
|
||||
""" Delete a collection. """
|
||||
"""Delete a collection."""
|
||||
self.client.delete_collection(collection_name=self.collection_name)
|
||||
|
||||
def col_info(self) -> dict:
|
||||
|
||||
Reference in New Issue
Block a user