Formatting and Client changes (#2247)
This commit is contained in:
@@ -6,14 +6,10 @@ from typing import Any, Dict, List, Optional, Union
|
|||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
from mem0.memory.setup import get_user_id, setup_config
|
|
||||||
from mem0.memory.telemetry import capture_client_event
|
from mem0.memory.telemetry import capture_client_event
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Setup user config
|
|
||||||
setup_config()
|
|
||||||
|
|
||||||
warnings.filterwarnings("default", category=DeprecationWarning)
|
warnings.filterwarnings("default", category=DeprecationWarning)
|
||||||
|
|
||||||
|
|
||||||
@@ -78,17 +74,16 @@ class MemoryClient:
|
|||||||
self.host = host or "https://api.mem0.ai"
|
self.host = host or "https://api.mem0.ai"
|
||||||
self.org_id = org_id
|
self.org_id = org_id
|
||||||
self.project_id = project_id
|
self.project_id = project_id
|
||||||
self.user_id = get_user_id()
|
|
||||||
|
|
||||||
if not self.api_key:
|
if not self.api_key:
|
||||||
raise ValueError("Mem0 API Key not provided. Please provide an API Key.")
|
raise ValueError("Mem0 API Key not provided. Please provide an API Key.")
|
||||||
|
|
||||||
self.client = httpx.Client(
|
self.client = httpx.Client(
|
||||||
base_url=self.host,
|
base_url=self.host,
|
||||||
headers={"Authorization": f"Token {self.api_key}", "Mem0-User-ID": self.user_id},
|
headers={"Authorization": f"Token {self.api_key}"},
|
||||||
timeout=300,
|
timeout=300,
|
||||||
)
|
)
|
||||||
self._validate_api_key()
|
self.user_email = self._validate_api_key()
|
||||||
capture_client_event("client.init", self)
|
capture_client_event("client.init", self)
|
||||||
|
|
||||||
def _validate_api_key(self):
|
def _validate_api_key(self):
|
||||||
@@ -104,6 +99,8 @@ class MemoryClient:
|
|||||||
self.org_id = data.get("org_id")
|
self.org_id = data.get("org_id")
|
||||||
self.project_id = data.get("project_id")
|
self.project_id = data.get("project_id")
|
||||||
|
|
||||||
|
return data.get("user_email")
|
||||||
|
|
||||||
except httpx.HTTPStatusError as e:
|
except httpx.HTTPStatusError as e:
|
||||||
try:
|
try:
|
||||||
error_data = e.response.json()
|
error_data = e.response.json()
|
||||||
|
|||||||
@@ -36,7 +36,6 @@ class OpenSearchConfig(BaseModel):
|
|||||||
extra_fields = input_fields - allowed_fields
|
extra_fields = input_fields - allowed_fields
|
||||||
if extra_fields:
|
if extra_fields:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"Extra fields not allowed: {', '.join(extra_fields)}. "
|
f"Extra fields not allowed: {', '.join(extra_fields)}. " f"Allowed fields: {', '.join(allowed_fields)}"
|
||||||
f"Allowed fields: {', '.join(allowed_fields)}"
|
|
||||||
)
|
)
|
||||||
return values
|
return values
|
||||||
|
|||||||
@@ -178,7 +178,7 @@ class Memory(MemoryBase):
|
|||||||
retrieved_old_memory.append({"id": mem.id, "text": mem.payload["data"]})
|
retrieved_old_memory.append({"id": mem.id, "text": mem.payload["data"]})
|
||||||
unique_data = {}
|
unique_data = {}
|
||||||
for item in retrieved_old_memory:
|
for item in retrieved_old_memory:
|
||||||
unique_data[item['id']] = item
|
unique_data[item["id"]] = item
|
||||||
retrieved_old_memory = list(unique_data.values())
|
retrieved_old_memory = list(unique_data.values())
|
||||||
logging.info(f"Total existing memories: {len(retrieved_old_memory)}")
|
logging.info(f"Total existing memories: {len(retrieved_old_memory)}")
|
||||||
|
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ class AnonymousTelemetry:
|
|||||||
if not MEM0_TELEMETRY:
|
if not MEM0_TELEMETRY:
|
||||||
self.posthog.disabled = True
|
self.posthog.disabled = True
|
||||||
|
|
||||||
def capture_event(self, event_name, properties=None):
|
def capture_event(self, event_name, properties=None, user_email=None):
|
||||||
if properties is None:
|
if properties is None:
|
||||||
properties = {}
|
properties = {}
|
||||||
properties = {
|
properties = {
|
||||||
@@ -43,7 +43,8 @@ class AnonymousTelemetry:
|
|||||||
"machine": platform.machine(),
|
"machine": platform.machine(),
|
||||||
**properties,
|
**properties,
|
||||||
}
|
}
|
||||||
self.posthog.capture(distinct_id=self.user_id, event=event_name, properties=properties)
|
distinct_id = self.user_id if user_email is None else user_email
|
||||||
|
self.posthog.capture(distinct_id=distinct_id, event=event_name, properties=properties)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
self.posthog.shutdown()
|
self.posthog.shutdown()
|
||||||
@@ -82,4 +83,4 @@ def capture_client_event(event_name, instance, additional_data=None):
|
|||||||
if additional_data:
|
if additional_data:
|
||||||
event_data.update(additional_data)
|
event_data.update(additional_data)
|
||||||
|
|
||||||
telemetry.capture_event(event_name, event_data)
|
telemetry.capture_event(event_name, event_data, instance.user_email)
|
||||||
|
|||||||
@@ -55,8 +55,11 @@ def get_image_description(image_url):
|
|||||||
{
|
{
|
||||||
"role": "user",
|
"role": "user",
|
||||||
"content": [
|
"content": [
|
||||||
{"type": "text", "text": "Provide a description of the image and do not include any additional text."},
|
{
|
||||||
{"type": "image_url", "image_url": {"url": image_url}}
|
"type": "text",
|
||||||
|
"text": "Provide a description of the image and do not include any additional text.",
|
||||||
|
},
|
||||||
|
{"type": "image_url", "image_url": {"url": image_url}},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ class VectorStoreFactory:
|
|||||||
"azure_ai_search": "mem0.vector_stores.azure_ai_search.AzureAISearch",
|
"azure_ai_search": "mem0.vector_stores.azure_ai_search.AzureAISearch",
|
||||||
"redis": "mem0.vector_stores.redis.RedisDB",
|
"redis": "mem0.vector_stores.redis.RedisDB",
|
||||||
"elasticsearch": "mem0.vector_stores.elasticsearch.ElasticsearchDB",
|
"elasticsearch": "mem0.vector_stores.elasticsearch.ElasticsearchDB",
|
||||||
"opensearch": "mem0.vector_stores.opensearch.OpenSearchDB"
|
"opensearch": "mem0.vector_stores.opensearch.OpenSearchDB",
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -118,8 +118,7 @@ class AzureAISearch(VectorStoreBase):
|
|||||||
logger.info(f"Inserting {len(vectors)} vectors into index {self.index_name}")
|
logger.info(f"Inserting {len(vectors)} vectors into index {self.index_name}")
|
||||||
|
|
||||||
documents = [
|
documents = [
|
||||||
self._generate_document(vector, payload, id)
|
self._generate_document(vector, payload, id) for id, vector, payload in zip(ids, vectors, payloads)
|
||||||
for id, vector, payload in zip(ids, vectors, payloads)
|
|
||||||
]
|
]
|
||||||
self.search_client.upload_documents(documents)
|
self.search_client.upload_documents(documents)
|
||||||
|
|
||||||
@@ -133,7 +132,7 @@ class AzureAISearch(VectorStoreBase):
|
|||||||
condition = f"{key} eq {value}"
|
condition = f"{key} eq {value}"
|
||||||
filter_conditions.append(condition)
|
filter_conditions.append(condition)
|
||||||
# Use 'and' to join multiple conditions
|
# Use 'and' to join multiple conditions
|
||||||
filter_expression = ' and '.join(filter_conditions)
|
filter_expression = " and ".join(filter_conditions)
|
||||||
return filter_expression
|
return filter_expression
|
||||||
|
|
||||||
def search(self, query, limit=5, filters=None):
|
def search(self, query, limit=5, filters=None):
|
||||||
@@ -152,14 +151,8 @@ class AzureAISearch(VectorStoreBase):
|
|||||||
if filters:
|
if filters:
|
||||||
filter_expression = self._build_filter_expression(filters)
|
filter_expression = self._build_filter_expression(filters)
|
||||||
|
|
||||||
vector_query = VectorizedQuery(
|
vector_query = VectorizedQuery(vector=query, k_nearest_neighbors=limit, fields="vector")
|
||||||
vector=query, k_nearest_neighbors=limit, fields="vector"
|
search_results = self.search_client.search(vector_queries=[vector_query], filter=filter_expression, top=limit)
|
||||||
)
|
|
||||||
search_results = self.search_client.search(
|
|
||||||
vector_queries=[vector_query],
|
|
||||||
filter=filter_expression,
|
|
||||||
top=limit
|
|
||||||
)
|
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
for result in search_results:
|
for result in search_results:
|
||||||
@@ -245,11 +238,7 @@ class AzureAISearch(VectorStoreBase):
|
|||||||
if filters:
|
if filters:
|
||||||
filter_expression = self._build_filter_expression(filters)
|
filter_expression = self._build_filter_expression(filters)
|
||||||
|
|
||||||
search_results = self.search_client.search(
|
search_results = self.search_client.search(search_text="*", filter=filter_expression, top=limit)
|
||||||
search_text="*",
|
|
||||||
filter=filter_expression,
|
|
||||||
top=limit
|
|
||||||
)
|
|
||||||
results = []
|
results = []
|
||||||
for result in search_results:
|
for result in search_results:
|
||||||
payload = json.loads(result["payload"])
|
payload = json.loads(result["payload"])
|
||||||
|
|||||||
@@ -49,30 +49,14 @@ class ElasticsearchDB(VectorStoreBase):
|
|||||||
def create_index(self) -> None:
|
def create_index(self) -> None:
|
||||||
"""Create Elasticsearch index with proper mappings if it doesn't exist"""
|
"""Create Elasticsearch index with proper mappings if it doesn't exist"""
|
||||||
index_settings = {
|
index_settings = {
|
||||||
"settings": {
|
"settings": {"index": {"number_of_replicas": 1, "number_of_shards": 5, "refresh_interval": "1s"}},
|
||||||
"index": {
|
|
||||||
"number_of_replicas": 1,
|
|
||||||
"number_of_shards": 5,
|
|
||||||
"refresh_interval": "1s"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"mappings": {
|
"mappings": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"text": {"type": "text"},
|
"text": {"type": "text"},
|
||||||
"vector": {
|
"vector": {"type": "dense_vector", "dims": self.vector_dim, "index": True, "similarity": "cosine"},
|
||||||
"type": "dense_vector",
|
"metadata": {"type": "object", "properties": {"user_id": {"type": "keyword"}}},
|
||||||
"dims": self.vector_dim,
|
|
||||||
"index": True,
|
|
||||||
"similarity": "cosine"
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"user_id": {"type": "keyword"}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if not self.client.indices.exists(index=self.collection_name):
|
if not self.client.indices.exists(index=self.collection_name):
|
||||||
@@ -114,8 +98,8 @@ class ElasticsearchDB(VectorStoreBase):
|
|||||||
"_id": id_,
|
"_id": id_,
|
||||||
"_source": {
|
"_source": {
|
||||||
"vector": vec,
|
"vector": vec,
|
||||||
"metadata": payloads[i] # Store all metadata in the metadata field
|
"metadata": payloads[i], # Store all metadata in the metadata field
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
actions.append(action)
|
actions.append(action)
|
||||||
|
|
||||||
@@ -127,7 +111,7 @@ class ElasticsearchDB(VectorStoreBase):
|
|||||||
OutputData(
|
OutputData(
|
||||||
id=id_,
|
id=id_,
|
||||||
score=1.0, # Default score for inserts
|
score=1.0, # Default score for inserts
|
||||||
payload=payloads[i]
|
payload=payloads[i],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return results
|
return results
|
||||||
@@ -136,23 +120,12 @@ class ElasticsearchDB(VectorStoreBase):
|
|||||||
"""Search for similar vectors using KNN search with pre-filtering."""
|
"""Search for similar vectors using KNN search with pre-filtering."""
|
||||||
if not filters:
|
if not filters:
|
||||||
# If no filters, just do KNN search
|
# If no filters, just do KNN search
|
||||||
search_query = {
|
search_query = {"knn": {"field": "vector", "query_vector": query, "k": limit, "num_candidates": limit * 2}}
|
||||||
"knn": {
|
|
||||||
"field": "vector",
|
|
||||||
"query_vector": query,
|
|
||||||
"k": limit,
|
|
||||||
"num_candidates": limit * 2
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else:
|
else:
|
||||||
# If filters exist, apply them with KNN search
|
# If filters exist, apply them with KNN search
|
||||||
filter_conditions = []
|
filter_conditions = []
|
||||||
for key, value in filters.items():
|
for key, value in filters.items():
|
||||||
filter_conditions.append({
|
filter_conditions.append({"term": {f"metadata.{key}": value}})
|
||||||
"term": {
|
|
||||||
f"metadata.{key}": value
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
search_query = {
|
search_query = {
|
||||||
"knn": {
|
"knn": {
|
||||||
@@ -160,11 +133,7 @@ class ElasticsearchDB(VectorStoreBase):
|
|||||||
"query_vector": query,
|
"query_vector": query,
|
||||||
"k": limit,
|
"k": limit,
|
||||||
"num_candidates": limit * 2,
|
"num_candidates": limit * 2,
|
||||||
"filter": {
|
"filter": {"bool": {"must": filter_conditions}},
|
||||||
"bool": {
|
|
||||||
"must": filter_conditions
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -173,11 +142,7 @@ class ElasticsearchDB(VectorStoreBase):
|
|||||||
results = []
|
results = []
|
||||||
for hit in response["hits"]["hits"]:
|
for hit in response["hits"]["hits"]:
|
||||||
results.append(
|
results.append(
|
||||||
OutputData(
|
OutputData(id=hit["_id"], score=hit["_score"], payload=hit.get("_source", {}).get("metadata", {}))
|
||||||
id=hit["_id"],
|
|
||||||
score=hit["_score"],
|
|
||||||
payload=hit.get("_source", {}).get("metadata", {})
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
@@ -203,7 +168,7 @@ class ElasticsearchDB(VectorStoreBase):
|
|||||||
return OutputData(
|
return OutputData(
|
||||||
id=response["_id"],
|
id=response["_id"],
|
||||||
score=1.0, # Default score for direct get
|
score=1.0, # Default score for direct get
|
||||||
payload=response["_source"].get("metadata", {})
|
payload=response["_source"].get("metadata", {}),
|
||||||
)
|
)
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
logger.warning(f"Missing key in Elasticsearch response: {e}")
|
logger.warning(f"Missing key in Elasticsearch response: {e}")
|
||||||
@@ -234,16 +199,8 @@ class ElasticsearchDB(VectorStoreBase):
|
|||||||
if filters:
|
if filters:
|
||||||
filter_conditions = []
|
filter_conditions = []
|
||||||
for key, value in filters.items():
|
for key, value in filters.items():
|
||||||
filter_conditions.append({
|
filter_conditions.append({"term": {f"metadata.{key}": value}})
|
||||||
"term": {
|
query["query"] = {"bool": {"must": filter_conditions}}
|
||||||
f"metadata.{key}": value
|
|
||||||
}
|
|
||||||
})
|
|
||||||
query["query"] = {
|
|
||||||
"bool": {
|
|
||||||
"must": filter_conditions
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if limit:
|
if limit:
|
||||||
query["size"] = limit
|
query["size"] = limit
|
||||||
@@ -256,7 +213,7 @@ class ElasticsearchDB(VectorStoreBase):
|
|||||||
OutputData(
|
OutputData(
|
||||||
id=hit["_id"],
|
id=hit["_id"],
|
||||||
score=1.0, # Default score for list operation
|
score=1.0, # Default score for list operation
|
||||||
payload=hit.get("_source", {}).get("metadata", {})
|
payload=hit.get("_source", {}).get("metadata", {}),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -50,10 +50,7 @@ class OpenSearchDB(VectorStoreBase):
|
|||||||
"mappings": {
|
"mappings": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"text": {"type": "text"},
|
"text": {"type": "text"},
|
||||||
"vector": {
|
"vector": {"type": "knn_vector", "dimension": self.vector_dim},
|
||||||
"type": "knn_vector",
|
|
||||||
"dimension": self.vector_dim
|
|
||||||
},
|
|
||||||
"metadata": {"type": "object", "properties": {"user_id": {"type": "keyword"}}},
|
"metadata": {"type": "object", "properties": {"user_id": {"type": "keyword"}}},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -73,7 +70,7 @@ class OpenSearchDB(VectorStoreBase):
|
|||||||
"vector": {
|
"vector": {
|
||||||
"type": "knn_vector",
|
"type": "knn_vector",
|
||||||
"dimension": vector_size,
|
"dimension": vector_size,
|
||||||
"method": { "engine": "lucene", "name": "hnsw", "space_type": "cosinesimil"},
|
"method": {"engine": "lucene", "name": "hnsw", "space_type": "cosinesimil"},
|
||||||
},
|
},
|
||||||
"payload": {"type": "object"},
|
"payload": {"type": "object"},
|
||||||
"id": {"type": "keyword"},
|
"id": {"type": "keyword"},
|
||||||
@@ -125,12 +122,12 @@ class OpenSearchDB(VectorStoreBase):
|
|||||||
"k": limit,
|
"k": limit,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if filters:
|
if filters:
|
||||||
filter_conditions = [{"term": {f"metadata.{key}": value}} for key, value in filters.items()]
|
filter_conditions = [{"term": {f"metadata.{key}": value}} for key, value in filters.items()]
|
||||||
search_query["query"]["knn"]["vector"]["filter"] = { "bool": {"filter": filter_conditions} }
|
search_query["query"]["knn"]["vector"]["filter"] = {"bool": {"filter": filter_conditions}}
|
||||||
|
|
||||||
response = self.client.search(index=self.collection_name, body=search_query)
|
response = self.client.search(index=self.collection_name, body=search_query)
|
||||||
|
|
||||||
@@ -180,10 +177,17 @@ class OpenSearchDB(VectorStoreBase):
|
|||||||
query = {"query": {"match_all": {}}}
|
query = {"query": {"match_all": {}}}
|
||||||
|
|
||||||
if filters:
|
if filters:
|
||||||
query["query"] = {"bool": {"must": [{"term": {f"metadata.{key}": value}} for key, value in filters.items()]}}
|
query["query"] = {
|
||||||
|
"bool": {"must": [{"term": {f"metadata.{key}": value}} for key, value in filters.items()]}
|
||||||
|
}
|
||||||
|
|
||||||
if limit:
|
if limit:
|
||||||
query["size"] = limit
|
query["size"] = limit
|
||||||
|
|
||||||
response = self.client.search(index=self.collection_name, body=query)
|
response = self.client.search(index=self.collection_name, body=query)
|
||||||
return [[OutputData(id=hit["_id"], score=1.0, payload=hit["_source"].get("metadata", {})) for hit in response["hits"]["hits"]]]
|
return [
|
||||||
|
[
|
||||||
|
OutputData(id=hit["_id"], score=1.0, payload=hit["_source"].get("metadata", {}))
|
||||||
|
for hit in response["hits"]["hits"]
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|||||||
Reference in New Issue
Block a user