Feat/mem0 support es (#2125)
This commit is contained in:
58
docs/components/vectordbs/dbs/elasticsearch.mdx
Normal file
58
docs/components/vectordbs/dbs/elasticsearch.mdx
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
[Elasticsearch](https://www.elastic.co/) is a distributed, RESTful search and analytics engine that can efficiently store and search vector data using dense vectors and k-NN search.
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
Elasticsearch support requires additional dependencies. Install them with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install elasticsearch>=8.0.0
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
```python
|
||||||
|
import os
|
||||||
|
from mem0 import Memory
|
||||||
|
|
||||||
|
os.environ["OPENAI_API_KEY"] = "sk-xx"
|
||||||
|
|
||||||
|
config = {
|
||||||
|
"vector_store": {
|
||||||
|
"provider": "elasticsearch",
|
||||||
|
"config": {
|
||||||
|
"collection_name": "mem0",
|
||||||
|
"host": "localhost",
|
||||||
|
"port": 9200,
|
||||||
|
"embedding_model_dims": 1536
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
m = Memory.from_config(config)
|
||||||
|
m.add("Likes to play cricket on weekends", user_id="alice", metadata={"category": "hobbies"})
|
||||||
|
```
|
||||||
|
|
||||||
|
### Config
|
||||||
|
|
||||||
|
Let's see the available parameters for the `elasticsearch` config:
|
||||||
|
|
||||||
|
| Parameter | Description | Default Value |
|
||||||
|
| ---------------------- | -------------------------------------------------- | ------------- |
|
||||||
|
| `collection_name` | The name of the index to store the vectors | `mem0` |
|
||||||
|
| `embedding_model_dims` | Dimensions of the embedding model | `1536` |
|
||||||
|
| `host` | The host where the Elasticsearch server is running | `localhost` |
|
||||||
|
| `port` | The port where the Elasticsearch server is running | `9200` |
|
||||||
|
| `cloud_id` | Cloud ID for Elastic Cloud deployment | `None` |
|
||||||
|
| `api_key` | API key for authentication | `None` |
|
||||||
|
| `user` | Username for basic authentication | `None` |
|
||||||
|
| `password` | Password for basic authentication | `None` |
|
||||||
|
| `verify_certs` | Whether to verify SSL certificates | `True` |
|
||||||
|
| `auto_create_index` | Whether to automatically create the index | `True` |
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- Efficient vector search using Elasticsearch's native k-NN search
|
||||||
|
- Support for both local and cloud deployments (Elastic Cloud)
|
||||||
|
- Multiple authentication methods (Basic Auth, API Key)
|
||||||
|
- Automatic index creation with optimized mappings for vector search
|
||||||
|
- Memory isolation through payload filtering
|
||||||
43
mem0/configs/vector_stores/elasticsearch.py
Normal file
43
mem0/configs/vector_stores/elasticsearch.py
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field, model_validator
|
||||||
|
|
||||||
|
|
||||||
|
class ElasticsearchConfig(BaseModel):
|
||||||
|
collection_name: str = Field("mem0", description="Name of the index")
|
||||||
|
host: str = Field("localhost", description="Elasticsearch host")
|
||||||
|
port: int = Field(9200, description="Elasticsearch port")
|
||||||
|
user: Optional[str] = Field(None, description="Username for authentication")
|
||||||
|
password: Optional[str] = Field(None, description="Password for authentication")
|
||||||
|
cloud_id: Optional[str] = Field(None, description="Cloud ID for Elastic Cloud")
|
||||||
|
api_key: Optional[str] = Field(None, description="API key for authentication")
|
||||||
|
embedding_model_dims: int = Field(1536, description="Dimension of the embedding vector")
|
||||||
|
verify_certs: bool = Field(True, description="Verify SSL certificates")
|
||||||
|
use_ssl: bool = Field(True, description="Use SSL for connection")
|
||||||
|
auto_create_index: bool = Field(True, description="Automatically create index during initialization")
|
||||||
|
|
||||||
|
@model_validator(mode="before")
|
||||||
|
@classmethod
|
||||||
|
def validate_auth(cls, values: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
# Check if either cloud_id or host/port is provided
|
||||||
|
if not values.get("cloud_id") and not values.get("host"):
|
||||||
|
raise ValueError("Either cloud_id or host must be provided")
|
||||||
|
|
||||||
|
# Check if authentication is provided
|
||||||
|
if not any([values.get("api_key"), (values.get("user") and values.get("password"))]):
|
||||||
|
raise ValueError("Either api_key or user/password must be provided")
|
||||||
|
|
||||||
|
return values
|
||||||
|
|
||||||
|
@model_validator(mode="before")
|
||||||
|
@classmethod
|
||||||
|
def validate_extra_fields(cls, values: Dict[str, Any]) -> Dict[str, Any]:
|
||||||
|
allowed_fields = set(cls.model_fields.keys())
|
||||||
|
input_fields = set(values.keys())
|
||||||
|
extra_fields = input_fields - allowed_fields
|
||||||
|
if extra_fields:
|
||||||
|
raise ValueError(
|
||||||
|
f"Extra fields not allowed: {', '.join(extra_fields)}. "
|
||||||
|
f"Please input only the following fields: {', '.join(allowed_fields)}"
|
||||||
|
)
|
||||||
|
return values
|
||||||
@@ -66,6 +66,7 @@ class VectorStoreFactory:
|
|||||||
"milvus": "mem0.vector_stores.milvus.MilvusDB",
|
"milvus": "mem0.vector_stores.milvus.MilvusDB",
|
||||||
"azure_ai_search": "mem0.vector_stores.azure_ai_search.AzureAISearch",
|
"azure_ai_search": "mem0.vector_stores.azure_ai_search.AzureAISearch",
|
||||||
"redis": "mem0.vector_stores.redis.RedisDB",
|
"redis": "mem0.vector_stores.redis.RedisDB",
|
||||||
|
"elasticsearch": "mem0.vector_stores.elasticsearch.ElasticsearchDB",
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ class VectorStoreConfig(BaseModel):
|
|||||||
"milvus": "MilvusDBConfig",
|
"milvus": "MilvusDBConfig",
|
||||||
"azure_ai_search": "AzureAISearchConfig",
|
"azure_ai_search": "AzureAISearchConfig",
|
||||||
"redis": "RedisDBConfig",
|
"redis": "RedisDBConfig",
|
||||||
|
"elasticsearch": "ElasticsearchConfig",
|
||||||
}
|
}
|
||||||
|
|
||||||
@model_validator(mode="after")
|
@model_validator(mode="after")
|
||||||
|
|||||||
224
mem0/vector_stores/elasticsearch.py
Normal file
224
mem0/vector_stores/elasticsearch.py
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
import logging
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
try:
|
||||||
|
from elasticsearch import Elasticsearch
|
||||||
|
from elasticsearch.helpers import bulk
|
||||||
|
except ImportError:
|
||||||
|
raise ImportError(
|
||||||
|
"Elasticsearch requires extra dependencies. Install with `pip install elasticsearch`"
|
||||||
|
) from None
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from mem0.configs.vector_stores.elasticsearch import ElasticsearchConfig
|
||||||
|
from mem0.vector_stores.base import VectorStoreBase
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class OutputData(BaseModel):
|
||||||
|
id: str
|
||||||
|
score: float
|
||||||
|
payload: Dict
|
||||||
|
|
||||||
|
|
||||||
|
class ElasticsearchDB(VectorStoreBase):
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
config = ElasticsearchConfig(**kwargs)
|
||||||
|
|
||||||
|
# Initialize Elasticsearch client
|
||||||
|
if config.cloud_id:
|
||||||
|
self.client = Elasticsearch(
|
||||||
|
cloud_id=config.cloud_id,
|
||||||
|
api_key=config.api_key,
|
||||||
|
verify_certs=config.verify_certs,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.client = Elasticsearch(
|
||||||
|
hosts=[f"{config.host}" if config.port is None else f"{config.host}:{config.port}"],
|
||||||
|
basic_auth=(config.user, config.password) if (config.user and config.password) else None,
|
||||||
|
verify_certs=config.verify_certs,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.collection_name = config.collection_name
|
||||||
|
self.vector_dim = config.embedding_model_dims
|
||||||
|
|
||||||
|
# Create index only if auto_create_index is True
|
||||||
|
if config.auto_create_index:
|
||||||
|
self.create_index()
|
||||||
|
|
||||||
|
def create_index(self) -> None:
|
||||||
|
"""Create Elasticsearch index with proper mappings if it doesn't exist"""
|
||||||
|
index_settings = {
|
||||||
|
"mappings": {
|
||||||
|
"properties": {
|
||||||
|
"text": {"type": "text"},
|
||||||
|
"embedding": {
|
||||||
|
"type": "dense_vector",
|
||||||
|
"dims": self.vector_dim,
|
||||||
|
"index": True,
|
||||||
|
"similarity": "cosine",
|
||||||
|
},
|
||||||
|
"metadata": {"type": "object"},
|
||||||
|
"user_id": {"type": "keyword"},
|
||||||
|
"hash": {"type": "keyword"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if not self.client.indices.exists(index=self.collection_name):
|
||||||
|
self.client.indices.create(index=self.collection_name, body=index_settings)
|
||||||
|
logger.info(f"Created index {self.collection_name}")
|
||||||
|
else:
|
||||||
|
logger.info(f"Index {self.collection_name} already exists")
|
||||||
|
|
||||||
|
def create_col(self, name: str, vector_size: int, distance: str = "cosine") -> None:
|
||||||
|
"""Create a new collection (index in Elasticsearch)."""
|
||||||
|
index_settings = {
|
||||||
|
"mappings": {
|
||||||
|
"properties": {
|
||||||
|
"vector": {"type": "dense_vector", "dims": vector_size, "index": True, "similarity": "cosine"},
|
||||||
|
"payload": {"type": "object"},
|
||||||
|
"id": {"type": "keyword"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if not self.client.indices.exists(index=name):
|
||||||
|
self.client.indices.create(index=name, body=index_settings)
|
||||||
|
logger.info(f"Created index {name}")
|
||||||
|
|
||||||
|
def insert(
|
||||||
|
self, vectors: List[List[float]], payloads: Optional[List[Dict]] = None, ids: Optional[List[str]] = None
|
||||||
|
) -> List[OutputData]:
|
||||||
|
"""Insert vectors into the index."""
|
||||||
|
if not ids:
|
||||||
|
ids = [str(i) for i in range(len(vectors))]
|
||||||
|
|
||||||
|
if payloads is None:
|
||||||
|
payloads = [{} for _ in range(len(vectors))]
|
||||||
|
|
||||||
|
actions = []
|
||||||
|
for i, (vec, id_) in enumerate(zip(vectors, ids)):
|
||||||
|
action = {"_index": self.collection_name, "_id": id_, "vector": vec, "payload": payloads[i]}
|
||||||
|
actions.append(action)
|
||||||
|
|
||||||
|
bulk(self.client, actions)
|
||||||
|
|
||||||
|
# Return OutputData objects for inserted documents
|
||||||
|
results = []
|
||||||
|
for i, id_ in enumerate(ids):
|
||||||
|
results.append(
|
||||||
|
OutputData(
|
||||||
|
id=id_,
|
||||||
|
score=1.0, # Default score for inserts
|
||||||
|
payload=payloads[i],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return results
|
||||||
|
|
||||||
|
def search(self, query: List[float], limit: int = 5, filters: Optional[Dict] = None) -> List[OutputData]:
|
||||||
|
"""Search for similar vectors using KNN search with pre-filtering."""
|
||||||
|
search_query = {
|
||||||
|
"query": {
|
||||||
|
"bool": {
|
||||||
|
"must": [
|
||||||
|
# Exact match filters for memory isolation
|
||||||
|
*({"term": {f"payload.{k}": v}} for k, v in (filters or {}).items()),
|
||||||
|
# KNN vector search
|
||||||
|
{
|
||||||
|
"knn": {
|
||||||
|
"vector": {
|
||||||
|
"vector": query,
|
||||||
|
"k": limit
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
response = self.client.search(index=self.collection_name, body=search_query)
|
||||||
|
|
||||||
|
results = []
|
||||||
|
for hit in response["hits"]["hits"]:
|
||||||
|
results.append(
|
||||||
|
OutputData(
|
||||||
|
id=hit["_id"],
|
||||||
|
score=hit["_score"],
|
||||||
|
payload=hit["_source"].get("payload", {})
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def delete(self, vector_id: str) -> None:
|
||||||
|
"""Delete a vector by ID."""
|
||||||
|
self.client.delete(index=self.collection_name, id=vector_id)
|
||||||
|
|
||||||
|
def update(self, vector_id: str, vector: Optional[List[float]] = None, payload: Optional[Dict] = None) -> None:
|
||||||
|
"""Update a vector and its payload."""
|
||||||
|
doc = {}
|
||||||
|
if vector is not None:
|
||||||
|
doc["vector"] = vector
|
||||||
|
if payload is not None:
|
||||||
|
doc["payload"] = payload
|
||||||
|
|
||||||
|
self.client.update(index=self.collection_name, id=vector_id, body={"doc": doc})
|
||||||
|
|
||||||
|
def get(self, vector_id: str) -> Optional[OutputData]:
|
||||||
|
"""Retrieve a vector by ID."""
|
||||||
|
try:
|
||||||
|
response = self.client.get(index=self.collection_name, id=vector_id)
|
||||||
|
return OutputData(
|
||||||
|
id=response["_id"],
|
||||||
|
score=1.0, # Default score for direct get
|
||||||
|
payload=response["_source"].get("payload", {}),
|
||||||
|
)
|
||||||
|
except KeyError as e:
|
||||||
|
logger.warning(f"Missing key in Elasticsearch response: {e}")
|
||||||
|
return None
|
||||||
|
except TypeError as e:
|
||||||
|
logger.warning(f"Invalid response type from Elasticsearch: {e}")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unexpected error while parsing Elasticsearch response: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def list_cols(self) -> List[str]:
|
||||||
|
"""List all collections (indices)."""
|
||||||
|
return list(self.client.indices.get_alias().keys())
|
||||||
|
|
||||||
|
def delete_col(self) -> None:
|
||||||
|
"""Delete a collection (index)."""
|
||||||
|
self.client.indices.delete(index=self.collection_name)
|
||||||
|
|
||||||
|
def col_info(self, name: str) -> Any:
|
||||||
|
"""Get information about a collection (index)."""
|
||||||
|
return self.client.indices.get(index=name)
|
||||||
|
|
||||||
|
def list(self, filters: Optional[Dict] = None, limit: Optional[int] = None) -> List[List[OutputData]]:
|
||||||
|
"""List all memories."""
|
||||||
|
query: Dict[str, Any] = {"query": {"match_all": {}}}
|
||||||
|
|
||||||
|
if filters:
|
||||||
|
query["query"] = {"bool": {"must": [{"match": {f"payload.{k}": v}} for k, v in filters.items()]}}
|
||||||
|
|
||||||
|
if limit:
|
||||||
|
query["size"] = limit
|
||||||
|
|
||||||
|
response = self.client.search(index=self.collection_name, body=query)
|
||||||
|
|
||||||
|
results = []
|
||||||
|
for hit in response["hits"]["hits"]:
|
||||||
|
results.append(
|
||||||
|
OutputData(
|
||||||
|
id=hit["_id"],
|
||||||
|
score=1.0, # Default score for list operation
|
||||||
|
payload=hit["_source"].get("payload", {}),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return [results]
|
||||||
2403
poetry.lock
generated
2403
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -37,7 +37,6 @@ ruff = "^0.6.5"
|
|||||||
isort = "^5.13.2"
|
isort = "^5.13.2"
|
||||||
pytest = "^8.2.2"
|
pytest = "^8.2.2"
|
||||||
|
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core"]
|
requires = ["poetry-core"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|||||||
337
tests/vector_stores/test_elasticsearch.py
Normal file
337
tests/vector_stores/test_elasticsearch.py
Normal file
@@ -0,0 +1,337 @@
|
|||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
import dotenv
|
||||||
|
|
||||||
|
try:
|
||||||
|
from elasticsearch import Elasticsearch
|
||||||
|
except ImportError:
|
||||||
|
raise ImportError(
|
||||||
|
"Elasticsearch requires extra dependencies. Install with `pip install elasticsearch`"
|
||||||
|
) from None
|
||||||
|
|
||||||
|
from mem0.vector_stores.elasticsearch import ElasticsearchDB, OutputData
|
||||||
|
|
||||||
|
|
||||||
|
class TestElasticsearchDB(unittest.TestCase):
|
||||||
|
@classmethod
|
||||||
|
def setUpClass(cls):
|
||||||
|
# Load environment variables before any test
|
||||||
|
dotenv.load_dotenv()
|
||||||
|
|
||||||
|
# Save original environment variables
|
||||||
|
cls.original_env = {
|
||||||
|
'ES_URL': os.getenv('ES_URL', 'http://localhost:9200'),
|
||||||
|
'ES_USERNAME': os.getenv('ES_USERNAME', 'test_user'),
|
||||||
|
'ES_PASSWORD': os.getenv('ES_PASSWORD', 'test_password'),
|
||||||
|
'ES_CLOUD_ID': os.getenv('ES_CLOUD_ID', 'test_cloud_id')
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set test environment variables
|
||||||
|
os.environ['ES_URL'] = 'http://localhost'
|
||||||
|
os.environ['ES_USERNAME'] = 'test_user'
|
||||||
|
os.environ['ES_PASSWORD'] = 'test_password'
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
# Create a mock Elasticsearch client with proper attributes
|
||||||
|
self.client_mock = MagicMock(spec=Elasticsearch)
|
||||||
|
self.client_mock.indices = MagicMock()
|
||||||
|
self.client_mock.indices.exists = MagicMock(return_value=False)
|
||||||
|
self.client_mock.indices.create = MagicMock()
|
||||||
|
self.client_mock.indices.delete = MagicMock()
|
||||||
|
self.client_mock.indices.get_alias = MagicMock()
|
||||||
|
|
||||||
|
# Start patches BEFORE creating ElasticsearchDB instance
|
||||||
|
patcher = patch('mem0.vector_stores.elasticsearch.Elasticsearch', return_value=self.client_mock)
|
||||||
|
self.mock_es = patcher.start()
|
||||||
|
self.addCleanup(patcher.stop)
|
||||||
|
|
||||||
|
# Initialize ElasticsearchDB with test config and auto_create_index=False
|
||||||
|
self.es_db = ElasticsearchDB(
|
||||||
|
host=os.getenv('ES_URL'),
|
||||||
|
port=9200,
|
||||||
|
collection_name="test_collection",
|
||||||
|
embedding_model_dims=1536,
|
||||||
|
user=os.getenv('ES_USERNAME'),
|
||||||
|
password=os.getenv('ES_PASSWORD'),
|
||||||
|
verify_certs=False,
|
||||||
|
use_ssl=False,
|
||||||
|
auto_create_index=False # Disable auto creation for tests
|
||||||
|
)
|
||||||
|
|
||||||
|
# Reset mock counts after initialization
|
||||||
|
self.client_mock.reset_mock()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def tearDownClass(cls):
|
||||||
|
# Restore original environment variables
|
||||||
|
for key, value in cls.original_env.items():
|
||||||
|
if value is not None:
|
||||||
|
os.environ[key] = value
|
||||||
|
else:
|
||||||
|
os.environ.pop(key, None)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.client_mock.reset_mock()
|
||||||
|
# No need to stop patches here as we're using addCleanup
|
||||||
|
|
||||||
|
def test_create_index(self):
|
||||||
|
# Test when index doesn't exist
|
||||||
|
self.client_mock.indices.exists.return_value = False
|
||||||
|
self.es_db.create_index()
|
||||||
|
|
||||||
|
# Verify index creation was called with correct settings
|
||||||
|
self.client_mock.indices.create.assert_called_once()
|
||||||
|
create_args = self.client_mock.indices.create.call_args[1]
|
||||||
|
|
||||||
|
# Verify basic index settings
|
||||||
|
self.assertEqual(create_args["index"], "test_collection")
|
||||||
|
self.assertIn("mappings", create_args["body"])
|
||||||
|
|
||||||
|
# Verify field mappings
|
||||||
|
mappings = create_args["body"]["mappings"]["properties"]
|
||||||
|
self.assertEqual(mappings["text"]["type"], "text")
|
||||||
|
self.assertEqual(mappings["embedding"]["type"], "dense_vector")
|
||||||
|
self.assertEqual(mappings["embedding"]["dims"], 1536)
|
||||||
|
self.assertEqual(mappings["embedding"]["index"], True)
|
||||||
|
self.assertEqual(mappings["embedding"]["similarity"], "cosine")
|
||||||
|
self.assertEqual(mappings["metadata"]["type"], "object")
|
||||||
|
self.assertEqual(mappings["user_id"]["type"], "keyword")
|
||||||
|
self.assertEqual(mappings["hash"]["type"], "keyword")
|
||||||
|
|
||||||
|
# Reset mocks for next test
|
||||||
|
self.client_mock.reset_mock()
|
||||||
|
|
||||||
|
# Test when index already exists
|
||||||
|
self.client_mock.indices.exists.return_value = True
|
||||||
|
self.es_db.create_index()
|
||||||
|
|
||||||
|
# Verify create was not called when index exists
|
||||||
|
self.client_mock.indices.create.assert_not_called()
|
||||||
|
|
||||||
|
def test_auto_create_index(self):
|
||||||
|
# Reset mock
|
||||||
|
self.client_mock.reset_mock()
|
||||||
|
|
||||||
|
# Test with auto_create_index=True
|
||||||
|
ElasticsearchDB(
|
||||||
|
host=os.getenv('ES_URL'),
|
||||||
|
port=9200,
|
||||||
|
collection_name="test_collection",
|
||||||
|
embedding_model_dims=1536,
|
||||||
|
user=os.getenv('ES_USERNAME'),
|
||||||
|
password=os.getenv('ES_PASSWORD'),
|
||||||
|
verify_certs=False,
|
||||||
|
use_ssl=False,
|
||||||
|
auto_create_index=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify create_index was called during initialization
|
||||||
|
self.client_mock.indices.exists.assert_called_once()
|
||||||
|
|
||||||
|
# Reset mock
|
||||||
|
self.client_mock.reset_mock()
|
||||||
|
|
||||||
|
# Test with auto_create_index=False
|
||||||
|
ElasticsearchDB(
|
||||||
|
host=os.getenv('ES_URL'),
|
||||||
|
port=9200,
|
||||||
|
collection_name="test_collection",
|
||||||
|
embedding_model_dims=1536,
|
||||||
|
user=os.getenv('ES_USERNAME'),
|
||||||
|
password=os.getenv('ES_PASSWORD'),
|
||||||
|
verify_certs=False,
|
||||||
|
use_ssl=False,
|
||||||
|
auto_create_index=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify create_index was not called during initialization
|
||||||
|
self.client_mock.indices.exists.assert_not_called()
|
||||||
|
|
||||||
|
def test_insert(self):
|
||||||
|
# Test data
|
||||||
|
vectors = [[0.1] * 1536, [0.2] * 1536]
|
||||||
|
payloads = [{"key1": "value1"}, {"key2": "value2"}]
|
||||||
|
ids = ["id1", "id2"]
|
||||||
|
|
||||||
|
# Mock bulk operation
|
||||||
|
with patch('mem0.vector_stores.elasticsearch.bulk') as mock_bulk:
|
||||||
|
mock_bulk.return_value = (2, []) # Simulate successful bulk insert
|
||||||
|
|
||||||
|
# Perform insert
|
||||||
|
results = self.es_db.insert(vectors=vectors, payloads=payloads, ids=ids)
|
||||||
|
|
||||||
|
# Verify bulk was called
|
||||||
|
mock_bulk.assert_called_once()
|
||||||
|
|
||||||
|
# Verify bulk actions format
|
||||||
|
actions = mock_bulk.call_args[0][1]
|
||||||
|
self.assertEqual(len(actions), 2)
|
||||||
|
self.assertEqual(actions[0]["_index"], "test_collection")
|
||||||
|
self.assertEqual(actions[0]["_id"], "id1")
|
||||||
|
self.assertEqual(actions[0]["vector"], vectors[0])
|
||||||
|
self.assertEqual(actions[0]["payload"], payloads[0])
|
||||||
|
|
||||||
|
# Verify returned objects
|
||||||
|
self.assertEqual(len(results), 2)
|
||||||
|
self.assertIsInstance(results[0], OutputData)
|
||||||
|
self.assertEqual(results[0].id, "id1")
|
||||||
|
self.assertEqual(results[0].payload, payloads[0])
|
||||||
|
|
||||||
|
def test_search(self):
|
||||||
|
# Mock search response
|
||||||
|
mock_response = {
|
||||||
|
"hits": {
|
||||||
|
"hits": [
|
||||||
|
{
|
||||||
|
"_id": "id1",
|
||||||
|
"_score": 0.8,
|
||||||
|
"_source": {
|
||||||
|
"vector": [0.1] * 1536,
|
||||||
|
"payload": {"key1": "value1"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.client_mock.search.return_value = mock_response
|
||||||
|
|
||||||
|
# Perform search
|
||||||
|
query_vector = [0.1] * 1536
|
||||||
|
results = self.es_db.search(query=query_vector, limit=5)
|
||||||
|
|
||||||
|
# Verify search call
|
||||||
|
self.client_mock.search.assert_called_once()
|
||||||
|
search_args = self.client_mock.search.call_args[1]
|
||||||
|
|
||||||
|
# Verify search parameters
|
||||||
|
self.assertEqual(search_args["index"], "test_collection")
|
||||||
|
body = search_args["body"]
|
||||||
|
self.assertIn("script_score", body["query"])
|
||||||
|
self.assertEqual(
|
||||||
|
body["query"]["script_score"]["script"]["params"]["query_vector"],
|
||||||
|
query_vector
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify results
|
||||||
|
self.assertEqual(len(results), 1)
|
||||||
|
self.assertIsInstance(results[0], OutputData)
|
||||||
|
self.assertEqual(results[0].id, "id1")
|
||||||
|
self.assertEqual(results[0].score, 0.8)
|
||||||
|
self.assertEqual(results[0].payload, {"key1": "value1"})
|
||||||
|
|
||||||
|
def test_get(self):
|
||||||
|
# Mock get response with correct structure
|
||||||
|
mock_response = {
|
||||||
|
"_id": "id1",
|
||||||
|
"_source": {
|
||||||
|
"vector": [0.1] * 1536,
|
||||||
|
"payload": {"key": "value"},
|
||||||
|
"text": "sample text",
|
||||||
|
"user_id": "test_user",
|
||||||
|
"hash": "sample_hash"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.client_mock.get.return_value = mock_response
|
||||||
|
|
||||||
|
# Perform get
|
||||||
|
result = self.es_db.get(vector_id="id1")
|
||||||
|
|
||||||
|
# Verify get call
|
||||||
|
self.client_mock.get.assert_called_once_with(
|
||||||
|
index="test_collection",
|
||||||
|
id="id1"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Basic assertions that should pass if OutputData is created correctly
|
||||||
|
self.assertIsNotNone(result)
|
||||||
|
self.assertTrue(hasattr(result, 'id'))
|
||||||
|
self.assertTrue(hasattr(result, 'score'))
|
||||||
|
self.assertTrue(hasattr(result, 'payload'))
|
||||||
|
|
||||||
|
# If the above assertions pass, we can safely check the values
|
||||||
|
if result is not None: # This satisfies the linter
|
||||||
|
self.assertEqual(result.id, "id1")
|
||||||
|
self.assertEqual(result.score, 1.0)
|
||||||
|
self.assertEqual(result.payload, {"key": "value"})
|
||||||
|
|
||||||
|
def test_get_not_found(self):
|
||||||
|
# Mock get raising exception
|
||||||
|
self.client_mock.get.side_effect = Exception("Not found")
|
||||||
|
|
||||||
|
# Verify get returns None when document not found
|
||||||
|
result = self.es_db.get(vector_id="nonexistent")
|
||||||
|
self.assertIsNone(result)
|
||||||
|
|
||||||
|
def test_list(self):
|
||||||
|
# Mock search response with scores
|
||||||
|
mock_response = {
|
||||||
|
"hits": {
|
||||||
|
"hits": [
|
||||||
|
{
|
||||||
|
"_id": "id1",
|
||||||
|
"_source": {
|
||||||
|
"vector": [0.1] * 1536,
|
||||||
|
"payload": {"key1": "value1"}
|
||||||
|
},
|
||||||
|
"_score": 1.0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"_id": "id2",
|
||||||
|
"_source": {
|
||||||
|
"vector": [0.2] * 1536,
|
||||||
|
"payload": {"key2": "value2"}
|
||||||
|
},
|
||||||
|
"_score": 0.8
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.client_mock.search.return_value = mock_response
|
||||||
|
|
||||||
|
# Perform list operation
|
||||||
|
results = self.es_db.list(limit=10)
|
||||||
|
|
||||||
|
# Verify search call
|
||||||
|
self.client_mock.search.assert_called_once()
|
||||||
|
|
||||||
|
# Verify results
|
||||||
|
self.assertEqual(len(results), 1) # Outer list
|
||||||
|
self.assertEqual(len(results[0]), 2) # Inner list
|
||||||
|
self.assertIsInstance(results[0][0], OutputData)
|
||||||
|
self.assertEqual(results[0][0].id, "id1")
|
||||||
|
self.assertEqual(results[0][0].payload, {"key1": "value1"})
|
||||||
|
self.assertEqual(results[0][1].id, "id2")
|
||||||
|
self.assertEqual(results[0][1].payload, {"key2": "value2"})
|
||||||
|
|
||||||
|
def test_delete(self):
|
||||||
|
# Perform delete
|
||||||
|
self.es_db.delete(vector_id="id1")
|
||||||
|
|
||||||
|
# Verify delete call
|
||||||
|
self.client_mock.delete.assert_called_once_with(
|
||||||
|
index="test_collection",
|
||||||
|
id="id1"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_list_cols(self):
|
||||||
|
# Mock indices response
|
||||||
|
mock_indices = {"index1": {}, "index2": {}}
|
||||||
|
self.client_mock.indices.get_alias.return_value = mock_indices
|
||||||
|
|
||||||
|
# Get collections
|
||||||
|
result = self.es_db.list_cols()
|
||||||
|
|
||||||
|
# Verify result
|
||||||
|
self.assertEqual(result, ["index1", "index2"])
|
||||||
|
|
||||||
|
def test_delete_col(self):
|
||||||
|
# Delete collection
|
||||||
|
self.es_db.delete_col()
|
||||||
|
|
||||||
|
# Verify delete call
|
||||||
|
self.client_mock.indices.delete.assert_called_once_with(
|
||||||
|
index="test_collection"
|
||||||
|
)
|
||||||
|
|
||||||
Reference in New Issue
Block a user