Rename embedchain to mem0 and open sourcing code for long term memory (#1474)

Co-authored-by: Deshraj Yadav <deshrajdry@gmail.com>
This commit is contained in:
Taranjeet Singh
2024-07-12 07:51:33 -07:00
committed by GitHub
parent 83e8c97295
commit f842a92e25
665 changed files with 9427 additions and 6592 deletions

View File

@@ -0,0 +1,49 @@
import pytest
from chromadb.api.types import Documents, Embeddings
from embedchain.config.embedder.base import BaseEmbedderConfig
from embedchain.embedder.base import BaseEmbedder
@pytest.fixture
def base_embedder():
return BaseEmbedder()
def test_initialization(base_embedder):
assert isinstance(base_embedder.config, BaseEmbedderConfig)
# not initialized
assert not hasattr(base_embedder, "embedding_fn")
assert not hasattr(base_embedder, "vector_dimension")
def test_set_embedding_fn(base_embedder):
def embedding_function(texts: Documents) -> Embeddings:
return [f"Embedding for {text}" for text in texts]
base_embedder.set_embedding_fn(embedding_function)
assert hasattr(base_embedder, "embedding_fn")
assert callable(base_embedder.embedding_fn)
embeddings = base_embedder.embedding_fn(["text1", "text2"])
assert embeddings == ["Embedding for text1", "Embedding for text2"]
def test_set_embedding_fn_when_not_a_function(base_embedder):
with pytest.raises(ValueError):
base_embedder.set_embedding_fn(None)
def test_set_vector_dimension(base_embedder):
base_embedder.set_vector_dimension(256)
assert hasattr(base_embedder, "vector_dimension")
assert base_embedder.vector_dimension == 256
def test_set_vector_dimension_type_error(base_embedder):
with pytest.raises(TypeError):
base_embedder.set_vector_dimension(None)
def test_embedder_with_config():
embedder = BaseEmbedder(BaseEmbedderConfig())
assert isinstance(embedder.config, BaseEmbedderConfig)

View File

@@ -0,0 +1,18 @@
from unittest.mock import patch
from embedchain.config import BaseEmbedderConfig
from embedchain.embedder.huggingface import HuggingFaceEmbedder
def test_huggingface_embedder_with_model(monkeypatch):
config = BaseEmbedderConfig(model="test-model", model_kwargs={"param": "value"})
with patch('embedchain.embedder.huggingface.HuggingFaceEmbeddings') as mock_embeddings:
embedder = HuggingFaceEmbedder(config=config)
assert embedder.config.model == "test-model"
assert embedder.config.model_kwargs == {"param": "value"}
mock_embeddings.assert_called_once_with(
model_name="test-model",
model_kwargs={"param": "value"}
)