fix: elastic search (#600)
This commit is contained in:
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
from typing import Dict, List, Optional, Union
|
||||
|
||||
from embedchain.config.vectordbs.BaseVectorDbConfig import BaseVectorDbConfig
|
||||
@@ -26,7 +27,20 @@ class ElasticsearchDBConfig(BaseVectorDbConfig):
|
||||
:type ES_EXTRA_PARAMS: Dict[str, Any], optional
|
||||
"""
|
||||
# self, es_url: Union[str, List[str]] = None, **ES_EXTRA_PARAMS: Dict[str, any]):
|
||||
self.ES_URL = es_url
|
||||
self.ES_URL = es_url or os.environ.get("ELASTICSEARCH_URL")
|
||||
if not self.ES_URL:
|
||||
raise AttributeError(
|
||||
"Elasticsearch needs a URL attribute, "
|
||||
"this can either be passed to `ElasticsearchDBConfig` or as `ELASTICSEARCH_URL` in `.env`"
|
||||
)
|
||||
self.ES_EXTRA_PARAMS = ES_EXTRA_PARAMS
|
||||
|
||||
# Load API key from .env if it's not explicitly passed.
|
||||
# Can only set one of 'api_key', 'basic_auth', and 'bearer_auth'
|
||||
if (
|
||||
not self.ES_EXTRA_PARAMS.get("api_key")
|
||||
and not self.ES_EXTRA_PARAMS.get("basic_auth")
|
||||
and not self.ES_EXTRA_PARAMS.get("bearer_auth")
|
||||
and not self.ES_EXTRA_PARAMS.get("http_auth")
|
||||
):
|
||||
self.ES_EXTRA_PARAMS["api_key"] = os.environ.get("ELASTICSEARCH_API_KEY")
|
||||
super().__init__(collection_name=collection_name, dir=dir)
|
||||
|
||||
@@ -51,6 +51,8 @@ class BaseEmbedder:
|
||||
:param vector_dimension: vector dimension size
|
||||
:type vector_dimension: int
|
||||
"""
|
||||
if not isinstance(vector_dimension, int):
|
||||
raise TypeError("vector dimension must be int")
|
||||
self.vector_dimension = vector_dimension
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -4,7 +4,7 @@ from chromadb.utils import embedding_functions
|
||||
|
||||
from embedchain.config import BaseEmbedderConfig
|
||||
from embedchain.embedder.base import BaseEmbedder
|
||||
from embedchain.models import EmbeddingFunctions
|
||||
from embedchain.models import VectorDimensions
|
||||
|
||||
|
||||
class GPT4AllEmbedder(BaseEmbedder):
|
||||
@@ -17,5 +17,5 @@ class GPT4AllEmbedder(BaseEmbedder):
|
||||
embedding_fn = embedding_functions.SentenceTransformerEmbeddingFunction(model_name=self.config.model)
|
||||
self.set_embedding_fn(embedding_fn=embedding_fn)
|
||||
|
||||
vector_dimension = EmbeddingFunctions.GPT4ALL.value
|
||||
vector_dimension = VectorDimensions.GPT4ALL.value
|
||||
self.set_vector_dimension(vector_dimension=vector_dimension)
|
||||
|
||||
@@ -4,7 +4,7 @@ from langchain.embeddings import HuggingFaceEmbeddings
|
||||
|
||||
from embedchain.config import BaseEmbedderConfig
|
||||
from embedchain.embedder.base import BaseEmbedder
|
||||
from embedchain.models import EmbeddingFunctions
|
||||
from embedchain.models import VectorDimensions
|
||||
|
||||
|
||||
class HuggingFaceEmbedder(BaseEmbedder):
|
||||
@@ -15,5 +15,5 @@ class HuggingFaceEmbedder(BaseEmbedder):
|
||||
embedding_fn = BaseEmbedder._langchain_default_concept(embeddings)
|
||||
self.set_embedding_fn(embedding_fn=embedding_fn)
|
||||
|
||||
vector_dimension = EmbeddingFunctions.HUGGING_FACE.value
|
||||
vector_dimension = VectorDimensions.HUGGING_FACE.value
|
||||
self.set_vector_dimension(vector_dimension=vector_dimension)
|
||||
|
||||
@@ -5,7 +5,7 @@ from langchain.embeddings import OpenAIEmbeddings
|
||||
|
||||
from embedchain.config import BaseEmbedderConfig
|
||||
from embedchain.embedder.base import BaseEmbedder
|
||||
from embedchain.models import EmbeddingFunctions
|
||||
from embedchain.models import VectorDimensions
|
||||
|
||||
try:
|
||||
from chromadb.utils import embedding_functions
|
||||
@@ -37,4 +37,4 @@ class OpenAiEmbedder(BaseEmbedder):
|
||||
)
|
||||
|
||||
self.set_embedding_fn(embedding_fn=embedding_fn)
|
||||
self.set_vector_dimension(vector_dimension=EmbeddingFunctions.OPENAI.value)
|
||||
self.set_vector_dimension(vector_dimension=VectorDimensions.OPENAI.value)
|
||||
|
||||
@@ -4,7 +4,7 @@ from langchain.embeddings import VertexAIEmbeddings
|
||||
|
||||
from embedchain.config import BaseEmbedderConfig
|
||||
from embedchain.embedder.base import BaseEmbedder
|
||||
from embedchain.models import EmbeddingFunctions
|
||||
from embedchain.models import VectorDimensions
|
||||
|
||||
|
||||
class VertexAiEmbedder(BaseEmbedder):
|
||||
@@ -15,5 +15,5 @@ class VertexAiEmbedder(BaseEmbedder):
|
||||
embedding_fn = BaseEmbedder._langchain_default_concept(embeddings)
|
||||
self.set_embedding_fn(embedding_fn=embedding_fn)
|
||||
|
||||
vector_dimension = EmbeddingFunctions.VERTEX_AI.value
|
||||
vector_dimension = VectorDimensions.VERTEX_AI.value
|
||||
self.set_vector_dimension(vector_dimension=vector_dimension)
|
||||
|
||||
@@ -87,7 +87,7 @@ class ChromaDB(BaseVectorDB):
|
||||
)
|
||||
return self.collection
|
||||
|
||||
def get(self, ids=None, where=None, limit=None):
|
||||
def get(self, ids: Optional[List[str]] = None, where: Optional[Dict[str, any]] = None, limit: Optional[int] = None):
|
||||
"""
|
||||
Get existing doc ids present in vector database
|
||||
|
||||
@@ -95,6 +95,8 @@ class ChromaDB(BaseVectorDB):
|
||||
:type ids: List[str]
|
||||
:param where: Optional. to filter data
|
||||
:type where: Dict[str, Any]
|
||||
:param limit: Optional. maximum number of documents
|
||||
:type limit: Optional[int]
|
||||
:return: Existing documents.
|
||||
:rtype: List[str]
|
||||
"""
|
||||
@@ -180,6 +182,8 @@ class ChromaDB(BaseVectorDB):
|
||||
:param name: Name of the collection.
|
||||
:type name: str
|
||||
"""
|
||||
if not isinstance(name, str):
|
||||
raise TypeError("Collection name must be a string")
|
||||
self.config.collection_name = name
|
||||
self._get_or_create_collection(self.config.collection_name)
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import logging
|
||||
from typing import Dict, List, Optional, Set
|
||||
|
||||
try:
|
||||
@@ -34,9 +35,15 @@ class ElasticsearchDB(BaseVectorDB):
|
||||
:raises ValueError: No config provided
|
||||
"""
|
||||
if config is None and es_config is None:
|
||||
raise ValueError("ElasticsearchDBConfig is required")
|
||||
self.config = config or es_config
|
||||
self.client = Elasticsearch(es_config.ES_URL, **es_config.ES_EXTRA_PARAMS)
|
||||
self.config = ElasticsearchDBConfig()
|
||||
else:
|
||||
if not isinstance(config, ElasticsearchDBConfig):
|
||||
raise TypeError(
|
||||
"config is not a `ElasticsearchDBConfig` instance. "
|
||||
"Please make sure the type is right and that you are passing an instance."
|
||||
)
|
||||
self.config = config or es_config
|
||||
self.client = Elasticsearch(self.config.ES_URL, **self.config.ES_EXTRA_PARAMS)
|
||||
|
||||
# Call parent init here because embedder is needed
|
||||
super().__init__(config=self.config)
|
||||
@@ -45,6 +52,7 @@ class ElasticsearchDB(BaseVectorDB):
|
||||
"""
|
||||
This method is needed because `embedder` attribute needs to be set externally before it can be initialized.
|
||||
"""
|
||||
logging.info(self.client.info())
|
||||
index_settings = {
|
||||
"mappings": {
|
||||
"properties": {
|
||||
@@ -66,7 +74,9 @@ class ElasticsearchDB(BaseVectorDB):
|
||||
def _get_or_create_collection(self, name):
|
||||
"""Note: nothing to return here. Discuss later"""
|
||||
|
||||
def get(self, ids: List[str], where: Dict[str, any]) -> Set[str]:
|
||||
def get(
|
||||
self, ids: Optional[List[str]] = None, where: Optional[Dict[str, any]] = None, limit: Optional[int] = None
|
||||
) -> Set[str]:
|
||||
"""
|
||||
Get existing doc ids present in vector database
|
||||
|
||||
@@ -77,14 +87,18 @@ class ElasticsearchDB(BaseVectorDB):
|
||||
:return: ids
|
||||
:rtype: Set[str]
|
||||
"""
|
||||
query = {"bool": {"must": [{"ids": {"values": ids}}]}}
|
||||
if ids:
|
||||
query = {"bool": {"must": [{"ids": {"values": ids}}]}}
|
||||
else:
|
||||
query = {"bool": {"must": []}}
|
||||
if "app_id" in where:
|
||||
app_id = where["app_id"]
|
||||
query["bool"]["must"].append({"term": {"metadata.app_id": app_id}})
|
||||
response = self.client.search(index=self.es_index, query=query, _source=False)
|
||||
|
||||
response = self.client.search(index=self._get_index(), query=query, _source=False, size=limit)
|
||||
docs = response["hits"]["hits"]
|
||||
ids = [doc["_id"] for doc in docs]
|
||||
return set(ids)
|
||||
return {"ids": set(ids)}
|
||||
|
||||
def add(self, documents: List[str], metadatas: List[object], ids: List[str]):
|
||||
"""add data in vector database
|
||||
@@ -150,6 +164,8 @@ class ElasticsearchDB(BaseVectorDB):
|
||||
:param name: Name of the collection.
|
||||
:type name: str
|
||||
"""
|
||||
if not isinstance(name, str):
|
||||
raise TypeError("Collection name must be a string")
|
||||
self.config.collection_name = name
|
||||
|
||||
def count(self) -> int:
|
||||
@@ -181,4 +197,4 @@ class ElasticsearchDB(BaseVectorDB):
|
||||
"""
|
||||
# NOTE: The method is preferred to an attribute, because if collection name changes,
|
||||
# it's always up-to-date.
|
||||
return f"{self.config.collection_name}_{self.embedder.vector_dimension}"
|
||||
return f"{self.config.collection_name}_{self.embedder.vector_dimension}".lower()
|
||||
|
||||
Reference in New Issue
Block a user