fix: elastic search (#600)

This commit is contained in:
cachho
2023-09-13 19:58:18 +02:00
committed by GitHub
parent 79efa51941
commit 119ec5e405
11 changed files with 135 additions and 55 deletions

View File

@@ -51,6 +51,8 @@ class BaseEmbedder:
:param vector_dimension: vector dimension size
:type vector_dimension: int
"""
if not isinstance(vector_dimension, int):
raise TypeError("vector dimension must be int")
self.vector_dimension = vector_dimension
@staticmethod

View File

@@ -4,7 +4,7 @@ from chromadb.utils import embedding_functions
from embedchain.config import BaseEmbedderConfig
from embedchain.embedder.base import BaseEmbedder
from embedchain.models import EmbeddingFunctions
from embedchain.models import VectorDimensions
class GPT4AllEmbedder(BaseEmbedder):
@@ -17,5 +17,5 @@ class GPT4AllEmbedder(BaseEmbedder):
embedding_fn = embedding_functions.SentenceTransformerEmbeddingFunction(model_name=self.config.model)
self.set_embedding_fn(embedding_fn=embedding_fn)
vector_dimension = EmbeddingFunctions.GPT4ALL.value
vector_dimension = VectorDimensions.GPT4ALL.value
self.set_vector_dimension(vector_dimension=vector_dimension)

View File

@@ -4,7 +4,7 @@ from langchain.embeddings import HuggingFaceEmbeddings
from embedchain.config import BaseEmbedderConfig
from embedchain.embedder.base import BaseEmbedder
from embedchain.models import EmbeddingFunctions
from embedchain.models import VectorDimensions
class HuggingFaceEmbedder(BaseEmbedder):
@@ -15,5 +15,5 @@ class HuggingFaceEmbedder(BaseEmbedder):
embedding_fn = BaseEmbedder._langchain_default_concept(embeddings)
self.set_embedding_fn(embedding_fn=embedding_fn)
vector_dimension = EmbeddingFunctions.HUGGING_FACE.value
vector_dimension = VectorDimensions.HUGGING_FACE.value
self.set_vector_dimension(vector_dimension=vector_dimension)

View File

@@ -5,7 +5,7 @@ from langchain.embeddings import OpenAIEmbeddings
from embedchain.config import BaseEmbedderConfig
from embedchain.embedder.base import BaseEmbedder
from embedchain.models import EmbeddingFunctions
from embedchain.models import VectorDimensions
try:
from chromadb.utils import embedding_functions
@@ -37,4 +37,4 @@ class OpenAiEmbedder(BaseEmbedder):
)
self.set_embedding_fn(embedding_fn=embedding_fn)
self.set_vector_dimension(vector_dimension=EmbeddingFunctions.OPENAI.value)
self.set_vector_dimension(vector_dimension=VectorDimensions.OPENAI.value)

View File

@@ -4,7 +4,7 @@ from langchain.embeddings import VertexAIEmbeddings
from embedchain.config import BaseEmbedderConfig
from embedchain.embedder.base import BaseEmbedder
from embedchain.models import EmbeddingFunctions
from embedchain.models import VectorDimensions
class VertexAiEmbedder(BaseEmbedder):
@@ -15,5 +15,5 @@ class VertexAiEmbedder(BaseEmbedder):
embedding_fn = BaseEmbedder._langchain_default_concept(embeddings)
self.set_embedding_fn(embedding_fn=embedding_fn)
vector_dimension = EmbeddingFunctions.VERTEX_AI.value
vector_dimension = VectorDimensions.VERTEX_AI.value
self.set_vector_dimension(vector_dimension=vector_dimension)