[Improvement] Add support for gpt4all through langchain (#838)
This commit is contained in:
@@ -1,7 +1,5 @@
|
||||
from typing import Optional
|
||||
|
||||
from chromadb.utils import embedding_functions
|
||||
|
||||
from embedchain.config import BaseEmbedderConfig
|
||||
from embedchain.embedder.base import BaseEmbedder
|
||||
from embedchain.models import VectorDimensions
|
||||
@@ -9,12 +7,13 @@ from embedchain.models import VectorDimensions
|
||||
|
||||
class GPT4AllEmbedder(BaseEmbedder):
|
||||
def __init__(self, config: Optional[BaseEmbedderConfig] = None):
|
||||
# Note: We could use langchains GPT4ALL embedding, but it's not available in all versions.
|
||||
super().__init__(config=config)
|
||||
if self.config.model is None:
|
||||
self.config.model = "all-MiniLM-L6-v2"
|
||||
|
||||
embedding_fn = embedding_functions.SentenceTransformerEmbeddingFunction(model_name=self.config.model)
|
||||
from langchain.embeddings import \
|
||||
GPT4AllEmbeddings as LangchainGPT4AllEmbeddings
|
||||
|
||||
embeddings = LangchainGPT4AllEmbeddings()
|
||||
embedding_fn = BaseEmbedder._langchain_default_concept(embeddings)
|
||||
self.set_embedding_fn(embedding_fn=embedding_fn)
|
||||
|
||||
vector_dimension = VectorDimensions.GPT4ALL.value
|
||||
|
||||
Reference in New Issue
Block a user