Fix ollama embeddings for remote machine (#1394)

This commit is contained in:
Dev Khant
2024-06-08 22:38:15 +05:30
committed by GitHub
parent 00c1fa1ec7
commit 4070fc1bf0
2 changed files with 6 additions and 5 deletions

View File

@@ -12,4 +12,4 @@ class OllamaEmbedderConfig(BaseEmbedderConfig):
base_url: Optional[str] = None,
):
super().__init__(model)
self.base_url = base_url or "http://127.0.0.1:11434"
self.base_url = base_url or "http://localhost:11434"

View File

@@ -2,7 +2,7 @@ import logging
from typing import Optional
try:
import ollama
from ollama import Client
except ImportError:
raise ImportError("Ollama Embedder requires extra dependencies. Install with `pip install ollama`") from None
@@ -19,11 +19,12 @@ class OllamaEmbedder(BaseEmbedder):
def __init__(self, config: Optional[OllamaEmbedderConfig] = None):
super().__init__(config=config)
local_models = ollama.list()["models"]
client = Client(host=config.base_url)
local_models = client.list()["models"]
if not any(model.get("name") == self.config.model for model in local_models):
logger.info(f"Pulling {self.config.model} from Ollama!")
ollama.pull(self.config.model)
embeddings = OllamaEmbeddings(model=self.config.model, base_url=self.config.base_url)
client.pull(self.config.model)
embeddings = OllamaEmbeddings(model=self.config.model, base_url=config.base_url)
embedding_fn = BaseEmbedder._langchain_default_concept(embeddings)
self.set_embedding_fn(embedding_fn=embedding_fn)