Update llms card to properly use local ollama (#1395)
This commit is contained in:
@@ -330,6 +330,7 @@ Setup Ollama using https://github.com/jmorganca/ollama
|
|||||||
|
|
||||||
```python main.py
|
```python main.py
|
||||||
import os
|
import os
|
||||||
|
os.environ["OLLAMA_HOST"] = "http://127.0.0.1:11434"
|
||||||
from embedchain import App
|
from embedchain import App
|
||||||
|
|
||||||
# load llm configuration from config.yaml file
|
# load llm configuration from config.yaml file
|
||||||
@@ -345,6 +346,12 @@ llm:
|
|||||||
top_p: 1
|
top_p: 1
|
||||||
stream: true
|
stream: true
|
||||||
base_url: 'http://localhost:11434'
|
base_url: 'http://localhost:11434'
|
||||||
|
embedder:
|
||||||
|
provider: ollama
|
||||||
|
config:
|
||||||
|
model: znbang/bge:small-en-v1.5-q8_0
|
||||||
|
base_url: http://localhost:11434
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
</CodeGroup>
|
</CodeGroup>
|
||||||
|
|||||||
Reference in New Issue
Block a user