Added Support for Ollama for local model inference. (#1045)
Co-authored-by: Deshraj Yadav <deshraj@gatech.edu>
This commit is contained in:
12
configs/ollama.yaml
Normal file
12
configs/ollama.yaml
Normal file
@@ -0,0 +1,12 @@
|
||||
llm:
|
||||
provider: ollama
|
||||
config:
|
||||
model: 'llama2'
|
||||
temperature: 0.5
|
||||
top_p: 1
|
||||
stream: true
|
||||
|
||||
embedder:
|
||||
provider: huggingface
|
||||
config:
|
||||
model: 'BAAI/bge-small-en-v1.5'
|
||||
Reference in New Issue
Block a user