[Feature] Add support for vllm as llm source (#1149)
This commit is contained in:
14
configs/vllm.yaml
Normal file
14
configs/vllm.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
llm:
|
||||
provider: vllm
|
||||
config:
|
||||
model: 'meta-llama/Llama-2-70b-hf'
|
||||
temperature: 0.5
|
||||
top_p: 1
|
||||
top_k: 10
|
||||
stream: true
|
||||
trust_remote_code: true
|
||||
|
||||
embedder:
|
||||
provider: huggingface
|
||||
config:
|
||||
model: 'BAAI/bge-small-en-v1.5'
|
||||
Reference in New Issue
Block a user