feat: system prompt (#448)
This commit is contained in:
@@ -2,7 +2,7 @@ import os
|
||||
|
||||
from langchain.llms import Replicate
|
||||
|
||||
from embedchain.config import AppConfig
|
||||
from embedchain.config import AppConfig, ChatConfig
|
||||
from embedchain.embedchain import EmbedChain
|
||||
|
||||
|
||||
@@ -27,8 +27,10 @@ class Llama2App(EmbedChain):
|
||||
|
||||
super().__init__(config)
|
||||
|
||||
def get_llm_model_answer(self, prompt, config: AppConfig = None):
|
||||
def get_llm_model_answer(self, prompt, config: ChatConfig = None):
|
||||
# TODO: Move the model and other inputs into config
|
||||
if config.system_prompt:
|
||||
raise ValueError("Llama2App does not support `system_prompt`")
|
||||
llm = Replicate(
|
||||
model="a16z-infra/llama13b-v2-chat:df7690f1994d94e96ad9d568eac121aecf50684a0b0963b25a41cc40061269e5",
|
||||
input={"temperature": 0.75, "max_length": 500, "top_p": 1},
|
||||
|
||||
Reference in New Issue
Block a user