Allow ollama llm to take custom callback for handling streaming (#1376)
This commit is contained in:
committed by
GitHub
parent
c558eae9ce
commit
dc0d8e0932
@@ -33,14 +33,17 @@ class OllamaLlm(BaseLlm):
|
||||
|
||||
@staticmethod
|
||||
def _get_answer(prompt: str, config: BaseLlmConfig) -> Union[str, Iterable]:
|
||||
callback_manager = [StreamingStdOutCallbackHandler()] if config.stream else [StdOutCallbackHandler()]
|
||||
if config.stream:
|
||||
callbacks = config.callbacks if config.callbacks else [StreamingStdOutCallbackHandler()]
|
||||
else:
|
||||
callbacks = [StdOutCallbackHandler()]
|
||||
|
||||
llm = Ollama(
|
||||
model=config.model,
|
||||
system=config.system_prompt,
|
||||
temperature=config.temperature,
|
||||
top_p=config.top_p,
|
||||
callback_manager=CallbackManager(callback_manager),
|
||||
callback_manager=CallbackManager(callbacks),
|
||||
base_url=config.base_url,
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user