Allow ollama llm to take custom callback for handling streaming (#1376)

This commit is contained in:
Aditya Veer Parmar
2024-06-17 21:14:52 +05:30
committed by GitHub
parent c558eae9ce
commit dc0d8e0932
2 changed files with 23 additions and 2 deletions

View File

@@ -33,14 +33,17 @@ class OllamaLlm(BaseLlm):
@staticmethod
def _get_answer(prompt: str, config: BaseLlmConfig) -> Union[str, Iterable]:
callback_manager = [StreamingStdOutCallbackHandler()] if config.stream else [StdOutCallbackHandler()]
if config.stream:
callbacks = config.callbacks if config.callbacks else [StreamingStdOutCallbackHandler()]
else:
callbacks = [StdOutCallbackHandler()]
llm = Ollama(
model=config.model,
system=config.system_prompt,
temperature=config.temperature,
top_p=config.top_p,
callback_manager=CallbackManager(callback_manager),
callback_manager=CallbackManager(callbacks),
base_url=config.base_url,
)