feat: add logging (#206)

This commit is contained in:
cachho
2023-07-10 19:58:58 +02:00
committed by GitHub
parent c597b1939d
commit b3cf834186
3 changed files with 28 additions and 3 deletions

View File

@@ -1,5 +1,6 @@
import openai
import os
import logging
from string import Template
from chromadb.utils import embedding_functions
@@ -181,7 +182,9 @@ class EmbedChain:
config = QueryConfig()
context = self.retrieve_from_database(input_query)
prompt = self.generate_prompt(input_query, context, config.template)
logging.info(f"Prompt: {prompt}")
answer = self.get_answer_from_llm(prompt, config)
logging.info(f"Answer: {answer}")
return answer
def generate_chat_prompt(self, input_query, context, chat_history=''):
@@ -224,13 +227,16 @@ class EmbedChain:
context,
chat_history=chat_history,
)
logging.info(f"Prompt: {prompt}")
answer = self.get_answer_from_llm(prompt, config)
memory.chat_memory.add_user_message(input_query)
if isinstance(answer, str):
memory.chat_memory.add_ai_message(answer)
logging.info(f"Answer: {answer}")
return answer
else:
#this is a streamed response and needs to be handled differently
#this is a streamed response and needs to be handled differently.
return self._stream_chat_response(answer)
def _stream_chat_response(self, answer):
@@ -239,6 +245,7 @@ class EmbedChain:
streamed_answer.join(chunk)
yield chunk
memory.chat_memory.add_ai_message(streamed_answer)
logging.info(f"Answer: {streamed_answer}")
def dry_run(self, input_query, config: QueryConfig = None):
@@ -258,6 +265,7 @@ class EmbedChain:
config = QueryConfig()
context = self.retrieve_from_database(input_query)
prompt = self.generate_prompt(input_query, context, config.template)
logging.info(f"Prompt: {prompt}")
return prompt
def count(self):