feat: Add config based template to query function (#177)

This commit is contained in:
Anupam Singh
2023-07-07 17:56:43 +05:30
committed by GitHub
parent 90d5b1a800
commit 6a61fd38c3
3 changed files with 38 additions and 10 deletions

View File

@@ -317,7 +317,9 @@ This section describes all possible config options.
#### **Query Config**
*coming soon*
|option|description|type|default|
|---|---|---|---|
|template|custom template for prompt|Template|Template("Use the following pieces of context to answer the query at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer. \$context Query: $query Helpful Answer:")|
#### **Chat Config**

View File

@@ -1,8 +1,36 @@
from embedchain.config.BaseConfig import BaseConfig
from string import Template
import re
DEFAULT_PROMPT_TEMPLATE = Template("""
Use the following pieces of context to answer the query at the end.
If you don't know the answer, just say that you don't know, don't try to make up an answer.
$context
Query: $query
Helpful Answer:
""")
query_re = re.compile(r"\$\{*query\}*")
context_re = re.compile(r"\$\{*context\}*")
class QueryConfig(BaseConfig):
"""
Config for the `query` method.
"""
def __init__(self):
pass
def __init__(self, template: Template = None):
"""
Initializes the QueryConfig instance.
:param template: Optional. The `Template` instance to use as a template for prompt.
:raises ValueError: If the template is not valid as template should contain $context and $query
"""
if template is None:
template = DEFAULT_PROMPT_TEMPLATE
if not (re.search(query_re, template.template) \
and re.search(context_re, template.template)):
raise ValueError("`template` should have `query` and `context` keys")
self.template = template

View File

@@ -1,5 +1,6 @@
import openai
import os
from string import Template
from chromadb.utils import embedding_functions
from dotenv import load_dotenv
@@ -192,19 +193,16 @@ class EmbedChain:
content = ""
return content
def generate_prompt(self, input_query, context):
def generate_prompt(self, input_query, context, template: Template = None):
"""
Generates a prompt based on the given query and context, ready to be passed to an LLM
:param input_query: The query to use.
:param context: Similar documents to the query used as context.
:param template: Optional. The `Template` instance to use as a template for prompt.
:return: The prompt
"""
prompt = f"""Use the following pieces of context to answer the query at the end. If you don't know the answer, just say that you don't know, don't try to make up an answer.
{context}
Query: {input_query}
Helpful Answer:
"""
prompt = template.substitute(context = context, query = input_query)
return prompt
def get_answer_from_llm(self, prompt):
@@ -232,7 +230,7 @@ class EmbedChain:
if config is None:
config = QueryConfig()
context = self.retrieve_from_database(input_query)
prompt = self.generate_prompt(input_query, context)
prompt = self.generate_prompt(input_query, context, config.template)
answer = self.get_answer_from_llm(prompt)
return answer