[Improvements] Improve the default prompt and data loader util functions (#1272)

This commit is contained in:
Deshraj Yadav
2024-02-18 14:06:32 -08:00
committed by GitHub
parent 9a11683003
commit 6c12bc9044
19 changed files with 79 additions and 62 deletions

View File

@@ -131,7 +131,7 @@ class DiscordLoader(BaseLoader):
client = DiscordClient(intents=intents)
client.run(self.token)
meta_data = {
metadata = {
"url": channel_id,
}
@@ -144,7 +144,7 @@ class DiscordLoader(BaseLoader):
"data": [
{
"content": messages,
"meta_data": meta_data,
"meta_data": metadata,
}
],
}

View File

@@ -39,7 +39,7 @@ class DiscourseLoader(BaseLoader):
return
response_data = response.json()
post_contents = clean_string(response_data.get("raw"))
meta_data = {
metadata = {
"url": post_url,
"created_at": response_data.get("created_at", ""),
"username": response_data.get("username", ""),
@@ -48,7 +48,7 @@ class DiscourseLoader(BaseLoader):
}
data = {
"content": post_contents,
"meta_data": meta_data,
"meta_data": metadata,
}
return data

View File

@@ -18,9 +18,9 @@ class DocxFileLoader(BaseLoader):
output = []
data = loader.load()
content = data[0].page_content
meta_data = data[0].metadata
meta_data["url"] = "local"
output.append({"content": content, "meta_data": meta_data})
metadata = data[0].metadata
metadata["url"] = "local"
output.append({"content": content, "meta_data": metadata})
doc_id = hashlib.sha256((content + url).encode()).hexdigest()
return {
"doc_id": doc_id,

View File

@@ -11,14 +11,14 @@ class LocalQnaPairLoader(BaseLoader):
question, answer = content
content = f"Q: {question}\nA: {answer}"
url = "local"
meta_data = {"url": url, "question": question}
metadata = {"url": url, "question": question}
doc_id = hashlib.sha256((content + url).encode()).hexdigest()
return {
"doc_id": doc_id,
"data": [
{
"content": content,
"meta_data": meta_data,
"meta_data": metadata,
}
],
}

View File

@@ -9,7 +9,7 @@ class LocalTextLoader(BaseLoader):
def load_data(self, content):
"""Load data from a local text file."""
url = "local"
meta_data = {
metadata = {
"url": url,
}
doc_id = hashlib.sha256((content + url).encode()).hexdigest()
@@ -18,7 +18,7 @@ class LocalTextLoader(BaseLoader):
"data": [
{
"content": content,
"meta_data": meta_data,
"meta_data": metadata,
}
],
}

View File

@@ -10,7 +10,7 @@ class MdxLoader(BaseLoader):
"""Load data from a mdx file."""
with open(url, "r", encoding="utf-8") as infile:
content = infile.read()
meta_data = {
metadata = {
"url": url,
}
doc_id = hashlib.sha256((content + url).encode()).hexdigest()
@@ -19,7 +19,7 @@ class MdxLoader(BaseLoader):
"data": [
{
"content": content,
"meta_data": meta_data,
"meta_data": metadata,
}
],
}

View File

@@ -35,8 +35,8 @@ class OpenAPILoader(BaseLoader):
yaml_data = yaml.load(file, Loader=yaml.SafeLoader)
for i, (key, value) in enumerate(yaml_data.items()):
string_data = f"{key}: {value}"
meta_data = {"url": file_path, "row": i + 1}
data.append({"content": string_data, "meta_data": meta_data})
metadata = {"url": file_path, "row": i + 1}
data.append({"content": string_data, "meta_data": metadata})
data_content.append(string_data)
doc_id = hashlib.sha256((content + ", ".join(data_content)).encode()).hexdigest()
return {"doc_id": doc_id, "data": data}

View File

@@ -27,12 +27,12 @@ class PdfFileLoader(BaseLoader):
for page in pages:
content = page.page_content
content = clean_string(content)
meta_data = page.metadata
meta_data["url"] = url
metadata = page.metadata
metadata["url"] = url
data.append(
{
"content": content,
"meta_data": meta_data,
"meta_data": metadata,
}
)
all_content.append(content)

View File

@@ -41,12 +41,12 @@ class RSSFeedLoader(BaseLoader):
data = loader.load()
for entry in data:
meta_data = RSSFeedLoader.serialize_metadata(entry.metadata)
meta_data.update({"url": url})
metadata = RSSFeedLoader.serialize_metadata(entry.metadata)
metadata.update({"url": url})
output.append(
{
"content": entry.page_content,
"meta_data": meta_data,
"meta_data": metadata,
}
)

View File

@@ -88,16 +88,16 @@ class SlackLoader(BaseLoader):
content = clean_string(text)
message_meta_data_keys = ["iid", "team", "ts", "type", "user", "username"]
meta_data = {}
metadata = {}
for key in message.keys():
if key in message_meta_data_keys:
meta_data[key] = message.get(key)
meta_data.update({"url": url})
metadata[key] = message.get(key)
metadata.update({"url": url})
data.append(
{
"content": content,
"meta_data": meta_data,
"meta_data": metadata,
}
)
data_content.append(content)

View File

@@ -17,14 +17,14 @@ class TextFileLoader(BaseLoader):
doc_id = hashlib.sha256((content + url).encode()).hexdigest()
meta_data = {"url": url, "file_size": os.path.getsize(url), "file_type": url.split(".")[-1]}
metadata = {"url": url, "file_size": os.path.getsize(url), "file_type": url.split(".")[-1]}
return {
"doc_id": doc_id,
"data": [
{
"content": content,
"meta_data": meta_data,
"meta_data": metadata,
}
],
}

View File

@@ -26,12 +26,12 @@ class UnstructuredLoader(BaseLoader):
for page in pages:
content = page.page_content
content = clean_string(content)
meta_data = page.metadata
meta_data["url"] = url
metadata = page.metadata
metadata["url"] = url
data.append(
{
"content": content,
"meta_data": meta_data,
"meta_data": metadata,
}
)
all_content.append(content)

View File

@@ -30,7 +30,7 @@ class WebPageLoader(BaseLoader):
data = response.content
content = self._get_clean_content(data, url)
meta_data = {"url": url}
metadata = {"url": url}
doc_id = hashlib.sha256((content + url).encode()).hexdigest()
return {
@@ -38,7 +38,7 @@ class WebPageLoader(BaseLoader):
"data": [
{
"content": content,
"meta_data": meta_data,
"meta_data": metadata,
}
],
}

View File

@@ -19,10 +19,10 @@ class XmlLoader(BaseLoader):
data = loader.load()
content = data[0].page_content
content = clean_string(content)
meta_data = data[0].metadata
meta_data["url"] = meta_data["source"]
del meta_data["source"]
output = [{"content": content, "meta_data": meta_data}]
metadata = data[0].metadata
metadata["url"] = metadata["source"]
del metadata["source"]
output = [{"content": content, "meta_data": metadata}]
doc_id = hashlib.sha256((content + xml_url).encode()).hexdigest()
return {
"doc_id": doc_id,

View File

@@ -22,13 +22,13 @@ class YoutubeVideoLoader(BaseLoader):
raise ValueError(f"No data found for url: {url}")
content = doc[0].page_content
content = clean_string(content)
meta_data = doc[0].metadata
meta_data["url"] = url
metadata = doc[0].metadata
metadata["url"] = url
output.append(
{
"content": content,
"meta_data": meta_data,
"meta_data": metadata,
}
)
doc_id = hashlib.sha256((content + url).encode()).hexdigest()