diff --git a/embedchain/embedchain.py b/embedchain/embedchain.py index 190d2f55..4be92b3b 100644 --- a/embedchain/embedchain.py +++ b/embedchain/embedchain.py @@ -7,7 +7,9 @@ from typing import Any, Dict, List, Optional, Tuple, Union from dotenv import load_dotenv from langchain.docstore.document import Document -from embedchain.cache import adapt, get_gptcache_session, gptcache_data_convert, gptcache_update_cache_callback +from embedchain.cache import (adapt, get_gptcache_session, + gptcache_data_convert, + gptcache_update_cache_callback) from embedchain.chunkers.base_chunker import BaseChunker from embedchain.config import AddConfig, BaseLlmConfig, ChunkerConfig from embedchain.config.base_app_config import BaseAppConfig @@ -17,7 +19,8 @@ from embedchain.embedder.base import BaseEmbedder from embedchain.helpers.json_serializable import JSONSerializable from embedchain.llm.base import BaseLlm from embedchain.loaders.base_loader import BaseLoader -from embedchain.models.data_type import DataType, DirectDataType, IndirectDataType, SpecialDataType +from embedchain.models.data_type import (DataType, DirectDataType, + IndirectDataType, SpecialDataType) from embedchain.telemetry.posthog import AnonymousTelemetry from embedchain.utils.misc import detect_datatype, is_valid_json_string from embedchain.vectordb.base import BaseVectorDB diff --git a/examples/sadhguru-ai/app.py b/examples/sadhguru-ai/app.py index 055f68bc..67d4c62d 100644 --- a/examples/sadhguru-ai/app.py +++ b/examples/sadhguru-ai/app.py @@ -93,8 +93,9 @@ if prompt := st.chat_input("Ask me anything!"): answer, citations = results["answer"], results["citations"] if citations: full_response += "\n\n**Sources**:\n" - for i, citations in enumerate(citations): - full_response += f"{i+1}. {citations[1]}\n" + sources = list(set(map(lambda x: x[1]["url"], citations))) + for i, source in enumerate(sources): + full_response += f"{i+1}. {source}\n" msg_placeholder.markdown(full_response) st.session_state.messages.append({"role": "assistant", "content": full_response})