diff --git a/docs/integration/streamlit-mistral.mdx b/docs/integration/streamlit-mistral.mdx
new file mode 100644
index 00000000..d63b6d47
--- /dev/null
+++ b/docs/integration/streamlit-mistral.mdx
@@ -0,0 +1,113 @@
+---
+title: '🚀 Streamlit'
+description: 'Integrate with Streamlit to plug and play with any LLM'
+---
+
+In this example, we will learn how to use `mistralai/Mistral-7B-v0.1` and Embedchain together with Streamlit to build a simple RAG chatbot.
+
+## Setup
+
+
+ 1. Install Embedchain and Streamlit
+ ```bash
+ pip install embedchain
+ pip install streamlit
+ ```
+
+
+ ```python
+ import os
+ from embedchain import Pipeline as App
+ import streamlit as st
+
+ with st.sidebar:
+ huggingface_access_token = st.text_input("Hugging face Token", key="chatbot_api_key", type="password")
+ "[Get Hugging Face Access Token](https://huggingface.co/settings/tokens)"
+ "[View the source code](https://github.com/embedchain/examples/mistral-streamlit)"
+
+
+ st.title("💬 Chatbot")
+ st.caption("🚀 An Embedchain app powered by Mistral!")
+ if "messages" not in st.session_state:
+ st.session_state.messages = [
+ {
+ "role": "assistant",
+ "content": """
+ Hi! I'm a chatbot. I can answer questions and learn new things!\n
+ Ask me anything and if you want me to learn something do `/add `.\n
+ I can learn mostly everything. :)
+ """,
+ }
+ ]
+
+ for message in st.session_state.messages:
+ with st.chat_message(message["role"]):
+ st.markdown(message["content"])
+
+ if prompt := st.chat_input("Ask me anything!"):
+ if not st.session_state.chatbot_api_key:
+ st.error("Please enter your Hugging Face Access Token")
+ st.stop()
+
+ os.environ["HUGGINGFACE_ACCESS_TOKEN"] = st.session_state.chatbot_api_key
+ app = App.from_config(config_path="config.yaml")
+
+ if prompt.startswith("/add"):
+ with st.chat_message("user"):
+ st.markdown(prompt)
+ st.session_state.messages.append({"role": "user", "content": prompt})
+ prompt = prompt.replace("/add", "").strip()
+ with st.chat_message("assistant"):
+ message_placeholder = st.empty()
+ message_placeholder.markdown("Adding to knowledge base...")
+ app.add(prompt)
+ message_placeholder.markdown(f"Added {prompt} to knowledge base!")
+ st.session_state.messages.append({"role": "assistant", "content": f"Added {prompt} to knowledge base!"})
+ st.stop()
+
+ with st.chat_message("user"):
+ st.markdown(prompt)
+ st.session_state.messages.append({"role": "user", "content": prompt})
+
+ with st.chat_message("assistant"):
+ msg_placeholder = st.empty()
+ msg_placeholder.markdown("Thinking...")
+ full_response = ""
+
+ for response in app.chat(prompt):
+ msg_placeholder.empty()
+ full_response += response
+
+ msg_placeholder.markdown(full_response)
+ st.session_state.messages.append({"role": "assistant", "content": full_response})
+ ```
+
+
+ ```yaml
+ app:
+ config:
+ name: 'mistral-streamlit-app'
+
+ llm:
+ provider: huggingface
+ config:
+ model: 'mistralai/Mistral-7B-v0.1'
+ temperature: 0.1
+ max_tokens: 250
+ top_p: 0.1
+ stream: true
+
+ embedder:
+ provider: huggingface
+ config:
+ model: 'sentence-transformers/all-mpnet-base-v2'
+ ```
+
+
+
+
+## To run it locally,
+
+```bash
+streamlit run app.py
+```
diff --git a/docs/mint.json b/docs/mint.json
index 4aeaf195..110c67bb 100644
--- a/docs/mint.json
+++ b/docs/mint.json
@@ -77,7 +77,8 @@
"group": "🔗 Integrations",
"pages": [
"integration/langsmith",
- "integration/chainlit"
+ "integration/chainlit",
+ "integration/streamlit-mistral"
]
},
"get-started/faq"
diff --git a/examples/mistral-streamlit/README.md b/examples/mistral-streamlit/README.md
new file mode 100644
index 00000000..1bd80f83
--- /dev/null
+++ b/examples/mistral-streamlit/README.md
@@ -0,0 +1,7 @@
+### Streamlit Chat bot App (Embedchain + Mistral)
+
+To run it locally,
+
+```bash
+streamlit run app.py
+```
diff --git a/examples/mistral-streamlit/app.py b/examples/mistral-streamlit/app.py
new file mode 100644
index 00000000..65dd78a1
--- /dev/null
+++ b/examples/mistral-streamlit/app.py
@@ -0,0 +1,64 @@
+import os
+from embedchain import Pipeline as App
+import streamlit as st
+
+with st.sidebar:
+ huggingface_access_token = st.text_input("Hugging face Token", key="chatbot_api_key", type="password")
+ "[Get Hugging Face Access Token](https://huggingface.co/settings/tokens)"
+ "[View the source code](https://github.com/embedchain/examples/mistral-streamlit)"
+
+
+st.title("💬 Chatbot")
+st.caption("🚀 An Embedchain app powered by Mistral!")
+if "messages" not in st.session_state:
+ st.session_state.messages = [
+ {
+ "role": "assistant",
+ "content": """
+ Hi! I'm a chatbot. I can answer questions and learn new things!\n
+ Ask me anything and if you want me to learn something do `/add `.\n
+ I can learn mostly everything. :)
+ """,
+ }
+ ]
+
+for message in st.session_state.messages:
+ with st.chat_message(message["role"]):
+ st.markdown(message["content"])
+
+if prompt := st.chat_input("Ask me anything!"):
+ if not st.session_state.chatbot_api_key:
+ st.error("Please enter your Hugging Face Access Token")
+ st.stop()
+
+ os.environ["HUGGINGFACE_ACCESS_TOKEN"] = st.session_state.chatbot_api_key
+ app = App.from_config(config_path="config.yaml")
+
+ if prompt.startswith("/add"):
+ with st.chat_message("user"):
+ st.markdown(prompt)
+ st.session_state.messages.append({"role": "user", "content": prompt})
+ prompt = prompt.replace("/add", "").strip()
+ with st.chat_message("assistant"):
+ message_placeholder = st.empty()
+ message_placeholder.markdown("Adding to knowledge base...")
+ app.add(prompt)
+ message_placeholder.markdown(f"Added {prompt} to knowledge base!")
+ st.session_state.messages.append({"role": "assistant", "content": f"Added {prompt} to knowledge base!"})
+ st.stop()
+
+ with st.chat_message("user"):
+ st.markdown(prompt)
+ st.session_state.messages.append({"role": "user", "content": prompt})
+
+ with st.chat_message("assistant"):
+ msg_placeholder = st.empty()
+ msg_placeholder.markdown("Thinking...")
+ full_response = ""
+
+ for response in app.chat(prompt):
+ msg_placeholder.empty()
+ full_response += response
+
+ msg_placeholder.markdown(full_response)
+ st.session_state.messages.append({"role": "assistant", "content": full_response})
diff --git a/examples/mistral-streamlit/config.yaml b/examples/mistral-streamlit/config.yaml
new file mode 100644
index 00000000..6b597134
--- /dev/null
+++ b/examples/mistral-streamlit/config.yaml
@@ -0,0 +1,17 @@
+app:
+ config:
+ name: 'mistral-streamlit-app'
+
+llm:
+ provider: huggingface
+ config:
+ model: 'mistralai/Mixtral-8x7B-Instruct-v0.1'
+ temperature: 0.1
+ max_tokens: 250
+ top_p: 0.1
+ stream: true
+
+embedder:
+ provider: huggingface
+ config:
+ model: 'sentence-transformers/all-mpnet-base-v2'
diff --git a/examples/mistral-streamlit/requirements.txt b/examples/mistral-streamlit/requirements.txt
new file mode 100644
index 00000000..b864076a
--- /dev/null
+++ b/examples/mistral-streamlit/requirements.txt
@@ -0,0 +1,2 @@
+streamlit==1.29.0
+embedchain