[new] streamlit deployment (#1034)

This commit is contained in:
Sidharth Mohanty
2023-12-20 23:02:51 +05:30
committed by GitHub
parent 737837ae0b
commit 1020a4121f
7 changed files with 168 additions and 1 deletions

View File

@@ -98,6 +98,11 @@ def setup_render_com_app():
)
def setup_streamlit_io_app():
# nothing needs to be done here
console.print("Great! Now you can install the dependencies by doing `pip install -r requirements.txt`")
@cli.command()
@click.option("--template", default="fly.io", help="The template to use.")
@click.argument("extra_args", nargs=-1, type=click.UNPROCESSED)
@@ -113,6 +118,8 @@ def create(template, extra_args):
setup_modal_com_app(extra_args)
elif template == "render.com":
setup_render_com_app()
elif template == "streamlit.io":
setup_streamlit_io_app()
else:
raise ValueError(f"Unknown template '{template}'.")
@@ -152,6 +159,16 @@ def run_dev_modal_com():
console.print("\n🛑 [bold yellow]FastAPI server stopped[/bold yellow]")
def run_dev_streamlit_io():
streamlit_run_cmd = ["streamlit", "run", "app.py"]
try:
console.print(f"🚀 [bold cyan]Running Streamlit app with command: {' '.join(streamlit_run_cmd)}[/bold cyan]")
subprocess.run(streamlit_run_cmd, check=True)
except subprocess.CalledProcessError as e:
console.print(f"❌ [bold red]An error occurred: {e}[/bold red]")
except KeyboardInterrupt:
console.print("\n🛑 [bold yellow]Streamlit server stopped[/bold yellow]")
def run_dev_render_com(debug, host, port):
uvicorn_command = ["uvicorn", "app:app"]
@@ -186,6 +203,8 @@ def dev(debug, host, port):
run_dev_modal_com()
elif template == "render.com":
run_dev_render_com(debug, host, port)
elif template == "streamlit.io":
run_dev_streamlit_io()
else:
raise ValueError(f"Unknown template '{template}'.")
@@ -260,6 +279,26 @@ def deploy_modal():
)
def deploy_streamlit():
streamlit_deploy_cmd = ["streamlit", "run", "app.py"]
try:
console.print(f"🚀 [bold cyan]Running: {' '.join(streamlit_deploy_cmd)}[/bold cyan]")
console.print(
"""\n\n✅ [bold yellow]To deploy a streamlit app, you can directly it from the UI.\n
Click on the 'Deploy' button on the top right corner of the app.\n
For more information, please refer to https://docs.embedchain.ai/deployment/streamlit_io
[/bold yellow]
\n\n"""
)
subprocess.run(streamlit_deploy_cmd, check=True)
except subprocess.CalledProcessError as e:
console.print(f"❌ [bold red]An error occurred: {e}[/bold red]")
except FileNotFoundError:
console.print(
"""❌ [bold red]'streamlit' command not found.\n
Please ensure Streamlit CLI is installed and in your PATH.[/bold red]"""
)
def deploy_render():
render_deploy_cmd = ["render", "blueprint", "launch"]
@@ -290,5 +329,7 @@ def deploy():
deploy_modal()
elif template == "render.com":
deploy_render()
elif template == "streamlit.io":
deploy_streamlit()
else:
console.print("❌ [bold red]No recognized deployment platform found.[/bold red]")

View File

@@ -0,0 +1 @@
OPENAI_API_KEY="sk-xxx"

View File

@@ -0,0 +1,59 @@
import streamlit as st
from embedchain import Pipeline as App
@st.cache_resource
def embedchain_bot():
return App()
st.title("💬 Chatbot")
st.caption("🚀 An Embedchain app powered by OpenAI!")
if "messages" not in st.session_state:
st.session_state.messages = [
{
"role": "assistant",
"content": """
Hi! I'm a chatbot. I can answer questions and learn new things!\n
Ask me anything and if you want me to learn something do `/add <source>`.\n
I can learn mostly everything. :)
""",
}
]
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt := st.chat_input("Ask me anything!"):
app = embedchain_bot()
if prompt.startswith("/add"):
with st.chat_message("user"):
st.markdown(prompt)
st.session_state.messages.append({"role": "user", "content": prompt})
prompt = prompt.replace("/add", "").strip()
with st.chat_message("assistant"):
message_placeholder = st.empty()
message_placeholder.markdown("Adding to knowledge base...")
app.add(prompt)
message_placeholder.markdown(f"Added {prompt} to knowledge base!")
st.session_state.messages.append({"role": "assistant", "content": f"Added {prompt} to knowledge base!"})
st.stop()
with st.chat_message("user"):
st.markdown(prompt)
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("assistant"):
msg_placeholder = st.empty()
msg_placeholder.markdown("Thinking...")
full_response = ""
for response in app.chat(prompt):
msg_placeholder.empty()
full_response += response
msg_placeholder.markdown(full_response)
st.session_state.messages.append({"role": "assistant", "content": full_response})

View File

@@ -0,0 +1,2 @@
streamlit==1.29.0
embedchain