Rename embedchain to mem0 and open sourcing code for long term memory (#1474)

Co-authored-by: Deshraj Yadav <deshrajdry@gmail.com>
This commit is contained in:
Taranjeet Singh
2024-07-12 07:51:33 -07:00
committed by GitHub
parent 83e8c97295
commit f842a92e25
665 changed files with 9427 additions and 6592 deletions

View File

@@ -0,0 +1,8 @@
__pycache__/
database
db
pyenv
venv
.env
.git
trash_files/

View File

@@ -0,0 +1,8 @@
__pycache__
db
database
pyenv
venv
.env
trash_files/
.ideas.md

View File

@@ -0,0 +1,16 @@
FROM python:3.11 AS backend
WORKDIR /usr/src/api
COPY requirements.txt .
RUN pip install -r requirements.txt
COPY . .
EXPOSE 5000
ENV FLASK_APP=api_server.py
ENV FLASK_RUN_EXTRA_FILES=/usr/src/api/*
ENV FLASK_ENV=development
CMD ["flask", "run", "--host=0.0.0.0", "--reload"]

View File

@@ -0,0 +1,3 @@
# API Server
This is a docker template to create your own API Server using the embedchain package. To know more about the API Server and how to use it, go [here](https://docs.embedchain.ai/examples/api_server).

View File

@@ -0,0 +1,57 @@
import logging
from flask import Flask, jsonify, request
from embedchain import App
app = Flask(__name__)
logger = logging.getLogger(__name__)
@app.route("/add", methods=["POST"])
def add():
data = request.get_json()
data_type = data.get("data_type")
url_or_text = data.get("url_or_text")
if data_type and url_or_text:
try:
App().add(url_or_text, data_type=data_type)
return jsonify({"data": f"Added {data_type}: {url_or_text}"}), 200
except Exception:
logger.exception(f"Failed to add {data_type=}: {url_or_text=}")
return jsonify({"error": f"Failed to add {data_type}: {url_or_text}"}), 500
return jsonify({"error": "Invalid request. Please provide 'data_type' and 'url_or_text' in JSON format."}), 400
@app.route("/query", methods=["POST"])
def query():
data = request.get_json()
question = data.get("question")
if question:
try:
response = App().query(question)
return jsonify({"data": response}), 200
except Exception:
logger.exception(f"Failed to query {question=}")
return jsonify({"error": "An error occurred. Please try again!"}), 500
return jsonify({"error": "Invalid request. Please provide 'question' in JSON format."}), 400
@app.route("/chat", methods=["POST"])
def chat():
data = request.get_json()
question = data.get("question")
if question:
try:
response = App().chat(question)
return jsonify({"data": response}), 200
except Exception:
logger.exception(f"Failed to chat {question=}")
return jsonify({"error": "An error occurred. Please try again!"}), 500
return jsonify({"error": "Invalid request. Please provide 'question' in JSON format."}), 400
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5000, debug=False)

View File

@@ -0,0 +1,15 @@
version: "3.9"
services:
backend:
container_name: embedchain_api
restart: unless-stopped
build:
context: .
dockerfile: Dockerfile
env_file:
- variables.env
ports:
- "5000:5000"
volumes:
- .:/usr/src/api

View File

@@ -0,0 +1,12 @@
flask==2.3.2
youtube-transcript-api==0.6.1
pytube==15.0.0
beautifulsoup4==4.12.3
slack-sdk==3.21.3
huggingface_hub==0.23.0
gitpython==3.1.38
yt_dlp==2023.11.14
PyGithub==1.59.1
feedparser==6.0.10
newspaper3k==0.2.8
listparser==0.19

View File

@@ -0,0 +1 @@
OPENAI_API_KEY=""

View File

@@ -0,0 +1 @@
.chainlit

View File

@@ -0,0 +1,17 @@
## Chainlit + Embedchain Demo
In this example, we will learn how to use Chainlit and Embedchain together
## Setup
First, install the required packages:
```bash
pip install -r requirements.txt
```
## Run the app locally,
```
chainlit run app.py
```

View File

@@ -0,0 +1,35 @@
import os
import chainlit as cl
from embedchain import App
os.environ["OPENAI_API_KEY"] = "sk-xxx"
@cl.on_chat_start
async def on_chat_start():
app = App.from_config(
config={
"app": {"config": {"name": "chainlit-app"}},
"llm": {
"config": {
"stream": True,
}
},
}
)
# import your data here
app.add("https://www.forbes.com/profile/elon-musk/")
app.collect_metrics = False
cl.user_session.set("app", app)
@cl.on_message
async def on_message(message: cl.Message):
app = cl.user_session.get("app")
msg = cl.Message(content="")
for chunk in await cl.make_async(app.chat)(message.content):
await msg.stream_token(chunk)
await msg.send()

View File

@@ -0,0 +1,15 @@
# Welcome to Embedchain! 🚀
Hello! 👋 Excited to see you join us. With Embedchain and Chainlit, create ChatGPT like apps effortlessly.
## Quick Start 🌟
- **Embedchain Docs:** Get started with our comprehensive [Embedchain Documentation](https://docs.embedchain.ai/) 📚
- **Discord Community:** Join our discord [Embedchain Discord](https://discord.gg/CUU9FPhRNt) to ask questions, share your projects, and connect with other developers! 💬
- **UI Guide**: Master Chainlit with [Chainlit Documentation](https://docs.chainlit.io/) ⛓️
Happy building with Embedchain! 🎉
## Customize welcome screen
Edit chainlit.md in your project root to change this welcome message.

View File

@@ -0,0 +1,2 @@
chainlit==0.7.700
embedchain==0.1.31

View File

@@ -0,0 +1,32 @@
# Embedchain Chat with PDF App
You can easily create and deploy your own `Chat-with-PDF` App using Embedchain.
Checkout the live demo we created for [chat with PDF](https://embedchain.ai/demo/chat-pdf).
Here are few simple steps for you to create and deploy your app:
1. Fork the embedchain repo from [Github](https://github.com/embedchain/embedchain).
If you run into problems with forking, please refer to [github docs](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo) for forking a repo.
2. Navigate to `chat-pdf` example app from your forked repo:
```bash
cd <your_fork_repo>/examples/chat-pdf
```
3. Run your app in development environment with simple commands
```bash
pip install -r requirements.txt
ec dev
```
Feel free to improve our simple `chat-pdf` streamlit app and create pull request to showcase your app [here](https://docs.embedchain.ai/examples/showcase)
4. You can easily deploy your app using Streamlit interface
Connect your Github account with Streamlit and refer this [guide](https://docs.streamlit.io/streamlit-community-cloud/deploy-your-app) to deploy your app.
You can also use the deploy button from your streamlit website you see when running `ec dev` command.

View File

@@ -0,0 +1,161 @@
import os
import queue
import re
import tempfile
import threading
import streamlit as st
from embedchain import App
from embedchain.config import BaseLlmConfig
from embedchain.helpers.callbacks import (StreamingStdOutCallbackHandlerYield,
generate)
def embedchain_bot(db_path, api_key):
return App.from_config(
config={
"llm": {
"provider": "openai",
"config": {
"model": "gpt-3.5-turbo-1106",
"temperature": 0.5,
"max_tokens": 1000,
"top_p": 1,
"stream": True,
"api_key": api_key,
},
},
"vectordb": {
"provider": "chroma",
"config": {"collection_name": "chat-pdf", "dir": db_path, "allow_reset": True},
},
"embedder": {"provider": "openai", "config": {"api_key": api_key}},
"chunker": {"chunk_size": 2000, "chunk_overlap": 0, "length_function": "len"},
}
)
def get_db_path():
tmpdirname = tempfile.mkdtemp()
return tmpdirname
def get_ec_app(api_key):
if "app" in st.session_state:
print("Found app in session state")
app = st.session_state.app
else:
print("Creating app")
db_path = get_db_path()
app = embedchain_bot(db_path, api_key)
st.session_state.app = app
return app
with st.sidebar:
openai_access_token = st.text_input("OpenAI API Key", key="api_key", type="password")
"WE DO NOT STORE YOUR OPENAI KEY."
"Just paste your OpenAI API key here and we'll use it to power the chatbot. [Get your OpenAI API key](https://platform.openai.com/api-keys)" # noqa: E501
if st.session_state.api_key:
app = get_ec_app(st.session_state.api_key)
pdf_files = st.file_uploader("Upload your PDF files", accept_multiple_files=True, type="pdf")
add_pdf_files = st.session_state.get("add_pdf_files", [])
for pdf_file in pdf_files:
file_name = pdf_file.name
if file_name in add_pdf_files:
continue
try:
if not st.session_state.api_key:
st.error("Please enter your OpenAI API Key")
st.stop()
temp_file_name = None
with tempfile.NamedTemporaryFile(mode="wb", delete=False, prefix=file_name, suffix=".pdf") as f:
f.write(pdf_file.getvalue())
temp_file_name = f.name
if temp_file_name:
st.markdown(f"Adding {file_name} to knowledge base...")
app.add(temp_file_name, data_type="pdf_file")
st.markdown("")
add_pdf_files.append(file_name)
os.remove(temp_file_name)
st.session_state.messages.append({"role": "assistant", "content": f"Added {file_name} to knowledge base!"})
except Exception as e:
st.error(f"Error adding {file_name} to knowledge base: {e}")
st.stop()
st.session_state["add_pdf_files"] = add_pdf_files
st.title("📄 Embedchain - Chat with PDF")
styled_caption = '<p style="font-size: 17px; color: #aaa;">🚀 An <a href="https://github.com/embedchain/embedchain">Embedchain</a> app powered by OpenAI!</p>' # noqa: E501
st.markdown(styled_caption, unsafe_allow_html=True)
if "messages" not in st.session_state:
st.session_state.messages = [
{
"role": "assistant",
"content": """
Hi! I'm chatbot powered by Embedchain, which can answer questions about your pdf documents.\n
Upload your pdf documents here and I'll answer your questions about them!
""",
}
]
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt := st.chat_input("Ask me anything!"):
if not st.session_state.api_key:
st.error("Please enter your OpenAI API Key", icon="🤖")
st.stop()
app = get_ec_app(st.session_state.api_key)
with st.chat_message("user"):
st.session_state.messages.append({"role": "user", "content": prompt})
st.markdown(prompt)
with st.chat_message("assistant"):
msg_placeholder = st.empty()
msg_placeholder.markdown("Thinking...")
full_response = ""
q = queue.Queue()
def app_response(result):
llm_config = app.llm.config.as_dict()
llm_config["callbacks"] = [StreamingStdOutCallbackHandlerYield(q=q)]
config = BaseLlmConfig(**llm_config)
answer, citations = app.chat(prompt, config=config, citations=True)
result["answer"] = answer
result["citations"] = citations
results = {}
thread = threading.Thread(target=app_response, args=(results,))
thread.start()
for answer_chunk in generate(q):
full_response += answer_chunk
msg_placeholder.markdown(full_response)
thread.join()
answer, citations = results["answer"], results["citations"]
if citations:
full_response += "\n\n**Sources**:\n"
sources = []
for i, citation in enumerate(citations):
source = citation[1]["url"]
pattern = re.compile(r"([^/]+)\.[^\.]+\.pdf$")
match = pattern.search(source)
if match:
source = match.group(1) + ".pdf"
sources.append(source)
sources = list(set(sources))
for source in sources:
full_response += f"- {source}\n"
msg_placeholder.markdown(full_response)
print("Answer: ", full_response)
st.session_state.messages.append({"role": "assistant", "content": full_response})

View File

@@ -0,0 +1,3 @@
{
"provider": "streamlit.io"
}

View File

@@ -0,0 +1,4 @@
streamlit
embedchain
langchain-text-splitters
pysqlite3-binary

View File

@@ -0,0 +1,8 @@
__pycache__/
database
db
pyenv
venv
.env
.git
trash_files/

View File

@@ -0,0 +1,7 @@
__pycache__
db
database
pyenv
venv
.env
trash_files/

View File

@@ -0,0 +1,9 @@
FROM python:3.11-slim
WORKDIR /usr/src/discord_bot
COPY requirements.txt .
RUN pip install -r requirements.txt
COPY . .
CMD ["python", "discord_bot.py"]

View File

@@ -0,0 +1,9 @@
# Discord Bot
This is a docker template to create your own Discord bot using the embedchain package. To know more about the bot and how to use it, go [here](https://docs.embedchain.ai/examples/discord_bot).
To run this use the following command,
```bash
docker run --name discord-bot -e OPENAI_API_KEY=sk-xxx -e DISCORD_BOT_TOKEN=xxx -p 8080:8080 embedchain/discord-bot:latest
```

View File

@@ -0,0 +1,76 @@
import os
import discord
from discord.ext import commands
from dotenv import load_dotenv
from embedchain import App
load_dotenv()
intents = discord.Intents.default()
intents.message_content = True
bot = commands.Bot(command_prefix="/ec ", intents=intents)
root_folder = os.getcwd()
def initialize_chat_bot():
global chat_bot
chat_bot = App()
@bot.event
async def on_ready():
print(f"Logged in as {bot.user.name}")
initialize_chat_bot()
@bot.event
async def on_command_error(ctx, error):
if isinstance(error, commands.CommandNotFound):
await send_response(ctx, "Invalid command. Please refer to the documentation for correct syntax.")
else:
print("Error occurred during command execution:", error)
@bot.command()
async def add(ctx, data_type: str, *, url_or_text: str):
print(f"User: {ctx.author.name}, Data Type: {data_type}, URL/Text: {url_or_text}")
try:
chat_bot.add(data_type, url_or_text)
await send_response(ctx, f"Added {data_type} : {url_or_text}")
except Exception as e:
await send_response(ctx, f"Failed to add {data_type} : {url_or_text}")
print("Error occurred during 'add' command:", e)
@bot.command()
async def query(ctx, *, question: str):
print(f"User: {ctx.author.name}, Query: {question}")
try:
response = chat_bot.query(question)
await send_response(ctx, response)
except Exception as e:
await send_response(ctx, "An error occurred. Please try again!")
print("Error occurred during 'query' command:", e)
@bot.command()
async def chat(ctx, *, question: str):
print(f"User: {ctx.author.name}, Query: {question}")
try:
response = chat_bot.chat(question)
await send_response(ctx, response)
except Exception as e:
await send_response(ctx, "An error occurred. Please try again!")
print("Error occurred during 'chat' command:", e)
async def send_response(ctx, message):
if ctx.guild is None:
await ctx.send(message)
else:
await ctx.reply(message)
bot.run(os.environ["DISCORD_BOT_TOKEN"])

View File

@@ -0,0 +1,11 @@
version: "3.9"
services:
backend:
container_name: embedchain_discord_bot
restart: unless-stopped
build:
context: .
dockerfile: Dockerfile
env_file:
- variables.env

View File

@@ -0,0 +1,3 @@
discord==2.3.1
embedchain==0.0.58
python-dotenv==1.0.0

View File

@@ -0,0 +1,2 @@
OPENAI_API_KEY=""
DISCORD_BOT_TOKEN=""

View File

@@ -0,0 +1 @@
.git

View File

@@ -0,0 +1,18 @@
## 🐳 Docker Setup
- To setup full stack app using docker, run the following command inside this folder using your terminal.
```bash
docker-compose up --build
```
📝 Note: The build command might take a while to install all the packages depending on your system resources.
## 🚀 Usage Instructions
- Go to [http://localhost:3000/](http://localhost:3000/) in your browser to view the dashboard.
- Add your `OpenAI API key` 🔑 in the Settings.
- Create a new bot and you'll be navigated to its page.
- Here you can add your data sources and then chat with the bot.
🎉 Happy Chatting! 🎉

View File

@@ -0,0 +1,7 @@
__pycache__/
database
pyenv
venv
.env
.git
trash_files/

View File

@@ -0,0 +1,6 @@
__pycache__
database
pyenv
venv
.env
trash_files/

View File

@@ -0,0 +1,11 @@
FROM python:3.11-slim AS backend
WORKDIR /usr/src/app/backend
COPY requirements.txt .
RUN pip install -r requirements.txt
COPY . .
EXPOSE 8000
CMD ["python", "server.py"]

View File

@@ -0,0 +1,14 @@
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class APIKey(db.Model):
id = db.Column(db.Integer, primary_key=True)
key = db.Column(db.String(255), nullable=False)
class BotList(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255), nullable=False)
slug = db.Column(db.String(255), nullable=False, unique=True)

View File

@@ -0,0 +1,5 @@
import os
ROOT_DIRECTORY = os.getcwd()
DB_DIRECTORY_OPEN_AI = os.path.join(os.getcwd(), "database", "open_ai")
DB_DIRECTORY_OPEN_SOURCE = os.path.join(os.getcwd(), "database", "open_source")

View File

@@ -0,0 +1,32 @@
import os
from flask import Blueprint, jsonify, make_response, request
from models import APIKey
from paths import DB_DIRECTORY_OPEN_AI
from embedchain import App
chat_response_bp = Blueprint("chat_response", __name__)
# Chat Response for user query
@chat_response_bp.route("/api/get_answer", methods=["POST"])
def get_answer():
try:
data = request.get_json()
query = data.get("query")
embedding_model = data.get("embedding_model")
app_type = data.get("app_type")
if embedding_model == "open_ai":
os.chdir(DB_DIRECTORY_OPEN_AI)
api_key = APIKey.query.first().key
os.environ["OPENAI_API_KEY"] = api_key
if app_type == "app":
chat_bot = App()
response = chat_bot.chat(query)
return make_response(jsonify({"response": response}), 200)
except Exception as e:
return make_response(jsonify({"error": str(e)}), 400)

View File

@@ -0,0 +1,72 @@
from flask import Blueprint, jsonify, make_response, request
from models import APIKey, BotList, db
dashboard_bp = Blueprint("dashboard", __name__)
# Set Open AI Key
@dashboard_bp.route("/api/set_key", methods=["POST"])
def set_key():
data = request.get_json()
api_key = data["openAIKey"]
existing_key = APIKey.query.first()
if existing_key:
existing_key.key = api_key
else:
new_key = APIKey(key=api_key)
db.session.add(new_key)
db.session.commit()
return make_response(jsonify(message="API key saved successfully"), 200)
# Check OpenAI Key
@dashboard_bp.route("/api/check_key", methods=["GET"])
def check_key():
existing_key = APIKey.query.first()
if existing_key:
return make_response(jsonify(status="ok", message="OpenAI Key exists"), 200)
else:
return make_response(jsonify(status="fail", message="No OpenAI Key present"), 200)
# Create a bot
@dashboard_bp.route("/api/create_bot", methods=["POST"])
def create_bot():
data = request.get_json()
name = data["name"]
slug = name.lower().replace(" ", "_")
existing_bot = BotList.query.filter_by(slug=slug).first()
if existing_bot:
return (make_response(jsonify(message="Bot already exists"), 400),)
new_bot = BotList(name=name, slug=slug)
db.session.add(new_bot)
db.session.commit()
return make_response(jsonify(message="Bot created successfully"), 200)
# Delete a bot
@dashboard_bp.route("/api/delete_bot", methods=["POST"])
def delete_bot():
data = request.get_json()
slug = data.get("slug")
bot = BotList.query.filter_by(slug=slug).first()
if bot:
db.session.delete(bot)
db.session.commit()
return make_response(jsonify(message="Bot deleted successfully"), 200)
return make_response(jsonify(message="Bot not found"), 400)
# Get the list of bots
@dashboard_bp.route("/api/get_bots", methods=["GET"])
def get_bots():
bots = BotList.query.all()
bot_list = []
for bot in bots:
bot_list.append(
{
"name": bot.name,
"slug": bot.slug,
}
)
return jsonify(bot_list)

View File

@@ -0,0 +1,27 @@
import os
from flask import Blueprint, jsonify, make_response, request
from models import APIKey
from paths import DB_DIRECTORY_OPEN_AI
from embedchain import App
sources_bp = Blueprint("sources", __name__)
# API route to add data sources
@sources_bp.route("/api/add_sources", methods=["POST"])
def add_sources():
try:
embedding_model = request.json.get("embedding_model")
name = request.json.get("name")
value = request.json.get("value")
if embedding_model == "open_ai":
os.chdir(DB_DIRECTORY_OPEN_AI)
api_key = APIKey.query.first().key
os.environ["OPENAI_API_KEY"] = api_key
chat_bot = App()
chat_bot.add(name, value)
return make_response(jsonify(message="Sources added successfully"), 200)
except Exception as e:
return make_response(jsonify(message=f"Error adding sources: {str(e)}"), 400)

View File

@@ -0,0 +1,27 @@
import os
from flask import Flask
from models import db
from paths import DB_DIRECTORY_OPEN_AI, ROOT_DIRECTORY
from routes.chat_response import chat_response_bp
from routes.dashboard import dashboard_bp
from routes.sources import sources_bp
app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///" + os.path.join(ROOT_DIRECTORY, "database", "user_data.db")
app.register_blueprint(dashboard_bp)
app.register_blueprint(sources_bp)
app.register_blueprint(chat_response_bp)
# Initialize the app on startup
def load_app():
os.makedirs(DB_DIRECTORY_OPEN_AI, exist_ok=True)
db.init_app(app)
with app.app_context():
db.create_all()
if __name__ == "__main__":
load_app()
app.run(host="0.0.0.0", debug=True, port=8000)

View File

@@ -0,0 +1,24 @@
version: "3.9"
services:
backend:
container_name: embedchain-backend
restart: unless-stopped
build:
context: backend
dockerfile: Dockerfile
image: embedchain/backend
ports:
- "8000:8000"
frontend:
container_name: embedchain-frontend
restart: unless-stopped
build:
context: frontend
dockerfile: Dockerfile
image: embedchain/frontend
ports:
- "3000:3000"
depends_on:
- "backend"

View File

@@ -0,0 +1,7 @@
node_modules/
build
dist
.env
.git
.next/
trash_files/

View File

@@ -0,0 +1,3 @@
{
"extends": ["next/babel", "next/core-web-vitals"]
}

View File

@@ -0,0 +1,38 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.js
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# local env files
.env*.local
# vercel
.vercel
# typescript
*.tsbuildinfo
next-env.d.ts
vscode/
trash_files/

View File

@@ -0,0 +1,14 @@
FROM node:18-slim AS frontend
WORKDIR /usr/src/app/frontend
COPY package.json .
COPY package-lock.json .
RUN npm install
COPY . .
RUN npm run build
EXPOSE 3000
CMD ["npm", "start"]

View File

@@ -0,0 +1,7 @@
{
"compilerOptions": {
"paths": {
"@/*": ["./src/*"]
}
}
}

View File

@@ -0,0 +1,26 @@
/** @type {import('next').NextConfig} */
const nextConfig = {
async rewrites() {
return [
{
source: "/api/:path*",
destination: "http://backend:8000/api/:path*",
},
];
},
reactStrictMode: true,
experimental: {
proxyTimeout: 6000000,
},
webpack(config) {
config.module.rules.push({
test: /\.svg$/i,
issuer: /\.[jt]sx?$/,
use: ["@svgr/webpack"],
});
return config;
},
};
module.exports = nextConfig;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,25 @@
{
"name": "frontend",
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "next lint"
},
"dependencies": {
"autoprefixer": "^10.4.14",
"eslint": "8.44.0",
"eslint-config-next": "13.4.9",
"flowbite": "^1.7.0",
"next": "13.4.9",
"postcss": "8.4.25",
"react": "18.2.0",
"react-dom": "18.2.0",
"tailwindcss": "3.3.2"
},
"devDependencies": {
"@svgr/webpack": "^8.0.1"
}
}

View File

@@ -0,0 +1,6 @@
module.exports = {
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View File

@@ -0,0 +1,20 @@
<svg
viewBox="0 0 24 24"
fill="currentColor"
xmlns="http://www.w3.org/2000/svg"
>
<g id="SVGRepo_bgCarrier" stroke-width="0"></g>
<g
id="SVGRepo_tracerCarrier"
stroke-linecap="round"
stroke-linejoin="round"
></g>
<g id="SVGRepo_iconCarrier">
<path
fill-rule="evenodd"
clip-rule="evenodd"
d="M14 2C14 2.74028 13.5978 3.38663 13 3.73244V4H20C21.6569 4 23 5.34315 23 7V19C23 20.6569 21.6569 22 20 22H4C2.34315 22 1 20.6569 1 19V7C1 5.34315 2.34315 4 4 4H11V3.73244C10.4022 3.38663 10 2.74028 10 2C10 0.895431 10.8954 0 12 0C13.1046 0 14 0.895431 14 2ZM4 6H11H13H20C20.5523 6 21 6.44772 21 7V19C21 19.5523 20.5523 20 20 20H4C3.44772 20 3 19.5523 3 19V7C3 6.44772 3.44772 6 4 6ZM15 11.5C15 10.6716 15.6716 10 16.5 10C17.3284 10 18 10.6716 18 11.5C18 12.3284 17.3284 13 16.5 13C15.6716 13 15 12.3284 15 11.5ZM16.5 8C14.567 8 13 9.567 13 11.5C13 13.433 14.567 15 16.5 15C18.433 15 20 13.433 20 11.5C20 9.567 18.433 8 16.5 8ZM7.5 10C6.67157 10 6 10.6716 6 11.5C6 12.3284 6.67157 13 7.5 13C8.32843 13 9 12.3284 9 11.5C9 10.6716 8.32843 10 7.5 10ZM4 11.5C4 9.567 5.567 8 7.5 8C9.433 8 11 9.567 11 11.5C11 13.433 9.433 15 7.5 15C5.567 15 4 13.433 4 11.5ZM10.8944 16.5528C10.6474 16.0588 10.0468 15.8586 9.55279 16.1056C9.05881 16.3526 8.85858 16.9532 9.10557 17.4472C9.68052 18.5971 10.9822 19 12 19C13.0178 19 14.3195 18.5971 14.8944 17.4472C15.1414 16.9532 14.9412 16.3526 14.4472 16.1056C13.9532 15.8586 13.3526 16.0588 13.1056 16.5528C13.0139 16.7362 12.6488 17 12 17C11.3512 17 10.9861 16.7362 10.8944 16.5528Z"
fill="currentColor"
></path>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@@ -0,0 +1,14 @@
<svg
aria-hidden="true"
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 14 14"
>
<path
stroke="currentColor"
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="m1 1 6 6m0 0 6 6M7 7l6-6M7 7l-6 6"
/>
</svg>

After

Width:  |  Height:  |  Size: 252 B

View File

@@ -0,0 +1,15 @@
<svg
fill="currentColor"
viewBox="0 0 32 32"
xmlns="http://www.w3.org/2000/svg"
>
<g id="SVGRepo_bgCarrier" stroke-width="0"></g>
<g
id="SVGRepo_tracerCarrier"
stroke-linecap="round"
stroke-linejoin="round"
></g>
<g id="SVGRepo_iconCarrier">
<path d="M18.8,16l5.5-5.5c0.8-0.8,0.8-2,0-2.8l0,0C24,7.3,23.5,7,23,7c-0.5,0-1,0.2-1.4,0.6L16,13.2l-5.5-5.5 c-0.8-0.8-2.1-0.8-2.8,0C7.3,8,7,8.5,7,9.1s0.2,1,0.6,1.4l5.5,5.5l-5.5,5.5C7.3,21.9,7,22.4,7,23c0,0.5,0.2,1,0.6,1.4 C8,24.8,8.5,25,9,25c0.5,0,1-0.2,1.4-0.6l5.5-5.5l5.5,5.5c0.8,0.8,2.1,0.8,2.8,0c0.8-0.8,0.8-2.1,0-2.8L18.8,16z"></path>
</g>
</svg>

After

Width:  |  Height:  |  Size: 625 B

View File

@@ -0,0 +1,9 @@
<svg
aria-hidden="true"
xmlns="http://www.w3.org/2000/svg"
fill="currentColor"
viewBox="0 0 22 21"
>
<path d="M16.975 11H10V4.025a1 1 0 0 0-1.066-.998 8.5 8.5 0 1 0 9.039 9.039.999.999 0 0 0-1-1.066h.002Z" />
<path d="M12.5 0c-.157 0-.311.01-.565.027A1 1 0 0 0 11 1.02V10h8.975a1 1 0 0 0 1-.935c.013-.188.028-.374.028-.565A8.51 8.51 0 0 0 12.5 0Z" />
</svg>

After

Width:  |  Height:  |  Size: 369 B

View File

@@ -0,0 +1,15 @@
<svg
fill="currentColor"
viewBox="0 0 56 56"
xmlns="http://www.w3.org/2000/svg"
>
<g id="SVGRepo_bgCarrier" stroke-width="0"></g>
<g
id="SVGRepo_tracerCarrier"
stroke-linecap="round"
stroke-linejoin="round"
></g>
<g id="SVGRepo_iconCarrier">
<path d="M 15.5547 53.125 L 40.4453 53.125 C 45.2969 53.125 47.7109 50.6640 47.7109 45.7890 L 47.7109 24.5078 L 30.7422 24.5078 C 27.7422 24.5078 26.3359 23.0781 26.3359 20.0781 L 26.3359 2.8750 L 15.5547 2.8750 C 10.7266 2.8750 8.2891 5.3594 8.2891 10.2344 L 8.2891 45.7890 C 8.2891 50.6875 10.7266 53.125 15.5547 53.125 Z M 30.8125 21.2969 L 47.4531 21.2969 C 47.2891 20.3359 46.6094 19.3984 45.5078 18.2500 L 32.5703 5.1015 C 31.4922 3.9766 30.5078 3.2969 29.5234 3.1328 L 29.5234 20.0313 C 29.5234 20.875 29.9687 21.2969 30.8125 21.2969 Z M 18.9766 34.6562 C 18.0156 34.6562 17.3359 33.9766 17.3359 33.0625 C 17.3359 32.1484 18.0156 31.4687 18.9766 31.4687 L 37.0469 31.4687 C 37.9844 31.4687 38.7109 32.1484 38.7109 33.0625 C 38.7109 33.9766 37.9844 34.6562 37.0469 34.6562 Z M 18.9766 43.5859 C 18.0156 43.5859 17.3359 42.9062 17.3359 41.9922 C 17.3359 41.0781 18.0156 40.3984 18.9766 40.3984 L 37.0469 40.3984 C 37.9844 40.3984 38.7109 41.0781 38.7109 41.9922 C 38.7109 42.9062 37.9844 43.5859 37.0469 43.5859 Z"></path>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -0,0 +1,12 @@
<svg
aria-hidden="true"
fill="currentColor"
viewBox="0 0 20 20"
xmlns="http://www.w3.org/2000/svg"
>
<path
clip-rule="evenodd"
fill-rule="evenodd"
d="M2 4.75A.75.75 0 012.75 4h14.5a.75.75 0 010 1.5H2.75A.75.75 0 012 4.75zm0 10.5a.75.75 0 01.75-.75h7.5a.75.75 0 010 1.5h-7.5a.75.75 0 01-.75-.75zM2 10a.75.75 0 01.75-.75h14.5a.75.75 0 010 1.5H2.75A.75.75 0 012 10z"
></path>
</svg>

After

Width:  |  Height:  |  Size: 415 B

View File

@@ -0,0 +1,14 @@
<svg
aria-hidden="true"
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 10 6"
>
<path
stroke="currentColor"
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="m1 1 4 4 4-4"
/>
</svg>

After

Width:  |  Height:  |  Size: 230 B

View File

@@ -0,0 +1,14 @@
<svg
aria-hidden="true"
xmlns="http://www.w3.org/2000/svg"
fill="none"
viewBox="0 0 10 6"
>
<path
stroke="currentColor"
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M9 5 5 1 1 5"
/>
</svg>

After

Width:  |  Height:  |  Size: 254 B

View File

@@ -0,0 +1,39 @@
<svg
viewBox="0 0 20 20"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink"
fill="currentColor"
>
<g id="SVGRepo_bgCarrier" stroke-width="0"></g>
<g
id="SVGRepo_tracerCarrier"
stroke-linecap="round"
stroke-linejoin="round"
></g>
<g id="SVGRepo_iconCarrier">
<title>github [#142]</title> <desc>Created with Sketch.</desc>
<defs> </defs>
<g
id="Page-1"
stroke="none"
stroke-width="1"
fill="none"
fill-rule="evenodd"
>
<g
id="Dribbble-Light-Preview"
transform="translate(-140.000000, -7559.000000)"
fill="currentColor"
>
<g id="icons" transform="translate(56.000000, 160.000000)">
<path
d="M94,7399 C99.523,7399 104,7403.59 104,7409.253 C104,7413.782 101.138,7417.624 97.167,7418.981 C96.66,7419.082 96.48,7418.762 96.48,7418.489 C96.48,7418.151 96.492,7417.047 96.492,7415.675 C96.492,7414.719 96.172,7414.095 95.813,7413.777 C98.04,7413.523 100.38,7412.656 100.38,7408.718 C100.38,7407.598 99.992,7406.684 99.35,7405.966 C99.454,7405.707 99.797,7404.664 99.252,7403.252 C99.252,7403.252 98.414,7402.977 96.505,7404.303 C95.706,7404.076 94.85,7403.962 94,7403.958 C93.15,7403.962 92.295,7404.076 91.497,7404.303 C89.586,7402.977 88.746,7403.252 88.746,7403.252 C88.203,7404.664 88.546,7405.707 88.649,7405.966 C88.01,7406.684 87.619,7407.598 87.619,7408.718 C87.619,7412.646 89.954,7413.526 92.175,7413.785 C91.889,7414.041 91.63,7414.493 91.54,7415.156 C90.97,7415.418 89.522,7415.871 88.63,7414.304 C88.63,7414.304 88.101,7413.319 87.097,7413.247 C87.097,7413.247 86.122,7413.234 87.029,7413.87 C87.029,7413.87 87.684,7414.185 88.139,7415.37 C88.139,7415.37 88.726,7417.2 91.508,7416.58 C91.513,7417.437 91.522,7418.245 91.522,7418.489 C91.522,7418.76 91.338,7419.077 90.839,7418.982 C86.865,7417.627 84,7413.783 84,7409.253 C84,7403.59 88.478,7399 94,7399"
id="github-[#142]"
>
</path>
</g>
</g>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

@@ -0,0 +1,17 @@
<svg
fill="currentColor"
viewBox="0 0 32 32"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
>
<g id="SVGRepo_bgCarrier" stroke-width="0"></g>
<g
id="SVGRepo_tracerCarrier"
stroke-linecap="round"
stroke-linejoin="round"
></g>
<g id="SVGRepo_iconCarrier">
<title>linkedin</title>
<path d="M28.778 1.004h-25.56c-0.008-0-0.017-0-0.027-0-1.199 0-2.172 0.964-2.186 2.159v25.672c0.014 1.196 0.987 2.161 2.186 2.161 0.010 0 0.019-0 0.029-0h25.555c0.008 0 0.018 0 0.028 0 1.2 0 2.175-0.963 2.194-2.159l0-0.002v-25.67c-0.019-1.197-0.994-2.161-2.195-2.161-0.010 0-0.019 0-0.029 0h0.001zM9.9 26.562h-4.454v-14.311h4.454zM7.674 10.293c-1.425 0-2.579-1.155-2.579-2.579s1.155-2.579 2.579-2.579c1.424 0 2.579 1.154 2.579 2.578v0c0 0.001 0 0.002 0 0.004 0 1.423-1.154 2.577-2.577 2.577-0.001 0-0.002 0-0.003 0h0zM26.556 26.562h-4.441v-6.959c0-1.66-0.034-3.795-2.314-3.795-2.316 0-2.669 1.806-2.669 3.673v7.082h-4.441v-14.311h4.266v1.951h0.058c0.828-1.395 2.326-2.315 4.039-2.315 0.061 0 0.121 0.001 0.181 0.003l-0.009-0c4.5 0 5.332 2.962 5.332 6.817v7.855z"></path>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -0,0 +1,28 @@
<svg
viewBox="0 0 15 15"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<g id="SVGRepo_bgCarrier" stroke-width="0"></g>
<g
id="SVGRepo_tracerCarrier"
stroke-linecap="round"
stroke-linejoin="round"
></g>
<g id="SVGRepo_iconCarrier">
<path
d="M3.5 8H3V7H3.5C3.77614 7 4 7.22386 4 7.5C4 7.77614 3.77614 8 3.5 8Z"
fill="currentColor"
></path>
<path
d="M7 10V7H7.5C7.77614 7 8 7.22386 8 7.5V9.5C8 9.77614 7.77614 10 7.5 10H7Z"
fill="currentColor"
></path>
<path
fill-rule="evenodd"
clip-rule="evenodd"
d="M1 1.5C1 0.671573 1.67157 0 2.5 0H10.7071L14 3.29289V13.5C14 14.3284 13.3284 15 12.5 15H2.5C1.67157 15 1 14.3284 1 13.5V1.5ZM3.5 6H2V11H3V9H3.5C4.32843 9 5 8.32843 5 7.5C5 6.67157 4.32843 6 3.5 6ZM7.5 6H6V11H7.5C8.32843 11 9 10.3284 9 9.5V7.5C9 6.67157 8.32843 6 7.5 6ZM10 11V6H13V7H11V8H12V9H11V11H10Z"
fill="currentColor"
></path>
</g>
</svg>

After

Width:  |  Height:  |  Size: 947 B

View File

@@ -0,0 +1,13 @@
<svg
xmlns="http://www.w3.org/2000/svg"
fill="currentColor"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth="2"
d="M12 6v6m0 0v6m0-6h6m-6 0H6"
/>
</svg>

After

Width:  |  Height:  |  Size: 227 B

View File

@@ -0,0 +1,57 @@
<svg
viewBox="-0.5 0 25 25"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<g id="SVGRepo_bgCarrier" stroke-width="0"></g>
<g
id="SVGRepo_tracerCarrier"
stroke-linecap="round"
stroke-linejoin="round"
></g>
<g id="SVGRepo_iconCarrier">
{" "}
<path
d="M12 7.82001H22"
stroke="currentColor"
stroke-width="1.5"
stroke-linecap="round"
stroke-linejoin="round"
></path>{" "}
<path
d="M2 7.82001H4"
stroke="currentColor"
stroke-width="1.5"
stroke-linecap="round"
stroke-linejoin="round"
></path>{" "}
<path
d="M20 16.82H22"
stroke="currentColor"
stroke-width="1.5"
stroke-linecap="round"
stroke-linejoin="round"
></path>{" "}
<path
d="M2 16.82H12"
stroke="currentColor"
stroke-width="1.5"
stroke-linecap="round"
stroke-linejoin="round"
></path>{" "}
<path
d="M8 11.82C10.2091 11.82 12 10.0291 12 7.82001C12 5.61087 10.2091 3.82001 8 3.82001C5.79086 3.82001 4 5.61087 4 7.82001C4 10.0291 5.79086 11.82 8 11.82Z"
stroke="currentColor"
stroke-width="1.5"
stroke-linecap="round"
stroke-linejoin="round"
></path>{" "}
<path
d="M16 20.82C18.2091 20.82 20 19.0291 20 16.82C20 14.6109 18.2091 12.82 16 12.82C13.7909 12.82 12 14.6109 12 16.82C12 19.0291 13.7909 20.82 16 20.82Z"
stroke="currentColor"
stroke-width="1.5"
stroke-linecap="round"
stroke-linejoin="round"
></path>{" "}
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@@ -0,0 +1,61 @@
<svg
viewBox="0 0 24 24"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<g id="SVGRepo_bgCarrier" stroke-width="0"></g>
<g
id="SVGRepo_tracerCarrier"
stroke-linecap="round"
stroke-linejoin="round"
></g>
<g id="SVGRepo_iconCarrier">
<rect
height="4"
stroke="currentColor"
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
width="4"
x="10"
y="4"
></rect>
<rect
height="4"
stroke="currentColor"
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
width="4"
x="10"
y="16"
></rect>
<rect
height="4"
stroke="currentColor"
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
width="4"
x="3"
y="16"
></rect>
<rect
height="4"
stroke="currentColor"
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
width="4"
x="17"
y="16"
></rect>
<path
d="M12 8V12M12 16V12M12 12H17C18.1046 12 19 12.8954 19 14V16M12 12H7C5.89543 12 5 12.8954 5 14V16"
stroke="currentColor"
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
></path>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -0,0 +1,21 @@
<svg
viewBox="0 0 24 24"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<g id="SVGRepo_bgCarrier" stroke-width="0"></g>
<g
id="SVGRepo_tracerCarrier"
stroke-linecap="round"
stroke-linejoin="round"
></g>
<g id="SVGRepo_iconCarrier">
<path
d="M12 3V21M9 21H15M19 6V3H5V6"
stroke="currentColor"
stroke-width="2"
stroke-linecap="round"
stroke-linejoin="round"
></path>
</g>
</svg>

After

Width:  |  Height:  |  Size: 445 B

View File

@@ -0,0 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 27.5.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="svg5" xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 1668.56 1221.19"
style="enable-background:new 0 0 1668.56 1221.19;" xml:space="preserve">
<g id="layer1" transform="translate(52.390088,-25.058597)">
<path id="path1009" d="M283.94,167.31l386.39,516.64L281.5,1104h87.51l340.42-367.76L984.48,1104h297.8L874.15,558.3l361.92-390.99
h-87.51l-313.51,338.7l-253.31-338.7H283.94z M412.63,231.77h136.81l604.13,807.76h-136.81L412.63,231.77z"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 722 B

View File

@@ -0,0 +1,20 @@
<svg
fill="currentColor"
version="1.1"
id="Layer_1"
xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink"
viewBox="0 0 24 24"
enable-background="new 0 0 24 24"
xml:space="preserve"
>
<g id="SVGRepo_bgCarrier" stroke-width="0"></g>
<g
id="SVGRepo_tracerCarrier"
stroke-linecap="round"
stroke-linejoin="round"
></g>
<g id="SVGRepo_iconCarrier">
<path d="M18.41,0.01H4V9h2V2.01h10v6h6v14H4v2h20V5.6L18.41,0.01z M18,2.43l3.59,3.59H18V2.43z M21,18v2h-1h-1h-1v-8h2v6H21z M7,12 H6v2h1v6h2v-6h1v-2H9H7z M15,12l-1,1.61L13,12v0h-2v8h2v-4.21l1,1.61l1-1.61V20h2v-8L15,12L15,12z M3,15H2v-3H0v8h2v-3h1v3h2v-8H3 V15z"></path>
</g>
</svg>

After

Width:  |  Height:  |  Size: 689 B

View File

@@ -0,0 +1,39 @@
<svg
viewBox="0 -3 20 20"
version="1.1"
xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink"
fill="currentColor"
>
<g id="SVGRepo_bgCarrier" stroke-width="0"></g>
<g
id="SVGRepo_tracerCarrier"
stroke-linecap="round"
stroke-linejoin="round"
></g>
<g id="SVGRepo_iconCarrier">
<title>youtube [#168]</title> <desc>Created with Sketch.</desc>
<defs> </defs>
<g
id="Page-1"
stroke="none"
stroke-width="1"
fill="none"
fill-rule="evenodd"
>
<g
id="Dribbble-Light-Preview"
transform="translate(-300.000000, -7442.000000)"
fill="currentColor"
>
<g id="icons" transform="translate(56.000000, 160.000000)">
<path
d="M251.988432,7291.58588 L251.988432,7285.97425 C253.980638,7286.91168 255.523602,7287.8172 257.348463,7288.79353 C255.843351,7289.62824 253.980638,7290.56468 251.988432,7291.58588 M263.090998,7283.18289 C262.747343,7282.73013 262.161634,7282.37809 261.538073,7282.26141 C259.705243,7281.91336 248.270974,7281.91237 246.439141,7282.26141 C245.939097,7282.35515 245.493839,7282.58153 245.111335,7282.93357 C243.49964,7284.42947 244.004664,7292.45151 244.393145,7293.75096 C244.556505,7294.31342 244.767679,7294.71931 245.033639,7294.98558 C245.376298,7295.33761 245.845463,7295.57995 246.384355,7295.68865 C247.893451,7296.0008 255.668037,7296.17532 261.506198,7295.73552 C262.044094,7295.64178 262.520231,7295.39147 262.895762,7295.02447 C264.385932,7293.53455 264.28433,7285.06174 263.090998,7283.18289"
id="youtube-[#168]"
>
</path>
</g>
</g>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

View File

@@ -0,0 +1,9 @@
export default function PageWrapper({ children }) {
return (
<>
<div className="flex pt-4 px-4 sm:ml-64 min-h-screen">
<div className="flex-grow pt-4 px-4 rounded-lg">{children}</div>
</div>
</>
);
}

View File

@@ -0,0 +1,16 @@
export default function BotWrapper({ children }) {
return (
<>
<div className="rounded-lg">
<div className="flex flex-row items-center">
<div className="flex items-center justify-center h-10 w-10 rounded-full bg-black text-white flex-shrink-0">
B
</div>
<div className="ml-3 text-sm bg-white py-2 px-4 shadow-lg rounded-xl">
<div>{children}</div>
</div>
</div>
</div>
</>
);
}

View File

@@ -0,0 +1,16 @@
export default function HumanWrapper({ children }) {
return (
<>
<div className="rounded-lg">
<div className="flex items-center justify-start flex-row-reverse">
<div className="flex items-center justify-center h-10 w-10 rounded-full bg-blue-800 text-white flex-shrink-0">
H
</div>
<div className="mr-3 text-sm bg-blue-200 py-2 px-4 shadow-lg rounded-xl">
<div>{children}</div>
</div>
</div>
</div>
</>
);
}

View File

@@ -0,0 +1,73 @@
import { useState } from "react";
import { useRouter } from "next/router";
export default function CreateBot() {
const [botName, setBotName] = useState("");
const [status, setStatus] = useState("");
const router = useRouter();
const handleCreateBot = async (e) => {
e.preventDefault();
const data = {
name: botName,
};
const response = await fetch("/api/create_bot", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(data),
});
if (response.ok) {
const botSlug = botName.toLowerCase().replace(/\s+/g, "_");
router.push(`/${botSlug}/app`);
} else {
setBotName("");
setStatus("fail");
setTimeout(() => {
setStatus("");
}, 3000);
}
};
return (
<>
<div className="w-full">
{/* Create Bot */}
<h2 className="text-xl font-bold text-gray-800">CREATE BOT</h2>
<form className="py-2" onSubmit={handleCreateBot}>
<label
htmlFor="bot_name"
className="block mb-2 text-sm font-medium text-gray-900"
>
Name of Bot
</label>
<div className="flex flex-col sm:flex-row gap-x-4 gap-y-4">
<input
type="text"
id="bot_name"
className="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
placeholder="Eg. Naval Ravikant"
required
value={botName}
onChange={(e) => setBotName(e.target.value)}
/>
<button
type="submit"
className="h-fit text-white bg-black hover:bg-blue-800 focus:ring-4 focus:outline-none focus:ring-blue-300 font-medium rounded-lg text-sm w-full sm:w-auto px-5 py-2.5 text-center"
>
Submit
</button>
</div>
{status === "fail" && (
<div className="text-red-600 text-sm font-bold py-1">
An error occurred while creating your bot!
</div>
)}
</form>
</div>
</>
);
}

View File

@@ -0,0 +1,71 @@
import { useEffect, useState } from "react";
import { useRouter } from "next/router";
export default function DeleteBot() {
const [bots, setBots] = useState([]);
const router = useRouter();
useEffect(() => {
const fetchBots = async () => {
const response = await fetch("/api/get_bots");
const data = await response.json();
setBots(data);
};
fetchBots();
}, []);
const handleDeleteBot = async (event) => {
event.preventDefault();
const selectedBotSlug = event.target.bot_name.value;
if (selectedBotSlug === "none") {
return;
}
const response = await fetch("/api/delete_bot", {
method: "POST",
body: JSON.stringify({ slug: selectedBotSlug }),
headers: {
"Content-Type": "application/json",
},
});
if (response.ok) {
router.reload();
}
};
return (
<>
{bots.length !== 0 && (
<div className="w-full">
{/* Delete Bot */}
<h2 className="text-xl font-bold text-gray-800">DELETE BOTS</h2>
<form className="py-2" onSubmit={handleDeleteBot}>
<label className="block mb-2 text-sm font-medium text-gray-900">
List of Bots
</label>
<div className="flex flex-col sm:flex-row gap-x-4 gap-y-4">
<select
name="bot_name"
defaultValue="none"
className="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
>
<option value="none">Select a Bot</option>
{bots.map((bot) => (
<option key={bot.slug} value={bot.slug}>
{bot.name}
</option>
))}
</select>
<button
type="submit"
className="h-fit text-white bg-red-600 hover:bg-red-600/90 focus:ring-4 focus:outline-none focus:ring-blue-300 font-medium rounded-lg text-sm w-full sm:w-auto px-5 py-2.5 text-center"
>
Delete
</button>
</div>
</form>
</div>
)}
</>
);
}

View File

@@ -0,0 +1,47 @@
import { useState } from "react";
export default function PurgeChats() {
const [status, setStatus] = useState("");
const handleChatsPurge = (event) => {
event.preventDefault();
localStorage.clear();
setStatus("success");
setTimeout(() => {
setStatus(false);
}, 3000);
};
return (
<>
<div className="w-full">
{/* Purge Chats */}
<h2 className="text-xl font-bold text-gray-800">PURGE CHATS</h2>
<form className="py-2" onSubmit={handleChatsPurge}>
<label className="block mb-2 text-sm font-medium text-red-600">
Warning
</label>
<div className="flex flex-col sm:flex-row gap-x-4 gap-y-4">
<div
type="text"
className="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
>
The following action will clear all your chat logs. Proceed with
caution!
</div>
<button
type="submit"
className="h-fit text-white bg-red-600 hover:bg-red-600/80 focus:ring-4 focus:outline-none focus:ring-blue-300 font-medium rounded-lg text-sm w-full sm:w-auto px-5 py-2.5 text-center"
>
Purge
</button>
</div>
{status === "success" && (
<div className="text-green-600 text-sm font-bold py-1">
Your chats have been purged!
</div>
)}
</form>
</div>
</>
);
}

View File

@@ -0,0 +1,73 @@
import { useState } from "react";
export default function SetOpenAIKey({ setIsKeyPresent }) {
const [openAIKey, setOpenAIKey] = useState("");
const [status, setStatus] = useState("");
const handleOpenAIKey = async (e) => {
e.preventDefault();
const response = await fetch("/api/set_key", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ openAIKey }),
});
if (response.ok) {
setOpenAIKey("");
setStatus("success");
setIsKeyPresent(true);
} else {
setStatus("fail");
}
setTimeout(() => {
setStatus("");
}, 3000);
};
return (
<>
<div className="w-full">
{/* Set Open AI Key */}
<h2 className="text-xl font-bold text-gray-800">SET OPENAI KEY</h2>
<form className="py-2" onSubmit={handleOpenAIKey}>
<label
htmlFor="openai_key"
className="block mb-2 text-sm font-medium text-gray-900"
>
OpenAI Key
</label>
<div className="flex flex-col sm:flex-row gap-x-4 gap-y-4">
<input
type="password"
id="openai_key"
className="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5"
placeholder="Enter Open AI Key here"
required
value={openAIKey}
onChange={(e) => setOpenAIKey(e.target.value)}
/>
<button
type="submit"
className="h-fit text-white bg-black hover:bg-blue-800 focus:ring-4 focus:outline-none focus:ring-blue-300 font-medium rounded-lg text-sm w-full sm:w-auto px-5 py-2.5 text-center"
>
Submit
</button>
</div>
{status === "success" && (
<div className="text-green-600 text-sm font-bold py-1">
Your Open AI key has been saved successfully!
</div>
)}
{status === "fail" && (
<div className="text-red-600 text-sm font-bold py-1">
An error occurred while saving your OpenAI Key!
</div>
)}
</form>
</div>
</>
);
}

View File

@@ -0,0 +1,156 @@
import { useRouter } from "next/router";
import React, { useState, useEffect } from "react";
import BotWrapper from "@/components/chat/BotWrapper";
import HumanWrapper from "@/components/chat/HumanWrapper";
import SetSources from "@/containers/SetSources";
export default function ChatWindow({ embedding_model, app_type, setBotTitle }) {
const [bot, setBot] = useState(null);
const [chats, setChats] = useState([]);
const [isLoading, setIsLoading] = useState(false);
const [selectChat, setSelectChat] = useState(true);
const router = useRouter();
const { bot_slug } = router.query;
useEffect(() => {
if (bot_slug) {
const fetchBots = async () => {
const response = await fetch("/api/get_bots");
const data = await response.json();
const matchingBot = data.find((item) => item.slug === bot_slug);
setBot(matchingBot);
setBotTitle(matchingBot.name);
};
fetchBots();
}
}, [bot_slug]);
useEffect(() => {
const storedChats = localStorage.getItem(`chat_${bot_slug}_${app_type}`);
if (storedChats) {
const parsedChats = JSON.parse(storedChats);
setChats(parsedChats.chats);
}
}, [app_type, bot_slug]);
const handleChatResponse = async (e) => {
e.preventDefault();
setIsLoading(true);
const queryInput = e.target.query.value;
e.target.query.value = "";
const chatEntry = {
sender: "H",
message: queryInput,
};
setChats((prevChats) => [...prevChats, chatEntry]);
const response = await fetch("/api/get_answer", {
method: "POST",
body: JSON.stringify({
query: queryInput,
embedding_model,
app_type,
}),
headers: {
"Content-Type": "application/json",
},
});
const data = await response.json();
if (response.ok) {
const botResponse = data.response;
const botEntry = {
sender: "B",
message: botResponse,
};
setIsLoading(false);
setChats((prevChats) => [...prevChats, botEntry]);
const savedChats = {
chats: [...chats, chatEntry, botEntry],
};
localStorage.setItem(
`chat_${bot_slug}_${app_type}`,
JSON.stringify(savedChats)
);
} else {
router.reload();
}
};
return (
<>
<div className="flex flex-col justify-between h-full">
<div className="space-y-4 overflow-x-auto h-full pb-8">
{/* Greeting Message */}
<BotWrapper>
Hi, I am {bot?.name}. How can I help you today?
</BotWrapper>
{/* Chat Messages */}
{chats.map((chat, index) => (
<React.Fragment key={index}>
{chat.sender === "B" ? (
<BotWrapper>{chat.message}</BotWrapper>
) : (
<HumanWrapper>{chat.message}</HumanWrapper>
)}
</React.Fragment>
))}
{/* Loader */}
{isLoading && (
<BotWrapper>
<div className="flex items-center justify-center space-x-2 animate-pulse">
<div className="w-2 h-2 bg-black rounded-full"></div>
<div className="w-2 h-2 bg-black rounded-full"></div>
<div className="w-2 h-2 bg-black rounded-full"></div>
</div>
</BotWrapper>
)}
</div>
<div className="bg-white fixed bottom-0 left-0 right-0 h-28 sm:h-16"></div>
{/* Query Form */}
<div className="flex flex-row gap-x-2 sticky bottom-3">
<SetSources
setChats={setChats}
embedding_model={embedding_model}
setSelectChat={setSelectChat}
/>
{selectChat && (
<form
onSubmit={handleChatResponse}
className="w-full flex flex-col sm:flex-row gap-y-2 gap-x-2"
>
<div className="w-full">
<input
id="query"
name="query"
type="text"
placeholder="Enter your query..."
className="text-sm w-full border-2 border-black rounded-xl focus:outline-none focus:border-blue-800 sm:pl-4 h-11"
required
/>
</div>
<div className="w-full sm:w-fit">
<button
type="submit"
id="sender"
disabled={isLoading}
className={`${
isLoading ? "opacity-60" : ""
} w-full bg-black hover:bg-blue-800 rounded-xl text-lg text-white px-6 h-11`}
>
Send
</button>
</div>
</form>
)}
</div>
</div>
</>
);
}

View File

@@ -0,0 +1,160 @@
import { useState } from "react";
import PlusIcon from "../../public/icons/plus.svg";
import CrossIcon from "../../public/icons/cross.svg";
import YoutubeIcon from "../../public/icons/youtube.svg";
import PDFIcon from "../../public/icons/pdf.svg";
import WebIcon from "../../public/icons/web.svg";
import DocIcon from "../../public/icons/doc.svg";
import SitemapIcon from "../../public/icons/sitemap.svg";
import TextIcon from "../../public/icons/text.svg";
export default function SetSources({
setChats,
embedding_model,
setSelectChat,
}) {
const [sourceName, setSourceName] = useState("");
const [sourceValue, setSourceValue] = useState("");
const [isDropdownOpen, setIsDropdownOpen] = useState(false);
const [isLoading, setIsLoading] = useState(false);
const dataTypes = {
youtube_video: "YouTube Video",
pdf_file: "PDF File",
web_page: "Web Page",
doc_file: "Doc File",
sitemap: "Sitemap",
text: "Text",
};
const dataIcons = {
youtube_video: <YoutubeIcon className="w-5 h-5 mr-3" />,
pdf_file: <PDFIcon className="w-5 h-5 mr-3" />,
web_page: <WebIcon className="w-5 h-5 mr-3" />,
doc_file: <DocIcon className="w-5 h-5 mr-3" />,
sitemap: <SitemapIcon className="w-5 h-5 mr-3" />,
text: <TextIcon className="w-5 h-5 mr-3" />,
};
const handleDropdownClose = () => {
setIsDropdownOpen(false);
setSourceName("");
setSelectChat(true);
};
const handleDropdownSelect = (dataType) => {
setSourceName(dataType);
setSourceValue("");
setIsDropdownOpen(false);
setSelectChat(false);
};
const handleAddDataSource = async (e) => {
e.preventDefault();
setIsLoading(true);
const addDataSourceEntry = {
sender: "B",
message: `Adding the following ${dataTypes[sourceName]}: ${sourceValue}`,
};
setChats((prevChats) => [...prevChats, addDataSourceEntry]);
let name = sourceName;
let value = sourceValue;
setSourceValue("");
const response = await fetch("/api/add_sources", {
method: "POST",
body: JSON.stringify({
embedding_model,
name,
value,
}),
headers: {
"Content-Type": "application/json",
},
});
if (response.ok) {
const successEntry = {
sender: "B",
message: `Successfully added ${dataTypes[sourceName]}!`,
};
setChats((prevChats) => [...prevChats, successEntry]);
} else {
const errorEntry = {
sender: "B",
message: `Failed to add ${dataTypes[sourceName]}. Please try again.`,
};
setChats((prevChats) => [...prevChats, errorEntry]);
}
setSourceName("");
setIsLoading(false);
setSelectChat(true);
};
return (
<>
<div className="w-fit">
<button
type="button"
onClick={() => setIsDropdownOpen(!isDropdownOpen)}
className="w-fit p-2.5 rounded-xl text-white bg-black hover:bg-blue-800 focus:ring-4 focus:outline-none focus:ring-blue-300"
>
<PlusIcon className="w-6 h-6" />
</button>
{isDropdownOpen && (
<div className="absolute left-0 bottom-full bg-white border border-gray-300 rounded-lg shadow-lg mb-2">
<ul className="py-1">
<li
className="block px-4 py-2 text-sm text-black cursor-pointer hover:bg-gray-200"
onClick={handleDropdownClose}
>
<span className="flex items-center text-red-600">
<CrossIcon className="w-5 h-5 mr-3" />
Close
</span>
</li>
{Object.entries(dataTypes).map(([key, value]) => (
<li
key={key}
className="block px-4 py-2 text-sm text-black cursor-pointer hover:bg-gray-200"
onClick={() => handleDropdownSelect(key)}
>
<span className="flex items-center">
{dataIcons[key]}
{value}
</span>
</li>
))}
</ul>
</div>
)}
</div>
{sourceName && (
<form
onSubmit={handleAddDataSource}
className="w-full flex flex-col sm:flex-row gap-y-2 gap-x-2 items-center"
>
<div className="w-full">
<input
type="text"
placeholder="Enter URL, Data or File path here..."
className="text-sm w-full border-2 border-black rounded-xl focus:outline-none focus:border-blue-800 sm:pl-4 h-11"
required
value={sourceValue}
onChange={(e) => setSourceValue(e.target.value)}
/>
</div>
<div className="w-full sm:w-fit">
<button
type="submit"
disabled={isLoading}
className={`${
isLoading ? "opacity-60" : ""
} w-full bg-black hover:bg-blue-800 rounded-xl text-lg text-white px-6 h-11`}
>
Send
</button>
</div>
</form>
)}
</>
);
}

View File

@@ -0,0 +1,131 @@
import Link from "next/link";
import Image from "next/image";
import React, { useState, useEffect } from "react";
import DrawerIcon from "../../public/icons/drawer.svg";
import SettingsIcon from "../../public/icons/settings.svg";
import BotIcon from "../../public/icons/bot.svg";
import DropdownIcon from "../../public/icons/dropdown.svg";
import TwitterIcon from "../../public/icons/twitter.svg";
import GithubIcon from "../../public/icons/github.svg";
import LinkedinIcon from "../../public/icons/linkedin.svg";
export default function Sidebar() {
const [bots, setBots] = useState([]);
useEffect(() => {
const fetchBots = async () => {
const response = await fetch("/api/get_bots");
const data = await response.json();
setBots(data);
};
fetchBots();
}, []);
const toggleDropdown = () => {
const dropdown = document.getElementById("dropdown-toggle");
dropdown.classList.toggle("hidden");
};
return (
<>
{/* Mobile Toggle */}
<button
data-drawer-target="logo-sidebar"
data-drawer-toggle="logo-sidebar"
aria-controls="logo-sidebar"
type="button"
className="inline-flex items-center p-2 mt-2 ml-3 text-sm text-gray-500 rounded-lg sm:hidden hover:bg-gray-200 focus:outline-none focus:ring-2 focus:ring-gray-200"
>
<DrawerIcon className="w-6 h-6" />
</button>
{/* Sidebar */}
<div
id="logo-sidebar"
className="fixed top-0 left-0 z-40 w-64 h-screen transition-transform -translate-x-full sm:translate-x-0"
>
<div className="flex flex-col h-full px-3 py-4 overflow-y-auto bg-gray-100">
<div className="pb-10">
<Link href="/" className="flex items-center justify-evenly mb-5">
<Image
src="/images/embedchain.png"
alt="Embedchain Logo"
width={45}
height={0}
className="block h-auto w-auto"
/>
<span className="self-center text-2xl font-bold whitespace-nowrap">
Embedchain
</span>
</Link>
<ul className="space-y-2 font-medium text-lg">
{/* Settings */}
<li>
<Link
href="/"
className="flex items-center p-2 text-gray-900 rounded-lg hover:bg-gray-200 group"
>
<SettingsIcon className="w-6 h-6 text-gray-600 transition duration-75 group-hover:text-gray-900" />
<span className="ml-3">Settings</span>
</Link>
</li>
{/* Bots */}
{bots.length !== 0 && (
<li>
<button
type="button"
className="flex items-center w-full p-2 text-base text-gray-900 transition duration-75 rounded-lg group hover:bg-gray-200"
onClick={toggleDropdown}
>
<BotIcon className="w-6 h-6 text-gray-600 transition duration-75 group-hover:text-gray-900" />
<span className="flex-1 ml-3 text-left whitespace-nowrap">
Bots
</span>
<DropdownIcon className="w-3 h-3" />
</button>
<ul
id="dropdown-toggle"
className="hidden text-sm py-2 space-y-2"
>
{bots.map((bot, index) => (
<React.Fragment key={index}>
<li>
<Link
href={`/${bot.slug}/app`}
className="flex items-center w-full p-2 text-gray-900 transition duration-75 rounded-lg pl-11 group hover:bg-gray-200"
>
{bot.name}
</Link>
</li>
</React.Fragment>
))}
</ul>
</li>
)}
</ul>
</div>
<div className="bg-gray-200 absolute bottom-0 left-0 right-0 h-20"></div>
{/* Social Icons */}
<div className="mt-auto mb-3 flex flex-row justify-evenly sticky bottom-3">
<a href="https://twitter.com/embedchain" target="blank">
<TwitterIcon className="w-6 h-6 text-gray-600 transition duration-75 hover:text-gray-900" />
</a>
<a href="https://github.com/embedchain/embedchain" target="blank">
<GithubIcon className="w-6 h-6 text-gray-600 transition duration-75 hover:text-gray-900" />
</a>
<a
href="https://www.linkedin.com/company/embedchain"
target="blank"
>
<LinkedinIcon className="w-6 h-6 text-gray-600 transition duration-75 hover:text-gray-900" />
</a>
</div>
</div>
</div>
</>
);
}

View File

@@ -0,0 +1,25 @@
import Wrapper from "@/components/PageWrapper";
import Sidebar from "@/containers/Sidebar";
import ChatWindow from "@/containers/ChatWindow";
import { useState } from "react";
import Head from "next/head";
export default function App() {
const [botTitle, setBotTitle] = useState("");
return (
<>
<Head>
<title>{botTitle}</title>
</Head>
<Sidebar />
<Wrapper>
<ChatWindow
embedding_model="open_ai"
app_type="app"
setBotTitle={setBotTitle}
/>
</Wrapper>
</>
);
}

View File

@@ -0,0 +1,14 @@
import "@/styles/globals.css";
import Script from "next/script";
export default function App({ Component, pageProps }) {
return (
<>
<Script
src="https://cdnjs.cloudflare.com/ajax/libs/flowbite/1.7.0/flowbite.min.js"
strategy="beforeInteractive"
/>
<Component {...pageProps} />
</>
);
}

View File

@@ -0,0 +1,18 @@
import { Html, Head, Main, NextScript } from "next/document";
export default function Document() {
return (
<Html lang="en">
<Head>
<link
href="https://cdnjs.cloudflare.com/ajax/libs/flowbite/1.7.0/flowbite.min.css"
rel="stylesheet"
/>
</Head>
<body>
<Main />
<NextScript />
</body>
</Html>
);
}

View File

@@ -0,0 +1,52 @@
import Wrapper from "@/components/PageWrapper";
import Sidebar from "@/containers/Sidebar";
import CreateBot from "@/components/dashboard/CreateBot";
import SetOpenAIKey from "@/components/dashboard/SetOpenAIKey";
import PurgeChats from "@/components/dashboard/PurgeChats";
import DeleteBot from "@/components/dashboard/DeleteBot";
import { useEffect, useState } from "react";
export default function Home() {
const [isKeyPresent, setIsKeyPresent] = useState(false);
useEffect(() => {
fetch("/api/check_key")
.then((response) => response.json())
.then((data) => {
if (data.status === "ok") {
setIsKeyPresent(true);
}
});
}, []);
return (
<>
<Sidebar />
<Wrapper>
<div className="text-center">
<h1 className="mb-4 text-4xl font-extrabold leading-none tracking-tight text-gray-900 md:text-5xl">
Welcome to Embedchain Playground
</h1>
<p className="mb-6 text-lg font-normal text-gray-500 lg:text-xl">
Embedchain is a Data Platform for LLMs - Load, index, retrieve, and sync any unstructured data
dataset
</p>
</div>
<div
className={`pt-6 gap-y-4 gap-x-8 ${
isKeyPresent ? "grid lg:grid-cols-2" : "w-[50%] mx-auto"
}`}
>
<SetOpenAIKey setIsKeyPresent={setIsKeyPresent} />
{isKeyPresent && (
<>
<CreateBot />
<DeleteBot />
<PurgeChats />
</>
)}
</div>
</Wrapper>
</>
);
}

View File

@@ -0,0 +1,3 @@
@tailwind base;
@tailwind components;
@tailwind utilities;

View File

@@ -0,0 +1,15 @@
/** @type {import('tailwindcss').Config} */
module.exports = {
content: [
"./src/**/*.{js,ts,jsx,tsx,mdx}",
"./src/pages/**/*.{js,ts,jsx,tsx,mdx}",
"./src/containers/**/*.{js,ts,jsx,tsx,mdx}",
"./src/components/**/*.{js,ts,jsx,tsx,mdx}",
"./src/app/**/*.{js,ts,jsx,tsx,mdx}",
"./node_modules/flowbite/**/*.js",
],
theme: {
extend: {},
},
plugins: [require("flowbite/plugin")],
};

View File

@@ -0,0 +1,7 @@
### Streamlit Chat bot App (Embedchain + Mistral)
To run it locally,
```bash
streamlit run app.py
```

View File

@@ -0,0 +1,72 @@
import os
import streamlit as st
from embedchain import App
@st.cache_resource
def ec_app():
return App.from_config(config_path="config.yaml")
with st.sidebar:
huggingface_access_token = st.text_input("Hugging face Token", key="chatbot_api_key", type="password")
"[Get Hugging Face Access Token](https://huggingface.co/settings/tokens)"
"[View the source code](https://github.com/embedchain/examples/mistral-streamlit)"
st.title("💬 Chatbot")
st.caption("🚀 An Embedchain app powered by Mistral!")
if "messages" not in st.session_state:
st.session_state.messages = [
{
"role": "assistant",
"content": """
Hi! I'm a chatbot. I can answer questions and learn new things!\n
Ask me anything and if you want me to learn something do `/add <source>`.\n
I can learn mostly everything. :)
""",
}
]
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt := st.chat_input("Ask me anything!"):
if not st.session_state.chatbot_api_key:
st.error("Please enter your Hugging Face Access Token")
st.stop()
os.environ["HUGGINGFACE_ACCESS_TOKEN"] = st.session_state.chatbot_api_key
app = ec_app()
if prompt.startswith("/add"):
with st.chat_message("user"):
st.markdown(prompt)
st.session_state.messages.append({"role": "user", "content": prompt})
prompt = prompt.replace("/add", "").strip()
with st.chat_message("assistant"):
message_placeholder = st.empty()
message_placeholder.markdown("Adding to knowledge base...")
app.add(prompt)
message_placeholder.markdown(f"Added {prompt} to knowledge base!")
st.session_state.messages.append({"role": "assistant", "content": f"Added {prompt} to knowledge base!"})
st.stop()
with st.chat_message("user"):
st.markdown(prompt)
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("assistant"):
msg_placeholder = st.empty()
msg_placeholder.markdown("Thinking...")
full_response = ""
for response in app.chat(prompt):
msg_placeholder.empty()
full_response += response
msg_placeholder.markdown(full_response)
st.session_state.messages.append({"role": "assistant", "content": full_response})

View File

@@ -0,0 +1,17 @@
app:
config:
name: 'mistral-streamlit-app'
llm:
provider: huggingface
config:
model: 'mistralai/Mixtral-8x7B-Instruct-v0.1'
temperature: 0.1
max_tokens: 250
top_p: 0.1
stream: true
embedder:
provider: huggingface
config:
model: 'sentence-transformers/all-mpnet-base-v2'

View File

@@ -0,0 +1,2 @@
streamlit==1.29.0
embedchain

View File

@@ -0,0 +1,129 @@
Fork this repo on [Github](https://github.com/embedchain/embedchain) to create your own NextJS discord and slack bot powered by Embedchain app.
If you run into problems with forking, please refer to [github docs](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo) for forking a repo.
We will work from the examples/nextjs folder so change your current working directory by running the command - `cd <your_forked_repo>/examples/nextjs`
# Installation
First, lets start by install all the required packages and dependencies.
- Install all the required python packages by running `pip install -r requirements.txt`.
- We will use [Fly.io](https://fly.io/) to deploy our embedchain app and discord/slack bot. Follow the step one to install [Fly.io CLI](https://docs.embedchain.ai/deployment/fly_io#step-1-install-flyctl-command-line)
# Developement
## Embedchain App
First, lets get started by creating an Embedchain app powered with the knowledge of NextJS. We have already created an embedchain app using FastAPI in `ec_app` folder for you. Feel free to ingest data of your choice to power the App.
---
**NOTE**
Create `.env` file in this folder and set your OpenAI API key as shown in `.env.example` file. If you want to use other open-source models, feel free to change the app config in `app.py`. More details for using custom configuration for Embedchain app is [available here](https://docs.embedchain.ai/api-reference/advanced/configuration).
---
Before running the ec commands to develope/deploy the app, open `fly.toml` file and update the `name` variable to something unique. This is important as `fly.io` requires users to provide a globally unique deployment app names.
Now, we need to launch this application with fly.io. You can see your app on [fly.io dashboard](https://fly.io/dashboard). Run the following command to launch your app on fly.io:
```bash
fly launch --no-deploy
```
To run the app in development:
```bash
ec dev #To run the app in development environment
```
Run `ec deploy` to deploy your app on Fly.io. Once you deploy your app, save the endpoint on which our discord and slack bot will send requests.
## Discord bot
For discord bot, you will need to create the bot on discord developer portal and get the discord bot token and your discord bot name.
While keeping in mind the following note, create the discord bot by following the instructions from our [discord bot docs](https://docs.embedchain.ai/examples/discord_bot) and get discord bot token.
---
**NOTE**
You do not need to set `OPENAI_API_KEY` to run this discord bot. Follow the remaining instructions to create a discord bot app. We recommend you to give the following sets of bot permissions to run the discord bot without errors:
```
(General Permissions)
Read Message/View Channels
(Text Permissions)
Send Messages
Create Public Thread
Create Private Thread
Send Messages in Thread
Manage Threads
Embed Links
Read Message History
```
---
Once you have your discord bot token and discord app name. Navigate to `nextjs_discord` folder and create `.env` file and define your discord bot token, discord bot name and endpoint of your embedchain app as shown in `.env.example` file.
To run the app in development:
```bash
python app.py #To run the app in development environment
```
Before deploying the app, open `fly.toml` file and update the `name` variable to something unique. This is important as `fly.io` requires users to provide a globally unique deployment app names.
Now, we need to launch this application with fly.io. You can see your app on [fly.io dashboard](https://fly.io/dashboard). Run the following command to launch your app on fly.io:
```bash
fly launch --no-deploy
```
Run `ec deploy` to deploy your app on Fly.io. Once you deploy your app, your discord bot will be live!
## Slack bot
For Slack bot, you will need to create the bot on slack developer portal and get the slack bot token and slack app token.
### Setup
- Create a workspace on Slack if you don't have one already by clicking [here](https://slack.com/intl/en-in/).
- Create a new App on your Slack account by going [here](https://api.slack.com/apps).
- Select `From Scratch`, then enter the Bot Name and select your workspace.
- Go to `App Credentials` section on the `Basic Information` tab from the left sidebar, create your app token and save it in your `.env` file as `SLACK_APP_TOKEN`.
- Go to `Socket Mode` tab from the left sidebar and enable the socket mode to listen to slack message from your workspace.
- (Optional) Under the `App Home` tab you can change your App display name and default name.
- Navigate to `Event Subscription` tab, and enable the event subscription so that we can listen to slack events.
- Once you enable the event subscription, you will need to subscribe to bot events to authorize the bot to listen to app mention events of the bot. Do that by tapping on `Add Bot User Event` button and select `app_mention`.
- On the left Sidebar, go to `OAuth and Permissions` and add the following scopes under `Bot Token Scopes`:
```text
app_mentions:read
channels:history
channels:read
chat:write
emoji:read
reactions:write
reactions:read
```
- Now select the option `Install to Workspace` and after it's done, copy the `Bot User OAuth Token` and set it in your `.env` file as `SLACK_BOT_TOKEN`.
Once you have your slack bot token and slack app token. Navigate to `nextjs_slack` folder and create `.env` file and define your slack bot token, slack app token and endpoint of your embedchain app as shown in `.env.example` file.
To run the app in development:
```bash
python app.py #To run the app in development environment
```
Before deploying the app, open `fly.toml` file and update the `name` variable to something unique. This is important as `fly.io` requires users to provide a globally unique deployment app names.
Now, we need to launch this application with fly.io. You can see your app on [fly.io dashboard](https://fly.io/dashboard). Run the following command to launch your app on fly.io:
```bash
fly launch --no-deploy
```
Run `ec deploy` to deploy your app on Fly.io. Once you deploy your app, your slack bot will be live!

View File

@@ -0,0 +1 @@
db/

View File

@@ -0,0 +1 @@
OPENAI_API_KEY=sk-xxx

View File

@@ -0,0 +1,13 @@
FROM python:3.11-slim
WORKDIR /app
COPY requirements.txt /app/
RUN pip install -r requirements.txt
COPY . /app
EXPOSE 8080
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8080"]

View File

@@ -0,0 +1,56 @@
from dotenv import load_dotenv
from fastapi import FastAPI, responses
from pydantic import BaseModel
from embedchain import App
load_dotenv(".env")
app = FastAPI(title="Embedchain FastAPI App")
embedchain_app = App()
class SourceModel(BaseModel):
source: str
class QuestionModel(BaseModel):
question: str
@app.post("/add")
async def add_source(source_model: SourceModel):
"""
Adds a new source to the EmbedChain app.
Expects a JSON with a "source" key.
"""
source = source_model.source
embedchain_app.add(source)
return {"message": f"Source '{source}' added successfully."}
@app.post("/query")
async def handle_query(question_model: QuestionModel):
"""
Handles a query to the EmbedChain app.
Expects a JSON with a "question" key.
"""
question = question_model.question
answer = embedchain_app.query(question)
return {"answer": answer}
@app.post("/chat")
async def handle_chat(question_model: QuestionModel):
"""
Handles a chat request to the EmbedChain app.
Expects a JSON with a "question" key.
"""
question = question_model.question
response = embedchain_app.chat(question)
return {"response": response}
@app.get("/")
async def root():
return responses.RedirectResponse(url="/docs")

View File

@@ -0,0 +1,3 @@
{
"provider": "fly.io"
}

View File

@@ -0,0 +1,22 @@
# fly.toml app configuration file generated for ec-app-crimson-dew-123 on 2024-01-04T06:48:40+05:30
#
# See https://fly.io/docs/reference/configuration/ for information about how to use this file.
#
app = "ec-app-crimson-dew-123"
primary_region = "sjc"
[build]
[http_service]
internal_port = 8080
force_https = true
auto_stop_machines = false
auto_start_machines = true
min_machines_running = 0
processes = ["app"]
[[vm]]
cpu_kind = "shared"
cpus = 1
memory_mb = 1024

View File

@@ -0,0 +1,4 @@
fastapi==0.104.0
uvicorn==0.23.2
embedchain
beautifulsoup4

View File

@@ -0,0 +1 @@
db/

View File

@@ -0,0 +1,3 @@
DISCORD_BOT_TOKEN=xxxx
DISCORD_BOT_NAME=your_bot_name
EC_APP_URL=your_embedchain_app_url

View File

@@ -0,0 +1,11 @@
FROM python:3.11-slim
WORKDIR /app
COPY requirements.txt /app
RUN pip install -r requirements.txt
COPY . /app
CMD ["python", "app.py"]

View File

@@ -0,0 +1,111 @@
import logging
import os
import discord
import dotenv
import requests
dotenv.load_dotenv(".env")
intents = discord.Intents.default()
intents.message_content = True
client = discord.Client(intents=intents)
discord_bot_name = os.environ["DISCORD_BOT_NAME"]
logger = logging.getLogger(__name__)
class NextJSBot:
def __init__(self) -> None:
logger.info("NextJS Bot powered with embedchain.")
def add(self, _):
raise ValueError("Add is not implemented yet")
def query(self, message, citations: bool = False):
url = os.environ["EC_APP_URL"] + "/query"
payload = {
"question": message,
"citations": citations,
}
try:
response = requests.request("POST", url, json=payload)
try:
response = response.json()
except Exception:
logger.error(f"Failed to parse response: {response}")
response = {}
return response
except Exception:
logger.exception(f"Failed to query {message}.")
response = "An error occurred. Please try again!"
return response
def start(self):
discord_token = os.environ["DISCORD_BOT_TOKEN"]
client.run(discord_token)
NEXTJS_BOT = NextJSBot()
@client.event
async def on_ready():
logger.info(f"User {client.user.name} logged in with id: {client.user.id}!")
def _get_question(message):
user_ids = message.raw_mentions
if len(user_ids) > 0:
for user_id in user_ids:
# remove mentions from message
question = message.content.replace(f"<@{user_id}>", "").strip()
return question
async def answer_query(message):
if (
message.channel.type == discord.ChannelType.public_thread
or message.channel.type == discord.ChannelType.private_thread
):
await message.channel.send(
"🧵 Currently, we don't support answering questions in threads. Could you please send your message in the channel for a swift response? Appreciate your understanding! 🚀" # noqa: E501
)
return
question = _get_question(message)
print("Answering question: ", question)
thread = await message.create_thread(name=question)
await thread.send("🎭 Putting on my thinking cap, brb with an epic response!")
response = NEXTJS_BOT.query(question, citations=True)
default_answer = "Sorry, I don't know the answer to that question. Please refer to the documentation.\nhttps://nextjs.org/docs" # noqa: E501
answer = response.get("answer", default_answer)
contexts = response.get("contexts", [])
if contexts:
sources = list(set(map(lambda x: x[1]["url"], contexts)))
answer += "\n\n**Sources**:\n"
for i, source in enumerate(sources):
answer += f"- {source}\n"
sent_message = await thread.send(answer)
await sent_message.add_reaction("😮")
await sent_message.add_reaction("👍")
await sent_message.add_reaction("❤️")
await sent_message.add_reaction("👎")
@client.event
async def on_message(message):
mentions = message.mentions
if len(mentions) > 0 and any([user.bot and user.name == discord_bot_name for user in mentions]):
await answer_query(message)
def start_bot():
NEXTJS_BOT.start()
if __name__ == "__main__":
start_bot()

View File

@@ -0,0 +1,3 @@
{
"provider": "fly.io"
}

Some files were not shown because too many files have changed in this diff Show More