Rename embedchain to mem0 and open sourcing code for long term memory (#1474)
Co-authored-by: Deshraj Yadav <deshrajdry@gmail.com>
This commit is contained in:
1
embedchain/embedchain/deployment/fly.io/.dockerignore
Normal file
1
embedchain/embedchain/deployment/fly.io/.dockerignore
Normal file
@@ -0,0 +1 @@
|
||||
db/
|
||||
1
embedchain/embedchain/deployment/fly.io/.env.example
Normal file
1
embedchain/embedchain/deployment/fly.io/.env.example
Normal file
@@ -0,0 +1 @@
|
||||
OPENAI_API_KEY=sk-xxx
|
||||
13
embedchain/embedchain/deployment/fly.io/Dockerfile
Normal file
13
embedchain/embedchain/deployment/fly.io/Dockerfile
Normal file
@@ -0,0 +1,13 @@
|
||||
FROM python:3.11-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt /app/
|
||||
|
||||
RUN pip install -r requirements.txt
|
||||
|
||||
COPY . /app
|
||||
|
||||
EXPOSE 8080
|
||||
|
||||
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8080"]
|
||||
56
embedchain/embedchain/deployment/fly.io/app.py
Normal file
56
embedchain/embedchain/deployment/fly.io/app.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from dotenv import load_dotenv
|
||||
from fastapi import FastAPI, responses
|
||||
from pydantic import BaseModel
|
||||
|
||||
from embedchain import App
|
||||
|
||||
load_dotenv(".env")
|
||||
|
||||
app = FastAPI(title="Embedchain FastAPI App")
|
||||
embedchain_app = App()
|
||||
|
||||
|
||||
class SourceModel(BaseModel):
|
||||
source: str
|
||||
|
||||
|
||||
class QuestionModel(BaseModel):
|
||||
question: str
|
||||
|
||||
|
||||
@app.post("/add")
|
||||
async def add_source(source_model: SourceModel):
|
||||
"""
|
||||
Adds a new source to the EmbedChain app.
|
||||
Expects a JSON with a "source" key.
|
||||
"""
|
||||
source = source_model.source
|
||||
embedchain_app.add(source)
|
||||
return {"message": f"Source '{source}' added successfully."}
|
||||
|
||||
|
||||
@app.post("/query")
|
||||
async def handle_query(question_model: QuestionModel):
|
||||
"""
|
||||
Handles a query to the EmbedChain app.
|
||||
Expects a JSON with a "question" key.
|
||||
"""
|
||||
question = question_model.question
|
||||
answer = embedchain_app.query(question)
|
||||
return {"answer": answer}
|
||||
|
||||
|
||||
@app.post("/chat")
|
||||
async def handle_chat(question_model: QuestionModel):
|
||||
"""
|
||||
Handles a chat request to the EmbedChain app.
|
||||
Expects a JSON with a "question" key.
|
||||
"""
|
||||
question = question_model.question
|
||||
response = embedchain_app.chat(question)
|
||||
return {"response": response}
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
return responses.RedirectResponse(url="/docs")
|
||||
4
embedchain/embedchain/deployment/fly.io/requirements.txt
Normal file
4
embedchain/embedchain/deployment/fly.io/requirements.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
fastapi==0.104.0
|
||||
uvicorn==0.23.2
|
||||
embedchain
|
||||
beautifulsoup4
|
||||
18
embedchain/embedchain/deployment/gradio.app/app.py
Normal file
18
embedchain/embedchain/deployment/gradio.app/app.py
Normal file
@@ -0,0 +1,18 @@
|
||||
import os
|
||||
|
||||
import gradio as gr
|
||||
|
||||
from embedchain import App
|
||||
|
||||
os.environ["OPENAI_API_KEY"] = "sk-xxx"
|
||||
|
||||
app = App()
|
||||
|
||||
|
||||
def query(message, history):
|
||||
return app.chat(message)
|
||||
|
||||
|
||||
demo = gr.ChatInterface(query)
|
||||
|
||||
demo.launch()
|
||||
@@ -0,0 +1,2 @@
|
||||
gradio==4.11.0
|
||||
embedchain
|
||||
1
embedchain/embedchain/deployment/modal.com/.env.example
Normal file
1
embedchain/embedchain/deployment/modal.com/.env.example
Normal file
@@ -0,0 +1 @@
|
||||
OPENAI_API_KEY=sk-xxx
|
||||
1
embedchain/embedchain/deployment/modal.com/.gitignore
vendored
Normal file
1
embedchain/embedchain/deployment/modal.com/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.env
|
||||
86
embedchain/embedchain/deployment/modal.com/app.py
Normal file
86
embedchain/embedchain/deployment/modal.com/app.py
Normal file
@@ -0,0 +1,86 @@
|
||||
from dotenv import load_dotenv
|
||||
from fastapi import Body, FastAPI, responses
|
||||
from modal import Image, Secret, Stub, asgi_app
|
||||
|
||||
from embedchain import App
|
||||
|
||||
load_dotenv(".env")
|
||||
|
||||
image = Image.debian_slim().pip_install(
|
||||
"embedchain",
|
||||
"lanchain_community==0.2.6",
|
||||
"youtube-transcript-api==0.6.1",
|
||||
"pytube==15.0.0",
|
||||
"beautifulsoup4==4.12.3",
|
||||
"slack-sdk==3.21.3",
|
||||
"huggingface_hub==0.23.0",
|
||||
"gitpython==3.1.38",
|
||||
"yt_dlp==2023.11.14",
|
||||
"PyGithub==1.59.1",
|
||||
"feedparser==6.0.10",
|
||||
"newspaper3k==0.2.8",
|
||||
"listparser==0.19",
|
||||
)
|
||||
|
||||
stub = Stub(
|
||||
name="embedchain-app",
|
||||
image=image,
|
||||
secrets=[Secret.from_dotenv(".env")],
|
||||
)
|
||||
|
||||
web_app = FastAPI()
|
||||
embedchain_app = App(name="embedchain-modal-app")
|
||||
|
||||
|
||||
@web_app.post("/add")
|
||||
async def add(
|
||||
source: str = Body(..., description="Source to be added"),
|
||||
data_type: str | None = Body(None, description="Type of the data source"),
|
||||
):
|
||||
"""
|
||||
Adds a new source to the EmbedChain app.
|
||||
Expects a JSON with a "source" and "data_type" key.
|
||||
"data_type" is optional.
|
||||
"""
|
||||
if source and data_type:
|
||||
embedchain_app.add(source, data_type)
|
||||
elif source:
|
||||
embedchain_app.add(source)
|
||||
else:
|
||||
return {"message": "No source provided."}
|
||||
return {"message": f"Source '{source}' added successfully."}
|
||||
|
||||
|
||||
@web_app.post("/query")
|
||||
async def query(question: str = Body(..., description="Question to be answered")):
|
||||
"""
|
||||
Handles a query to the EmbedChain app.
|
||||
Expects a JSON with a "question" key.
|
||||
"""
|
||||
if not question:
|
||||
return {"message": "No question provided."}
|
||||
answer = embedchain_app.query(question)
|
||||
return {"answer": answer}
|
||||
|
||||
|
||||
@web_app.get("/chat")
|
||||
async def chat(question: str = Body(..., description="Question to be answered")):
|
||||
"""
|
||||
Handles a chat request to the EmbedChain app.
|
||||
Expects a JSON with a "question" key.
|
||||
"""
|
||||
if not question:
|
||||
return {"message": "No question provided."}
|
||||
response = embedchain_app.chat(question)
|
||||
return {"response": response}
|
||||
|
||||
|
||||
@web_app.get("/")
|
||||
async def root():
|
||||
return responses.RedirectResponse(url="/docs")
|
||||
|
||||
|
||||
@stub.function(image=image)
|
||||
@asgi_app()
|
||||
def fastapi_app():
|
||||
return web_app
|
||||
@@ -0,0 +1,4 @@
|
||||
modal==0.56.4329
|
||||
fastapi==0.104.0
|
||||
uvicorn==0.23.2
|
||||
embedchain
|
||||
1
embedchain/embedchain/deployment/render.com/.env.example
Normal file
1
embedchain/embedchain/deployment/render.com/.env.example
Normal file
@@ -0,0 +1 @@
|
||||
OPENAI_API_KEY=sk-xxx
|
||||
1
embedchain/embedchain/deployment/render.com/.gitignore
vendored
Normal file
1
embedchain/embedchain/deployment/render.com/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.env
|
||||
53
embedchain/embedchain/deployment/render.com/app.py
Normal file
53
embedchain/embedchain/deployment/render.com/app.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from fastapi import FastAPI, responses
|
||||
from pydantic import BaseModel
|
||||
|
||||
from embedchain import App
|
||||
|
||||
app = FastAPI(title="Embedchain FastAPI App")
|
||||
embedchain_app = App()
|
||||
|
||||
|
||||
class SourceModel(BaseModel):
|
||||
source: str
|
||||
|
||||
|
||||
class QuestionModel(BaseModel):
|
||||
question: str
|
||||
|
||||
|
||||
@app.post("/add")
|
||||
async def add_source(source_model: SourceModel):
|
||||
"""
|
||||
Adds a new source to the EmbedChain app.
|
||||
Expects a JSON with a "source" key.
|
||||
"""
|
||||
source = source_model.source
|
||||
embedchain_app.add(source)
|
||||
return {"message": f"Source '{source}' added successfully."}
|
||||
|
||||
|
||||
@app.post("/query")
|
||||
async def handle_query(question_model: QuestionModel):
|
||||
"""
|
||||
Handles a query to the EmbedChain app.
|
||||
Expects a JSON with a "question" key.
|
||||
"""
|
||||
question = question_model.question
|
||||
answer = embedchain_app.query(question)
|
||||
return {"answer": answer}
|
||||
|
||||
|
||||
@app.post("/chat")
|
||||
async def handle_chat(question_model: QuestionModel):
|
||||
"""
|
||||
Handles a chat request to the EmbedChain app.
|
||||
Expects a JSON with a "question" key.
|
||||
"""
|
||||
question = question_model.question
|
||||
response = embedchain_app.chat(question)
|
||||
return {"response": response}
|
||||
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
return responses.RedirectResponse(url="/docs")
|
||||
16
embedchain/embedchain/deployment/render.com/render.yaml
Normal file
16
embedchain/embedchain/deployment/render.com/render.yaml
Normal file
@@ -0,0 +1,16 @@
|
||||
services:
|
||||
- type: web
|
||||
name: ec-render-app
|
||||
runtime: python
|
||||
repo: https://github.com/<your-username>/<repo-name>
|
||||
scaling:
|
||||
minInstances: 1
|
||||
maxInstances: 3
|
||||
targetMemoryPercent: 60 # optional if targetCPUPercent is set
|
||||
targetCPUPercent: 60 # optional if targetMemory is set
|
||||
buildCommand: pip install -r requirements.txt
|
||||
startCommand: uvicorn app:app --host 0.0.0.0
|
||||
envVars:
|
||||
- key: OPENAI_API_KEY
|
||||
value: sk-xxx
|
||||
autoDeploy: false # optional
|
||||
@@ -0,0 +1,4 @@
|
||||
fastapi==0.104.0
|
||||
uvicorn==0.23.2
|
||||
embedchain
|
||||
beautifulsoup4
|
||||
@@ -0,0 +1 @@
|
||||
OPENAI_API_KEY="sk-xxx"
|
||||
59
embedchain/embedchain/deployment/streamlit.io/app.py
Normal file
59
embedchain/embedchain/deployment/streamlit.io/app.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import streamlit as st
|
||||
|
||||
from embedchain import App
|
||||
|
||||
|
||||
@st.cache_resource
|
||||
def embedchain_bot():
|
||||
return App()
|
||||
|
||||
|
||||
st.title("💬 Chatbot")
|
||||
st.caption("🚀 An Embedchain app powered by OpenAI!")
|
||||
if "messages" not in st.session_state:
|
||||
st.session_state.messages = [
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": """
|
||||
Hi! I'm a chatbot. I can answer questions and learn new things!\n
|
||||
Ask me anything and if you want me to learn something do `/add <source>`.\n
|
||||
I can learn mostly everything. :)
|
||||
""",
|
||||
}
|
||||
]
|
||||
|
||||
for message in st.session_state.messages:
|
||||
with st.chat_message(message["role"]):
|
||||
st.markdown(message["content"])
|
||||
|
||||
if prompt := st.chat_input("Ask me anything!"):
|
||||
app = embedchain_bot()
|
||||
|
||||
if prompt.startswith("/add"):
|
||||
with st.chat_message("user"):
|
||||
st.markdown(prompt)
|
||||
st.session_state.messages.append({"role": "user", "content": prompt})
|
||||
prompt = prompt.replace("/add", "").strip()
|
||||
with st.chat_message("assistant"):
|
||||
message_placeholder = st.empty()
|
||||
message_placeholder.markdown("Adding to knowledge base...")
|
||||
app.add(prompt)
|
||||
message_placeholder.markdown(f"Added {prompt} to knowledge base!")
|
||||
st.session_state.messages.append({"role": "assistant", "content": f"Added {prompt} to knowledge base!"})
|
||||
st.stop()
|
||||
|
||||
with st.chat_message("user"):
|
||||
st.markdown(prompt)
|
||||
st.session_state.messages.append({"role": "user", "content": prompt})
|
||||
|
||||
with st.chat_message("assistant"):
|
||||
msg_placeholder = st.empty()
|
||||
msg_placeholder.markdown("Thinking...")
|
||||
full_response = ""
|
||||
|
||||
for response in app.chat(prompt):
|
||||
msg_placeholder.empty()
|
||||
full_response += response
|
||||
|
||||
msg_placeholder.markdown(full_response)
|
||||
st.session_state.messages.append({"role": "assistant", "content": full_response})
|
||||
@@ -0,0 +1,2 @@
|
||||
streamlit==1.29.0
|
||||
embedchain
|
||||
Reference in New Issue
Block a user