Rename embedchain to mem0 and open sourcing code for long term memory (#1474)

Co-authored-by: Deshraj Yadav <deshrajdry@gmail.com>
This commit is contained in:
Taranjeet Singh
2024-07-12 07:51:33 -07:00
committed by GitHub
parent 83e8c97295
commit f842a92e25
665 changed files with 9427 additions and 6592 deletions

View File

@@ -0,0 +1 @@
db/

View File

@@ -0,0 +1 @@
OPENAI_API_KEY=sk-xxx

View File

@@ -0,0 +1,13 @@
FROM python:3.11-slim
WORKDIR /app
COPY requirements.txt /app/
RUN pip install -r requirements.txt
COPY . /app
EXPOSE 8080
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8080"]

View File

@@ -0,0 +1,56 @@
from dotenv import load_dotenv
from fastapi import FastAPI, responses
from pydantic import BaseModel
from embedchain import App
load_dotenv(".env")
app = FastAPI(title="Embedchain FastAPI App")
embedchain_app = App()
class SourceModel(BaseModel):
source: str
class QuestionModel(BaseModel):
question: str
@app.post("/add")
async def add_source(source_model: SourceModel):
"""
Adds a new source to the EmbedChain app.
Expects a JSON with a "source" key.
"""
source = source_model.source
embedchain_app.add(source)
return {"message": f"Source '{source}' added successfully."}
@app.post("/query")
async def handle_query(question_model: QuestionModel):
"""
Handles a query to the EmbedChain app.
Expects a JSON with a "question" key.
"""
question = question_model.question
answer = embedchain_app.query(question)
return {"answer": answer}
@app.post("/chat")
async def handle_chat(question_model: QuestionModel):
"""
Handles a chat request to the EmbedChain app.
Expects a JSON with a "question" key.
"""
question = question_model.question
response = embedchain_app.chat(question)
return {"response": response}
@app.get("/")
async def root():
return responses.RedirectResponse(url="/docs")

View File

@@ -0,0 +1,4 @@
fastapi==0.104.0
uvicorn==0.23.2
embedchain
beautifulsoup4

View File

@@ -0,0 +1,18 @@
import os
import gradio as gr
from embedchain import App
os.environ["OPENAI_API_KEY"] = "sk-xxx"
app = App()
def query(message, history):
return app.chat(message)
demo = gr.ChatInterface(query)
demo.launch()

View File

@@ -0,0 +1,2 @@
gradio==4.11.0
embedchain

View File

@@ -0,0 +1 @@
OPENAI_API_KEY=sk-xxx

View File

@@ -0,0 +1 @@
.env

View File

@@ -0,0 +1,86 @@
from dotenv import load_dotenv
from fastapi import Body, FastAPI, responses
from modal import Image, Secret, Stub, asgi_app
from embedchain import App
load_dotenv(".env")
image = Image.debian_slim().pip_install(
"embedchain",
"lanchain_community==0.2.6",
"youtube-transcript-api==0.6.1",
"pytube==15.0.0",
"beautifulsoup4==4.12.3",
"slack-sdk==3.21.3",
"huggingface_hub==0.23.0",
"gitpython==3.1.38",
"yt_dlp==2023.11.14",
"PyGithub==1.59.1",
"feedparser==6.0.10",
"newspaper3k==0.2.8",
"listparser==0.19",
)
stub = Stub(
name="embedchain-app",
image=image,
secrets=[Secret.from_dotenv(".env")],
)
web_app = FastAPI()
embedchain_app = App(name="embedchain-modal-app")
@web_app.post("/add")
async def add(
source: str = Body(..., description="Source to be added"),
data_type: str | None = Body(None, description="Type of the data source"),
):
"""
Adds a new source to the EmbedChain app.
Expects a JSON with a "source" and "data_type" key.
"data_type" is optional.
"""
if source and data_type:
embedchain_app.add(source, data_type)
elif source:
embedchain_app.add(source)
else:
return {"message": "No source provided."}
return {"message": f"Source '{source}' added successfully."}
@web_app.post("/query")
async def query(question: str = Body(..., description="Question to be answered")):
"""
Handles a query to the EmbedChain app.
Expects a JSON with a "question" key.
"""
if not question:
return {"message": "No question provided."}
answer = embedchain_app.query(question)
return {"answer": answer}
@web_app.get("/chat")
async def chat(question: str = Body(..., description="Question to be answered")):
"""
Handles a chat request to the EmbedChain app.
Expects a JSON with a "question" key.
"""
if not question:
return {"message": "No question provided."}
response = embedchain_app.chat(question)
return {"response": response}
@web_app.get("/")
async def root():
return responses.RedirectResponse(url="/docs")
@stub.function(image=image)
@asgi_app()
def fastapi_app():
return web_app

View File

@@ -0,0 +1,4 @@
modal==0.56.4329
fastapi==0.104.0
uvicorn==0.23.2
embedchain

View File

@@ -0,0 +1 @@
OPENAI_API_KEY=sk-xxx

View File

@@ -0,0 +1 @@
.env

View File

@@ -0,0 +1,53 @@
from fastapi import FastAPI, responses
from pydantic import BaseModel
from embedchain import App
app = FastAPI(title="Embedchain FastAPI App")
embedchain_app = App()
class SourceModel(BaseModel):
source: str
class QuestionModel(BaseModel):
question: str
@app.post("/add")
async def add_source(source_model: SourceModel):
"""
Adds a new source to the EmbedChain app.
Expects a JSON with a "source" key.
"""
source = source_model.source
embedchain_app.add(source)
return {"message": f"Source '{source}' added successfully."}
@app.post("/query")
async def handle_query(question_model: QuestionModel):
"""
Handles a query to the EmbedChain app.
Expects a JSON with a "question" key.
"""
question = question_model.question
answer = embedchain_app.query(question)
return {"answer": answer}
@app.post("/chat")
async def handle_chat(question_model: QuestionModel):
"""
Handles a chat request to the EmbedChain app.
Expects a JSON with a "question" key.
"""
question = question_model.question
response = embedchain_app.chat(question)
return {"response": response}
@app.get("/")
async def root():
return responses.RedirectResponse(url="/docs")

View File

@@ -0,0 +1,16 @@
services:
- type: web
name: ec-render-app
runtime: python
repo: https://github.com/<your-username>/<repo-name>
scaling:
minInstances: 1
maxInstances: 3
targetMemoryPercent: 60 # optional if targetCPUPercent is set
targetCPUPercent: 60 # optional if targetMemory is set
buildCommand: pip install -r requirements.txt
startCommand: uvicorn app:app --host 0.0.0.0
envVars:
- key: OPENAI_API_KEY
value: sk-xxx
autoDeploy: false # optional

View File

@@ -0,0 +1,4 @@
fastapi==0.104.0
uvicorn==0.23.2
embedchain
beautifulsoup4

View File

@@ -0,0 +1 @@
OPENAI_API_KEY="sk-xxx"

View File

@@ -0,0 +1,59 @@
import streamlit as st
from embedchain import App
@st.cache_resource
def embedchain_bot():
return App()
st.title("💬 Chatbot")
st.caption("🚀 An Embedchain app powered by OpenAI!")
if "messages" not in st.session_state:
st.session_state.messages = [
{
"role": "assistant",
"content": """
Hi! I'm a chatbot. I can answer questions and learn new things!\n
Ask me anything and if you want me to learn something do `/add <source>`.\n
I can learn mostly everything. :)
""",
}
]
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt := st.chat_input("Ask me anything!"):
app = embedchain_bot()
if prompt.startswith("/add"):
with st.chat_message("user"):
st.markdown(prompt)
st.session_state.messages.append({"role": "user", "content": prompt})
prompt = prompt.replace("/add", "").strip()
with st.chat_message("assistant"):
message_placeholder = st.empty()
message_placeholder.markdown("Adding to knowledge base...")
app.add(prompt)
message_placeholder.markdown(f"Added {prompt} to knowledge base!")
st.session_state.messages.append({"role": "assistant", "content": f"Added {prompt} to knowledge base!"})
st.stop()
with st.chat_message("user"):
st.markdown(prompt)
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("assistant"):
msg_placeholder = st.empty()
msg_placeholder.markdown("Thinking...")
full_response = ""
for response in app.chat(prompt):
msg_placeholder.empty()
full_response += response
msg_placeholder.markdown(full_response)
st.session_state.messages.append({"role": "assistant", "content": full_response})

View File

@@ -0,0 +1,2 @@
streamlit==1.29.0
embedchain