feat: add development docker compose (#2411)
This commit is contained in:
@@ -7,7 +7,7 @@ iconType: "solid"
|
||||
Mem0 provides a REST API server (written using FastAPI). Users can perform all operations through REST endpoints. The API also includes OpenAPI documentation, accessible at `/docs` when the server is running.
|
||||
|
||||
<Frame caption="APIs supported by Mem0 REST API Server">
|
||||
<img src="/images/rest-api-server.png" />
|
||||
<img src="/images/rest-api-server.png"/>
|
||||
</Frame>
|
||||
|
||||
## Features
|
||||
@@ -23,67 +23,90 @@ Mem0 provides a REST API server (written using FastAPI). Users can perform all o
|
||||
## Running Locally
|
||||
|
||||
<Tabs>
|
||||
<Tab title="With Docker">
|
||||
<Tab title="With Docker Compose">
|
||||
The Development Docker Compose comes pre-configured with postgres pgvector, neo4j and a `server/history/history.db` volume for the history database.
|
||||
|
||||
1. Create a `.env` file in the current directory and set your environment variables. For example:
|
||||
The only required environment variable to run the server is `OPENAI_API_KEY`.
|
||||
|
||||
```txt
|
||||
OPENAI_API_KEY=your-openai-api-key
|
||||
```
|
||||
1. Create a `.env` file in the `server/` directory and set your environment variables. For example:
|
||||
|
||||
2. Either pull the docker image from docker hub or build the docker image locally.
|
||||
```txt
|
||||
OPENAI_API_KEY=your-openai-api-key
|
||||
```
|
||||
|
||||
<Tabs>
|
||||
<Tab title="Pull from Docker Hub">
|
||||
2. Run the Docker container using Docker Compose:
|
||||
|
||||
```bash
|
||||
docker pull mem0/mem0-api-server
|
||||
```
|
||||
```bash
|
||||
cd server
|
||||
docker compose up
|
||||
```
|
||||
|
||||
</Tab>
|
||||
3. Access the API at http://localhost:8888.
|
||||
|
||||
<Tab title="Build Locally">
|
||||
4. Making changes to the server code or the library code will automatically reload the server.
|
||||
</Tab>
|
||||
|
||||
```bash
|
||||
docker build -t mem0-api-server .
|
||||
```
|
||||
<Tab title="With Docker">
|
||||
|
||||
</Tab>
|
||||
</Tabs>
|
||||
1. Create a `.env` file in the current directory and set your environment variables. For example:
|
||||
|
||||
3. Run the Docker container:
|
||||
```txt
|
||||
OPENAI_API_KEY=your-openai-api-key
|
||||
```
|
||||
|
||||
``` bash
|
||||
docker run -p 8000:8000 mem0-api-server --env-file .env
|
||||
```
|
||||
2. Either pull the docker image from docker hub or build the docker image locally.
|
||||
|
||||
4. Access the API at http://localhost:8000.
|
||||
<Tabs>
|
||||
<Tab title="Pull from Docker Hub">
|
||||
|
||||
</Tab>
|
||||
```bash
|
||||
docker pull mem0/mem0-api-server
|
||||
```
|
||||
|
||||
<Tab title="Without Docker">
|
||||
</Tab>
|
||||
|
||||
1. Create a `.env` file in the current directory and set your environment variables. For example:
|
||||
<Tab title="Build Locally">
|
||||
|
||||
```txt
|
||||
OPENAI_API_KEY=your-openai-api-key
|
||||
```
|
||||
```bash
|
||||
docker build -t mem0-api-server .
|
||||
```
|
||||
|
||||
2. Install dependencies:
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
3. Run the Docker container:
|
||||
|
||||
3. Start the FastAPI server:
|
||||
``` bash
|
||||
docker run -p 8000:8000 mem0-api-server --env-file .env
|
||||
```
|
||||
|
||||
```bash
|
||||
uvicorn main:app --reload
|
||||
```
|
||||
4. Access the API at http://localhost:8000.
|
||||
|
||||
4. Access the API at http://localhost:8000.
|
||||
</Tab>
|
||||
|
||||
</Tab>
|
||||
<Tab title="Without Docker">
|
||||
|
||||
1. Create a `.env` file in the current directory and set your environment variables. For example:
|
||||
|
||||
```txt
|
||||
OPENAI_API_KEY=your-openai-api-key
|
||||
```
|
||||
|
||||
2. Install dependencies:
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
3. Start the FastAPI server:
|
||||
|
||||
```bash
|
||||
uvicorn main:app --reload
|
||||
```
|
||||
|
||||
4. Access the API at http://localhost:8000.
|
||||
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -167,7 +167,7 @@ class MemoryGraph:
|
||||
for item in search_results["tool_calls"][0]["arguments"]["entities"]:
|
||||
entity_type_map[item["entity"]] = item["entity_type"]
|
||||
except Exception as e:
|
||||
logger.error(f"Error in search tool: {e}")
|
||||
logger.exception(f"Error in search tool: {e}, llm_provider={self.llm_provider}, search_results={search_results}")
|
||||
|
||||
entity_type_map = {k.lower().replace(" ", "_"): v.lower().replace(" ", "_") for k, v in entity_type_map.items()}
|
||||
logger.debug(f"Entity type map: {entity_type_map}")
|
||||
@@ -203,14 +203,13 @@ class MemoryGraph:
|
||||
tools=_tools,
|
||||
)
|
||||
|
||||
entities = []
|
||||
if extracted_entities["tool_calls"]:
|
||||
extracted_entities = extracted_entities["tool_calls"][0]["arguments"]["entities"]
|
||||
else:
|
||||
extracted_entities = []
|
||||
entities = extracted_entities["tool_calls"][0]["arguments"]["entities"]
|
||||
|
||||
extracted_entities = self._remove_spaces_from_entities(extracted_entities)
|
||||
logger.debug(f"Extracted entities: {extracted_entities}")
|
||||
return extracted_entities
|
||||
entities = self._remove_spaces_from_entities(entities)
|
||||
logger.debug(f"Extracted entities: {entities}")
|
||||
return entities
|
||||
|
||||
def _search_graph_db(self, node_list, filters, limit=100):
|
||||
"""Search similar nodes among and their respective incoming and outgoing relations."""
|
||||
@@ -347,14 +346,9 @@ class MemoryGraph:
|
||||
params = {
|
||||
"source_id": source_node_search_result[0]["elementId(source_candidate)"],
|
||||
"destination_name": destination,
|
||||
"relationship": relationship,
|
||||
"destination_type": destination_type,
|
||||
"destination_embedding": dest_embedding,
|
||||
"user_id": user_id,
|
||||
}
|
||||
resp = self.graph.query(cypher, params=params)
|
||||
results.append(resp)
|
||||
|
||||
elif destination_node_search_result and not source_node_search_result:
|
||||
cypher = f"""
|
||||
MATCH (destination)
|
||||
@@ -372,14 +366,9 @@ class MemoryGraph:
|
||||
params = {
|
||||
"destination_id": destination_node_search_result[0]["elementId(destination_candidate)"],
|
||||
"source_name": source,
|
||||
"relationship": relationship,
|
||||
"source_type": source_type,
|
||||
"source_embedding": source_embedding,
|
||||
"user_id": user_id,
|
||||
}
|
||||
resp = self.graph.query(cypher, params=params)
|
||||
results.append(resp)
|
||||
|
||||
elif source_node_search_result and destination_node_search_result:
|
||||
cypher = f"""
|
||||
MATCH (source)
|
||||
@@ -396,12 +385,8 @@ class MemoryGraph:
|
||||
"source_id": source_node_search_result[0]["elementId(source_candidate)"],
|
||||
"destination_id": destination_node_search_result[0]["elementId(destination_candidate)"],
|
||||
"user_id": user_id,
|
||||
"relationship": relationship,
|
||||
}
|
||||
resp = self.graph.query(cypher, params=params)
|
||||
results.append(resp)
|
||||
|
||||
elif not source_node_search_result and not destination_node_search_result:
|
||||
else:
|
||||
cypher = f"""
|
||||
MERGE (n:{source_type} {{name: $source_name, user_id: $user_id}})
|
||||
ON CREATE SET n.created = timestamp(), n.embedding = $source_embedding
|
||||
@@ -415,15 +400,13 @@ class MemoryGraph:
|
||||
"""
|
||||
params = {
|
||||
"source_name": source,
|
||||
"source_type": source_type,
|
||||
"dest_name": destination,
|
||||
"destination_type": destination_type,
|
||||
"source_embedding": source_embedding,
|
||||
"dest_embedding": dest_embedding,
|
||||
"user_id": user_id,
|
||||
}
|
||||
resp = self.graph.query(cypher, params=params)
|
||||
results.append(resp)
|
||||
result = self.graph.query(cypher, params=params)
|
||||
results.append(result)
|
||||
return results
|
||||
|
||||
def _remove_spaces_from_entities(self, entity_list):
|
||||
|
||||
@@ -67,6 +67,7 @@ class PGVector(VectorStoreBase):
|
||||
Args:
|
||||
embedding_model_dims (int): Dimension of the embedding vector.
|
||||
"""
|
||||
self.cur.execute("CREATE EXTENSION IF NOT EXISTS vector")
|
||||
self.cur.execute(
|
||||
f"""
|
||||
CREATE TABLE IF NOT EXISTS {self.collection_name} (
|
||||
|
||||
12
server/.env.example
Normal file
12
server/.env.example
Normal file
@@ -0,0 +1,12 @@
|
||||
OPENAI_API_KEY=
|
||||
NEO4J_URI=
|
||||
NEO4J_USERNAME=
|
||||
NEO4J_PASSWORD=
|
||||
|
||||
|
||||
POSTGRES_HOST=
|
||||
POSTGRES_PORT=
|
||||
POSTGRES_DB=
|
||||
POSTGRES_USER=
|
||||
POSTGRES_PASSWORD=
|
||||
POSTGRES_COLLECTION_NAME=
|
||||
25
server/dev.Dockerfile
Normal file
25
server/dev.Dockerfile
Normal file
@@ -0,0 +1,25 @@
|
||||
FROM python:3.12
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install Poetry
|
||||
RUN curl -sSL https://install.python-poetry.org | python3 -
|
||||
ENV PATH="/root/.local/bin:$PATH"
|
||||
|
||||
# Copy requirements first for better caching
|
||||
COPY server/requirements.txt .
|
||||
RUN pip install -r requirements.txt
|
||||
|
||||
# Install mem0 in editable mode using Poetry
|
||||
WORKDIR /app/packages
|
||||
COPY pyproject.toml .
|
||||
COPY poetry.lock .
|
||||
COPY README.md .
|
||||
COPY mem0 ./mem0
|
||||
RUN pip install -e .[graph]
|
||||
|
||||
# Return to app directory and copy server code
|
||||
WORKDIR /app
|
||||
COPY server .
|
||||
|
||||
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
|
||||
74
server/docker-compose.yaml
Normal file
74
server/docker-compose.yaml
Normal file
@@ -0,0 +1,74 @@
|
||||
name: mem0-dev
|
||||
|
||||
services:
|
||||
mem0:
|
||||
build:
|
||||
context: .. # Set context to parent directory
|
||||
dockerfile: server/dev.Dockerfile
|
||||
ports:
|
||||
- "8888:8000"
|
||||
env_file:
|
||||
- .env
|
||||
networks:
|
||||
- mem0_network
|
||||
volumes:
|
||||
- ./history:/app/history # History db location. By default, it creates a history.db file on the server folder
|
||||
- .:/app # Server code. This allows to reload the app when the server code is updated
|
||||
- ../mem0:/app/packages/mem0 # Mem0 library. This allows to reload the app when the library code is updated
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
neo4j:
|
||||
condition: service_healthy
|
||||
command: uvicorn main:app --host 0.0.0.0 --port 8000 --reload # Enable auto-reload
|
||||
environment:
|
||||
- PYTHONDONTWRITEBYTECODE=1 # Prevents Python from writing .pyc files
|
||||
- PYTHONUNBUFFERED=1 # Ensures Python output is sent straight to terminal
|
||||
|
||||
postgres:
|
||||
image: ankane/pgvector:v0.5.1
|
||||
restart: on-failure
|
||||
shm_size: "128mb" # Increase this if vacuuming fails with a "no space left on device" error
|
||||
networks:
|
||||
- mem0_network
|
||||
environment:
|
||||
- POSTGRES_USER=postgres
|
||||
- POSTGRES_PASSWORD=postgres
|
||||
healthcheck:
|
||||
test: ["CMD", "pg_isready", "-q", "-d", "postgres", "-U", "postgres"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
volumes:
|
||||
- postgres_db:/var/lib/postgresql/data
|
||||
ports:
|
||||
- "8432:5432"
|
||||
neo4j:
|
||||
image: neo4j:5.26.4
|
||||
networks:
|
||||
- mem0_network
|
||||
healthcheck:
|
||||
test: wget http://localhost:7687 || exit 1
|
||||
interval: 1s
|
||||
timeout: 10s
|
||||
retries: 20
|
||||
start_period: 3s
|
||||
ports:
|
||||
- "8474:7474" # HTTP
|
||||
- "8687:7687" # Bolt
|
||||
volumes:
|
||||
- neo4j_data:/data
|
||||
environment:
|
||||
- NEO4J_AUTH=neo4j/mem0graph
|
||||
- NEO4J_PLUGINS=["apoc"] # Add this line to install APOC
|
||||
- NEO4J_apoc_export_file_enabled=true
|
||||
- NEO4J_apoc_import_file_enabled=true
|
||||
- NEO4J_apoc_import_file_use__neo4j__config=true
|
||||
|
||||
volumes:
|
||||
neo4j_data:
|
||||
postgres_db:
|
||||
|
||||
networks:
|
||||
mem0_network:
|
||||
driver: bridge
|
||||
@@ -6,10 +6,69 @@ from typing import Optional, List, Any, Dict
|
||||
from mem0 import Memory
|
||||
from dotenv import load_dotenv
|
||||
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
MEMORY_INSTANCE = Memory()
|
||||
|
||||
POSTGRES_HOST = os.environ.get("POSTGRES_HOST", "postgres")
|
||||
POSTGRES_PORT = os.environ.get("POSTGRES_PORT", "5432")
|
||||
POSTGRES_DB = os.environ.get("POSTGRES_DB", "postgres")
|
||||
POSTGRES_USER = os.environ.get("POSTGRES_USER", "postgres")
|
||||
POSTGRES_PASSWORD = os.environ.get("POSTGRES_PASSWORD", "postgres")
|
||||
POSTGRES_COLLECTION_NAME = os.environ.get("POSTGRES_COLLECTION_NAME", "memories")
|
||||
|
||||
NEO4J_URI = os.environ.get("NEO4J_URI", "bolt://neo4j:7687")
|
||||
NEO4J_USERNAME = os.environ.get("NEO4J_USERNAME", "neo4j")
|
||||
NEO4J_PASSWORD = os.environ.get("NEO4J_PASSWORD", "mem0graph")
|
||||
|
||||
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
|
||||
HISTORY_DB_PATH = os.environ.get("HISTORY_DB_PATH", "/app/history/history.db")
|
||||
|
||||
DEFAULT_CONFIG = {
|
||||
"version": "v1.1",
|
||||
"vector_store": {
|
||||
"provider": "pgvector",
|
||||
"config": {
|
||||
"host": POSTGRES_HOST,
|
||||
"port": int(POSTGRES_PORT),
|
||||
"dbname": POSTGRES_DB,
|
||||
"user": POSTGRES_USER,
|
||||
"password": POSTGRES_PASSWORD,
|
||||
"collection_name": POSTGRES_COLLECTION_NAME,
|
||||
}
|
||||
},
|
||||
"graph_store": {
|
||||
"provider": "neo4j",
|
||||
"config": {
|
||||
"url": NEO4J_URI,
|
||||
"username": NEO4J_USERNAME,
|
||||
"password": NEO4J_PASSWORD
|
||||
}
|
||||
},
|
||||
"llm": {
|
||||
"provider": "openai",
|
||||
"config": {
|
||||
"api_key": OPENAI_API_KEY,
|
||||
"temperature": 0.2,
|
||||
"model": "gpt-4o"
|
||||
}
|
||||
},
|
||||
"embedder": {
|
||||
"provider": "openai",
|
||||
"config": {
|
||||
"api_key": OPENAI_API_KEY,
|
||||
"model": "text-embedding-3-small"
|
||||
}
|
||||
},
|
||||
"history_db_path": HISTORY_DB_PATH,
|
||||
}
|
||||
|
||||
|
||||
MEMORY_INSTANCE = Memory.from_config(DEFAULT_CONFIG)
|
||||
|
||||
app = FastAPI(
|
||||
title="Mem0 REST APIs",
|
||||
@@ -36,6 +95,7 @@ class SearchRequest(BaseModel):
|
||||
user_id: Optional[str] = None
|
||||
run_id: Optional[str] = None
|
||||
agent_id: Optional[str] = None
|
||||
filters: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
@app.post("/configure", summary="Configure Mem0")
|
||||
@@ -59,6 +119,7 @@ def add_memory(memory_create: MemoryCreate):
|
||||
response = MEMORY_INSTANCE.add(messages=[m.model_dump() for m in memory_create.messages], **params)
|
||||
return JSONResponse(content=response)
|
||||
except Exception as e:
|
||||
logging.exception("Error in add_memory:") # This will log the full traceback
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@@ -75,6 +136,7 @@ def get_all_memories(
|
||||
params = {k: v for k, v in {"user_id": user_id, "run_id": run_id, "agent_id": agent_id}.items() if v is not None}
|
||||
return MEMORY_INSTANCE.get_all(**params)
|
||||
except Exception as e:
|
||||
logging.exception("Error in get_all_memories:")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@@ -84,6 +146,7 @@ def get_memory(memory_id: str):
|
||||
try:
|
||||
return MEMORY_INSTANCE.get(memory_id)
|
||||
except Exception as e:
|
||||
logging.exception("Error in get_memory:")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@@ -94,6 +157,7 @@ def search_memories(search_req: SearchRequest):
|
||||
params = {k: v for k, v in search_req.model_dump().items() if v is not None and k != "query"}
|
||||
return MEMORY_INSTANCE.search(query=search_req.query, **params)
|
||||
except Exception as e:
|
||||
logging.exception("Error in search_memories:")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@@ -103,6 +167,7 @@ def update_memory(memory_id: str, updated_memory: Dict[str, Any]):
|
||||
try:
|
||||
return MEMORY_INSTANCE.update(memory_id=memory_id, data=updated_memory)
|
||||
except Exception as e:
|
||||
logging.exception("Error in update_memory:")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@@ -112,6 +177,7 @@ def memory_history(memory_id: str):
|
||||
try:
|
||||
return MEMORY_INSTANCE.history(memory_id=memory_id)
|
||||
except Exception as e:
|
||||
logging.exception("Error in memory_history:")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@@ -122,6 +188,7 @@ def delete_memory(memory_id: str):
|
||||
MEMORY_INSTANCE.delete(memory_id=memory_id)
|
||||
return {"message": "Memory deleted successfully"}
|
||||
except Exception as e:
|
||||
logging.exception("Error in delete_memory:")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@@ -139,6 +206,7 @@ def delete_all_memories(
|
||||
MEMORY_INSTANCE.delete_all(**params)
|
||||
return {"message": "All relevant memories deleted"}
|
||||
except Exception as e:
|
||||
logging.exception("Error in delete_all_memories:")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@@ -149,6 +217,7 @@ def reset_memory():
|
||||
MEMORY_INSTANCE.reset()
|
||||
return {"message": "All memories reset"}
|
||||
except Exception as e:
|
||||
logging.exception("Error in reset_memory:")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user