improvement(OpenMemory MCP): Improves Docker Compose commands (#2681)

Co-authored-by: Deshraj Yadav <deshrajdry@gmail.com>
This commit is contained in:
Saket Aryan
2025-05-14 13:44:08 +05:30
committed by GitHub
parent da59412150
commit a22287a3ba
10 changed files with 45 additions and 78 deletions

View File

@@ -19,17 +19,13 @@ git clone https://github.com/mem0ai/mem0.git
cd mem0/openmemory
# Create the backend .env file with your OpenAI key
pushd api && echo "OPENAI_API_KEY=your_key_here" > .env && popd
make env
# Build the Docker images
make build
# Start all services (API server, vector database, and MCP server components)
make up
# Start the frontend
cp ui/.env.example ui/.env
make ui
make up
```
You can configure the MCP client using the following command (replace username with your username):
@@ -44,4 +40,4 @@ Once set up, OpenMemory runs locally on your machine, ensuring all your AI memor
### Getting Started Today
- Github Repository: https://github.com/mem0ai/mem0/openmemory
- Github Repository: https://github.com/mem0ai/mem0/openmemory

View File

@@ -1,4 +1,4 @@
.PHONY: help up down logs shell migrate test test-clean env ui-install ui-start ui-dev
.PHONY: help up down logs shell migrate test test-clean env ui-install ui-start ui-dev ui-build ui-dev-start
NEXT_PUBLIC_USER_ID=$(USER)
NEXT_PUBLIC_API_URL=http://localhost:8765
@@ -16,55 +16,37 @@ help:
@echo " make test-clean - Run tests and clean up volumes"
@echo " make ui-install - Install frontend dependencies"
@echo " make ui-start - Start the frontend development server"
@echo " make ui - Install dependencies and start the frontend"
@echo " make ui-dev - Install dependencies and start the frontend in dev mode"
@echo " make ui - Install dependencies and start the frontend in production mode"
env:
cd api && cp .env.example .env
cd ui && cp .env.example .env
build:
cd api && docker-compose build
docker compose build
up:
cd api && docker-compose up
NEXT_PUBLIC_USER_ID=$(USER) NEXT_PUBLIC_API_URL=$(NEXT_PUBLIC_API_URL) docker compose up
down:
cd api && docker-compose down -v
docker compose down -v
rm -f api/openmemory.db
logs:
cd api && docker-compose logs -f
docker compose logs -f
shell:
cd api && docker-compose exec api bash
docker compose exec api bash
upgrade:
cd api && docker-compose exec api alembic upgrade head
docker compose exec api alembic upgrade head
migrate:
cd api && docker-compose exec api alembic upgrade head
docker compose exec api alembic upgrade head
downgrade:
cd api && docker-compose exec api alembic downgrade -1
docker compose exec api alembic downgrade -1
test:
cd api && docker-compose run --rm api pytest tests/ -v
test-clean:
cd api && docker-compose run --rm api pytest tests/ -v && docker-compose down -v
# Frontend commands
ui-install:
cd ui && pnpm install
ui-build:
cd ui && pnpm build
ui-start:
cd ui && NEXT_PUBLIC_USER_ID=$(USER) NEXT_PUBLIC_API_URL=$(NEXT_PUBLIC_API_URL) pnpm start
ui-dev-start:
cd ui && NEXT_PUBLIC_USER_ID=$(USER) NEXT_PUBLIC_API_URL=$(NEXT_PUBLIC_API_URL) && pnpm dev
ui-dev: ui-install ui-dev-start
ui: ui-install ui-build ui-start
ui-dev:
cd ui && NEXT_PUBLIC_USER_ID=$(USER) NEXT_PUBLIC_API_URL=$(NEXT_PUBLIC_API_URL) pnpm install && pnpm dev

View File

@@ -15,9 +15,8 @@ OpenMemory is your personal memory layer for LLMs - private, portable, and open-
You can run the project using the following two commands:
```bash
make build # builds the mcp server
make up # runs openmemory mcp server
make ui # runs openmemory ui
make build # builds the mcp server and ui
make up # runs openmemory mcp server and ui
```
After running these commands, you will have:

View File

@@ -1,2 +1 @@
OPENAI_API_KEY=sk-...
USER=username
OPENAI_API_KEY=sk-xxx

View File

@@ -11,7 +11,14 @@ The easiest way to get started is using Docker. Make sure you have Docker and Do
make build
```
2. Start the services:
2. Create `.env` file:
```bash
make env
```
Once you run this command, edit the file `api/.env` and enter the `OPENAI_API_KEY`.
3. Start the services:
```bash
make up
```

View File

@@ -5,22 +5,31 @@ services:
- "6333:6333"
volumes:
- mem0_storage:/mem0/storage
api:
openmemory-mcp:
image: mem0/openmemory-mcp
build: .
build: api/
environment:
- OPENAI_API_KEY
- USER
env_file:
- .env
- api/.env
depends_on:
- mem0_store
ports:
- "8765:8765"
volumes:
- .:/usr/src/openmemory
- ./api:/usr/src/openmemory
command: >
sh -c "uvicorn main:app --host 0.0.0.0 --port 8765 --reload --workers 4"
openmemory-ui:
build:
context: ui/
dockerfile: Dockerfile
image: mem0/openmemory-ui:latest
ports:
- "3000:3000"
environment:
- NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL}
- NEXT_PUBLIC_USER_ID=${USER}
volumes:
mem0_storage:

View File

@@ -1,2 +0,0 @@
NEXT_PUBLIC_API_URL=NEXT_PUBLIC_API_URL
NEXT_PUBLIC_USER_ID=NEXT_PUBLIC_USER_ID

View File

@@ -1,2 +1,2 @@
NEXT_PUBLIC_API_URL=http://localhost:8765
NEXT_PUBLIC_USER_ID=default-user
NEXT_PUBLIC_API_URL=NEXT_PUBLIC_API_URL
NEXT_PUBLIC_USER_ID=NEXT_PUBLIC_USER_ID

View File

@@ -10,16 +10,12 @@ RUN apk add --no-cache libc6-compat curl && \
WORKDIR /app
# Dependencies stage
FROM base AS deps
# Copy lockfile and manifest
COPY package.json pnpm-lock.yaml ./
# Install dependencies using pnpm
RUN pnpm install --frozen-lockfile
# Builder stage
FROM base AS builder
WORKDIR /app
@@ -28,30 +24,24 @@ COPY --from=deps /app/pnpm-lock.yaml ./pnpm-lock.yaml
COPY . .
RUN cp next.config.dev.mjs next.config.mjs
RUN cp .env.dev .env
RUN cp .env.example .env
RUN pnpm build
# Production runner stage
FROM base AS runner
WORKDIR /app
ENV NODE_ENV=production
# Create non-root user for security
RUN addgroup --system --gid 1001 nodejs && \
adduser --system --uid 1001 nextjs
# Copy production dependencies and built artifacts
COPY --from=builder /app/public ./public
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
# Copy and prepare entrypoint script
COPY --chown=nextjs:nodejs entrypoint.sh /home/nextjs/entrypoint.sh
RUN chmod +x /home/nextjs/entrypoint.sh
# Switch to non-root user
USER nextjs
EXPOSE 3000

View File

@@ -1,13 +0,0 @@
services:
frontend:
build:
context: .
dockerfile: Dockerfile
image: mem0/openmemory-ui:latest
ports:
- "3000:3000"
env_file:
- .env
environment:
- NEXT_PUBLIC_API_URL=NEXT_PUBLIC_API_URL
- NEXT_PUBLIC_USER_ID=NEXT_PUBLIC_USER_ID