diff --git a/docs/openmemory/overview.mdx b/docs/openmemory/overview.mdx
index ff46d9d7..edd32fa3 100644
--- a/docs/openmemory/overview.mdx
+++ b/docs/openmemory/overview.mdx
@@ -16,6 +16,33 @@ The OpenMemory MCP Server is a private, local-first memory server that creates a
The OpenMemory MCP Server ensures all memory stays local, structured, and under your control with no cloud sync or external storage.
+## OpenMemory Easy Setup
+
+### Prerequisites
+- Docker
+- OpenAI API Key
+
+You can quickly run OpenMemory by running the following command:
+
+```bash
+curl -sL https://raw.githubusercontent.com/mem0ai/mem0/main/openmemory/run.sh | bash
+```
+
+You should set the `OPENAI_API_KEY` as a global environment variable:
+
+```bash
+export OPENAI_API_KEY=your_api_key
+```
+
+You can also set the `OPENAI_API_KEY` as a parameter to the script:
+
+```bash
+curl -sL https://raw.githubusercontent.com/mem0ai/mem0/main/openmemory/run.sh | OPENAI_API_KEY=your_api_key bash
+```
+
+This will start the OpenMemory server and the OpenMemory UI. Deleting the container will lead to the deletion of the memory store.
+We suggest you follow the instructions [here](/openmemory/quickstart#setting-up-openmemory) to set up OpenMemory on your local machine, with more persistant memory store.
+
## How the OpenMemory MCP Server Works
Built around the Model Context Protocol (MCP), the OpenMemory MCP Server exposes a standardized set of memory tools:
diff --git a/docs/openmemory/quickstart.mdx b/docs/openmemory/quickstart.mdx
index ae379507..e153e712 100644
--- a/docs/openmemory/quickstart.mdx
+++ b/docs/openmemory/quickstart.mdx
@@ -6,6 +6,32 @@ iconType: "solid"
+## OpenMemory Easy Setup
+
+### Prerequisites
+- Docker
+- OpenAI API Key
+
+You can quickly run OpenMemory by running the following command:
+
+```bash
+curl -sL https://raw.githubusercontent.com/mem0ai/mem0/main/openmemory/run.sh | bash
+```
+
+You should set the `OPENAI_API_KEY` as a global environment variable:
+
+```bash
+export OPENAI_API_KEY=your_api_key
+```
+
+You can also set the `OPENAI_API_KEY` as a parameter to the script:
+
+```bash
+curl -sL https://raw.githubusercontent.com/mem0ai/mem0/main/openmemory/run.sh | OPENAI_API_KEY=your_api_key bash
+```
+
+This will start the OpenMemory server and the OpenMemory UI. Deleting the container will lead to the deletion of the memory store.
+We suggest you follow the instructions below to set up OpenMemory on your local machine, with more persistant memory store.
## Setting Up OpenMemory
diff --git a/openmemory/README.md b/openmemory/README.md
index 9404bdbb..0e32c101 100644
--- a/openmemory/README.md
+++ b/openmemory/README.md
@@ -4,6 +4,30 @@ OpenMemory is your personal memory layer for LLMs - private, portable, and open-

+## Easy Setup
+
+### Prerequisites
+- Docker
+- OpenAI API Key
+
+You can quickly run OpenMemory by running the following command:
+
+```bash
+curl -sL https://raw.githubusercontent.com/mem0ai/mem0/main/openmemory/run.sh | bash
+```
+
+You should set the `OPENAI_API_KEY` as a global environment variable:
+
+```bash
+export OPENAI_API_KEY=your_api_key
+```
+
+You can also set the `OPENAI_API_KEY` as a parameter to the script:
+
+```bash
+curl -sL https://raw.githubusercontent.com/mem0ai/mem0/main/openmemory/run.sh | OPENAI_API_KEY=your_api_key bash
+```
+
## Prerequisites
- Docker and Docker Compose
diff --git a/openmemory/api/.dockerignore b/openmemory/api/.dockerignore
new file mode 100644
index 00000000..7909beb9
--- /dev/null
+++ b/openmemory/api/.dockerignore
@@ -0,0 +1,23 @@
+# Ignore all .env files
+**/.env
+**/.env.*
+
+# Ignore all database files
+**/*.db
+**/*.sqlite
+**/*.sqlite3
+
+# Ignore logs
+**/*.log
+
+# Ignore runtime data
+**/node_modules
+**/__pycache__
+**/.pytest_cache
+**/.coverage
+**/coverage
+
+# Ignore Docker runtime files
+**/.dockerignore
+**/Dockerfile
+**/docker-compose*.yml
\ No newline at end of file
diff --git a/openmemory/run.sh b/openmemory/run.sh
new file mode 100755
index 00000000..029b7d4b
--- /dev/null
+++ b/openmemory/run.sh
@@ -0,0 +1,111 @@
+#!/bin/bash
+
+set -e
+
+echo "🚀 Starting OpenMemory installation..."
+
+# Set environment variables
+OPENAI_API_KEY="${OPENAI_API_KEY:-}"
+USER="${USER:-$(whoami)}"
+NEXT_PUBLIC_API_URL="${NEXT_PUBLIC_API_URL:-http://localhost:8765}"
+
+if [ -z "$OPENAI_API_KEY" ]; then
+ echo "❌ OPENAI_API_KEY not set. Please run with: curl -sL https://raw.githubusercontent.com/mem0ai/mem0/main/openmemory/run.sh | OPENAI_API_KEY=your_api_key bash"
+ echo "❌ OPENAI_API_KEY not set. You can also set it as global environment variable: export OPENAI_API_KEY=your_api_key"
+ exit 1
+fi
+
+# Check if Docker is installed
+if ! command -v docker &> /dev/null; then
+ echo "❌ Docker not found. Please install Docker first."
+ exit 1
+fi
+
+# Check if docker compose is available
+if ! docker compose version &> /dev/null; then
+ echo "❌ Docker Compose not found. Please install Docker Compose V2."
+ exit 1
+fi
+
+# Check if the container "mem0_ui" already exists and remove it if necessary
+if [ $(docker ps -aq -f name=mem0_ui) ]; then
+ echo "⚠️ Found existing container 'mem0_ui'. Removing it..."
+ docker rm -f mem0_ui
+fi
+
+# Find an available port starting from 3000
+echo "🔍 Looking for available port for frontend..."
+for port in {3000..3010}; do
+ if ! lsof -i:$port >/dev/null 2>&1; then
+ FRONTEND_PORT=$port
+ break
+ fi
+done
+
+if [ -z "$FRONTEND_PORT" ]; then
+ echo "❌ Could not find an available port between 3000 and 3010"
+ exit 1
+fi
+
+# Export required variables for Compose and frontend
+export OPENAI_API_KEY
+export USER
+export NEXT_PUBLIC_API_URL
+export NEXT_PUBLIC_USER_ID="$USER"
+export FRONTEND_PORT
+
+# Create docker-compose.yml file
+echo "📝 Creating docker-compose.yml..."
+cat > docker-compose.yml < /dev/null; then
+ xdg-open "$URL" # Linux
+elif command -v open > /dev/null; then
+ open "$URL" # macOS
+elif command -v start > /dev/null; then
+ start "$URL" # Windows (if run via Git Bash or similar)
+else
+ echo "⚠️ Could not detect a method to open the browser. Please open $URL manually."
+fi
diff --git a/openmemory/ui/.dockerignore b/openmemory/ui/.dockerignore
new file mode 100644
index 00000000..dc61851a
--- /dev/null
+++ b/openmemory/ui/.dockerignore
@@ -0,0 +1,23 @@
+# Ignore all .env files
+**/.env
+
+
+# Ignore all database files
+**/*.db
+**/*.sqlite
+**/*.sqlite3
+
+# Ignore logs
+**/*.log
+
+# Ignore runtime data
+**/node_modules
+**/__pycache__
+**/.pytest_cache
+**/.coverage
+**/coverage
+
+# Ignore Docker runtime files
+**/.dockerignore
+**/Dockerfile
+**/docker-compose*.yml
\ No newline at end of file