- Complete fact-based memory API with mem0-inspired approach - Individual fact extraction and deduplication - ADD/UPDATE/DELETE memory actions - Precision search with 0.86+ similarity scores - MCP server for Claude Code integration - Neo4j graph relationships and PostgreSQL vector storage - Comprehensive documentation with architecture and API docs - Matrix communication integration - Production-ready Docker setup with Ollama and Supabase 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
611 lines
27 KiB
Python
611 lines
27 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
LangMem MCP Server - Model Context Protocol Server for Claude Code Integration
|
|
Provides LangMem memory capabilities to Claude Code via MCP protocol
|
|
"""
|
|
|
|
import asyncio
|
|
import json
|
|
import logging
|
|
import os
|
|
from typing import Dict, List, Optional, Any, Union
|
|
from uuid import UUID
|
|
|
|
import httpx
|
|
from mcp.server import Server
|
|
from mcp.server.models import InitializationOptions
|
|
from mcp.server.stdio import stdio_server
|
|
from mcp.types import (
|
|
Resource,
|
|
Tool,
|
|
TextContent,
|
|
EmbeddedResource,
|
|
ListResourcesResult,
|
|
ListToolsResult,
|
|
ReadResourceResult,
|
|
CallToolResult,
|
|
)
|
|
from pydantic import BaseModel, Field
|
|
|
|
# Configure logging
|
|
logging.basicConfig(level=logging.INFO)
|
|
logger = logging.getLogger(__name__)
|
|
|
|
# Configuration
|
|
LANGMEM_API_URL = os.getenv("LANGMEM_API_URL", "http://localhost:8765")
|
|
LANGMEM_API_KEY = os.getenv("LANGMEM_API_KEY", "langmem_api_key_2025")
|
|
|
|
class LangMemMCPServer:
|
|
"""LangMem MCP Server implementation"""
|
|
|
|
def __init__(self):
|
|
self.server = Server("langmem")
|
|
self.setup_handlers()
|
|
|
|
def setup_handlers(self):
|
|
"""Setup MCP server handlers"""
|
|
|
|
@self.server.list_resources()
|
|
async def list_resources() -> ListResourcesResult:
|
|
"""List available LangMem resources"""
|
|
return ListResourcesResult(
|
|
resources=[
|
|
Resource(
|
|
uri="langmem://memories",
|
|
name="Memory Storage",
|
|
description="Long-term memory storage with AI relationship extraction",
|
|
mimeType="application/json"
|
|
),
|
|
Resource(
|
|
uri="langmem://search",
|
|
name="Memory Search",
|
|
description="Hybrid vector and graph search capabilities",
|
|
mimeType="application/json"
|
|
),
|
|
Resource(
|
|
uri="langmem://relationships",
|
|
name="AI Relationships",
|
|
description="AI-extracted relationships between entities",
|
|
mimeType="application/json"
|
|
),
|
|
Resource(
|
|
uri="langmem://health",
|
|
name="System Health",
|
|
description="LangMem system health and status",
|
|
mimeType="application/json"
|
|
)
|
|
]
|
|
)
|
|
|
|
@self.server.read_resource()
|
|
async def read_resource(uri: str) -> ReadResourceResult:
|
|
"""Read a specific LangMem resource"""
|
|
try:
|
|
if uri == "langmem://health":
|
|
# Get system health
|
|
async with httpx.AsyncClient() as client:
|
|
response = await client.get(f"{LANGMEM_API_URL}/health")
|
|
if response.status_code == 200:
|
|
health_data = response.json()
|
|
return ReadResourceResult(
|
|
contents=[
|
|
TextContent(
|
|
type="text",
|
|
text=json.dumps(health_data, indent=2)
|
|
)
|
|
]
|
|
)
|
|
|
|
elif uri == "langmem://memories":
|
|
# Get recent memories summary
|
|
return ReadResourceResult(
|
|
contents=[
|
|
TextContent(
|
|
type="text",
|
|
text="LangMem Memory Storage - Use store_memory tool to add memories"
|
|
)
|
|
]
|
|
)
|
|
|
|
elif uri == "langmem://search":
|
|
# Search capabilities info
|
|
return ReadResourceResult(
|
|
contents=[
|
|
TextContent(
|
|
type="text",
|
|
text="LangMem Search - Use search_memories tool for hybrid vector + graph search"
|
|
)
|
|
]
|
|
)
|
|
|
|
elif uri == "langmem://relationships":
|
|
# AI relationship info
|
|
return ReadResourceResult(
|
|
contents=[
|
|
TextContent(
|
|
type="text",
|
|
text="LangMem AI Relationships - Automatic extraction of meaningful relationships using Llama3.2"
|
|
)
|
|
]
|
|
)
|
|
|
|
else:
|
|
return ReadResourceResult(
|
|
contents=[
|
|
TextContent(
|
|
type="text",
|
|
text=f"Unknown resource: {uri}"
|
|
)
|
|
]
|
|
)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error reading resource {uri}: {e}")
|
|
return ReadResourceResult(
|
|
contents=[
|
|
TextContent(
|
|
type="text",
|
|
text=f"Error reading resource: {str(e)}"
|
|
)
|
|
]
|
|
)
|
|
|
|
@self.server.list_tools()
|
|
async def list_tools() -> ListToolsResult:
|
|
"""List available LangMem tools"""
|
|
return ListToolsResult(
|
|
tools=[
|
|
Tool(
|
|
name="store_memory",
|
|
description="Store a memory with AI relationship extraction",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"content": {
|
|
"type": "string",
|
|
"description": "Content to store in memory"
|
|
},
|
|
"user_id": {
|
|
"type": "string",
|
|
"description": "User identifier"
|
|
},
|
|
"session_id": {
|
|
"type": "string",
|
|
"description": "Session identifier (optional)"
|
|
},
|
|
"metadata": {
|
|
"type": "object",
|
|
"description": "Additional metadata (optional)"
|
|
}
|
|
},
|
|
"required": ["content", "user_id"]
|
|
}
|
|
),
|
|
Tool(
|
|
name="search_memories",
|
|
description="Search memories using hybrid vector + graph search",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"query": {
|
|
"type": "string",
|
|
"description": "Search query"
|
|
},
|
|
"user_id": {
|
|
"type": "string",
|
|
"description": "User identifier"
|
|
},
|
|
"limit": {
|
|
"type": "integer",
|
|
"description": "Maximum number of results (default: 10)"
|
|
},
|
|
"threshold": {
|
|
"type": "number",
|
|
"description": "Similarity threshold (default: 0.7)"
|
|
},
|
|
"include_graph": {
|
|
"type": "boolean",
|
|
"description": "Include graph relationships (default: true)"
|
|
}
|
|
},
|
|
"required": ["query", "user_id"]
|
|
}
|
|
),
|
|
Tool(
|
|
name="retrieve_memories",
|
|
description="Retrieve relevant memories for conversation context",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"messages": {
|
|
"type": "array",
|
|
"items": {
|
|
"type": "object",
|
|
"properties": {
|
|
"role": {"type": "string"},
|
|
"content": {"type": "string"}
|
|
}
|
|
},
|
|
"description": "Conversation messages"
|
|
},
|
|
"user_id": {
|
|
"type": "string",
|
|
"description": "User identifier"
|
|
},
|
|
"session_id": {
|
|
"type": "string",
|
|
"description": "Session identifier (optional)"
|
|
}
|
|
},
|
|
"required": ["messages", "user_id"]
|
|
}
|
|
),
|
|
Tool(
|
|
name="get_user_memories",
|
|
description="Get all memories for a specific user",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"user_id": {
|
|
"type": "string",
|
|
"description": "User identifier"
|
|
},
|
|
"limit": {
|
|
"type": "integer",
|
|
"description": "Maximum number of results (default: 50)"
|
|
},
|
|
"offset": {
|
|
"type": "integer",
|
|
"description": "Number of results to skip (default: 0)"
|
|
}
|
|
},
|
|
"required": ["user_id"]
|
|
}
|
|
),
|
|
Tool(
|
|
name="delete_memory",
|
|
description="Delete a specific memory",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {
|
|
"memory_id": {
|
|
"type": "string",
|
|
"description": "Memory ID to delete"
|
|
}
|
|
},
|
|
"required": ["memory_id"]
|
|
}
|
|
),
|
|
Tool(
|
|
name="health_check",
|
|
description="Check LangMem system health",
|
|
inputSchema={
|
|
"type": "object",
|
|
"properties": {}
|
|
}
|
|
)
|
|
]
|
|
)
|
|
|
|
@self.server.call_tool()
|
|
async def call_tool(name: str, arguments: Dict[str, Any]) -> CallToolResult:
|
|
"""Handle tool calls"""
|
|
try:
|
|
headers = {"Authorization": f"Bearer {LANGMEM_API_KEY}"}
|
|
|
|
if name == "store_memory":
|
|
# Store memory
|
|
async with httpx.AsyncClient() as client:
|
|
response = await client.post(
|
|
f"{LANGMEM_API_URL}/v1/memories/store",
|
|
json=arguments,
|
|
headers=headers,
|
|
timeout=60.0
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
result = response.json()
|
|
return CallToolResult(
|
|
content=[
|
|
TextContent(
|
|
type="text",
|
|
text=f"Memory stored successfully!\nID: {result['id']}\nStatus: {result['status']}\nAI relationship extraction running in background..."
|
|
)
|
|
]
|
|
)
|
|
else:
|
|
return CallToolResult(
|
|
content=[
|
|
TextContent(
|
|
type="text",
|
|
text=f"Error storing memory: {response.status_code} - {response.text}"
|
|
)
|
|
]
|
|
)
|
|
|
|
elif name == "search_memories":
|
|
# Search memories
|
|
# Set defaults
|
|
arguments.setdefault("limit", 10)
|
|
arguments.setdefault("threshold", 0.7)
|
|
arguments.setdefault("include_graph", True)
|
|
|
|
async with httpx.AsyncClient() as client:
|
|
response = await client.post(
|
|
f"{LANGMEM_API_URL}/v1/memories/search",
|
|
json=arguments,
|
|
headers=headers,
|
|
timeout=30.0
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
result = response.json()
|
|
|
|
# Format results
|
|
formatted_results = []
|
|
formatted_results.append(f"Found {result['total_count']} memories for query: '{arguments['query']}'")
|
|
formatted_results.append("")
|
|
|
|
for i, memory in enumerate(result['memories'], 1):
|
|
formatted_results.append(f"{i}. {memory['content']}")
|
|
formatted_results.append(f" Similarity: {memory['similarity']:.3f}")
|
|
formatted_results.append(f" ID: {memory['id']}")
|
|
|
|
if 'relationships' in memory and memory['relationships']:
|
|
formatted_results.append(f" Relationships: {len(memory['relationships'])}")
|
|
for rel in memory['relationships'][:3]: # Show first 3
|
|
formatted_results.append(f" → {rel['relationship']} {rel['entity_name']} (conf: {rel['confidence']})")
|
|
formatted_results.append("")
|
|
|
|
return CallToolResult(
|
|
content=[
|
|
TextContent(
|
|
type="text",
|
|
text="\n".join(formatted_results)
|
|
)
|
|
]
|
|
)
|
|
else:
|
|
return CallToolResult(
|
|
content=[
|
|
TextContent(
|
|
type="text",
|
|
text=f"Error searching memories: {response.status_code} - {response.text}"
|
|
)
|
|
]
|
|
)
|
|
|
|
elif name == "retrieve_memories":
|
|
# Retrieve memories for conversation context
|
|
async with httpx.AsyncClient() as client:
|
|
response = await client.post(
|
|
f"{LANGMEM_API_URL}/v1/memories/retrieve",
|
|
json=arguments,
|
|
headers=headers,
|
|
timeout=30.0
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
result = response.json()
|
|
|
|
# Format results
|
|
formatted_results = []
|
|
formatted_results.append(f"Retrieved {result['total_count']} relevant memories")
|
|
formatted_results.append("")
|
|
|
|
for i, memory in enumerate(result['memories'], 1):
|
|
formatted_results.append(f"{i}. {memory['content']}")
|
|
formatted_results.append(f" Similarity: {memory['similarity']:.3f}")
|
|
|
|
if 'relationships' in memory and memory['relationships']:
|
|
formatted_results.append(f" Relationships:")
|
|
for rel in memory['relationships'][:3]: # Show first 3
|
|
formatted_results.append(f" → {rel['relationship']} {rel['entity_name']}")
|
|
formatted_results.append("")
|
|
|
|
return CallToolResult(
|
|
content=[
|
|
TextContent(
|
|
type="text",
|
|
text="\n".join(formatted_results)
|
|
)
|
|
]
|
|
)
|
|
else:
|
|
return CallToolResult(
|
|
content=[
|
|
TextContent(
|
|
type="text",
|
|
text=f"Error retrieving memories: {response.status_code} - {response.text}"
|
|
)
|
|
]
|
|
)
|
|
|
|
elif name == "get_user_memories":
|
|
# Get user memories
|
|
user_id = arguments['user_id']
|
|
limit = arguments.get('limit', 50)
|
|
offset = arguments.get('offset', 0)
|
|
|
|
async with httpx.AsyncClient() as client:
|
|
response = await client.get(
|
|
f"{LANGMEM_API_URL}/v1/memories/users/{user_id}?limit={limit}&offset={offset}",
|
|
headers=headers,
|
|
timeout=30.0
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
result = response.json()
|
|
|
|
# Format results
|
|
formatted_results = []
|
|
formatted_results.append(f"User {user_id} has {result['total_count']} memories")
|
|
formatted_results.append(f"Showing {len(result['memories'])} results (offset: {offset})")
|
|
formatted_results.append("")
|
|
|
|
for i, memory in enumerate(result['memories'], 1):
|
|
formatted_results.append(f"{i}. {memory['content']}")
|
|
formatted_results.append(f" ID: {memory['id']}")
|
|
formatted_results.append(f" Created: {memory['created_at']}")
|
|
if memory.get('metadata'):
|
|
formatted_results.append(f" Metadata: {memory['metadata']}")
|
|
formatted_results.append("")
|
|
|
|
return CallToolResult(
|
|
content=[
|
|
TextContent(
|
|
type="text",
|
|
text="\n".join(formatted_results)
|
|
)
|
|
]
|
|
)
|
|
else:
|
|
return CallToolResult(
|
|
content=[
|
|
TextContent(
|
|
type="text",
|
|
text=f"Error getting user memories: {response.status_code} - {response.text}"
|
|
)
|
|
]
|
|
)
|
|
|
|
elif name == "delete_memory":
|
|
# Delete memory
|
|
memory_id = arguments['memory_id']
|
|
|
|
async with httpx.AsyncClient() as client:
|
|
response = await client.delete(
|
|
f"{LANGMEM_API_URL}/v1/memories/{memory_id}",
|
|
headers=headers,
|
|
timeout=30.0
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
result = response.json()
|
|
return CallToolResult(
|
|
content=[
|
|
TextContent(
|
|
type="text",
|
|
text=f"Memory deleted successfully!\nID: {result['id']}\nStatus: {result['status']}"
|
|
)
|
|
]
|
|
)
|
|
else:
|
|
return CallToolResult(
|
|
content=[
|
|
TextContent(
|
|
type="text",
|
|
text=f"Error deleting memory: {response.status_code} - {response.text}"
|
|
)
|
|
]
|
|
)
|
|
|
|
elif name == "health_check":
|
|
# Health check
|
|
async with httpx.AsyncClient() as client:
|
|
response = await client.get(
|
|
f"{LANGMEM_API_URL}/health",
|
|
timeout=30.0
|
|
)
|
|
|
|
if response.status_code == 200:
|
|
result = response.json()
|
|
|
|
# Format health status
|
|
formatted_results = []
|
|
formatted_results.append(f"LangMem System Status: {result['status']}")
|
|
formatted_results.append("")
|
|
formatted_results.append("Service Status:")
|
|
for service, status in result['services'].items():
|
|
status_emoji = "✅" if status == "healthy" else "❌"
|
|
formatted_results.append(f" {status_emoji} {service}: {status}")
|
|
formatted_results.append("")
|
|
formatted_results.append(f"Last checked: {result['timestamp']}")
|
|
|
|
return CallToolResult(
|
|
content=[
|
|
TextContent(
|
|
type="text",
|
|
text="\n".join(formatted_results)
|
|
)
|
|
]
|
|
)
|
|
else:
|
|
return CallToolResult(
|
|
content=[
|
|
TextContent(
|
|
type="text",
|
|
text=f"Error checking health: {response.status_code} - {response.text}"
|
|
)
|
|
]
|
|
)
|
|
|
|
else:
|
|
return CallToolResult(
|
|
content=[
|
|
TextContent(
|
|
type="text",
|
|
text=f"Unknown tool: {name}"
|
|
)
|
|
]
|
|
)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error calling tool {name}: {e}")
|
|
return CallToolResult(
|
|
content=[
|
|
TextContent(
|
|
type="text",
|
|
text=f"Error calling tool {name}: {str(e)}"
|
|
)
|
|
]
|
|
)
|
|
|
|
async def run(self):
|
|
"""Run the MCP server"""
|
|
try:
|
|
logger.info("Starting LangMem MCP Server...")
|
|
|
|
# Test connection to LangMem API
|
|
try:
|
|
async with httpx.AsyncClient() as client:
|
|
response = await client.get(f"{LANGMEM_API_URL}/health", timeout=10.0)
|
|
if response.status_code == 200:
|
|
logger.info("✅ LangMem API is healthy")
|
|
else:
|
|
logger.warning(f"⚠️ LangMem API returned status {response.status_code}")
|
|
except Exception as e:
|
|
logger.error(f"❌ Failed to connect to LangMem API: {e}")
|
|
logger.error("Make sure LangMem API is running on http://localhost:8765")
|
|
|
|
# Start the server
|
|
async with stdio_server() as (read_stream, write_stream):
|
|
await self.server.run(
|
|
read_stream,
|
|
write_stream,
|
|
InitializationOptions(
|
|
server_name="langmem",
|
|
server_version="1.0.0",
|
|
capabilities={
|
|
"resources": {
|
|
"subscribe": True,
|
|
"list_changed": True
|
|
},
|
|
"tools": {
|
|
"list_changed": True
|
|
}
|
|
}
|
|
)
|
|
)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error running MCP server: {e}")
|
|
raise
|
|
|
|
async def main():
|
|
"""Main entry point"""
|
|
server = LangMemMCPServer()
|
|
await server.run()
|
|
|
|
if __name__ == "__main__":
|
|
asyncio.run(main()) |