Major Changes: - Implemented MCP HTTP/SSE transport server for n8n and web clients - Created mcp_server/http_server.py with FastAPI for JSON-RPC 2.0 over HTTP - Added health check endpoint (/health) for container monitoring - Refactored mcp-server/ to mcp_server/ (Python module structure) - Updated Dockerfile.mcp to run HTTP server with health checks MCP Server Features: - 7 memory tools exposed via MCP (add, search, get, update, delete) - HTTP/SSE transport on port 8765 for n8n integration - stdio transport for Claude Code integration - JSON-RPC 2.0 protocol implementation - CORS support for web clients n8n Integration: - Successfully tested with AI Agent workflows - MCP Client Tool configuration documented - Working webhook endpoint tested and verified - System prompt optimized for automatic user_id usage Documentation: - Created comprehensive Mintlify documentation site - Added docs/mcp/introduction.mdx - MCP server overview - Added docs/mcp/installation.mdx - Installation guide - Added docs/mcp/tools.mdx - Complete tool reference - Added docs/examples/n8n.mdx - n8n integration guide - Added docs/examples/claude-code.mdx - Claude Code setup - Updated README.md with MCP HTTP server info - Updated roadmap to mark Phase 1 as complete Bug Fixes: - Fixed synchronized delete operations across Supabase and Neo4j - Updated memory_service.py with proper error handling - Fixed Neo4j connection issues in delete operations Configuration: - Added MCP_HOST and MCP_PORT environment variables - Updated .env.example with MCP server configuration - Updated docker-compose.yml with MCP container health checks Testing: - Added test scripts for MCP HTTP endpoint verification - Created test workflows in n8n - Verified all 7 memory tools working correctly - Tested synchronized operations across both stores Version: 1.0.0 Status: Phase 1 Complete - Production Ready 🤖 Generated with Claude Code Co-Authored-By: Claude <noreply@anthropic.com>
104 lines
3.7 KiB
Python
104 lines
3.7 KiB
Python
#!/usr/bin/env python3
|
|
"""Check synchronization between Supabase and Neo4j stores"""
|
|
import asyncio
|
|
from mem0 import Memory
|
|
from config import mem0_config
|
|
from neo4j import GraphDatabase
|
|
|
|
async def check_stores():
|
|
"""Check memory counts in both stores"""
|
|
print("=" * 60)
|
|
print("Memory Store Synchronization Check")
|
|
print("=" * 60)
|
|
|
|
# Initialize mem0
|
|
memory = Memory.from_config(mem0_config)
|
|
|
|
# Check Supabase (vector store)
|
|
print("\n[1] Checking Supabase (Vector Store)...")
|
|
try:
|
|
# Get all memories - this queries the vector store
|
|
result = memory.get_all()
|
|
supabase_memories = result.get('results', []) if isinstance(result, dict) else result
|
|
print(f"✓ Supabase memories count: {len(supabase_memories)}")
|
|
|
|
if supabase_memories:
|
|
print("\nMemories in Supabase:")
|
|
for i, mem in enumerate(supabase_memories[:5], 1): # Show first 5
|
|
if isinstance(mem, dict):
|
|
print(f" {i}. ID: {mem.get('id')}, Memory: {mem.get('memory', 'N/A')[:50]}")
|
|
else:
|
|
print(f" {i}. {str(mem)[:50]}")
|
|
if len(supabase_memories) > 5:
|
|
print(f" ... and {len(supabase_memories) - 5} more")
|
|
except Exception as e:
|
|
print(f"✗ Error checking Supabase: {e}")
|
|
supabase_memories = []
|
|
|
|
# Check Neo4j (graph store)
|
|
print("\n[2] Checking Neo4j (Graph Store)...")
|
|
try:
|
|
from config import settings
|
|
driver = GraphDatabase.driver(
|
|
settings.neo4j_uri,
|
|
auth=(settings.neo4j_user, settings.neo4j_password)
|
|
)
|
|
|
|
with driver.session() as session:
|
|
# Count all nodes
|
|
result = session.run("MATCH (n) RETURN count(n) as count")
|
|
total_nodes = result.single()['count']
|
|
print(f"✓ Total Neo4j nodes: {total_nodes}")
|
|
|
|
# Count by label
|
|
result = session.run("""
|
|
MATCH (n)
|
|
RETURN labels(n) as labels, count(n) as count
|
|
ORDER BY count DESC
|
|
""")
|
|
print("\nNodes by label:")
|
|
for record in result:
|
|
labels = record['labels']
|
|
count = record['count']
|
|
print(f" • {labels}: {count}")
|
|
|
|
# Count relationships
|
|
result = session.run("MATCH ()-[r]->() RETURN count(r) as count")
|
|
total_rels = result.single()['count']
|
|
print(f"\n✓ Total relationships: {total_rels}")
|
|
|
|
# Show sample nodes
|
|
result = session.run("""
|
|
MATCH (n)
|
|
RETURN n, labels(n) as labels
|
|
LIMIT 10
|
|
""")
|
|
print("\nSample nodes:")
|
|
for i, record in enumerate(result, 1):
|
|
node = record['n']
|
|
labels = record['labels']
|
|
props = dict(node)
|
|
print(f" {i}. Labels: {labels}, Properties: {list(props.keys())[:3]}")
|
|
|
|
driver.close()
|
|
|
|
except Exception as e:
|
|
print(f"✗ Error checking Neo4j: {e}")
|
|
import traceback
|
|
traceback.print_exc()
|
|
|
|
# Summary
|
|
print("\n" + "=" * 60)
|
|
print("SUMMARY")
|
|
print("=" * 60)
|
|
print(f"Supabase (Vector Store): {len(supabase_memories)} memories")
|
|
print(f"Neo4j (Graph Store): {total_nodes if 'total_nodes' in locals() else 'ERROR'} nodes, {total_rels if 'total_rels' in locals() else 'ERROR'} relationships")
|
|
|
|
if len(supabase_memories) == 0 and total_nodes > 0:
|
|
print("\n⚠️ WARNING: Inconsistency detected!")
|
|
print(" → Supabase has 0 memories but Neo4j has nodes")
|
|
print(" → This suggests orphaned graph data")
|
|
|
|
if __name__ == "__main__":
|
|
asyncio.run(check_stores())
|