Integrate self-hosted Supabase with mem0 system

- Configure mem0 to use self-hosted Supabase instead of Qdrant for vector storage
- Update docker-compose to connect containers to localai network
- Install vecs library for Supabase pgvector integration
- Create comprehensive test suite for Supabase + mem0 integration
- Update documentation to reflect Supabase configuration
- All containers now connected to shared localai network
- Successful vector storage and retrieval tests completed

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Docker Config Backup
2025-07-31 06:57:10 +02:00
parent 724c553a2e
commit 41cd78207a
36 changed files with 2533 additions and 405 deletions

77
test_openai.py Normal file
View File

@@ -0,0 +1,77 @@
#!/usr/bin/env python3
"""
Test OpenAI integration with mem0
"""
import os
from dotenv import load_dotenv
from mem0 import Memory
from config import load_config, get_mem0_config
# Load environment variables from .env file if it exists
load_dotenv()
def test_openai_integration():
"""Test mem0 with OpenAI integration"""
# Load configuration
config = load_config()
if not config.llm.openai_api_key:
print("❌ OPENAI_API_KEY not found in environment variables")
print("Please set your OpenAI API key in .env file or environment")
return False
try:
print("Testing mem0 with OpenAI integration...")
# Get mem0 configuration for OpenAI
mem0_config = get_mem0_config(config, "openai")
print(f"✅ Configuration loaded: {list(mem0_config.keys())}")
# Initialize Memory with OpenAI
print("Initializing mem0 Memory with OpenAI...")
memory = Memory.from_config(config_dict=mem0_config)
print("✅ Memory initialized successfully")
# Test basic memory operations
print("\nTesting basic memory operations...")
# Add a memory
print("Adding test memory...")
messages = [
{"role": "user", "content": "I love machine learning and AI. My favorite framework is PyTorch."},
{"role": "assistant", "content": "That's great! PyTorch is indeed a powerful framework for AI development."}
]
result = memory.add(messages, user_id="test_user")
print(f"✅ Memory added: {result}")
# Search memories
print("\nSearching memories...")
search_results = memory.search(query="AI framework", user_id="test_user")
print(f"✅ Search results: {len(search_results)} memories found")
for i, result in enumerate(search_results):
print(f" {i+1}. {result['memory'][:100]}...")
# Get all memories
print("\nRetrieving all memories...")
all_memories = memory.get_all(user_id="test_user")
print(f"✅ Total memories: {len(all_memories)}")
return True
except Exception as e:
print(f"❌ Error during OpenAI integration test: {e}")
return False
if __name__ == "__main__":
success = test_openai_integration()
if success:
print("\n🎉 OpenAI integration test passed!")
else:
print("\n💥 OpenAI integration test failed!")
print("\nTo run this test:")
print("1. Copy .env.example to .env")
print("2. Add your OpenAI API key to .env")
print("3. Run: python test_openai.py")