- Configure mem0 to use self-hosted Supabase instead of Qdrant for vector storage - Update docker-compose to connect containers to localai network - Install vecs library for Supabase pgvector integration - Create comprehensive test suite for Supabase + mem0 integration - Update documentation to reflect Supabase configuration - All containers now connected to shared localai network - Successful vector storage and retrieval tests completed 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
132 lines
4.3 KiB
Python
132 lines
4.3 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Configuration management for mem0 system
|
|
"""
|
|
|
|
import os
|
|
from typing import Dict, Any, Optional
|
|
from dataclasses import dataclass
|
|
|
|
@dataclass
|
|
class DatabaseConfig:
|
|
"""Database configuration"""
|
|
supabase_url: Optional[str] = None
|
|
supabase_key: Optional[str] = None
|
|
neo4j_uri: Optional[str] = None
|
|
neo4j_username: Optional[str] = None
|
|
neo4j_password: Optional[str] = None
|
|
|
|
@dataclass
|
|
class LLMConfig:
|
|
"""LLM configuration"""
|
|
openai_api_key: Optional[str] = None
|
|
ollama_base_url: Optional[str] = None
|
|
|
|
@dataclass
|
|
class SystemConfig:
|
|
"""Complete system configuration"""
|
|
database: DatabaseConfig
|
|
llm: LLMConfig
|
|
|
|
def load_config() -> SystemConfig:
|
|
"""Load configuration from environment variables"""
|
|
database_config = DatabaseConfig(
|
|
supabase_url=os.getenv("SUPABASE_URL"),
|
|
supabase_key=os.getenv("SUPABASE_ANON_KEY"),
|
|
neo4j_uri=os.getenv("NEO4J_URI", "bolt://localhost:7687"),
|
|
neo4j_username=os.getenv("NEO4J_USERNAME", "neo4j"),
|
|
neo4j_password=os.getenv("NEO4J_PASSWORD")
|
|
)
|
|
|
|
llm_config = LLMConfig(
|
|
openai_api_key=os.getenv("OPENAI_API_KEY"),
|
|
ollama_base_url=os.getenv("OLLAMA_BASE_URL", "http://localhost:11434")
|
|
)
|
|
|
|
return SystemConfig(database=database_config, llm=llm_config)
|
|
|
|
def get_mem0_config(config: SystemConfig, provider: str = "openai") -> Dict[str, Any]:
|
|
"""Get mem0 configuration dictionary"""
|
|
base_config = {}
|
|
|
|
# Use Supabase for vector storage if configured
|
|
if config.database.supabase_url and config.database.supabase_key:
|
|
base_config["vector_store"] = {
|
|
"provider": "supabase",
|
|
"config": {
|
|
"connection_string": "postgresql://supabase_admin:CzkaYmRvc26Y@localhost:5435/postgres",
|
|
"collection_name": "mem0_vectors",
|
|
"embedding_model_dims": 1536 # OpenAI text-embedding-3-small dimension
|
|
}
|
|
}
|
|
else:
|
|
# Fallback to Qdrant if Supabase not configured
|
|
base_config["vector_store"] = {
|
|
"provider": "qdrant",
|
|
"config": {
|
|
"host": "localhost",
|
|
"port": 6333,
|
|
}
|
|
}
|
|
|
|
if provider == "openai" and config.llm.openai_api_key:
|
|
base_config["llm"] = {
|
|
"provider": "openai",
|
|
"config": {
|
|
"api_key": config.llm.openai_api_key,
|
|
"model": "gpt-4o-mini",
|
|
"temperature": 0.2,
|
|
"max_tokens": 1500
|
|
}
|
|
}
|
|
base_config["embedder"] = {
|
|
"provider": "openai",
|
|
"config": {
|
|
"api_key": config.llm.openai_api_key,
|
|
"model": "text-embedding-3-small"
|
|
}
|
|
}
|
|
elif provider == "ollama":
|
|
base_config["llm"] = {
|
|
"provider": "ollama",
|
|
"config": {
|
|
"model": "llama2",
|
|
"base_url": config.llm.ollama_base_url
|
|
}
|
|
}
|
|
base_config["embedder"] = {
|
|
"provider": "ollama",
|
|
"config": {
|
|
"model": "llama2",
|
|
"base_url": config.llm.ollama_base_url
|
|
}
|
|
}
|
|
|
|
# Add Neo4j graph store if configured
|
|
if config.database.neo4j_uri and config.database.neo4j_password:
|
|
base_config["graph_store"] = {
|
|
"provider": "neo4j",
|
|
"config": {
|
|
"url": config.database.neo4j_uri,
|
|
"username": config.database.neo4j_username,
|
|
"password": config.database.neo4j_password
|
|
}
|
|
}
|
|
base_config["version"] = "v1.1" # Required for graph memory
|
|
|
|
return base_config
|
|
|
|
if __name__ == "__main__":
|
|
# Test configuration loading
|
|
config = load_config()
|
|
print("Configuration loaded:")
|
|
print(f" OpenAI API Key: {'Set' if config.llm.openai_api_key else 'Not set'}")
|
|
print(f" Supabase URL: {'Set' if config.database.supabase_url else 'Not set'}")
|
|
print(f" Neo4j URI: {config.database.neo4j_uri}")
|
|
print(f" Ollama URL: {config.llm.ollama_base_url}")
|
|
|
|
# Test mem0 config generation
|
|
print("\nMem0 OpenAI Config:")
|
|
mem0_config = get_mem0_config(config, "openai")
|
|
for key, value in mem0_config.items():
|
|
print(f" {key}: {value}") |