- Added docker-compose.api-localai.yml for Docker network integration - Updated config.py to support dynamic Supabase connection strings via environment variables - Enhanced documentation with Docker network deployment instructions - Added specific N8N workflow integration guidance - Solved Docker networking issues for container-to-container communication Key improvements: * Container-to-container API access for N8N workflows * Automatic service dependency resolution (Ollama, Supabase) * Comprehensive deployment options for different use cases * Production-ready Docker network configuration 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
132 lines
4.3 KiB
Python
132 lines
4.3 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Configuration management for mem0 system
|
|
"""
|
|
|
|
import os
|
|
from typing import Dict, Any, Optional
|
|
from dataclasses import dataclass
|
|
|
|
@dataclass
|
|
class DatabaseConfig:
|
|
"""Database configuration"""
|
|
supabase_url: Optional[str] = None
|
|
supabase_key: Optional[str] = None
|
|
neo4j_uri: Optional[str] = None
|
|
neo4j_username: Optional[str] = None
|
|
neo4j_password: Optional[str] = None
|
|
|
|
@dataclass
|
|
class LLMConfig:
|
|
"""LLM configuration"""
|
|
openai_api_key: Optional[str] = None
|
|
ollama_base_url: Optional[str] = None
|
|
|
|
@dataclass
|
|
class SystemConfig:
|
|
"""Complete system configuration"""
|
|
database: DatabaseConfig
|
|
llm: LLMConfig
|
|
|
|
def load_config() -> SystemConfig:
|
|
"""Load configuration from environment variables"""
|
|
database_config = DatabaseConfig(
|
|
supabase_url=os.getenv("SUPABASE_URL"),
|
|
supabase_key=os.getenv("SUPABASE_ANON_KEY"),
|
|
neo4j_uri=os.getenv("NEO4J_URI", "bolt://localhost:7687"),
|
|
neo4j_username=os.getenv("NEO4J_USERNAME", "neo4j"),
|
|
neo4j_password=os.getenv("NEO4J_PASSWORD")
|
|
)
|
|
|
|
llm_config = LLMConfig(
|
|
openai_api_key=os.getenv("OPENAI_API_KEY"),
|
|
ollama_base_url=os.getenv("OLLAMA_BASE_URL", "http://localhost:11434")
|
|
)
|
|
|
|
return SystemConfig(database=database_config, llm=llm_config)
|
|
|
|
def get_mem0_config(config: SystemConfig, provider: str = "openai") -> Dict[str, Any]:
|
|
"""Get mem0 configuration dictionary"""
|
|
base_config = {}
|
|
|
|
# Always use Supabase for vector storage (local setup)
|
|
if True: # Force Supabase usage
|
|
base_config["vector_store"] = {
|
|
"provider": "supabase",
|
|
"config": {
|
|
"connection_string": os.getenv("SUPABASE_CONNECTION_STRING", "postgresql://supabase_admin:CzkaYmRvc26Y@localhost:5435/postgres"),
|
|
"collection_name": "mem0_working_test",
|
|
"embedding_model_dims": 768 # nomic-embed-text dimension
|
|
}
|
|
}
|
|
else:
|
|
# Fallback to Qdrant if Supabase not configured
|
|
base_config["vector_store"] = {
|
|
"provider": "qdrant",
|
|
"config": {
|
|
"host": "localhost",
|
|
"port": 6333,
|
|
}
|
|
}
|
|
|
|
if provider == "openai" and config.llm.openai_api_key:
|
|
base_config["llm"] = {
|
|
"provider": "openai",
|
|
"config": {
|
|
"api_key": config.llm.openai_api_key,
|
|
"model": "gpt-4o-mini",
|
|
"temperature": 0.2,
|
|
"max_tokens": 1500
|
|
}
|
|
}
|
|
base_config["embedder"] = {
|
|
"provider": "openai",
|
|
"config": {
|
|
"api_key": config.llm.openai_api_key,
|
|
"model": "text-embedding-3-small"
|
|
}
|
|
}
|
|
elif provider == "ollama":
|
|
base_config["llm"] = {
|
|
"provider": "ollama",
|
|
"config": {
|
|
"model": "qwen2.5:7b",
|
|
"ollama_base_url": config.llm.ollama_base_url
|
|
}
|
|
}
|
|
base_config["embedder"] = {
|
|
"provider": "ollama",
|
|
"config": {
|
|
"model": "nomic-embed-text:latest",
|
|
"ollama_base_url": config.llm.ollama_base_url
|
|
}
|
|
}
|
|
|
|
# Add Neo4j graph store if configured
|
|
if config.database.neo4j_uri and config.database.neo4j_password:
|
|
base_config["graph_store"] = {
|
|
"provider": "neo4j",
|
|
"config": {
|
|
"url": config.database.neo4j_uri,
|
|
"username": config.database.neo4j_username,
|
|
"password": config.database.neo4j_password
|
|
}
|
|
}
|
|
base_config["version"] = "v1.1" # Required for graph memory
|
|
|
|
return base_config
|
|
|
|
if __name__ == "__main__":
|
|
# Test configuration loading
|
|
config = load_config()
|
|
print("Configuration loaded:")
|
|
print(f" OpenAI API Key: {'Set' if config.llm.openai_api_key else 'Not set'}")
|
|
print(f" Supabase URL: {'Set' if config.database.supabase_url else 'Not set'}")
|
|
print(f" Neo4j URI: {config.database.neo4j_uri}")
|
|
print(f" Ollama URL: {config.llm.ollama_base_url}")
|
|
|
|
# Test mem0 config generation
|
|
print("\nMem0 OpenAI Config:")
|
|
mem0_config = get_mem0_config(config, "openai")
|
|
for key, value in mem0_config.items():
|
|
print(f" {key}: {value}") |