- Fix mem0 library hardcoded US/Pacific timezone in Docker build - Add TZ=Europe/Prague environment variable to containers - Add missing ollama Python library to requirements.txt - Add Ollama environment variables to MCP container - Include test scripts for Ollama configuration validation This resolves timestamp issues where memories were created with incorrect Pacific timezone (-07:00) instead of local time (+02:00). 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
98 lines
4.0 KiB
Python
98 lines
4.0 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Test Ollama configuration
|
|
"""
|
|
|
|
import os
|
|
import sys
|
|
from config import get_settings, get_mem0_config
|
|
|
|
def test_config():
|
|
"""Test configuration loading with Ollama"""
|
|
|
|
print("=" * 60)
|
|
print("Testing Ollama Configuration")
|
|
print("=" * 60)
|
|
|
|
# Load settings
|
|
print("\n1. Loading settings from .env...")
|
|
try:
|
|
settings = get_settings()
|
|
print(f" ✓ Settings loaded successfully")
|
|
except Exception as e:
|
|
print(f" ✗ Failed to load settings: {e}")
|
|
return False
|
|
|
|
# Display provider configuration
|
|
print(f"\n2. Provider Configuration:")
|
|
print(f" LLM Provider: {settings.llm_provider}")
|
|
print(f" Embedder Provider: {settings.embedder_provider}")
|
|
|
|
if settings.llm_provider.lower() == "ollama":
|
|
print(f"\n3. Ollama LLM Settings:")
|
|
print(f" Base URL: {settings.ollama_base_url}")
|
|
print(f" LLM Model: {settings.ollama_llm_model}")
|
|
|
|
if settings.embedder_provider.lower() == "ollama":
|
|
print(f"\n4. Ollama Embedder Settings:")
|
|
print(f" Base URL: {settings.ollama_base_url}")
|
|
print(f" Embedding Model: {settings.ollama_embedding_model}")
|
|
print(f" Embedding Dims: {settings.mem0_embedding_dims}")
|
|
|
|
# Generate mem0 config
|
|
print(f"\n5. Generating mem0 configuration...")
|
|
try:
|
|
mem0_config = get_mem0_config(settings)
|
|
print(f" ✓ Mem0 config generated successfully")
|
|
except Exception as e:
|
|
print(f" ✗ Failed to generate mem0 config: {e}")
|
|
return False
|
|
|
|
# Display mem0 config
|
|
print(f"\n6. Mem0 Configuration:")
|
|
print(f" Vector Store: {mem0_config['vector_store']['provider']}")
|
|
print(f" Graph Store: {mem0_config['graph_store']['provider']}")
|
|
print(f" LLM Provider: {mem0_config['llm']['provider']}")
|
|
print(f" LLM Model: {mem0_config['llm']['config'].get('model', 'N/A')}")
|
|
print(f" Embedder Provider: {mem0_config['embedder']['provider']}")
|
|
print(f" Embedder Model: {mem0_config['embedder']['config'].get('model', 'N/A')}")
|
|
|
|
# Test Ollama connectivity
|
|
if settings.llm_provider.lower() == "ollama" or settings.embedder_provider.lower() == "ollama":
|
|
print(f"\n7. Testing Ollama connectivity...")
|
|
import httpx
|
|
try:
|
|
response = httpx.get(f"{settings.ollama_base_url}/api/tags", timeout=5.0)
|
|
if response.status_code == 200:
|
|
print(f" ✓ Ollama is reachable at {settings.ollama_base_url}")
|
|
models = response.json()
|
|
model_names = [m['name'] for m in models.get('models', [])]
|
|
|
|
# Check if required models are available
|
|
if settings.llm_provider.lower() == "ollama":
|
|
if settings.ollama_llm_model in model_names or f"{settings.ollama_llm_model}:latest" in model_names:
|
|
print(f" ✓ LLM model '{settings.ollama_llm_model}' is available")
|
|
else:
|
|
print(f" ✗ LLM model '{settings.ollama_llm_model}' not found")
|
|
print(f" Available models: {', '.join(model_names[:5])}")
|
|
|
|
if settings.embedder_provider.lower() == "ollama":
|
|
if settings.ollama_embedding_model in model_names or f"{settings.ollama_embedding_model}:latest" in model_names:
|
|
print(f" ✓ Embedding model '{settings.ollama_embedding_model}' is available")
|
|
else:
|
|
print(f" ✗ Embedding model '{settings.ollama_embedding_model}' not found")
|
|
else:
|
|
print(f" ✗ Ollama returned status code: {response.status_code}")
|
|
except Exception as e:
|
|
print(f" ✗ Cannot reach Ollama: {e}")
|
|
|
|
print("\n" + "=" * 60)
|
|
print("Configuration test completed successfully!")
|
|
print("=" * 60)
|
|
|
|
return True
|
|
|
|
if __name__ == "__main__":
|
|
success = test_config()
|
|
sys.exit(0 if success else 1)
|