# LLM Provider Selection # Options: "openai" or "ollama" LLM_PROVIDER=openai EMBEDDER_PROVIDER=openai # OpenAI Configuration # Required when LLM_PROVIDER=openai or EMBEDDER_PROVIDER=openai OPENAI_API_KEY=sk-your-openai-api-key-here # Ollama Configuration # Required when LLM_PROVIDER=ollama or EMBEDDER_PROVIDER=ollama # Ollama must be running and models must be pulled OLLAMA_BASE_URL=http://localhost:11434 OLLAMA_LLM_MODEL=llama3.1:8b OLLAMA_EMBEDDING_MODEL=nomic-embed-text # Supabase Configuration SUPABASE_CONNECTION_STRING=postgresql://user:password@host:5432/database # Neo4j Configuration NEO4J_URI=neo4j://neo4j:7687 NEO4J_USER=neo4j NEO4J_PASSWORD=your-neo4j-password # API Configuration API_HOST=0.0.0.0 API_PORT=8080 API_KEY=your-secure-api-key-here # MCP Server Configuration MCP_HOST=0.0.0.0 MCP_PORT=8765 # Mem0 Configuration MEM0_COLLECTION_NAME=t6_memories MEM0_EMBEDDING_DIMS=1536 MEM0_VERSION=v1.1 # Docker Network DOCKER_NETWORK=localai # Logging LOG_LEVEL=INFO LOG_FORMAT=json # Environment ENVIRONMENT=development