fix(llm): consume llm base url config with a better way (#1861)

This commit is contained in:
Mathew Shen
2024-09-24 12:35:09 +08:00
committed by GitHub
parent 56ceecb4e3
commit 8511eca03b
5 changed files with 40 additions and 6 deletions

View File

@@ -22,9 +22,9 @@ class BaseLlmConfig(ABC):
# Openrouter specific
models: Optional[list[str]] = None,
route: Optional[str] = "fallback",
openrouter_base_url: Optional[str] = "https://openrouter.ai/api/v1",
openrouter_base_url: Optional[str] = None,
# Openai specific
openai_base_url: Optional[str] = "https://api.openai.com/v1",
openai_base_url: Optional[str] = None,
site_url: Optional[str] = None,
app_name: Optional[str] = None,
# Ollama specific