fix(llm): consume llm base url config with a better way (#1861)
This commit is contained in:
@@ -22,9 +22,9 @@ class BaseLlmConfig(ABC):
|
||||
# Openrouter specific
|
||||
models: Optional[list[str]] = None,
|
||||
route: Optional[str] = "fallback",
|
||||
openrouter_base_url: Optional[str] = "https://openrouter.ai/api/v1",
|
||||
openrouter_base_url: Optional[str] = None,
|
||||
# Openai specific
|
||||
openai_base_url: Optional[str] = "https://api.openai.com/v1",
|
||||
openai_base_url: Optional[str] = None,
|
||||
site_url: Optional[str] = None,
|
||||
app_name: Optional[str] = None,
|
||||
# Ollama specific
|
||||
|
||||
@@ -18,11 +18,11 @@ class OpenAILLM(LLMBase):
|
||||
if os.environ.get("OPENROUTER_API_KEY"): # Use OpenRouter
|
||||
self.client = OpenAI(
|
||||
api_key=os.environ.get("OPENROUTER_API_KEY"),
|
||||
base_url=self.config.openrouter_base_url,
|
||||
base_url=self.config.openrouter_base_url or os.getenv("OPENROUTER_API_BASE") or "https://openrouter.ai/api/v1",
|
||||
)
|
||||
else:
|
||||
api_key = self.config.api_key or os.getenv("OPENAI_API_KEY")
|
||||
base_url = os.getenv("OPENAI_API_BASE") or self.config.openai_base_url
|
||||
base_url = self.config.openai_base_url or os.getenv("OPENAI_API_BASE") or "https://api.openai.com/v1"
|
||||
self.client = OpenAI(api_key=api_key, base_url=base_url)
|
||||
|
||||
def _parse_response(self, response, tools):
|
||||
|
||||
@@ -16,7 +16,7 @@ class OpenAIStructuredLLM(LLMBase):
|
||||
self.config.model = "gpt-4o-2024-08-06"
|
||||
|
||||
api_key = self.config.api_key or os.getenv("OPENAI_API_KEY")
|
||||
base_url = self.config.openai_base_url or os.getenv("OPENAI_API_BASE")
|
||||
base_url = self.config.openai_base_url or os.getenv("OPENAI_API_BASE") or "https://api.openai.com/v1"
|
||||
self.client = OpenAI(api_key=api_key, base_url=base_url)
|
||||
|
||||
def _parse_response(self, response, tools):
|
||||
|
||||
Reference in New Issue
Block a user