Add support for http clients in config (#1355)

This commit is contained in:
Abdur Rahman Nawaz
2024-05-06 23:02:46 +05:30
committed by GitHub
parent 78301ee63d
commit ebc5e25f98
2 changed files with 6 additions and 0 deletions

View File

@@ -98,6 +98,8 @@ class BaseLlmConfig(BaseConfig):
base_url: Optional[str] = None,
endpoint: Optional[str] = None,
model_kwargs: Optional[dict[str, Any]] = None,
http_client: Optional[Any] = None,
http_async_client: Optional[Any] = None,
local: Optional[bool] = False,
default_headers: Optional[Mapping[str, str]] = None,
):
@@ -175,6 +177,8 @@ class BaseLlmConfig(BaseConfig):
self.base_url = base_url
self.endpoint = endpoint
self.model_kwargs = model_kwargs
self.http_client = http_client
self.http_async_client = http_async_client
self.local = local
self.default_headers = default_headers

View File

@@ -52,6 +52,8 @@ class OpenAILlm(BaseLlm):
callbacks=callbacks,
api_key=api_key,
base_url=base_url,
http_client=config.http_client,
http_async_client=config.http_async_client,
)
else:
chat = ChatOpenAI(**kwargs, api_key=api_key, base_url=base_url)