Add support for http clients in config (#1355)
This commit is contained in:
committed by
GitHub
parent
78301ee63d
commit
ebc5e25f98
@@ -98,6 +98,8 @@ class BaseLlmConfig(BaseConfig):
|
|||||||
base_url: Optional[str] = None,
|
base_url: Optional[str] = None,
|
||||||
endpoint: Optional[str] = None,
|
endpoint: Optional[str] = None,
|
||||||
model_kwargs: Optional[dict[str, Any]] = None,
|
model_kwargs: Optional[dict[str, Any]] = None,
|
||||||
|
http_client: Optional[Any] = None,
|
||||||
|
http_async_client: Optional[Any] = None,
|
||||||
local: Optional[bool] = False,
|
local: Optional[bool] = False,
|
||||||
default_headers: Optional[Mapping[str, str]] = None,
|
default_headers: Optional[Mapping[str, str]] = None,
|
||||||
):
|
):
|
||||||
@@ -175,6 +177,8 @@ class BaseLlmConfig(BaseConfig):
|
|||||||
self.base_url = base_url
|
self.base_url = base_url
|
||||||
self.endpoint = endpoint
|
self.endpoint = endpoint
|
||||||
self.model_kwargs = model_kwargs
|
self.model_kwargs = model_kwargs
|
||||||
|
self.http_client = http_client
|
||||||
|
self.http_async_client = http_async_client
|
||||||
self.local = local
|
self.local = local
|
||||||
self.default_headers = default_headers
|
self.default_headers = default_headers
|
||||||
|
|
||||||
|
|||||||
@@ -52,6 +52,8 @@ class OpenAILlm(BaseLlm):
|
|||||||
callbacks=callbacks,
|
callbacks=callbacks,
|
||||||
api_key=api_key,
|
api_key=api_key,
|
||||||
base_url=base_url,
|
base_url=base_url,
|
||||||
|
http_client=config.http_client,
|
||||||
|
http_async_client=config.http_async_client,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
chat = ChatOpenAI(**kwargs, api_key=api_key, base_url=base_url)
|
chat = ChatOpenAI(**kwargs, api_key=api_key, base_url=base_url)
|
||||||
|
|||||||
Reference in New Issue
Block a user