From ebc5e25f9825f1f3acef45593a1457550b089fd1 Mon Sep 17 00:00:00 2001 From: Abdur Rahman Nawaz Date: Mon, 6 May 2024 23:02:46 +0530 Subject: [PATCH] Add support for http clients in config (#1355) --- embedchain/config/llm/base.py | 4 ++++ embedchain/llm/openai.py | 2 ++ 2 files changed, 6 insertions(+) diff --git a/embedchain/config/llm/base.py b/embedchain/config/llm/base.py index 26b094c9..cdb158c5 100644 --- a/embedchain/config/llm/base.py +++ b/embedchain/config/llm/base.py @@ -98,6 +98,8 @@ class BaseLlmConfig(BaseConfig): base_url: Optional[str] = None, endpoint: Optional[str] = None, model_kwargs: Optional[dict[str, Any]] = None, + http_client: Optional[Any] = None, + http_async_client: Optional[Any] = None, local: Optional[bool] = False, default_headers: Optional[Mapping[str, str]] = None, ): @@ -175,6 +177,8 @@ class BaseLlmConfig(BaseConfig): self.base_url = base_url self.endpoint = endpoint self.model_kwargs = model_kwargs + self.http_client = http_client + self.http_async_client = http_async_client self.local = local self.default_headers = default_headers diff --git a/embedchain/llm/openai.py b/embedchain/llm/openai.py index c1b2d0af..aec91b2a 100644 --- a/embedchain/llm/openai.py +++ b/embedchain/llm/openai.py @@ -52,6 +52,8 @@ class OpenAILlm(BaseLlm): callbacks=callbacks, api_key=api_key, base_url=base_url, + http_client=config.http_client, + http_async_client=config.http_async_client, ) else: chat = ChatOpenAI(**kwargs, api_key=api_key, base_url=base_url)