Adding proxy server settings to azure openai (#1753)

This commit is contained in:
Pranav Puranik
2024-08-29 04:48:50 -05:00
committed by GitHub
parent deeb4f2250
commit fee3c27af3
7 changed files with 46 additions and 6 deletions

View File

@@ -1,5 +1,7 @@
from abc import ABC
from typing import Optional
from typing import Optional, Union, Dict
import httpx
class BaseEmbedderConfig(ABC):
@@ -18,6 +20,8 @@ class BaseEmbedderConfig(ABC):
openai_base_url: Optional[str] = None,
# Huggingface specific
model_kwargs: Optional[dict] = None,
# AzureOpenAI specific
http_client_proxies: Optional[Union[Dict, str]] = None,
):
"""
Initializes a configuration class instance for the Embeddings.
@@ -34,6 +38,8 @@ class BaseEmbedderConfig(ABC):
:type model_kwargs: Optional[Dict[str, Any]], defaults a dict inside init
:param openai_base_url: Openai base URL to be use, defaults to "https://api.openai.com/v1"
:type openai_base_url: Optional[str], optional
:param http_client_proxies: The proxy server settings used to create self.http_client, defaults to None
:type http_client_proxies: Optional[Dict | str], optional
"""
self.model = model
@@ -41,6 +47,9 @@ class BaseEmbedderConfig(ABC):
self.openai_base_url = openai_base_url
self.embedding_dims = embedding_dims
# AzureOpenAI specific
self.http_client = httpx.Client(proxies=http_client_proxies) if http_client_proxies else None
# Ollama specific
self.ollama_base_url = ollama_base_url

View File

@@ -1,5 +1,7 @@
from abc import ABC
from typing import Optional
from typing import Optional, Union, Dict
import httpx
class BaseLlmConfig(ABC):
@@ -25,6 +27,9 @@ class BaseLlmConfig(ABC):
app_name: Optional[str] = None,
# Ollama specific
ollama_base_url: Optional[str] = None,
# AzureOpenAI specific
http_client_proxies: Optional[Union[Dict, str]] = None,
):
"""
Initializes a configuration class instance for the LLM.
@@ -57,6 +62,8 @@ class BaseLlmConfig(ABC):
:type ollama_base_url: Optional[str], optional
:param openai_base_url: Openai base URL to be use, defaults to "https://api.openai.com/v1"
:type openai_base_url: Optional[str], optional
:param http_client_proxies: The proxy server(s) settings used to create self.http_client, defaults to None
:type http_client_proxies: Optional[Dict | str], optional
"""
self.model = model
@@ -66,6 +73,9 @@ class BaseLlmConfig(ABC):
self.top_p = top_p
self.top_k = top_k
# AzureOpenAI specific
self.http_client = httpx.Client(proxies=http_client_proxies) if http_client_proxies else None
# Openrouter specific
self.models = models
self.route = route

View File

@@ -17,7 +17,7 @@ class AzureOpenAIEmbedding(EmbeddingBase):
self.config.embedding_dims = 1536
api_key = os.getenv("AZURE_OPENAI_API_KEY") or self.config.api_key
self.client = AzureOpenAI(api_key=api_key)
self.client = AzureOpenAI(api_key=api_key, http_client=self.config.http_client)
def embed(self, text):
"""

View File

@@ -15,10 +15,9 @@ class AzureOpenAILLM(LLMBase):
# Model name should match the custom deployment name chosen for it.
if not self.config.model:
self.config.model = "gpt-4o"
self.client = AzureOpenAI()
api_key = os.getenv("AZURE_OPENAI_API_KEY") or self.config.api_key
self.client = AzureOpenAI(api_key=api_key)
self.client = AzureOpenAI(api_key=api_key, http_client=self.config.http_client)
def _parse_response(self, response, tools):
"""