Adding proxy server settings to azure openai (#1753)

This commit is contained in:
Pranav Puranik
2024-08-29 04:48:50 -05:00
committed by GitHub
parent deeb4f2250
commit fee3c27af3
7 changed files with 46 additions and 6 deletions

View File

@@ -48,6 +48,7 @@ Here's a comprehensive list of all parameters that can be used across different
| `model` | Embedding model to use |
| `api_key` | API key of the provider |
| `embedding_dims` | Dimensions of the embedding model |
| `http_client_proxies` | Allow proxy server settings |
| `ollama_base_url` | Base URL for the Ollama embedding model |
| `model_kwargs` | Key-Value arguments for the Huggingface embedding model |

View File

@@ -53,6 +53,7 @@ Here's the table based on the provided parameters:
| `max_tokens` | Tokens to generate | All |
| `top_p` | Probability threshold for nucleus sampling | All |
| `top_k` | Number of highest probability tokens to keep | All |
| `http_client_proxies`| Allow proxy server settings | AzureOpenAI |
| `models` | List of models | Openrouter |
| `route` | Routing strategy | Openrouter |
| `openrouter_base_url`| Base URL for Openrouter API | Openrouter |

View File

@@ -1,5 +1,7 @@
from abc import ABC
from typing import Optional
from typing import Optional, Union, Dict
import httpx
class BaseEmbedderConfig(ABC):
@@ -18,6 +20,8 @@ class BaseEmbedderConfig(ABC):
openai_base_url: Optional[str] = None,
# Huggingface specific
model_kwargs: Optional[dict] = None,
# AzureOpenAI specific
http_client_proxies: Optional[Union[Dict, str]] = None,
):
"""
Initializes a configuration class instance for the Embeddings.
@@ -34,6 +38,8 @@ class BaseEmbedderConfig(ABC):
:type model_kwargs: Optional[Dict[str, Any]], defaults a dict inside init
:param openai_base_url: Openai base URL to be use, defaults to "https://api.openai.com/v1"
:type openai_base_url: Optional[str], optional
:param http_client_proxies: The proxy server settings used to create self.http_client, defaults to None
:type http_client_proxies: Optional[Dict | str], optional
"""
self.model = model
@@ -41,6 +47,9 @@ class BaseEmbedderConfig(ABC):
self.openai_base_url = openai_base_url
self.embedding_dims = embedding_dims
# AzureOpenAI specific
self.http_client = httpx.Client(proxies=http_client_proxies) if http_client_proxies else None
# Ollama specific
self.ollama_base_url = ollama_base_url

View File

@@ -1,5 +1,7 @@
from abc import ABC
from typing import Optional
from typing import Optional, Union, Dict
import httpx
class BaseLlmConfig(ABC):
@@ -25,6 +27,9 @@ class BaseLlmConfig(ABC):
app_name: Optional[str] = None,
# Ollama specific
ollama_base_url: Optional[str] = None,
# AzureOpenAI specific
http_client_proxies: Optional[Union[Dict, str]] = None,
):
"""
Initializes a configuration class instance for the LLM.
@@ -57,6 +62,8 @@ class BaseLlmConfig(ABC):
:type ollama_base_url: Optional[str], optional
:param openai_base_url: Openai base URL to be use, defaults to "https://api.openai.com/v1"
:type openai_base_url: Optional[str], optional
:param http_client_proxies: The proxy server(s) settings used to create self.http_client, defaults to None
:type http_client_proxies: Optional[Dict | str], optional
"""
self.model = model
@@ -66,6 +73,9 @@ class BaseLlmConfig(ABC):
self.top_p = top_p
self.top_k = top_k
# AzureOpenAI specific
self.http_client = httpx.Client(proxies=http_client_proxies) if http_client_proxies else None
# Openrouter specific
self.models = models
self.route = route

View File

@@ -17,7 +17,7 @@ class AzureOpenAIEmbedding(EmbeddingBase):
self.config.embedding_dims = 1536
api_key = os.getenv("AZURE_OPENAI_API_KEY") or self.config.api_key
self.client = AzureOpenAI(api_key=api_key)
self.client = AzureOpenAI(api_key=api_key, http_client=self.config.http_client)
def embed(self, text):
"""

View File

@@ -15,10 +15,9 @@ class AzureOpenAILLM(LLMBase):
# Model name should match the custom deployment name chosen for it.
if not self.config.model:
self.config.model = "gpt-4o"
self.client = AzureOpenAI()
api_key = os.getenv("AZURE_OPENAI_API_KEY") or self.config.api_key
self.client = AzureOpenAI(api_key=api_key)
self.client = AzureOpenAI(api_key=api_key, http_client=self.config.http_client)
def _parse_response(self, response, tools):
"""

View File

@@ -1,5 +1,8 @@
import httpx
import pytest
from unittest.mock import Mock, patch
from mem0.llms.azure_openai import AzureOpenAILLM
from mem0.configs.llms.base import BaseLlmConfig
@@ -91,4 +94,21 @@ def test_generate_response_with_tools(mock_openai_client):
assert len(response["tool_calls"]) == 1
assert response["tool_calls"][0]["name"] == "add_memory"
assert response["tool_calls"][0]["arguments"] == {'data': 'Today is a sunny day.'}
def test_generate_with_http_proxies():
mock_http_client = Mock(spec=httpx.Client)
mock_http_client_instance = Mock(spec=httpx.Client)
mock_http_client.return_value = mock_http_client_instance
with (patch("mem0.llms.azure_openai.AzureOpenAI") as mock_azure_openai,
patch("httpx.Client", new=mock_http_client) as mock_http_client):
config = BaseLlmConfig(model=MODEL, temperature=TEMPERATURE, max_tokens=MAX_TOKENS, top_p=TOP_P,
api_key="test", http_client_proxies="http://testproxy.mem0.net:8000")
_ = AzureOpenAILLM(config)
mock_azure_openai.assert_called_once_with(
api_key="test",
http_client=mock_http_client_instance
)
mock_http_client.assert_called_once_with(proxies="http://testproxy.mem0.net:8000")