AzureOpenAI Embedding Model and LLM Model Initialisation from Config. (#1773)
This commit is contained in:
@@ -55,4 +55,20 @@ class MemoryConfig(BaseModel):
|
||||
description="The version of the API",
|
||||
default="v1.0",
|
||||
)
|
||||
|
||||
|
||||
|
||||
class AzureConfig(BaseModel):
|
||||
"""
|
||||
Configuration settings for Azure.
|
||||
|
||||
Args:
|
||||
api_key (str): The API key used for authenticating with the Azure service.
|
||||
azure_deployment (str): The name of the Azure deployment.
|
||||
azure_endpoint (str): The endpoint URL for the Azure service.
|
||||
api_version (str): The version of the Azure API being used.
|
||||
"""
|
||||
|
||||
api_key: str = Field(description="The API key used for authenticating with the Azure service.", default=None)
|
||||
azure_deployment : str = Field(description="The name of the Azure deployment.", default=None)
|
||||
azure_endpoint : str = Field(description="The endpoint URL for the Azure service.", default=None)
|
||||
api_version : str = Field(description="The version of the Azure API being used.", default=None)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from abc import ABC
|
||||
from mem0.configs.base import AzureConfig
|
||||
from typing import Optional, Union, Dict
|
||||
|
||||
import httpx
|
||||
@@ -21,6 +22,7 @@ class BaseEmbedderConfig(ABC):
|
||||
# Huggingface specific
|
||||
model_kwargs: Optional[dict] = None,
|
||||
# AzureOpenAI specific
|
||||
azure_kwargs: Optional[AzureConfig] = {},
|
||||
http_client_proxies: Optional[Union[Dict, str]] = None,
|
||||
):
|
||||
"""
|
||||
@@ -38,6 +40,8 @@ class BaseEmbedderConfig(ABC):
|
||||
:type model_kwargs: Optional[Dict[str, Any]], defaults a dict inside init
|
||||
:param openai_base_url: Openai base URL to be use, defaults to "https://api.openai.com/v1"
|
||||
:type openai_base_url: Optional[str], optional
|
||||
:param azure_kwargs: key-value arguments for the AzureOpenAI embedding model, defaults a dict inside init
|
||||
:type azure_kwargs: Optional[Dict[str, Any]], defaults a dict inside init
|
||||
:param http_client_proxies: The proxy server settings used to create self.http_client, defaults to None
|
||||
:type http_client_proxies: Optional[Dict | str], optional
|
||||
"""
|
||||
@@ -55,3 +59,6 @@ class BaseEmbedderConfig(ABC):
|
||||
|
||||
# Huggingface specific
|
||||
self.model_kwargs = model_kwargs or {}
|
||||
|
||||
# AzureOpenAI specific
|
||||
self.azure_kwargs = AzureConfig(**azure_kwargs) or {}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from abc import ABC
|
||||
from mem0.configs.base import AzureConfig
|
||||
from typing import Optional, Union, Dict
|
||||
|
||||
import httpx
|
||||
@@ -27,7 +28,8 @@ class BaseLlmConfig(ABC):
|
||||
app_name: Optional[str] = None,
|
||||
# Ollama specific
|
||||
ollama_base_url: Optional[str] = None,
|
||||
|
||||
# AzureOpenAI specific
|
||||
azure_kwargs: Optional[AzureConfig] = {},
|
||||
# AzureOpenAI specific
|
||||
http_client_proxies: Optional[Union[Dict, str]] = None,
|
||||
):
|
||||
@@ -62,6 +64,8 @@ class BaseLlmConfig(ABC):
|
||||
:type ollama_base_url: Optional[str], optional
|
||||
:param openai_base_url: Openai base URL to be use, defaults to "https://api.openai.com/v1"
|
||||
:type openai_base_url: Optional[str], optional
|
||||
:param azure_kwargs: key-value arguments for the AzureOpenAI LLM model, defaults a dict inside init
|
||||
:type azure_kwargs: Optional[Dict[str, Any]], defaults a dict inside init
|
||||
:param http_client_proxies: The proxy server(s) settings used to create self.http_client, defaults to None
|
||||
:type http_client_proxies: Optional[Dict | str], optional
|
||||
"""
|
||||
@@ -86,3 +90,6 @@ class BaseLlmConfig(ABC):
|
||||
|
||||
# Ollama specific
|
||||
self.ollama_base_url = ollama_base_url
|
||||
|
||||
# AzureOpenAI specific
|
||||
self.azure_kwargs = AzureConfig(**azure_kwargs) or {}
|
||||
|
||||
Reference in New Issue
Block a user