AzureOpenAI Embedding Model and LLM Model Initialisation from Config. (#1773)

This commit is contained in:
k10
2024-09-01 02:09:00 +05:30
committed by GitHub
parent ad233034ef
commit 077d0c47f9
10 changed files with 88 additions and 22 deletions

View File

@@ -1,4 +1,5 @@
from abc import ABC
from mem0.configs.base import AzureConfig
from typing import Optional, Union, Dict
import httpx
@@ -27,7 +28,8 @@ class BaseLlmConfig(ABC):
app_name: Optional[str] = None,
# Ollama specific
ollama_base_url: Optional[str] = None,
# AzureOpenAI specific
azure_kwargs: Optional[AzureConfig] = {},
# AzureOpenAI specific
http_client_proxies: Optional[Union[Dict, str]] = None,
):
@@ -62,6 +64,8 @@ class BaseLlmConfig(ABC):
:type ollama_base_url: Optional[str], optional
:param openai_base_url: Openai base URL to be use, defaults to "https://api.openai.com/v1"
:type openai_base_url: Optional[str], optional
:param azure_kwargs: key-value arguments for the AzureOpenAI LLM model, defaults a dict inside init
:type azure_kwargs: Optional[Dict[str, Any]], defaults a dict inside init
:param http_client_proxies: The proxy server(s) settings used to create self.http_client, defaults to None
:type http_client_proxies: Optional[Dict | str], optional
"""
@@ -86,3 +90,6 @@ class BaseLlmConfig(ABC):
# Ollama specific
self.ollama_base_url = ollama_base_url
# AzureOpenAI specific
self.azure_kwargs = AzureConfig(**azure_kwargs) or {}