[Feature] Add support for running huggingface models locally (#1287)

This commit is contained in:
Deshraj Yadav
2024-02-27 15:05:17 -08:00
committed by GitHub
parent 752f638cfc
commit 56bf33ab7f
5 changed files with 95 additions and 46 deletions

View File

@@ -95,6 +95,7 @@ class BaseLlmConfig(BaseConfig):
api_key: Optional[str] = None,
endpoint: Optional[str] = None,
model_kwargs: Optional[dict[str, Any]] = None,
local: Optional[bool] = False,
):
"""
Initializes a configuration class instance for the LLM.
@@ -138,6 +139,8 @@ class BaseLlmConfig(BaseConfig):
:type callbacks: Optional[list], optional
:param query_type: The type of query to use, defaults to None
:type query_type: Optional[str], optional
:param local: If True, the model will be run locally, defaults to False (for huggingface provider)
:type local: Optional[bool], optional
:raises ValueError: If the template is not valid as template should
contain $context and $query (and optionally $history)
:raises ValueError: Stream is not boolean
@@ -165,6 +168,7 @@ class BaseLlmConfig(BaseConfig):
self.api_key = api_key
self.endpoint = endpoint
self.model_kwargs = model_kwargs
self.local = local
if isinstance(prompt, str):
prompt = Template(prompt)