Add support for Hugging Face Inference Endpoint as LLM (#1143)

This commit is contained in:
Madison Ebersole
2024-01-08 13:20:04 -05:00
committed by GitHub
parent e36198dcc2
commit 62c0c52e31
5 changed files with 93 additions and 1 deletions

View File

@@ -72,6 +72,8 @@ class BaseLlmConfig(BaseConfig):
query_type: Optional[str] = None,
callbacks: Optional[List] = None,
api_key: Optional[str] = None,
endpoint: Optional[str] = None,
model_kwargs: Optional[Dict[str, Any]] = {},
):
"""
Initializes a configuration class instance for the LLM.
@@ -105,6 +107,12 @@ class BaseLlmConfig(BaseConfig):
:type system_prompt: Optional[str], optional
:param where: A dictionary of key-value pairs to filter the database results., defaults to None
:type where: Dict[str, Any], optional
:param api_key: The api key of the custom endpoint, defaults to None
:type api_key: Optional[str], optional
:param endpoint: The api url of the custom endpoint, defaults to None
:type endpoint: Optional[str], optional
:param model_kwargs: A dictionary of key-value pairs to pass to the model, defaults to None
:type model_kwargs: Optional[Dict[str, Any]], optional
:param callbacks: Langchain callback functions to use, defaults to None
:type callbacks: Optional[List], optional
:raises ValueError: If the template is not valid as template should
@@ -132,7 +140,8 @@ class BaseLlmConfig(BaseConfig):
self.query_type = query_type
self.callbacks = callbacks
self.api_key = api_key
self.endpoint = endpoint
self.model_kwargs = model_kwargs
if type(prompt) is str:
prompt = Template(prompt)

View File

@@ -3,6 +3,7 @@ import logging
import os
from typing import Optional
from langchain.llms.huggingface_endpoint import HuggingFaceEndpoint
from langchain.llms.huggingface_hub import HuggingFaceHub
from embedchain.config import BaseLlmConfig
@@ -33,6 +34,15 @@ class HuggingFaceLlm(BaseLlm):
@staticmethod
def _get_answer(prompt: str, config: BaseLlmConfig) -> str:
if config.model:
return HuggingFaceLlm._from_model(prompt=prompt, config=config)
elif config.endpoint:
return HuggingFaceLlm._from_endpoint(prompt=prompt, config=config)
else:
raise ValueError("Either `model` or `endpoint` must be set")
@staticmethod
def _from_model(prompt: str, config: BaseLlmConfig) -> str:
model_kwargs = {
"temperature": config.temperature or 0.1,
"max_new_tokens": config.max_tokens,
@@ -52,3 +62,13 @@ class HuggingFaceLlm(BaseLlm):
)
return llm(prompt)
@staticmethod
def _from_endpoint(prompt: str, config: BaseLlmConfig) -> str:
llm = HuggingFaceEndpoint(
huggingfacehub_api_token=os.environ["HUGGINGFACE_ACCESS_TOKEN"],
endpoint_url=config.endpoint,
task="text-generation",
model_kwargs=config.model_kwargs,
)
return llm(prompt)

View File

@@ -415,6 +415,7 @@ def validate_config(config_data):
Optional("where"): dict,
Optional("query_type"): str,
Optional("api_key"): str,
Optional("endpoint"): str,
},
},
Optional("vectordb"): {