Feature/vllm support (#2981)

This commit is contained in:
NiLAy
2025-06-23 13:18:38 +05:30
committed by GitHub
parent 386d8b87ae
commit 89499aedbe
10 changed files with 430 additions and 1 deletions

View File

@@ -44,6 +44,8 @@ class BaseLlmConfig(ABC):
# LM Studio specific
lmstudio_base_url: Optional[str] = "http://localhost:1234/v1",
lmstudio_response_format: dict = None,
# vLLM specific
vllm_base_url: Optional[str] = "http://localhost:8000/v1",
# AWS Bedrock specific
aws_access_key_id: Optional[str] = None,
aws_secret_access_key: Optional[str] = None,
@@ -98,6 +100,8 @@ class BaseLlmConfig(ABC):
:type lmstudio_base_url: Optional[str], optional
:param lmstudio_response_format: LM Studio response format to be use, defaults to None
:type lmstudio_response_format: Optional[Dict], optional
:param vllm_base_url: vLLM base URL to be use, defaults to "http://localhost:8000/v1"
:type vllm_base_url: Optional[str], optional
"""
self.model = model
@@ -139,6 +143,9 @@ class BaseLlmConfig(ABC):
self.lmstudio_base_url = lmstudio_base_url
self.lmstudio_response_format = lmstudio_response_format
# vLLM specific
self.vllm_base_url = vllm_base_url
# AWS Bedrock specific
self.aws_access_key_id = aws_access_key_id
self.aws_secret_access_key = aws_secret_access_key