feat(LM Studio): Add response_format param for LM Studio to config (#2502)

This commit is contained in:
i-sun
2025-06-17 21:25:18 +09:00
committed by GitHub
parent c70dc7614b
commit 62c330e5b3
4 changed files with 34 additions and 0 deletions

View File

@@ -43,6 +43,7 @@ class BaseLlmConfig(ABC):
sarvam_base_url: Optional[str] = "https://api.sarvam.ai/v1",
# LM Studio specific
lmstudio_base_url: Optional[str] = "http://localhost:1234/v1",
lmstudio_response_format: dict = None,
# AWS Bedrock specific
aws_access_key_id: Optional[str] = None,
aws_secret_access_key: Optional[str] = None,
@@ -95,6 +96,8 @@ class BaseLlmConfig(ABC):
:type sarvam_base_url: Optional[str], optional
:param lmstudio_base_url: LM Studio base URL to be use, defaults to "http://localhost:1234/v1"
:type lmstudio_base_url: Optional[str], optional
:param lmstudio_response_format: LM Studio response format to be use, defaults to None
:type lmstudio_response_format: Optional[Dict], optional
"""
self.model = model
@@ -134,6 +137,7 @@ class BaseLlmConfig(ABC):
# LM Studio specific
self.lmstudio_base_url = lmstudio_base_url
self.lmstudio_response_format = lmstudio_response_format
# AWS Bedrock specific
self.aws_access_key_id = aws_access_key_id

View File

@@ -46,6 +46,8 @@ class LMStudioLLM(LLMBase):
}
if response_format:
params["response_format"] = response_format
if self.config.lmstudio_response_format is not None:
params["response_format"] = self.config.lmstudio_response_format
response = self.client.chat.completions.create(**params)
return response.choices[0].message.content