[Fix] Added missing provider for 'vllm' (#1316)

This commit is contained in:
Abhishek Sharma
2024-03-14 12:31:30 +05:30
committed by GitHub
parent 3616eaadb4
commit f2122ed696

View File

@@ -25,6 +25,7 @@ class LlmFactory:
"mistralai": "embedchain.llm.mistralai.MistralAILlm",
"groq": "embedchain.llm.groq.GroqLlm",
"nvidia": "embedchain.llm.nvidia.NvidiaLlm",
"vllm": "embedchain.llm.vllm.VLLM",
}
provider_to_config_class = {
"embedchain": "embedchain.config.llm.base.BaseLlmConfig",