[bugfix] Fix issue when llm config is not defined (#763)

This commit is contained in:
Deshraj Yadav
2023-10-04 12:08:21 -07:00
committed by GitHub
parent d0af018b8d
commit 87d0b5c76f
15 changed files with 100 additions and 88 deletions

View File

@@ -22,7 +22,7 @@ class GPT4ALLLlm(BaseLlm):
from gpt4all import GPT4All
except ModuleNotFoundError:
raise ModuleNotFoundError(
"The GPT4All python package is not installed. Please install it with `pip install --upgrade embedchain[opensource]`" # noqa E501
"The GPT4All python package is not installed. Please install it with `pip install --upgrade embedchain[opensource]`" # noqa E501
) from None
return GPT4All(model_name=model)