[Bug fix] import App shouldn't throw other llm deps errors (#837)
This commit is contained in:
@@ -8,18 +8,17 @@ from embedchain.config import BaseLlmConfig
|
||||
from embedchain.helper.json_serializable import register_deserializable
|
||||
from embedchain.llm.base import BaseLlm
|
||||
|
||||
try:
|
||||
importlib.import_module("replicate")
|
||||
except ModuleNotFoundError:
|
||||
raise ModuleNotFoundError(
|
||||
"The required dependencies for Llama2 are not installed."
|
||||
'Please install with `pip install --upgrade "embedchain[llama2]"`'
|
||||
) from None
|
||||
|
||||
|
||||
@register_deserializable
|
||||
class Llama2Llm(BaseLlm):
|
||||
def __init__(self, config: Optional[BaseLlmConfig] = None):
|
||||
try:
|
||||
importlib.import_module("replicate")
|
||||
except ModuleNotFoundError:
|
||||
raise ModuleNotFoundError(
|
||||
"The required dependencies for Llama2 are not installed."
|
||||
'Please install with `pip install --upgrade "embedchain[llama2]"`'
|
||||
) from None
|
||||
if "REPLICATE_API_TOKEN" not in os.environ:
|
||||
raise ValueError("Please set the REPLICATE_API_TOKEN environment variable.")
|
||||
|
||||
|
||||
@@ -6,18 +6,17 @@ from embedchain.config import BaseLlmConfig
|
||||
from embedchain.helper.json_serializable import register_deserializable
|
||||
from embedchain.llm.base import BaseLlm
|
||||
|
||||
try:
|
||||
importlib.import_module("vertexai")
|
||||
except ModuleNotFoundError:
|
||||
raise ModuleNotFoundError(
|
||||
"The required dependencies for VertexAI are not installed."
|
||||
'Please install with `pip install --upgrade "embedchain[vertexai]"`'
|
||||
) from None
|
||||
|
||||
|
||||
@register_deserializable
|
||||
class VertexAILlm(BaseLlm):
|
||||
def __init__(self, config: Optional[BaseLlmConfig] = None):
|
||||
try:
|
||||
importlib.import_module("vertexai")
|
||||
except ModuleNotFoundError:
|
||||
raise ModuleNotFoundError(
|
||||
"The required dependencies for VertexAI are not installed."
|
||||
'Please install with `pip install --upgrade "embedchain[vertexai]"`'
|
||||
) from None
|
||||
super().__init__(config=config)
|
||||
|
||||
def get_llm_model_answer(self, prompt):
|
||||
|
||||
Reference in New Issue
Block a user