[bugfix] Fix issue when llm config is not defined (#763)

This commit is contained in:
Deshraj Yadav
2023-10-04 12:08:21 -07:00
committed by GitHub
parent d0af018b8d
commit 87d0b5c76f
15 changed files with 100 additions and 88 deletions

View File

@@ -1,11 +1,11 @@
import os
import logging
import hashlib
import logging
import os
from embedchain.loaders.base_loader import BaseLoader
class ImagesLoader(BaseLoader):
def load_data(self, image_url):
"""
Loads images from the supplied directory/file and applies CLIP model transformation to represent these images
@@ -15,6 +15,7 @@ class ImagesLoader(BaseLoader):
"""
# load model and image preprocessing
from embedchain.models.clip_processor import ClipProcessor
model, preprocess = ClipProcessor.load_model()
if os.path.isfile(image_url):
data = [ClipProcessor.get_image_features(image_url, model, preprocess)]
@@ -28,8 +29,11 @@ class ImagesLoader(BaseLoader):
# Log the file that was not loaded
logging.exception("Failed to load the file {}. Exception {}".format(filepath, e))
# Get the metadata like Size, Last Modified and Last Created timestamps
image_path_metadata = [str(os.path.getsize(image_url)), str(os.path.getmtime(image_url)),
str(os.path.getctime(image_url))]
image_path_metadata = [
str(os.path.getsize(image_url)),
str(os.path.getmtime(image_url)),
str(os.path.getctime(image_url)),
]
doc_id = hashlib.sha256((" ".join(image_path_metadata) + image_url).encode()).hexdigest()
return {
"doc_id": doc_id,