Files
t6_mem0/mem0/utils/factory.py
2024-08-03 20:31:43 +05:30

65 lines
2.2 KiB
Python

import importlib
from mem0.configs.llms.base import BaseLlmConfig
def load_class(class_type):
module_path, class_name = class_type.rsplit(".", 1)
module = importlib.import_module(module_path)
return getattr(module, class_name)
class LlmFactory:
provider_to_class = {
"ollama": "mem0.llms.ollama.py.OllamaLLM",
"openai": "mem0.llms.openai.OpenAILLM",
"groq": "mem0.llms.groq.GroqLLM",
"together": "mem0.llms.together.TogetherLLM",
"aws_bedrock": "mem0.llms.aws_bedrock.AWSBedrockLLM",
"litellm": "mem0.llms.litellm.LiteLLM",
"ollama": "mem0.llms.ollama.OllamaLLM",
"azure_openai": "mem0.llms.azure_openai.AzureOpenAILLM",
}
@classmethod
def create(cls, provider_name, config):
class_type = cls.provider_to_class.get(provider_name)
if class_type:
llm_instance = load_class(class_type)
base_config = BaseLlmConfig(**config)
return llm_instance(base_config)
else:
raise ValueError(f"Unsupported Llm provider: {provider_name}")
class EmbedderFactory:
provider_to_class = {
"openai": "mem0.embeddings.openai.OpenAIEmbedding",
"ollama": "mem0.embeddings.ollama.OllamaEmbedding"
}
@classmethod
def create(cls, provider_name):
class_type = cls.provider_to_class.get(provider_name)
if class_type:
embedder_instance = load_class(class_type)()
return embedder_instance
else:
raise ValueError(f"Unsupported Embedder provider: {provider_name}")
class VectorStoreFactory:
provider_to_class = {
"qdrant": "mem0.vector_stores.qdrant.Qdrant",
"chromadb": "mem0.vector_stores.chroma.ChromaDB",
}
@classmethod
def create(cls, provider_name, config):
class_type = cls.provider_to_class.get(provider_name)
if class_type:
if not isinstance(config, dict):
config = config.model_dump()
vector_store_instance = load_class(class_type)
return vector_store_instance(**config)
else:
raise ValueError(f"Unsupported VectorStore provider: {provider_name}")