[chore]: Rename modules for better readability and maintainability (#587)

This commit is contained in:
Deshraj Yadav
2023-09-10 18:31:40 -07:00
committed by GitHub
parent 6fed75bb45
commit 79f5a1d052
65 changed files with 109 additions and 108 deletions

View File

@@ -2,8 +2,8 @@ import logging
from typing import Optional
from embedchain.config import BaseLlmConfig
from embedchain.helper_classes.json_serializable import register_deserializable
from embedchain.llm.base_llm import BaseLlm
from embedchain.helper.json_serializable import register_deserializable
from embedchain.llm.base import BaseLlm
@register_deserializable

View File

@@ -2,17 +2,17 @@ import logging
from typing import Optional
from embedchain.config import BaseLlmConfig
from embedchain.helper_classes.json_serializable import register_deserializable
from embedchain.llm.base_llm import BaseLlm
from embedchain.helper.json_serializable import register_deserializable
from embedchain.llm.base import BaseLlm
@register_deserializable
class AzureOpenAiLlm(BaseLlm):
class AzureOpenAILlm(BaseLlm):
def __init__(self, config: Optional[BaseLlmConfig] = None):
super().__init__(config=config)
def get_llm_model_answer(self, prompt):
return AzureOpenAiLlm._get_azure_openai_answer(prompt=prompt, config=self.config)
return AzureOpenAILlm._get_azure_openai_answer(prompt=prompt, config=self.config)
@staticmethod
def _get_azure_openai_answer(prompt: str, config: BaseLlmConfig) -> str:

View File

@@ -8,7 +8,7 @@ from embedchain.config import BaseLlmConfig
from embedchain.config.llm.base_llm_config import (
DEFAULT_PROMPT, DEFAULT_PROMPT_WITH_HISTORY_TEMPLATE,
DOCS_SITE_PROMPT_TEMPLATE)
from embedchain.helper_classes.json_serializable import JSONSerializable
from embedchain.helper.json_serializable import JSONSerializable
class BaseLlm(JSONSerializable):

View File

@@ -1,8 +1,8 @@
from typing import Iterable, Optional, Union
from embedchain.config import BaseLlmConfig
from embedchain.helper_classes.json_serializable import register_deserializable
from embedchain.llm.base_llm import BaseLlm
from embedchain.helper.json_serializable import register_deserializable
from embedchain.llm.base import BaseLlm
@register_deserializable

View File

@@ -4,8 +4,8 @@ from typing import Optional
from langchain.llms import Replicate
from embedchain.config import BaseLlmConfig
from embedchain.helper_classes.json_serializable import register_deserializable
from embedchain.llm.base_llm import BaseLlm
from embedchain.helper.json_serializable import register_deserializable
from embedchain.llm.base import BaseLlm
@register_deserializable

View File

@@ -3,12 +3,12 @@ from typing import Optional
import openai
from embedchain.config import BaseLlmConfig
from embedchain.helper_classes.json_serializable import register_deserializable
from embedchain.llm.base_llm import BaseLlm
from embedchain.helper.json_serializable import register_deserializable
from embedchain.llm.base import BaseLlm
@register_deserializable
class OpenAiLlm(BaseLlm):
class OpenAILlm(BaseLlm):
def __init__(self, config: Optional[BaseLlmConfig] = None):
super().__init__(config=config)

View File

@@ -2,8 +2,8 @@ import logging
from typing import Optional
from embedchain.config import BaseLlmConfig
from embedchain.helper_classes.json_serializable import register_deserializable
from embedchain.llm.base_llm import BaseLlm
from embedchain.helper.json_serializable import register_deserializable
from embedchain.llm.base import BaseLlm
@register_deserializable