Telemetry support for ec cli commands (#1030)

This commit is contained in:
Sidharth Mohanty
2023-12-19 19:29:04 +05:30
committed by GitHub
parent b4f3bbbbc9
commit 48c38b5dc3
8 changed files with 47 additions and 21 deletions

View File

@@ -8,6 +8,8 @@ import click
import pkg_resources
from rich.console import Console
from embedchain.telemetry.posthog import AnonymousTelemetry
console = Console()
@@ -16,6 +18,26 @@ def cli():
pass
anonymous_telemetry = AnonymousTelemetry()
def get_pkg_path_from_name(template: str):
try:
# Determine the installation location of the embedchain package
package_path = pkg_resources.resource_filename("embedchain", "")
except ImportError:
console.print("❌ [bold red]Failed to locate the 'embedchain' package. Is it installed?[/bold red]")
return
# Construct the source path from the embedchain package
src_path = os.path.join(package_path, "deployment", template)
if not os.path.exists(src_path):
console.print(f"❌ [bold red]Template '{template}' not found.[/bold red]")
return
return src_path
def setup_fly_io_app(extra_args):
fly_launch_command = ["fly", "launch", "--region", "sjc", "--no-deploy"] + list(extra_args)
try:
@@ -49,20 +71,10 @@ def setup_modal_com_app(extra_args):
@click.option("--template", default="fly.io", help="The template to use.")
@click.argument("extra_args", nargs=-1, type=click.UNPROCESSED)
def create(template, extra_args):
try:
# Determine the installation location of the embedchain package
package_path = pkg_resources.resource_filename("embedchain", "")
except ImportError:
console.print("❌ [bold red]Failed to locate the 'embedchain' package. Is it installed?[/bold red]")
return
# Construct the source path from the embedchain package
src_path = os.path.join(package_path, "deployment", template)
if not os.path.exists(src_path):
console.print(f"❌ [bold red]Template '{template}' not found.[/bold red]")
return
anonymous_telemetry.capture(
event_name="ec_create", properties={"template_used": template}
)
src_path = get_pkg_path_from_name(template)
shutil.copytree(src_path, os.getcwd(), dirs_exist_ok=True)
env_sample_path = os.path.join(src_path, ".env.example")
if os.path.exists(env_sample_path):
@@ -122,6 +134,9 @@ def dev(debug, host, port):
embedchain_config = json.load(file)
template = embedchain_config["provider"]
anonymous_telemetry.capture(
event_name="ec_dev", properties={"template_used": template}
)
if template == "fly.io":
run_dev_fly_io(debug, host, port)
elif template == "modal.com":
@@ -207,6 +222,10 @@ def deploy():
with open("embedchain.json", "r") as file:
embedchain_config = json.load(file)
template = embedchain_config["provider"]
anonymous_telemetry.capture(
event_name="ec_deploy", properties={"template_used": template}
)
if template == "fly.io":
deploy_fly()
elif template == "modal.com":

View File

@@ -2,6 +2,9 @@ from fastapi import FastAPI, responses
from pydantic import BaseModel
from embedchain import Pipeline
from dotenv import load_dotenv
load_dotenv(".env")
app = FastAPI(title="Embedchain FastAPI App")
embedchain_app = Pipeline()

View File

@@ -1,4 +1,4 @@
fastapi==0.104.0
uvicorn==0.23.2
embedchain==0.1.34
embedchain
beautifulsoup4

View File

@@ -1,4 +1,4 @@
modal==0.56.4329
fastapi==0.104.0
uvicorn==0.23.2
embedchain==0.1.34
embedchain

View File

@@ -33,14 +33,16 @@ class OpenAILlm(BaseLlm):
if config.top_p:
kwargs["model_kwargs"]["top_p"] = config.top_p
if config.stream:
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
from langchain.callbacks.streaming_stdout import \
StreamingStdOutCallbackHandler
callbacks = config.callbacks if config.callbacks else [StreamingStdOutCallbackHandler()]
chat = ChatOpenAI(**kwargs, streaming=config.stream, callbacks=callbacks)
else:
chat = ChatOpenAI(**kwargs)
if self.functions is not None:
from langchain.chains.openai_functions import create_openai_fn_runnable
from langchain.chains.openai_functions import \
create_openai_fn_runnable
from langchain.prompts import ChatPromptTemplate
structured_prompt = ChatPromptTemplate.from_messages(messages)