Telemetry support for ec cli commands (#1030)

This commit is contained in:
Sidharth Mohanty
2023-12-19 19:29:04 +05:30
committed by GitHub
parent b4f3bbbbc9
commit 48c38b5dc3
8 changed files with 47 additions and 21 deletions

View File

@@ -70,7 +70,7 @@ Feel free to edit the files as required.
- `.env`: Contains environment variables for production - `.env`: Contains environment variables for production
- `.env.example`: Contains dummy environment variables (can ignore this file) - `.env.example`: Contains dummy environment variables (can ignore this file)
- `embedchain.json`: Contains embedchain specific configuration for deployment (you don't need to configure this) - `embedchain.json`: Contains embedchain specific configuration for deployment (you don't need to configure this)
- `requirements.txt`: Contains python dependencies for your FastAPI application - `requirements.txt`: Contains python dependencies for your application
## Step-3: Test app locally ## Step-3: Test app locally

View File

@@ -8,6 +8,8 @@ import click
import pkg_resources import pkg_resources
from rich.console import Console from rich.console import Console
from embedchain.telemetry.posthog import AnonymousTelemetry
console = Console() console = Console()
@@ -16,6 +18,26 @@ def cli():
pass pass
anonymous_telemetry = AnonymousTelemetry()
def get_pkg_path_from_name(template: str):
try:
# Determine the installation location of the embedchain package
package_path = pkg_resources.resource_filename("embedchain", "")
except ImportError:
console.print("❌ [bold red]Failed to locate the 'embedchain' package. Is it installed?[/bold red]")
return
# Construct the source path from the embedchain package
src_path = os.path.join(package_path, "deployment", template)
if not os.path.exists(src_path):
console.print(f"❌ [bold red]Template '{template}' not found.[/bold red]")
return
return src_path
def setup_fly_io_app(extra_args): def setup_fly_io_app(extra_args):
fly_launch_command = ["fly", "launch", "--region", "sjc", "--no-deploy"] + list(extra_args) fly_launch_command = ["fly", "launch", "--region", "sjc", "--no-deploy"] + list(extra_args)
try: try:
@@ -49,20 +71,10 @@ def setup_modal_com_app(extra_args):
@click.option("--template", default="fly.io", help="The template to use.") @click.option("--template", default="fly.io", help="The template to use.")
@click.argument("extra_args", nargs=-1, type=click.UNPROCESSED) @click.argument("extra_args", nargs=-1, type=click.UNPROCESSED)
def create(template, extra_args): def create(template, extra_args):
try: anonymous_telemetry.capture(
# Determine the installation location of the embedchain package event_name="ec_create", properties={"template_used": template}
package_path = pkg_resources.resource_filename("embedchain", "") )
except ImportError: src_path = get_pkg_path_from_name(template)
console.print("❌ [bold red]Failed to locate the 'embedchain' package. Is it installed?[/bold red]")
return
# Construct the source path from the embedchain package
src_path = os.path.join(package_path, "deployment", template)
if not os.path.exists(src_path):
console.print(f"❌ [bold red]Template '{template}' not found.[/bold red]")
return
shutil.copytree(src_path, os.getcwd(), dirs_exist_ok=True) shutil.copytree(src_path, os.getcwd(), dirs_exist_ok=True)
env_sample_path = os.path.join(src_path, ".env.example") env_sample_path = os.path.join(src_path, ".env.example")
if os.path.exists(env_sample_path): if os.path.exists(env_sample_path):
@@ -122,6 +134,9 @@ def dev(debug, host, port):
embedchain_config = json.load(file) embedchain_config = json.load(file)
template = embedchain_config["provider"] template = embedchain_config["provider"]
anonymous_telemetry.capture(
event_name="ec_dev", properties={"template_used": template}
)
if template == "fly.io": if template == "fly.io":
run_dev_fly_io(debug, host, port) run_dev_fly_io(debug, host, port)
elif template == "modal.com": elif template == "modal.com":
@@ -207,6 +222,10 @@ def deploy():
with open("embedchain.json", "r") as file: with open("embedchain.json", "r") as file:
embedchain_config = json.load(file) embedchain_config = json.load(file)
template = embedchain_config["provider"] template = embedchain_config["provider"]
anonymous_telemetry.capture(
event_name="ec_deploy", properties={"template_used": template}
)
if template == "fly.io": if template == "fly.io":
deploy_fly() deploy_fly()
elif template == "modal.com": elif template == "modal.com":

View File

@@ -2,6 +2,9 @@ from fastapi import FastAPI, responses
from pydantic import BaseModel from pydantic import BaseModel
from embedchain import Pipeline from embedchain import Pipeline
from dotenv import load_dotenv
load_dotenv(".env")
app = FastAPI(title="Embedchain FastAPI App") app = FastAPI(title="Embedchain FastAPI App")
embedchain_app = Pipeline() embedchain_app = Pipeline()

View File

@@ -1,4 +1,4 @@
fastapi==0.104.0 fastapi==0.104.0
uvicorn==0.23.2 uvicorn==0.23.2
embedchain==0.1.34 embedchain
beautifulsoup4 beautifulsoup4

View File

@@ -1,4 +1,4 @@
modal==0.56.4329 modal==0.56.4329
fastapi==0.104.0 fastapi==0.104.0
uvicorn==0.23.2 uvicorn==0.23.2
embedchain==0.1.34 embedchain

View File

@@ -33,14 +33,16 @@ class OpenAILlm(BaseLlm):
if config.top_p: if config.top_p:
kwargs["model_kwargs"]["top_p"] = config.top_p kwargs["model_kwargs"]["top_p"] = config.top_p
if config.stream: if config.stream:
from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler from langchain.callbacks.streaming_stdout import \
StreamingStdOutCallbackHandler
callbacks = config.callbacks if config.callbacks else [StreamingStdOutCallbackHandler()] callbacks = config.callbacks if config.callbacks else [StreamingStdOutCallbackHandler()]
chat = ChatOpenAI(**kwargs, streaming=config.stream, callbacks=callbacks) chat = ChatOpenAI(**kwargs, streaming=config.stream, callbacks=callbacks)
else: else:
chat = ChatOpenAI(**kwargs) chat = ChatOpenAI(**kwargs)
if self.functions is not None: if self.functions is not None:
from langchain.chains.openai_functions import create_openai_fn_runnable from langchain.chains.openai_functions import \
create_openai_fn_runnable
from langchain.prompts import ChatPromptTemplate from langchain.prompts import ChatPromptTemplate
structured_prompt = ChatPromptTemplate.from_messages(messages) structured_prompt = ChatPromptTemplate.from_messages(messages)

View File

@@ -1,7 +1,9 @@
import os import os
from embedchain import Pipeline as App
import streamlit as st import streamlit as st
from embedchain import Pipeline as App
with st.sidebar: with st.sidebar:
huggingface_access_token = st.text_input("Hugging face Token", key="chatbot_api_key", type="password") huggingface_access_token = st.text_input("Hugging face Token", key="chatbot_api_key", type="password")
"[Get Hugging Face Access Token](https://huggingface.co/settings/tokens)" "[Get Hugging Face Access Token](https://huggingface.co/settings/tokens)"

View File

@@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "embedchain" name = "embedchain"
version = "0.1.35" version = "0.1.36"
description = "Data platform for LLMs - Load, index, retrieve and sync any unstructured data" description = "Data platform for LLMs - Load, index, retrieve and sync any unstructured data"
authors = [ authors = [
"Taranjeet Singh <taranjeet@embedchain.ai>", "Taranjeet Singh <taranjeet@embedchain.ai>",