Update notebooks to use dict instead of yaml and remove dataloaders (#1075)

This commit is contained in:
Sidharth Mohanty
2023-12-29 21:57:46 +05:30
committed by GitHub
parent 904baac153
commit 6df63d9ca7
16 changed files with 230 additions and 554 deletions

View File

@@ -30,7 +30,7 @@
},
"outputs": [],
"source": [
"!pip install embedchain[dataloaders,llama2]"
"!pip install embedchain[llama2]"
]
},
{
@@ -59,46 +59,13 @@
"os.environ[\"REPLICATE_API_TOKEN\"] = \"xxx\""
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "Ns6RhPfbiitr"
},
"source": [
"### Step-3: Define your llm and embedding model config"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "S9CkxVjriotB"
},
"outputs": [],
"source": [
"config = \"\"\"\n",
"llm:\n",
" provider: llama2\n",
" config:\n",
" model: 'a16z-infra/llama13b-v2-chat:df7690f1994d94e96ad9d568eac121aecf50684a0b0963b25a41cc40061269e5'\n",
" temperature: 0.5\n",
" max_tokens: 1000\n",
" top_p: 0.5\n",
" stream: false\n",
"\"\"\"\n",
"\n",
"# Write the multi-line string to a YAML file\n",
"with open('llama2.yaml', 'w') as file:\n",
" file.write(config)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "PGt6uPLIi1CS"
},
"source": [
"### Step-4 Create embedchain app based on the config"
"### Step-3 Create embedchain app and define your config"
]
},
{
@@ -109,7 +76,16 @@
},
"outputs": [],
"source": [
"app = App.from_config(config_path=\"llama2.yaml\")"
"app = App.from_config(config={\n",
" \"provider\": \"llama2\",\n",
" \"config\": {\n",
" \"model\": \"a16z-infra/llama13b-v2-chat:df7690f1994d94e96ad9d568eac121aecf50684a0b0963b25a41cc40061269e5\",\n",
" \"temperature\": 0.5,\n",
" \"max_tokens\": 1000,\n",
" \"top_p\": 0.5,\n",
" \"stream\": False\n",
" }\n",
"})"
]
},
{
@@ -118,7 +94,7 @@
"id": "XNXv4yZwi7ef"
},
"source": [
"### Step-5: Add data sources to your app"
"### Step-4: Add data sources to your app"
]
},
{
@@ -143,7 +119,7 @@
"id": "_7W6fDeAjMAP"
},
"source": [
"### Step-6: All set. Now start asking questions related to your data"
"### Step-5: All set. Now start asking questions related to your data"
]
},
{