[chore] fix rest api docs and other minor fixes (#902)
This commit is contained in:
@@ -1,3 +0,0 @@
|
||||
---
|
||||
openapi: post /{app_id}/chat
|
||||
---
|
||||
@@ -1,3 +0,0 @@
|
||||
---
|
||||
openapi: get /ping
|
||||
---
|
||||
@@ -1,3 +0,0 @@
|
||||
---
|
||||
openapi: post /create
|
||||
---
|
||||
@@ -1,3 +0,0 @@
|
||||
---
|
||||
openapi: delete /{app_id}/delete
|
||||
---
|
||||
@@ -1,3 +0,0 @@
|
||||
---
|
||||
openapi: get /apps
|
||||
---
|
||||
@@ -1,3 +0,0 @@
|
||||
---
|
||||
openapi: get /{app_id}/data
|
||||
---
|
||||
@@ -1,102 +0,0 @@
|
||||
---
|
||||
title: "🌍 Getting Started"
|
||||
---
|
||||
|
||||
## Quickstart
|
||||
|
||||
To run Embedchain as a REST API server use,
|
||||
|
||||
```bash
|
||||
docker run -d --name embedchain -p 8080:8080 embedchain/rest-api:latest
|
||||
```
|
||||
|
||||
Open up your browser and navigate to http://0.0.0.0:8080/docs to interact with the API. There is a full-fledged Swagger docs playground with all the information
|
||||
about the API endpoints.
|
||||
|
||||

|
||||
|
||||
## Creating your first App
|
||||
|
||||
App requires an `app_id` to be created. The `app_id` is a unique identifier for your app.
|
||||
|
||||
By default we will use the opensource **gpt4all** model to perform operations. You can also specify your own config by uploading a config YAML file.
|
||||
|
||||
For example, create a `config.yaml` file (adjust according to your requirements):
|
||||
|
||||
```yaml
|
||||
app:
|
||||
config:
|
||||
id: "default-app"
|
||||
|
||||
llm:
|
||||
provider: openai
|
||||
config:
|
||||
model: "gpt-3.5-turbo"
|
||||
temperature: 0.5
|
||||
max_tokens: 1000
|
||||
top_p: 1
|
||||
stream: false
|
||||
template: |
|
||||
Use the following pieces of context to answer the query at the end.
|
||||
If you don't know the answer, just say that you don't know, don't try to make up an answer.
|
||||
|
||||
$context
|
||||
|
||||
Query: $query
|
||||
|
||||
Helpful Answer:
|
||||
|
||||
vectordb:
|
||||
provider: chroma
|
||||
config:
|
||||
collection_name: "rest-api-app"
|
||||
dir: db
|
||||
allow_reset: true
|
||||
|
||||
embedder:
|
||||
provider: openai
|
||||
config:
|
||||
model: "text-embedding-ada-002"
|
||||
```
|
||||
|
||||
To learn more about custom configurations, check out the [Custom configurations](https://docs.embedchain.ai/advanced/configuration).
|
||||
To explore more examples of config YAMLs for Embedchain, visit [embedchain/configs](https://github.com/embedchain/embedchain/tree/main/configs).
|
||||
|
||||
Now, you can upload this config file in the request body.
|
||||
|
||||
**Note:** To use custom models, an **API key** might be required. Refer to the table below to determine the necessary API key for your provider.
|
||||
|
||||
| Keys | Providers |
|
||||
| -------------------------- | ------------------------------ |
|
||||
| `OPENAI_API_KEY ` | OpenAI, Azure OpenAI, Jina etc |
|
||||
| `OPENAI_API_TYPE` | Azure OpenAI |
|
||||
| `OPENAI_API_BASE` | Azure OpenAI |
|
||||
| `OPENAI_API_VERSION` | Azure OpenAI |
|
||||
| `COHERE_API_KEY` | Cohere |
|
||||
| `ANTHROPIC_API_KEY` | Anthropic |
|
||||
| `JINACHAT_API_KEY` | Jina |
|
||||
| `HUGGINGFACE_ACCESS_TOKEN` | Huggingface |
|
||||
| `REPLICATE_API_TOKEN` | LLAMA2 |
|
||||
|
||||
To provide them, you can simply run the docker command with the `-e` flag.
|
||||
|
||||
For example,
|
||||
|
||||
```bash
|
||||
docker run -d --name embedchain -p 8080:8080 -e OPENAI_API_KEY=YOUR_API_KEY embedchain/rest-api:latest
|
||||
```
|
||||
|
||||
Cool! This will create a new Embedchain App with the given `app_id`.
|
||||
|
||||
## Deploying your App to Embedchain Platform
|
||||
|
||||
This feature is very powerful as it allows the creation of a public API endpoint for your app, enabling queries from anywhere. This creates a _pipeline_
|
||||
for your app that can sync the data time to time and provide you with the best results.
|
||||
|
||||

|
||||
|
||||
To utilize this functionality, visit [app.embedchain.ai](app.embedchain.ai) and create an account. Subsequently, generate a new [API KEY](https://app.embedchain.ai/settings/keys/).
|
||||
|
||||

|
||||
|
||||
Using this API key, you can deploy your app to the platform.
|
||||
@@ -1,3 +0,0 @@
|
||||
---
|
||||
openapi: post /{app_id}/query
|
||||
---
|
||||
@@ -93,17 +93,17 @@
|
||||
},
|
||||
{
|
||||
"group": "REST API",
|
||||
"pages": [
|
||||
"api-reference/getting-started",
|
||||
"api-reference/check-status",
|
||||
"api-reference/get-all-apps",
|
||||
"api-reference/create-app",
|
||||
"api-reference/query-an-app",
|
||||
"api-reference/add-datasource-to-an-app",
|
||||
"api-reference/get-datasources-associated-with-app-id",
|
||||
"api-reference/deploy-app",
|
||||
"api-reference/delete-app"
|
||||
]
|
||||
"pages": [
|
||||
"rest-api/getting-started",
|
||||
"rest-api/create",
|
||||
"rest-api/get-all-apps",
|
||||
"rest-api/add-data",
|
||||
"rest-api/get-data",
|
||||
"rest-api/query",
|
||||
"rest-api/deploy",
|
||||
"rest-api/delete",
|
||||
"rest-api/check-status"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Examples",
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
import logging
|
||||
import yaml
|
||||
from fastapi import FastAPI, UploadFile, Depends, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
@@ -85,6 +86,7 @@ async def create_app_using_default_config(app_id: str, config: UploadFile = None
|
||||
|
||||
return DefaultResponse(response=f"App created successfully. App ID: {app_id}")
|
||||
except Exception as e:
|
||||
logging.warn(str(e))
|
||||
raise HTTPException(detail=f"Error creating app: {str(e)}", status_code=400)
|
||||
|
||||
|
||||
@@ -114,12 +116,13 @@ async def get_datasources_associated_with_app_id(app_id: str, db: Session = Depe
|
||||
response = app.get_data_sources()
|
||||
return {"results": response}
|
||||
except ValueError as ve:
|
||||
if "OPENAI_API_KEY" in str(ve) or "OPENAI_ORGANIZATION" in str(ve):
|
||||
raise HTTPException(
|
||||
detail=generate_error_message_for_api_keys(ve),
|
||||
status_code=400,
|
||||
)
|
||||
logging.warn(str(ve))
|
||||
raise HTTPException(
|
||||
detail=generate_error_message_for_api_keys(ve),
|
||||
status_code=400,
|
||||
)
|
||||
except Exception as e:
|
||||
logging.warn(str(e))
|
||||
raise HTTPException(detail=f"Error occurred: {str(e)}", status_code=400)
|
||||
|
||||
|
||||
@@ -152,12 +155,13 @@ async def add_datasource_to_an_app(body: SourceApp, app_id: str, db: Session = D
|
||||
response = app.add(source=body.source, data_type=body.data_type)
|
||||
return DefaultResponse(response=response)
|
||||
except ValueError as ve:
|
||||
if "OPENAI_API_KEY" in str(ve) or "OPENAI_ORGANIZATION" in str(ve):
|
||||
raise HTTPException(
|
||||
detail=generate_error_message_for_api_keys(ve),
|
||||
status_code=400,
|
||||
)
|
||||
logging.warn(str(ve))
|
||||
raise HTTPException(
|
||||
detail=generate_error_message_for_api_keys(ve),
|
||||
status_code=400,
|
||||
)
|
||||
except Exception as e:
|
||||
logging.warn(str(e))
|
||||
raise HTTPException(detail=f"Error occurred: {str(e)}", status_code=400)
|
||||
|
||||
|
||||
@@ -189,12 +193,13 @@ async def query_an_app(body: QueryApp, app_id: str, db: Session = Depends(get_db
|
||||
response = app.query(body.query)
|
||||
return DefaultResponse(response=response)
|
||||
except ValueError as ve:
|
||||
if "OPENAI_API_KEY" in str(ve) or "OPENAI_ORGANIZATION" in str(ve):
|
||||
raise HTTPException(
|
||||
detail=generate_error_message_for_api_keys(ve),
|
||||
status_code=400,
|
||||
)
|
||||
logging.warn(str(ve))
|
||||
raise HTTPException(
|
||||
detail=generate_error_message_for_api_keys(ve),
|
||||
status_code=400,
|
||||
)
|
||||
except Exception as e:
|
||||
logging.warn(str(e))
|
||||
raise HTTPException(detail=f"Error occurred: {str(e)}", status_code=400)
|
||||
|
||||
|
||||
@@ -230,7 +235,6 @@ async def query_an_app(body: QueryApp, app_id: str, db: Session = Depends(get_db
|
||||
# response = app.chat(body.message)
|
||||
# return DefaultResponse(response=response)
|
||||
# except ValueError as ve:
|
||||
# if "OPENAI_API_KEY" in str(ve) or "OPENAI_ORGANIZATION" in str(ve):
|
||||
# raise HTTPException(
|
||||
# detail=generate_error_message_for_api_keys(ve),
|
||||
# status_code=400,
|
||||
@@ -272,12 +276,13 @@ async def deploy_app(body: DeployAppRequest, app_id: str, db: Session = Depends(
|
||||
app.deploy()
|
||||
return DefaultResponse(response="App deployed successfully.")
|
||||
except ValueError as ve:
|
||||
if "OPENAI_API_KEY" in str(ve) or "OPENAI_ORGANIZATION" in str(ve):
|
||||
raise HTTPException(
|
||||
detail=generate_error_message_for_api_keys(ve),
|
||||
status_code=400,
|
||||
)
|
||||
logging.warn(str(ve))
|
||||
raise HTTPException(
|
||||
detail=generate_error_message_for_api_keys(ve),
|
||||
status_code=400,
|
||||
)
|
||||
except Exception as e:
|
||||
logging.warn(str(e))
|
||||
raise HTTPException(detail=f"Error occurred: {str(e)}", status_code=400)
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
fastapi==0.104.0
|
||||
uvicorn==0.23.2
|
||||
embedchain==0.0.86
|
||||
embedchain[dataloaders]==0.0.86
|
||||
sqlalchemy==2.0.22
|
||||
embedchain==0.0.90
|
||||
embedchain[streamlit, community, opensource, elasticsearch, opensearch, poe, discord, slack, whatsapp, weaviate, pinecone, qdrant, images, huggingface_hub, cohere, milvus, dataloaders, vertexai, llama2, gmail, json]==0.0.90
|
||||
sqlalchemy==2.0.22
|
||||
python-multipart==0.0.6
|
||||
|
||||
@@ -18,4 +18,4 @@ def generate_error_message_for_api_keys(error: ValueError) -> str:
|
||||
Example: `docker run -e {missing_keys[0]}=xxx embedchain/rest-api:latest`
|
||||
"""
|
||||
else:
|
||||
return "Unknown error occurred."
|
||||
return "Error: " + str(error)
|
||||
|
||||
Reference in New Issue
Block a user