[Refactor] Converge Pipeline and App classes (#1021)
Co-authored-by: Deven Patel <deven298@yahoo.com>
This commit is contained in:
@@ -29,7 +29,7 @@ Once you have obtained the key, you can use it like this:
|
||||
|
||||
```python
|
||||
import os
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
|
||||
os.environ['OPENAI_API_KEY'] = 'xxx'
|
||||
|
||||
@@ -44,7 +44,7 @@ If you are looking to configure the different parameters of the LLM, you can do
|
||||
|
||||
```python main.py
|
||||
import os
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
|
||||
os.environ['OPENAI_API_KEY'] = 'xxx'
|
||||
|
||||
@@ -71,7 +71,7 @@ Examples:
|
||||
<Accordion title="Using Pydantic Models">
|
||||
```python
|
||||
import os
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
from embedchain.llm.openai import OpenAILlm
|
||||
import requests
|
||||
from pydantic import BaseModel, Field, ValidationError, field_validator
|
||||
@@ -123,7 +123,7 @@ print(result)
|
||||
<Accordion title="Using OpenAI JSON schema">
|
||||
```python
|
||||
import os
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
from embedchain.llm.openai import OpenAILlm
|
||||
import requests
|
||||
from pydantic import BaseModel, Field, ValidationError, field_validator
|
||||
@@ -158,7 +158,7 @@ print(result)
|
||||
<Accordion title="Using actual python functions">
|
||||
```python
|
||||
import os
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
from embedchain.llm.openai import OpenAILlm
|
||||
import requests
|
||||
from pydantic import BaseModel, Field, ValidationError, field_validator
|
||||
@@ -192,7 +192,7 @@ To use Google AI model, you have to set the `GOOGLE_API_KEY` environment variabl
|
||||
<CodeGroup>
|
||||
```python main.py
|
||||
import os
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
|
||||
os.environ["GOOGLE_API_KEY"] = "xxx"
|
||||
|
||||
@@ -235,7 +235,7 @@ To use Azure OpenAI model, you have to set some of the azure openai related envi
|
||||
|
||||
```python main.py
|
||||
import os
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
|
||||
os.environ["OPENAI_API_TYPE"] = "azure"
|
||||
os.environ["OPENAI_API_BASE"] = "https://xxx.openai.azure.com/"
|
||||
@@ -274,7 +274,7 @@ To use anthropic's model, please set the `ANTHROPIC_API_KEY` which you find on t
|
||||
|
||||
```python main.py
|
||||
import os
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
|
||||
os.environ["ANTHROPIC_API_KEY"] = "xxx"
|
||||
|
||||
@@ -311,7 +311,7 @@ Once you have the API key, you are all set to use it with Embedchain.
|
||||
|
||||
```python main.py
|
||||
import os
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
|
||||
os.environ["COHERE_API_KEY"] = "xxx"
|
||||
|
||||
@@ -347,7 +347,7 @@ Once you have the API key, you are all set to use it with Embedchain.
|
||||
|
||||
```python main.py
|
||||
import os
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
|
||||
os.environ["TOGETHER_API_KEY"] = "xxx"
|
||||
|
||||
@@ -375,7 +375,7 @@ Setup Ollama using https://github.com/jmorganca/ollama
|
||||
|
||||
```python main.py
|
||||
import os
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
|
||||
# load llm configuration from config.yaml file
|
||||
app = App.from_config(config_path="config.yaml")
|
||||
@@ -406,7 +406,7 @@ GPT4all is a free-to-use, locally running, privacy-aware chatbot. No GPU or inte
|
||||
<CodeGroup>
|
||||
|
||||
```python main.py
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
|
||||
# load llm configuration from config.yaml file
|
||||
app = App.from_config(config_path="config.yaml")
|
||||
@@ -438,7 +438,7 @@ Once you have the key, load the app using the config yaml file:
|
||||
|
||||
```python main.py
|
||||
import os
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
|
||||
os.environ["JINACHAT_API_KEY"] = "xxx"
|
||||
# load llm configuration from config.yaml file
|
||||
@@ -474,7 +474,7 @@ Once you have the token, load the app using the config yaml file:
|
||||
|
||||
```python main.py
|
||||
import os
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
|
||||
os.environ["HUGGINGFACE_ACCESS_TOKEN"] = "xxx"
|
||||
|
||||
@@ -504,7 +504,7 @@ Once you have the token, load the app using the config yaml file:
|
||||
|
||||
```python main.py
|
||||
import os
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
|
||||
os.environ["REPLICATE_API_TOKEN"] = "xxx"
|
||||
|
||||
@@ -531,7 +531,7 @@ Setup Google Cloud Platform application credentials by following the instruction
|
||||
<CodeGroup>
|
||||
|
||||
```python main.py
|
||||
from embedchain import Pipeline as App
|
||||
from embedchain import App
|
||||
|
||||
# load llm configuration from config.yaml file
|
||||
app = App.from_config(config_path="config.yaml")
|
||||
|
||||
Reference in New Issue
Block a user