[Docs] Documentation updates (#1014)
This commit is contained in:
@@ -1,6 +1,9 @@
|
||||
<p>If you can't find the specific data source, please feel free to request through one of the following channels and help us prioritize.</p>
|
||||
|
||||
<CardGroup cols={2}>
|
||||
<Card title="Google Form" icon="file" href="https://forms.gle/NDRCKsRpUHsz2Wcm8" color="#7387d0">
|
||||
Fill out this form
|
||||
</Card>
|
||||
<Card title="Slack" icon="slack" href="https://join.slack.com/t/embedchain/shared_invite/zt-22uwz3c46-Zg7cIh5rOBteT_xe1jwLDw" color="#4A154B">
|
||||
Let us know on our slack community
|
||||
</Card>
|
||||
|
||||
19
docs/api-reference/pipeline/delete.mdx
Normal file
19
docs/api-reference/pipeline/delete.mdx
Normal file
@@ -0,0 +1,19 @@
|
||||
---
|
||||
title: 🗑 delete
|
||||
---
|
||||
|
||||
`delete_chat_history()` method allows you to delete all previous messages in a chat history.
|
||||
|
||||
## Usage
|
||||
|
||||
```python
|
||||
from embedchain import Pipeline as App
|
||||
|
||||
app = App()
|
||||
|
||||
app.add("https://www.forbes.com/profile/elon-musk")
|
||||
|
||||
app.chat("What is the net worth of Elon Musk?")
|
||||
|
||||
app.delete_chat_history()
|
||||
```
|
||||
@@ -14,4 +14,4 @@ app.add("https://www.forbes.com/profile/elon-musk")
|
||||
|
||||
# Reset the app
|
||||
app.reset()
|
||||
```
|
||||
```
|
||||
@@ -4,7 +4,7 @@ description: 'Collections of all the frequently asked questions'
|
||||
---
|
||||
<AccordionGroup>
|
||||
<Accordion title="Does Embedchain support OpenAI's Assistant APIs?">
|
||||
Yes, it does. Please refer to the [OpenAI Assistant docs page](/get-started/openai-assistant).
|
||||
Yes, it does. Please refer to the [OpenAI Assistant docs page](/examples/openai-assistant).
|
||||
</Accordion>
|
||||
<Accordion title="How to use MistralAI language model?">
|
||||
Use the model provided on huggingface: `mistralai/Mistral-7B-v0.1`
|
||||
@@ -116,6 +116,36 @@ embedder:
|
||||
```
|
||||
</CodeGroup>
|
||||
|
||||
</Accordion>
|
||||
<Accordion title="How to stream response while using OpenAI model in Embedchain?">
|
||||
You can achieve this by setting `stream` to `true` in the config file.
|
||||
|
||||
<CodeGroup>
|
||||
```yaml openai.yaml
|
||||
llm:
|
||||
provider: openai
|
||||
config:
|
||||
model: 'gpt-3.5-turbo'
|
||||
temperature: 0.5
|
||||
max_tokens: 1000
|
||||
top_p: 1
|
||||
stream: true
|
||||
```
|
||||
|
||||
```python main.py
|
||||
import os
|
||||
from embedchain import Pipeline as App
|
||||
|
||||
os.environ['OPENAI_API_KEY'] = 'sk-xxx'
|
||||
|
||||
app = App.from_config(config_path="openai.yaml")
|
||||
|
||||
app.add("https://www.forbes.com/profile/elon-musk")
|
||||
|
||||
response = app.query("What is the net worth of Elon Musk?")
|
||||
# response will be streamed in stdout as it is generated.
|
||||
```
|
||||
</CodeGroup>
|
||||
</Accordion>
|
||||
</AccordionGroup>
|
||||
|
||||
|
||||
@@ -183,7 +183,8 @@
|
||||
"api-reference/pipeline/chat",
|
||||
"api-reference/pipeline/search",
|
||||
"api-reference/pipeline/deploy",
|
||||
"api-reference/pipeline/reset"
|
||||
"api-reference/pipeline/reset",
|
||||
"api-reference/pipeline/delete"
|
||||
]
|
||||
},
|
||||
"api-reference/store/openai-assistant",
|
||||
@@ -233,5 +234,11 @@
|
||||
},
|
||||
"api": {
|
||||
"baseUrl": "http://localhost:8080"
|
||||
}
|
||||
},
|
||||
"redirects": [
|
||||
{
|
||||
"source": "/changelog/command-line",
|
||||
"destination": "/get-started/introduction"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -650,7 +650,7 @@ class EmbedChain(JSONSerializable):
|
||||
self.db.reset()
|
||||
self.cursor.execute("DELETE FROM data_sources WHERE pipeline_id = ?", (self.config.id,))
|
||||
self.connection.commit()
|
||||
self.delete_history()
|
||||
self.delete_chat_history()
|
||||
# Send anonymous telemetry
|
||||
self.telemetry.capture(event_name="reset", properties=self._telemetry_props)
|
||||
|
||||
@@ -661,5 +661,6 @@ class EmbedChain(JSONSerializable):
|
||||
display_format=display_format,
|
||||
)
|
||||
|
||||
def delete_history(self):
|
||||
def delete_chat_history(self):
|
||||
self.llm.memory.delete_chat_history(app_id=self.config.id)
|
||||
self.llm.update_history(app_id=self.config.id)
|
||||
|
||||
@@ -48,8 +48,7 @@ class BaseLlm(JSONSerializable):
|
||||
def update_history(self, app_id: str):
|
||||
"""Update class history attribute with history in memory (for chat method)"""
|
||||
chat_history = self.memory.get_recent_memories(app_id=app_id, num_rounds=10)
|
||||
if chat_history:
|
||||
self.set_history([str(history) for history in chat_history])
|
||||
self.set_history([str(history) for history in chat_history])
|
||||
|
||||
def add_history(self, app_id: str, question: str, answer: str, metadata: Optional[Dict[str, Any]] = None):
|
||||
chat_message = ChatMessage()
|
||||
|
||||
Reference in New Issue
Block a user