Add Litellm support (#1493)
This commit is contained in:
@@ -11,6 +11,7 @@ Mem0 includes built-in support for various popular large language models. Memory
|
||||
<Card title="Groq" href="#groq"></Card>
|
||||
<Card title="Together" href="#together"></Card>
|
||||
<Card title="AWS Bedrock" href="#aws_bedrock"></Card>
|
||||
<Card title="Litellm" href="#litellm"></Card>
|
||||
</CardGroup>
|
||||
|
||||
## OpenAI
|
||||
@@ -123,3 +124,27 @@ config = {
|
||||
m = Memory.from_config(config)
|
||||
m.add("Likes to play cricket on weekends", user_id="alice", metadata={"category": "hobbies"})
|
||||
```
|
||||
|
||||
## Litellm
|
||||
|
||||
[Litellm](https://litellm.vercel.app/docs/) is compatible with over 100 large language models (LLMs), all using a standardized input/output format. You can explore the [available models]((https://litellm.vercel.app/docs/providers)) to use with Litellm. Ensure you set the `API_KEY` for the model you choose to use.
|
||||
|
||||
```python
|
||||
import os
|
||||
from mem0 import Memory
|
||||
|
||||
config = {
|
||||
"llm": {
|
||||
"provider": "litellm",
|
||||
"config": {
|
||||
"model": "gpt-3.5-turbo",
|
||||
"temperature": 0.2,
|
||||
"max_tokens": 1500,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
m = Memory.from_config(config)
|
||||
m.add("Likes to play cricket on weekends", user_id="alice", metadata={"category": "hobbies"})
|
||||
```
|
||||
|
||||
|
||||
Reference in New Issue
Block a user