Update embedder docs to show openai key is used for LLM (#2033)
This commit is contained in:
@@ -15,6 +15,8 @@ os.environ["EMBEDDING_AZURE_DEPLOYMENT"] = "your-deployment-name"
|
||||
os.environ["EMBEDDING_AZURE_ENDPOINT"] = "your-api-base-url"
|
||||
os.environ["EMBEDDING_AZURE_API_VERSION"] = "version-to-use"
|
||||
|
||||
os.environ["OPENAI_API_KEY"] = "your_api_key" # For LLM
|
||||
|
||||
|
||||
config = {
|
||||
"embedder": {
|
||||
|
||||
@@ -11,6 +11,7 @@ import os
|
||||
from mem0 import Memory
|
||||
|
||||
os.environ["GOOGLE_API_KEY"] = "key"
|
||||
os.environ["OPENAI_API_KEY"] = "your_api_key" # For LLM
|
||||
|
||||
config = {
|
||||
"embedder": {
|
||||
|
||||
@@ -10,7 +10,7 @@ You can use embedding models from Huggingface to run Mem0 locally.
|
||||
import os
|
||||
from mem0 import Memory
|
||||
|
||||
os.environ["OPENAI_API_KEY"] = "your_api_key"
|
||||
os.environ["OPENAI_API_KEY"] = "your_api_key" # For LLM
|
||||
|
||||
config = {
|
||||
"embedder": {
|
||||
|
||||
@@ -6,7 +6,7 @@ You can use embedding models from Ollama to run Mem0 locally.
|
||||
import os
|
||||
from mem0 import Memory
|
||||
|
||||
os.environ["OPENAI_API_KEY"] = "your_api_key"
|
||||
os.environ["OPENAI_API_KEY"] = "your_api_key" # For LLM
|
||||
|
||||
config = {
|
||||
"embedder": {
|
||||
|
||||
@@ -13,6 +13,7 @@ import os
|
||||
from mem0 import Memory
|
||||
|
||||
os.environ["TOGETHER_API_KEY"] = "your_api_key"
|
||||
os.environ["OPENAI_API_KEY"] = "your_api_key" # For LLM
|
||||
|
||||
config = {
|
||||
"embedder": {
|
||||
|
||||
@@ -10,6 +10,7 @@ from mem0 import Memory
|
||||
|
||||
# Set the path to your Google Cloud credentials JSON file
|
||||
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "/path/to/your/credentials.json"
|
||||
os.environ["OPENAI_API_KEY"] = "your_api_key" # For LLM
|
||||
|
||||
config = {
|
||||
"embedder": {
|
||||
|
||||
Reference in New Issue
Block a user