Add OpenAI proxy (#1503)
Co-authored-by: Deshraj Yadav <deshrajdry@gmail.com>
This commit is contained in:
@@ -0,0 +1,39 @@
|
||||
import os
|
||||
from typing import Any, Dict, Optional
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from mem0.memory.setup import mem0_dir
|
||||
from mem0.vector_stores.configs import VectorStoreConfig
|
||||
from mem0.llms.configs import LlmConfig
|
||||
from mem0.embeddings.configs import EmbedderConfig
|
||||
|
||||
class MemoryItem(BaseModel):
|
||||
id: str = Field(..., description="The unique identifier for the text data")
|
||||
memory: str = Field(..., description="The memory deduced from the text data") # TODO After prompt changes from platform, update this
|
||||
hash: Optional[str] = Field(None, description="The hash of the memory")
|
||||
# The metadata value can be anything and not just string. Fix it
|
||||
metadata: Optional[Dict[str, Any]] = Field(None, description="Additional metadata for the text data")
|
||||
score: Optional[float] = Field(
|
||||
None, description="The score associated with the text data"
|
||||
)
|
||||
created_at: Optional[str] = Field(None, description="The timestamp when the memory was created")
|
||||
updated_at: Optional[str] = Field(None, description="The timestamp when the memory was updated")
|
||||
|
||||
|
||||
class MemoryConfig(BaseModel):
|
||||
vector_store: VectorStoreConfig = Field(
|
||||
description="Configuration for the vector store",
|
||||
default_factory=VectorStoreConfig,
|
||||
)
|
||||
llm: LlmConfig = Field(
|
||||
description="Configuration for the language model",
|
||||
default_factory=LlmConfig,
|
||||
)
|
||||
embedder: EmbedderConfig = Field(
|
||||
description="Configuration for the embedding model",
|
||||
default_factory=EmbedderConfig,
|
||||
)
|
||||
history_db_path: str = Field(
|
||||
description="Path to the history database",
|
||||
default=os.path.join(mem0_dir, "history.db"),
|
||||
)
|
||||
0
mem0/configs/embeddings/__init__.py
Normal file
0
mem0/configs/embeddings/__init__.py
Normal file
32
mem0/configs/embeddings/base.py
Normal file
32
mem0/configs/embeddings/base.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from abc import ABC
|
||||
from typing import Optional
|
||||
|
||||
class BaseEmbedderConfig(ABC):
|
||||
"""
|
||||
Config for Embeddings.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
model: Optional[str] = None,
|
||||
embedding_dims: Optional[int] = None,
|
||||
|
||||
# Ollama specific
|
||||
base_url: Optional[str] = None
|
||||
):
|
||||
"""
|
||||
Initializes a configuration class instance for the Embeddings.
|
||||
|
||||
:param model: Embedding model to use, defaults to None
|
||||
:type model: Optional[str], optional
|
||||
:param embedding_dims: The number of dimensions in the embedding, defaults to None
|
||||
:type embedding_dims: Optional[int], optional
|
||||
:param base_url: Base URL for the Ollama API, defaults to None
|
||||
:type base_url: Optional[str], optional
|
||||
"""
|
||||
|
||||
self.model = model
|
||||
self.embedding_dims = embedding_dims
|
||||
|
||||
# Ollama specific
|
||||
self.base_url = base_url
|
||||
@@ -29,3 +29,14 @@ Constraint for deducing facts, preferences, and memories:
|
||||
|
||||
Deduced facts, preferences, and memories:
|
||||
"""
|
||||
|
||||
MEMORY_ANSWER_PROMPT = """
|
||||
You are an expert at answering questions based on the provided memories. Your task is to provide accurate and concise answers to the questions by leveraging the information given in the memories.
|
||||
|
||||
Guidelines:
|
||||
- Extract relevant information from the memories based on the question.
|
||||
- If no relevant information is found, make sure you don't say no information is found. Instead, accept the question and provide a general response.
|
||||
- Ensure that the answers are clear, concise, and directly address the question.
|
||||
|
||||
Here are the details of the task:
|
||||
"""
|
||||
|
||||
Reference in New Issue
Block a user