Support for Openrouter (#1628)

This commit is contained in:
Dev Khant
2024-08-03 22:51:03 +05:30
committed by GitHub
parent 5837991e5c
commit 04b4807145
4 changed files with 68 additions and 7 deletions

View File

@@ -42,6 +42,17 @@ config = {
}
}
# Use Openrouter by passing it's api key
# os.environ["OPENROUTER_API_KEY"] = "your-api-key"
# config = {
# "llm": {
# "provider": "openai",
# "config": {
# "model": "meta-llama/llama-3.1-70b-instruct",
# }
# }
# }
m = Memory.from_config(config)
m.add("Likes to play cricket on weekends", user_id="alice", metadata={"category": "hobbies"})
```

View File

@@ -11,10 +11,18 @@ class BaseLlmConfig(ABC):
model: Optional[str] = None,
temperature: float = 0,
max_tokens: int = 3000,
top_p: float = 1,
top_p: float = 0,
top_k: int = 1,
# Openrouter specific
models: Optional[list[str]] = None,
route: Optional[str] = "fallback",
openrouter_base_url: Optional[str] = "https://openrouter.ai/api/v1",
site_url: Optional[str] = None,
app_name: Optional[str] = None,
# Ollama specific
base_url: Optional[str] = None
ollama_base_url: Optional[str] = None
):
"""
Initializes a configuration class instance for the LLM.
@@ -29,14 +37,34 @@ class BaseLlmConfig(ABC):
:param top_p: Controls the diversity of words. Higher values (closer to 1) make word selection more diverse,
defaults to 1
:type top_p: float, optional
:param base_url: The base URL of the LLM, defaults to None
:type base_url: Optional[str], optional
:param top_k: Controls the diversity of words. Higher values make word selection more diverse, defaults to 0
:type top_k: int, optional
:param models: Controls the Openrouter models used, defaults to None
:type models: Optional[list[str]], optional
:param route: Controls the Openrouter route used, defaults to "fallback"
:type route: Optional[str], optional
:param openrouter_base_url: Controls the Openrouter base URL used, defaults to "https://openrouter.ai/api/v1"
:type openrouter_base_url: Optional[str], optional
:param site_url: Controls the Openrouter site URL used, defaults to None
:type site_url: Optional[str], optional
:param app_name: Controls the Openrouter app name used, defaults to None
:type app_name: Optional[str], optional
:param ollama_base_url: The base URL of the LLM, defaults to None
:type ollama_base_url: Optional[str], optional
"""
self.model = model
self.temperature = temperature
self.max_tokens = max_tokens
self.top_p = top_p
self.top_k = top_k
# Openrouter specific
self.models = models
self.route = route
self.openrouter_base_url = openrouter_base_url
self.site_url = site_url
self.app_name = app_name
# Ollama specific
self.base_url = base_url
self.ollama_base_url = ollama_base_url

View File

@@ -14,7 +14,7 @@ class OllamaLLM(LLMBase):
if not self.config.model:
self.config.model="llama3.1:70b"
self.client = Client(host=self.config.base_url)
self.client = Client(host=self.config.ollama_base_url)
self._ensure_model_exists()
def _ensure_model_exists(self):

View File

@@ -1,3 +1,4 @@
import os
import json
from typing import Dict, List, Optional
@@ -12,7 +13,11 @@ class OpenAILLM(LLMBase):
if not self.config.model:
self.config.model="gpt-4o"
self.client = OpenAI()
if os.environ.get("OPENROUTER_API_KEY"):
self.client = OpenAI(api_key=os.environ.get("OPENROUTER_API_KEY"), base_url=self.config.openrouter_base_url)
else:
self.client = OpenAI()
def _parse_response(self, response, tools):
"""
@@ -68,6 +73,23 @@ class OpenAILLM(LLMBase):
"max_tokens": self.config.max_tokens,
"top_p": self.config.top_p
}
if os.getenv("OPENROUTER_API_KEY"):
openrouter_params = {}
if self.config.models:
openrouter_params["models"] = self.config.models
openrouter_params["route"] = self.config.route
params.pop("model")
if self.config.site_url and self.config.app_name:
extra_headers={
"HTTP-Referer": self.config.site_url,
"X-Title": self.config.app_name
}
openrouter_params["extra_headers"] = extra_headers
params.update(**openrouter_params)
if response_format:
params["response_format"] = response_format
if tools: