feat: support openai compatible llm provider by adding baseUrl to config (#2674)

Signed-off-by: frank-zsy <syzhao1988@126.com>
Co-authored-by: Saket Aryan <94069182+whysosaket@users.noreply.github.com>
Co-authored-by: Saket Aryan <saketaryan2002@gmail.com>
This commit is contained in:
Frank Zhao
2025-05-27 02:55:23 +08:00
committed by GitHub
parent 8a280b4a54
commit 9622ac7dff
6 changed files with 11 additions and 3 deletions

View File

@@ -288,6 +288,11 @@ mode: "wide"
<Tab title="TypeScript">
<Update label="2025-05-27" description="v2.1.27">
**Improvements:**
- **OSS:** Added baseURL param in LLM Config.
</Update>
<Update label="2025-05-23" description="v2.1.26">
**Improvements:**
- **Client:** Removed type `string` from `messages` interface

View File

@@ -1,6 +1,6 @@
{
"name": "mem0ai",
"version": "2.1.26",
"version": "2.1.27",
"description": "The Memory Layer For Your AI Apps",
"main": "./dist/index.js",
"module": "./dist/index.mjs",

View File

@@ -20,6 +20,7 @@ export const DEFAULT_MEMORY_CONFIG: MemoryConfig = {
llm: {
provider: "openai",
config: {
baseURL: "https://api.openai.com/v1",
apiKey: process.env.OPENAI_API_KEY || "",
model: "gpt-4-turbo-preview",
modelProperties: undefined,

View File

@@ -78,8 +78,9 @@ export class ConfigManager {
} else if (userConf?.model && typeof userConf.model === "string") {
finalModel = userConf.model;
}
return {
baseURL: userConf?.baseURL || defaultConf.baseURL,
apiKey:
userConf?.apiKey !== undefined
? userConf.apiKey

View File

@@ -7,7 +7,7 @@ export class OpenAILLM implements LLM {
private model: string;
constructor(config: LLMConfig) {
this.openai = new OpenAI({ apiKey: config.apiKey });
this.openai = new OpenAI({ apiKey: config.apiKey, baseURL: config.baseURL });
this.model = config.model || "gpt-4o-mini";
}

View File

@@ -39,6 +39,7 @@ export interface HistoryStoreConfig {
export interface LLMConfig {
provider?: string;
baseURL?: string;
config?: Record<string, any>;
apiKey?: string;
model?: string | any;