feat: support openai compatible llm provider by adding baseUrl to config (#2674)

Signed-off-by: frank-zsy <syzhao1988@126.com>
Co-authored-by: Saket Aryan <94069182+whysosaket@users.noreply.github.com>
Co-authored-by: Saket Aryan <saketaryan2002@gmail.com>
This commit is contained in:
Frank Zhao
2025-05-27 02:55:23 +08:00
committed by GitHub
parent 8a280b4a54
commit 9622ac7dff
6 changed files with 11 additions and 3 deletions

View File

@@ -288,6 +288,11 @@ mode: "wide"
<Tab title="TypeScript"> <Tab title="TypeScript">
<Update label="2025-05-27" description="v2.1.27">
**Improvements:**
- **OSS:** Added baseURL param in LLM Config.
</Update>
<Update label="2025-05-23" description="v2.1.26"> <Update label="2025-05-23" description="v2.1.26">
**Improvements:** **Improvements:**
- **Client:** Removed type `string` from `messages` interface - **Client:** Removed type `string` from `messages` interface

View File

@@ -1,6 +1,6 @@
{ {
"name": "mem0ai", "name": "mem0ai",
"version": "2.1.26", "version": "2.1.27",
"description": "The Memory Layer For Your AI Apps", "description": "The Memory Layer For Your AI Apps",
"main": "./dist/index.js", "main": "./dist/index.js",
"module": "./dist/index.mjs", "module": "./dist/index.mjs",

View File

@@ -20,6 +20,7 @@ export const DEFAULT_MEMORY_CONFIG: MemoryConfig = {
llm: { llm: {
provider: "openai", provider: "openai",
config: { config: {
baseURL: "https://api.openai.com/v1",
apiKey: process.env.OPENAI_API_KEY || "", apiKey: process.env.OPENAI_API_KEY || "",
model: "gpt-4-turbo-preview", model: "gpt-4-turbo-preview",
modelProperties: undefined, modelProperties: undefined,

View File

@@ -80,6 +80,7 @@ export class ConfigManager {
} }
return { return {
baseURL: userConf?.baseURL || defaultConf.baseURL,
apiKey: apiKey:
userConf?.apiKey !== undefined userConf?.apiKey !== undefined
? userConf.apiKey ? userConf.apiKey

View File

@@ -7,7 +7,7 @@ export class OpenAILLM implements LLM {
private model: string; private model: string;
constructor(config: LLMConfig) { constructor(config: LLMConfig) {
this.openai = new OpenAI({ apiKey: config.apiKey }); this.openai = new OpenAI({ apiKey: config.apiKey, baseURL: config.baseURL });
this.model = config.model || "gpt-4o-mini"; this.model = config.model || "gpt-4o-mini";
} }

View File

@@ -39,6 +39,7 @@ export interface HistoryStoreConfig {
export interface LLMConfig { export interface LLMConfig {
provider?: string; provider?: string;
baseURL?: string;
config?: Record<string, any>; config?: Record<string, any>;
apiKey?: string; apiKey?: string;
model?: string | any; model?: string | any;