Adds Azure OpenAI Embedding Model (#2545)
This commit is contained in:
@@ -27,6 +27,10 @@ export class ConfigManager {
|
||||
: defaultConf.apiKey,
|
||||
model: finalModel,
|
||||
url: userConf?.url,
|
||||
modelProperties:
|
||||
userConf?.modelProperties !== undefined
|
||||
? userConf.modelProperties
|
||||
: defaultConf.modelProperties,
|
||||
};
|
||||
})(),
|
||||
},
|
||||
|
||||
39
mem0-ts/src/oss/src/embeddings/azure.ts
Normal file
39
mem0-ts/src/oss/src/embeddings/azure.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { AzureOpenAI } from "openai";
|
||||
import { Embedder } from "./base";
|
||||
import { EmbeddingConfig } from "../types";
|
||||
|
||||
export class AzureOpenAIEmbedder implements Embedder {
|
||||
private client: AzureOpenAI;
|
||||
private model: string;
|
||||
|
||||
constructor(config: EmbeddingConfig) {
|
||||
if (!config.apiKey || !config.modelProperties?.endpoint) {
|
||||
throw new Error("Azure OpenAI requires both API key and endpoint");
|
||||
}
|
||||
|
||||
const { endpoint, ...rest } = config.modelProperties;
|
||||
|
||||
this.client = new AzureOpenAI({
|
||||
apiKey: config.apiKey,
|
||||
endpoint: endpoint as string,
|
||||
...rest,
|
||||
});
|
||||
this.model = config.model || "text-embedding-3-small";
|
||||
}
|
||||
|
||||
async embed(text: string): Promise<number[]> {
|
||||
const response = await this.client.embeddings.create({
|
||||
model: this.model,
|
||||
input: text,
|
||||
});
|
||||
return response.data[0].embedding;
|
||||
}
|
||||
|
||||
async embedBatch(texts: string[]): Promise<number[][]> {
|
||||
const response = await this.client.embeddings.create({
|
||||
model: this.model,
|
||||
input: texts,
|
||||
});
|
||||
return response.data.map((item) => item.embedding);
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,7 @@ export * from "./embeddings/base";
|
||||
export * from "./embeddings/openai";
|
||||
export * from "./embeddings/ollama";
|
||||
export * from "./embeddings/google";
|
||||
export * from "./embeddings/azure";
|
||||
export * from "./embeddings/langchain";
|
||||
export * from "./llms/base";
|
||||
export * from "./llms/openai";
|
||||
|
||||
@@ -16,6 +16,7 @@ export interface EmbeddingConfig {
|
||||
apiKey?: string;
|
||||
model?: string | any;
|
||||
url?: string;
|
||||
modelProperties?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface VectorStoreConfig {
|
||||
@@ -112,6 +113,7 @@ export const MemoryConfigSchema = z.object({
|
||||
embedder: z.object({
|
||||
provider: z.string(),
|
||||
config: z.object({
|
||||
modelProperties: z.record(z.string(), z.any()).optional(),
|
||||
apiKey: z.string().optional(),
|
||||
model: z.union([z.string(), z.any()]).optional(),
|
||||
}),
|
||||
|
||||
@@ -26,6 +26,7 @@ import { HistoryManager } from "../storage/base";
|
||||
import { GoogleEmbedder } from "../embeddings/google";
|
||||
import { GoogleLLM } from "../llms/google";
|
||||
import { AzureOpenAILLM } from "../llms/azure";
|
||||
import { AzureOpenAIEmbedder } from "../embeddings/azure";
|
||||
import { LangchainLLM } from "../llms/langchain";
|
||||
import { LangchainEmbedder } from "../embeddings/langchain";
|
||||
import { LangchainVectorStore } from "../vector_stores/langchain";
|
||||
@@ -39,6 +40,8 @@ export class EmbedderFactory {
|
||||
return new OllamaEmbedder(config);
|
||||
case "google":
|
||||
return new GoogleEmbedder(config);
|
||||
case "azure_openai":
|
||||
return new AzureOpenAIEmbedder(config);
|
||||
case "langchain":
|
||||
return new LangchainEmbedder(config);
|
||||
default:
|
||||
|
||||
@@ -103,12 +103,16 @@ export class SupabaseDB implements VectorStore {
|
||||
try {
|
||||
// Verify table exists and vector operations work by attempting a test insert
|
||||
const testVector = Array(1536).fill(0);
|
||||
|
||||
// First try to delete any existing test vector
|
||||
try {
|
||||
await this.client.from(this.tableName).delete().eq("id", "test_vector");
|
||||
} catch (error) {
|
||||
console.warn("No test vector to delete, safe to ignore.");
|
||||
} catch {
|
||||
// Ignore delete errors - table might not exist yet
|
||||
}
|
||||
const { error: testError } = await this.client
|
||||
|
||||
// Try to insert the test vector
|
||||
const { error: insertError } = await this.client
|
||||
.from(this.tableName)
|
||||
.insert({
|
||||
id: "test_vector",
|
||||
@@ -117,8 +121,9 @@ export class SupabaseDB implements VectorStore {
|
||||
})
|
||||
.select();
|
||||
|
||||
if (testError) {
|
||||
console.error("Test insert error:", testError);
|
||||
// If we get a duplicate key error, that's actually fine - it means the table exists
|
||||
if (insertError && insertError.code !== "23505") {
|
||||
console.error("Test insert error:", insertError);
|
||||
throw new Error(
|
||||
`Vector operations failed. Please ensure:
|
||||
1. The vector extension is enabled
|
||||
@@ -178,8 +183,12 @@ See the SQL migration instructions in the code comments.`,
|
||||
);
|
||||
}
|
||||
|
||||
// Clean up test vector
|
||||
await this.client.from(this.tableName).delete().eq("id", "test_vector");
|
||||
// Clean up test vector - ignore errors here too
|
||||
try {
|
||||
await this.client.from(this.tableName).delete().eq("id", "test_vector");
|
||||
} catch {
|
||||
// Ignore delete errors
|
||||
}
|
||||
|
||||
console.log("Connected to Supabase successfully");
|
||||
} catch (error) {
|
||||
|
||||
Reference in New Issue
Block a user