Update Vercel AI SDK to support tools call (#2383)

This commit is contained in:
Saket Aryan
2025-03-26 10:30:44 +05:30
committed by GitHub
parent 366d263e0b
commit 9d0300f774
28 changed files with 763 additions and 803 deletions

View File

@@ -0,0 +1,61 @@
import dotenv from "dotenv";
dotenv.config();
import { retrieveMemories } from "../../src";
import { generateText, LanguageModelV1Prompt } from "ai";
import { testConfig } from "../../config/test-config";
import { createAnthropic } from "@ai-sdk/anthropic";
describe("ANTHROPIC Integration Tests", () => {
const { userId } = testConfig;
jest.setTimeout(30000);
let anthropic: any;
beforeEach(() => {
anthropic = createAnthropic({
apiKey: process.env.ANTHROPIC_API_KEY,
});
});
it("should retrieve memories and generate text using ANTHROPIC provider", async () => {
const messages: LanguageModelV1Prompt = [
{
role: "user",
content: [
{ type: "text", text: "Suggest me a good car to buy." },
{ type: "text", text: " Write only the car name and it's color." },
],
},
];
// Retrieve memories based on previous messages
const memories = await retrieveMemories(messages, { user_id: userId });
const { text } = await generateText({
// @ts-ignore
model: anthropic("claude-3-haiku-20240307"),
messages: messages,
system: memories.length > 0 ? memories : "No Memories Found"
});
// Expect text to be a string
expect(typeof text).toBe('string');
expect(text.length).toBeGreaterThan(0);
});
it("should generate text using ANTHROPIC provider with memories", async () => {
const prompt = "Suggest me a good car to buy.";
const memories = await retrieveMemories(prompt, { user_id: userId });
const { text } = await generateText({
// @ts-ignore
model: anthropic("claude-3-haiku-20240307"),
prompt: prompt,
system: memories.length > 0 ? memories : "No Memories Found"
});
expect(typeof text).toBe('string');
expect(text.length).toBeGreaterThan(0);
});
});

View File

@@ -0,0 +1,60 @@
import dotenv from "dotenv";
dotenv.config();
import { retrieveMemories } from "../../src";
import { generateText, LanguageModelV1Prompt } from "ai";
import { testConfig } from "../../config/test-config";
import { createCohere } from "@ai-sdk/cohere";
describe("COHERE Integration Tests", () => {
const { userId } = testConfig;
jest.setTimeout(30000);
let cohere: any;
beforeEach(() => {
cohere = createCohere({
apiKey: process.env.COHERE_API_KEY,
});
});
it("should retrieve memories and generate text using COHERE provider", async () => {
const messages: LanguageModelV1Prompt = [
{
role: "user",
content: [
{ type: "text", text: "Suggest me a good car to buy." },
{ type: "text", text: " Write only the car name and it's color." },
],
},
];
// Retrieve memories based on previous messages
const memories = await retrieveMemories(messages, { user_id: userId });
const { text } = await generateText({
// @ts-ignore
model: cohere("command-r-plus"),
messages: messages,
system: memories,
});
// Expect text to be a string
expect(typeof text).toBe('string');
expect(text.length).toBeGreaterThan(0);
});
it("should generate text using COHERE provider with memories", async () => {
const prompt = "Suggest me a good car to buy.";
const memories = await retrieveMemories(prompt, { user_id: userId });
const { text } = await generateText({
// @ts-ignore
model: cohere("command-r-plus"),
prompt: prompt,
system: memories
});
expect(typeof text).toBe('string');
expect(text.length).toBeGreaterThan(0);
});
});

View File

@@ -0,0 +1,61 @@
import dotenv from "dotenv";
dotenv.config();
import { retrieveMemories } from "../../src";
import { generateText, LanguageModelV1Prompt } from "ai";
import { testConfig } from "../../config/test-config";
import { createGroq } from "@ai-sdk/groq";
describe("GROQ Integration Tests", () => {
const { userId } = testConfig;
jest.setTimeout(30000);
let groq: any;
beforeEach(() => {
groq = createGroq({
apiKey: process.env.GROQ_API_KEY,
});
});
it("should retrieve memories and generate text using GROQ provider", async () => {
const messages: LanguageModelV1Prompt = [
{
role: "user",
content: [
{ type: "text", text: "Suggest me a good car to buy." },
{ type: "text", text: " Write only the car name and it's color." },
],
},
];
// Retrieve memories based on previous messages
const memories = await retrieveMemories(messages, { user_id: userId });
const { text } = await generateText({
// @ts-ignore
model: groq("llama3-8b-8192"),
messages: messages,
system: memories,
});
// Expect text to be a string
expect(typeof text).toBe('string');
expect(text.length).toBeGreaterThan(0);
});
it("should generate text using GROQ provider with memories", async () => {
const prompt = "Suggest me a good car to buy.";
const memories = await retrieveMemories(prompt, { user_id: userId });
const { text } = await generateText({
// @ts-ignore
model: groq("llama3-8b-8192"),
prompt: prompt,
system: memories
});
expect(typeof text).toBe('string');
expect(text.length).toBeGreaterThan(0);
});
});

View File

@@ -0,0 +1,58 @@
import dotenv from "dotenv";
dotenv.config();
import { retrieveMemories } from "../../src";
import { generateText, LanguageModelV1Prompt } from "ai";
import { testConfig } from "../../config/test-config";
import { createOpenAI } from "@ai-sdk/openai";
describe("OPENAI Integration Tests", () => {
const { userId } = testConfig;
jest.setTimeout(30000);
let openai: any;
beforeEach(() => {
openai = createOpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
});
it("should retrieve memories and generate text using OpenAI provider", async () => {
const messages: LanguageModelV1Prompt = [
{
role: "user",
content: [
{ type: "text", text: "Suggest me a good car to buy." },
{ type: "text", text: " Write only the car name and it's color." },
],
},
];
// Retrieve memories based on previous messages
const memories = await retrieveMemories(messages, { user_id: userId });
const { text } = await generateText({
model: openai("gpt-4-turbo"),
messages: messages,
system: memories,
});
// Expect text to be a string
expect(typeof text).toBe('string');
expect(text.length).toBeGreaterThan(0);
});
it("should generate text using openai provider with memories", async () => {
const prompt = "Suggest me a good car to buy.";
const memories = await retrieveMemories(prompt, { user_id: userId });
const { text } = await generateText({
model: openai("gpt-4-turbo"),
prompt: prompt,
system: memories
});
expect(typeof text).toBe('string');
expect(text.length).toBeGreaterThan(0);
});
});