// app/api/chat/route.ts
import { convertToModelMessages, streamText, UIMessage } from 'ai';
import { createStreamableUI, createStreamableValue } from '@ai-sdk/rsc';
import { createMCPClient } from '@ai-sdk/mcp';
import { createOpenAI } from '@ai-sdk/openai';
export const maxDuration = 120;
const baseURL = 'http://litellm-gateway:3333';
const apiKey = 'apikey';
export async function POST(req: Request) {
try {
const { messages }: { messages: UIMessage[] } = await req.json();
const gateway = createOpenAI({
baseURL: `${baseURL}/v1`,
apiKey: apiKey,
});
const mcpClient = await createMCPClient({
transport: {
type: 'http',
url: `${baseURL}/mcp`,
headers: {
'Authorization': `Bearer ${apiKey}`
},
},
name: "tools"
});
const tools = await mcpClient.tools();
const result = streamText({
model: gateway('litellm/gemini-2.0-flash'),
tools: tools,
system: `You are a helpful assistant. If the user asks for information you cannot answer directly, call the appropriate tool.`,
messages: await convertToModelMessages(messages),
});
return result.toUIMessageStreamResponse();
} catch (error) {
console.error("Stream error:", error);
return new Response("Internal Server Error", { status: 500 });
}
}
When I check in the network tab, I see the following error:
Error: NS_BASE_STREAM_CLOSED
I also tested this logic using Python with smolagents, and it works correctly when calling tools via the MCP Gateway.