The stream response message doesn’t work. If I remove the bindTools, It works perfectly
// File llama.ts
import { ChatOllama } from "@langchain/ollama";
const llama = new ChatOllama({
model: "llama3.2:latest",
});
export default llama;
// File route.ts
import llama from "@/models/llama";
import { AIMessage, HumanMessage } from "@langchain/core/messages";
import { tool } from "@langchain/core/tools";
import { LangChainAdapter, type Message } from "ai";
import { NextResponse } from "next/server";
import { z } from "zod";
export async function POST(req: Request) {
const { messages } = await req.json();
try {
const response = await llama
.bindTools([
tool(
async (input) => {
console.log(input);
return input.a * input.b;
},
{
name: "multiply",
description: "Multiplies a and b.",
schema: z.object({
a: z.number(),
b: z.number(),
}),
}
),
])
.stream(
messages.map((message: Message) =>
message.role == "user"
? new HumanMessage(message.content)
: new AIMessage(message.content)
)
);
return LangChainAdapter.toDataStreamResponse(response);
} catch (error: any) {
console.log(error);
return NextResponse.json({ error: error.message }, { status: 500 });
}
}
// useChat function
const { handleSubmit, input, handleInputChange, messages } = useChat({
api: "/api/bot",
onError(error) {
console.log(error);
},
onResponse(response) {
console.log(response);
},
onFinish(message, options) {
console.log(message);
},
});