fix: Custom tool call parsing for anthropic

This commit is contained in:
bracesproul
2025-03-07 16:57:30 -08:00
parent ecbcab7d24
commit c0cef943a6
6 changed files with 2286 additions and 4270 deletions

View File

@@ -7,10 +7,12 @@ import { ChatOpenAI } from "@langchain/openai";
import { tripPlannerGraph } from "./trip-planner";
import { formatMessages } from "./utils/format-messages";
import { graph as openCodeGraph } from "./open-code";
import { graph as orderPizzaGraph } from "./pizza-orderer";
const allToolDescriptions = `- stockbroker: can fetch the price of a ticker, purchase/sell a ticker, or get the user's portfolio
- tripPlanner: helps the user plan their trip. it can suggest restaurants, and places to stay in any given location.
- openCode: can write code for the user. call this tool when the user asks you to write code`;
- openCode: can write code for the user. call this tool when the user asks you to write code
- orderPizza: can order a pizza for the user`;
async function router(
state: GenerativeUIState,
@@ -21,7 +23,7 @@ ${allToolDescriptions}
`;
const routerSchema = z.object({
route: z
.enum(["stockbroker", "tripPlanner", "openCode", "generalInput"])
.enum(["stockbroker", "tripPlanner", "openCode", "orderPizza", "generalInput"])
.describe(routerDescription),
});
const routerTool = {
@@ -75,7 +77,7 @@ Please pick the proper route based on the most recent message, in the context of
function handleRoute(
state: GenerativeUIState,
): "stockbroker" | "tripPlanner" | "openCode" | "generalInput" {
): "stockbroker" | "tripPlanner" | "openCode" | "orderPizza" | "generalInput" {
return state.next;
}
@@ -107,18 +109,21 @@ const builder = new StateGraph(GenerativeUIAnnotation)
.addNode("stockbroker", stockbrokerGraph)
.addNode("tripPlanner", tripPlannerGraph)
.addNode("openCode", openCodeGraph)
.addNode("orderPizza", orderPizzaGraph)
.addNode("generalInput", handleGeneralInput)
.addConditionalEdges("router", handleRoute, [
"stockbroker",
"tripPlanner",
"openCode",
"orderPizza",
"generalInput",
])
.addEdge(START, "router")
.addEdge("stockbroker", END)
.addEdge("tripPlanner", END)
.addEdge("openCode", END)
.addEdge("orderPizza", END)
.addEdge("generalInput", END);
export const graph = builder.compile();

View File

@@ -0,0 +1,85 @@
import { ChatAnthropic } from "@langchain/anthropic";
import { Annotation, END, START, StateGraph } from "@langchain/langgraph";
import { GenerativeUIAnnotation } from "../types";
import { z } from "zod";
import { AIMessage, ToolMessage } from "@langchain/langgraph-sdk";
import { v4 as uuidv4 } from "uuid";
const PizzaOrdererAnnotation = Annotation.Root({
messages: GenerativeUIAnnotation.spec.messages,
})
async function sleep(ms = 5000) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
const workflow = new StateGraph(PizzaOrdererAnnotation)
.addNode("findStore", async (state) => {
const findShopSchema = z.object({
location: z.string().describe("The location the user is in. E.g. 'San Francisco' or 'New York'"),
pizza_company: z.string().optional().describe("The name of the pizza company. E.g. 'Dominos' or 'Papa John's'. Optional, if not defined it will search for all pizza shops"),
}).describe("The schema for finding a pizza shop for the user")
const model = new ChatAnthropic({ model: "claude-3-5-sonnet-latest", temperature: 0 }).withStructuredOutput(findShopSchema, {
name: "find_pizza_shop",
includeRaw: true,
})
const response = await model.invoke([
{
role: "system",
content: "You are a helpful AI assistant, tasked with extracting information from the conversation between you, and the user, in order to find a pizza shop for them."
},
...state.messages,
])
await sleep();
const toolResponse: ToolMessage = {
type: "tool",
id: uuidv4(),
content: "I've found a pizza shop at 1119 19th St, San Francisco, CA 94107. The phone number for the shop is 415-555-1234.",
tool_call_id: (response.raw as unknown as AIMessage).tool_calls?.[0].id ?? "",
}
return {
messages: [response.raw, toolResponse]
}
})
.addNode("orderPizza", async (state) => {
await sleep(1500);
const placeOrderSchema = z.object({
address: z.string().describe("The address of the store to order the pizza from"),
phone_number: z.string().describe("The phone number of the store to order the pizza from"),
order: z.string().describe("The full pizza order for the user"),
}).describe("The schema for ordering a pizza for the user")
const model = new ChatAnthropic({ model: "claude-3-5-sonnet-latest", temperature: 0 }).withStructuredOutput(placeOrderSchema, {
name: "place_pizza_order",
includeRaw: true,
})
const response = await model.invoke([
{
role: "system",
content: "You are a helpful AI assistant, tasked with placing an order for a pizza for the user."
},
...state.messages,
])
const toolResponse: ToolMessage = {
type: "tool",
id: uuidv4(),
content: "Pizza order successfully placed.",
tool_call_id: (response.raw as unknown as AIMessage).tool_calls?.[0].id ?? "",
}
return {
messages: [response.raw, toolResponse]
}
})
.addEdge(START, "findStore")
.addEdge("findStore", "orderPizza")
.addEdge("orderPizza", END)
export const graph = workflow.compile()
graph.name = "Order Pizza Graph";

View File

@@ -13,7 +13,7 @@ export const GenerativeUIAnnotation = Annotation.Root({
>({ default: () => [], reducer: uiMessageReducer }),
timestamp: Annotation<number>,
next: Annotation<
"stockbroker" | "tripPlanner" | "openCode" | "generalInput"
"stockbroker" | "tripPlanner" | "openCode" | "orderPizza" | "generalInput"
>(),
});