import { v4 as uuidv4 } from "uuid"; import { ReactNode, useEffect, useRef } from "react"; import { cn } from "@/lib/utils"; import { useStreamContext } from "@/providers/Stream"; import { useState, FormEvent } from "react"; import { Input } from "../ui/input"; import { Button } from "../ui/button"; import { Checkpoint, Message } from "@langchain/langgraph-sdk"; import { AssistantMessage, AssistantMessageLoading } from "./messages/ai"; import { HumanMessage } from "./messages/human"; import { DO_NOT_RENDER_ID_PREFIX, ensureToolCallsHaveResponses, } from "@/lib/ensure-tool-responses"; import { LangGraphLogoSVG } from "../icons/langgraph"; import { TooltipIconButton } from "./tooltip-icon-button"; import { SquarePen } from "lucide-react"; import { StringParam, useQueryParam } from "use-query-params"; import { StickToBottom, useStickToBottomContext } from "use-stick-to-bottom"; function NewThread() { const [threadId, setThreadId] = useQueryParam("threadId", StringParam); if (!threadId) return null; return ( setThreadId(null)} > ); } function StickyToBottomContent(props: { content: ReactNode; footer?: ReactNode; className?: string; contentClassName?: string; }) { const context = useStickToBottomContext(); return (
{props.content}
{props.footer}
); } export function Thread() { const [input, setInput] = useState(""); const [firstTokenReceived, setFirstTokenReceived] = useState(false); const stream = useStreamContext(); const messages = stream.messages; const isLoading = stream.isLoading; const prevMessageLength = useRef(0); // TODO: this should be part of the useStream hook useEffect(() => { if ( messages.length !== prevMessageLength.current && messages?.length && messages[messages.length - 1].type === "ai" ) { setFirstTokenReceived(true); prevMessageLength.current = messages.length; } }, [messages]); const handleSubmit = (e: FormEvent) => { e.preventDefault(); if (!input.trim() || isLoading) return; setFirstTokenReceived(false); const newHumanMessage: Message = { id: uuidv4(), type: "human", content: input, }; const toolMessages = ensureToolCallsHaveResponses(stream.messages); stream.submit( { messages: [...toolMessages, newHumanMessage] }, { streamMode: ["values"], optimisticValues: (prev) => ({ ...prev, messages: [ ...(prev.messages ?? []), ...toolMessages, newHumanMessage, ], }), }, ); setInput(""); }; const handleRegenerate = ( parentCheckpoint: Checkpoint | null | undefined, ) => { // Do this so the loading state is correct prevMessageLength.current = prevMessageLength.current - 1; setFirstTokenReceived(false); stream.submit(undefined, { checkpoint: parentCheckpoint, streamMode: ["values"], }); }; const chatStarted = isLoading || messages.length > 0; const renderMessages = messages.filter( (m) => !m.id?.startsWith(DO_NOT_RENDER_ID_PREFIX), ); return (
{chatStarted && (

LangGraph Chat

)} {renderMessages.map((message, index) => message.type === "human" ? ( ) : ( ), )} {isLoading && !firstTokenReceived && ( )} } footer={
{!chatStarted && (

LangGraph Chat

)}
setInput(e.target.value)} placeholder="Type your message..." className="px-4 py-6 border-none bg-transparent shadow-none ring-0 outline-none focus:outline-none focus:ring-0" />
} />
); }