From 303548d94e003d359b3a5fc83b0545f51e8132a4 Mon Sep 17 00:00:00 2001 From: kexsh <769014005@qq.com> Date: Sat, 30 Aug 2025 23:35:08 +0800 Subject: [PATCH] =?UTF-8?q?=E8=83=BD=E8=B0=83=E9=80=9A=E5=8D=83=E9=97=AE?= =?UTF-8?q?=E5=A4=A7=E6=A8=A1=E5=9E=8B=E4=BA=86?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- langgraph.json | 3 ++- requirements.txt | 1 + src/agent/my_graph.py | 53 +++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 56 insertions(+), 1 deletion(-) create mode 100644 requirements.txt create mode 100644 src/agent/my_graph.py diff --git a/langgraph.json b/langgraph.json index 9c4966e..a6edda8 100644 --- a/langgraph.json +++ b/langgraph.json @@ -1,7 +1,8 @@ { "dependencies": ["."], "graphs": { - "agent": "./src/agent/graph.py:graph" + "agent": "./src/agent/graph.py:graph", + "my_agent": "./src/agent/my_graph.py:graph" }, "env": ".env", "image_distro": "wolfi" diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..e7f08b2 --- /dev/null +++ b/requirements.txt @@ -0,0 +1 @@ +langchain \ No newline at end of file diff --git a/src/agent/my_graph.py b/src/agent/my_graph.py new file mode 100644 index 0000000..3e849c6 --- /dev/null +++ b/src/agent/my_graph.py @@ -0,0 +1,53 @@ +from typing import Annotated + +from langchain.chat_models import init_chat_model +from langchain_core.tools import tool +from langchain_tavily import TavilySearch +from langchain_core.messages import BaseMessage +from typing_extensions import TypedDict + +from langgraph.checkpoint.memory import InMemorySaver +from langgraph.graph import StateGraph +from langgraph.graph.message import add_messages +from langgraph.prebuilt import ToolNode, tools_condition +from langchain_community.chat_models.tongyi import ChatTongyi + +llm = ChatTongyi( + model="qwen-max", # 此处以qwen-max为例,您可按需更换模型名称。模型列表:https://help.aliyun.com/zh/model-studio/getting-started/models + streaming=True, + # other params... +) + + +class State(TypedDict): + messages: Annotated[list, add_messages] + +graph_builder = StateGraph(State) + +# tool = TavilySearch(max_results=2) + +@tool +def get_wheather(location: str) -> str: + """输入城市查询天气""" + return f"The weather in {location} is 20 degrees Celsius." + +tools = [get_wheather] +llm_with_tools = llm.bind_tools(tools) + +def chatbot(state: State): + return {"messages": [llm_with_tools.invoke(state["messages"])]} + +graph_builder.add_node("chatbot", chatbot) + +tool_node = ToolNode(tools=[tool]) +graph_builder.add_node("tools", tool_node) + +graph_builder.add_conditional_edges( + "chatbot", + tools_condition, +) +graph_builder.add_edge("tools", "chatbot") +graph_builder.set_entry_point("chatbot") +# memory = InMemorySaver() +# graph = graph_builder.compile(checkpointer=memory) +graph = graph_builder.compile() \ No newline at end of file