diff --git a/langgraph.json b/langgraph.json index a6edda8..e56d67e 100644 --- a/langgraph.json +++ b/langgraph.json @@ -2,7 +2,7 @@ "dependencies": ["."], "graphs": { "agent": "./src/agent/graph.py:graph", - "my_agent": "./src/agent/my_graph.py:graph" + "my_agent": "./src/agent/my_graph.py:make_graph" }, "env": ".env", "image_distro": "wolfi" diff --git a/src/agent/my_graph.py b/src/agent/my_graph.py index 3e849c6..b9a2689 100644 --- a/src/agent/my_graph.py +++ b/src/agent/my_graph.py @@ -1,9 +1,11 @@ -from typing import Annotated +from typing import Annotated, Any from langchain.chat_models import init_chat_model from langchain_core.tools import tool +from langchain_mcp_adapters.client import MultiServerMCPClient from langchain_tavily import TavilySearch from langchain_core.messages import BaseMessage +from langgraph.graph.state import CompiledStateGraph from typing_extensions import TypedDict from langgraph.checkpoint.memory import InMemorySaver @@ -12,6 +14,8 @@ from langgraph.graph.message import add_messages from langgraph.prebuilt import ToolNode, tools_condition from langchain_community.chat_models.tongyi import ChatTongyi +from src.mcp.mcp_tools import get_client + llm = ChatTongyi( model="qwen-max", # 此处以qwen-max为例,您可按需更换模型名称。模型列表:https://help.aliyun.com/zh/model-studio/getting-started/models streaming=True, @@ -19,35 +23,45 @@ llm = ChatTongyi( ) -class State(TypedDict): - messages: Annotated[list, add_messages] - -graph_builder = StateGraph(State) - -# tool = TavilySearch(max_results=2) - @tool def get_wheather(location: str) -> str: """输入城市查询天气""" return f"The weather in {location} is 20 degrees Celsius." -tools = [get_wheather] -llm_with_tools = llm.bind_tools(tools) +class State(TypedDict): + messages: Annotated[list, add_messages] -def chatbot(state: State): - return {"messages": [llm_with_tools.invoke(state["messages"])]} -graph_builder.add_node("chatbot", chatbot) -tool_node = ToolNode(tools=[tool]) -graph_builder.add_node("tools", tool_node) +async def make_graph() -> CompiledStateGraph[Any, Any, Any, Any]: + graph_builder = StateGraph(State) -graph_builder.add_conditional_edges( - "chatbot", - tools_condition, -) -graph_builder.add_edge("tools", "chatbot") -graph_builder.set_entry_point("chatbot") -# memory = InMemorySaver() -# graph = graph_builder.compile(checkpointer=memory) -graph = graph_builder.compile() \ No newline at end of file + # tool = TavilySearch(max_results=2) + + client = get_client() + + mcp_tools = await client.get_tools() + + tools = [get_wheather] + tools.extend(mcp_tools) + llm_with_tools = llm.bind_tools(tools) + + def chatbot(state: State): + return {"messages": [llm_with_tools.invoke(state["messages"])]} + + graph_builder.add_node("chatbot", chatbot) + + tool_node = ToolNode(tools=[tool]) + graph_builder.add_node("tools", tool_node) + + graph_builder.add_conditional_edges( + "chatbot", + tools_condition, + ) + graph_builder.add_edge("tools", "chatbot") + graph_builder.set_entry_point("chatbot") + # memory = InMemorySaver() + # graph = graph_builder.compile(checkpointer=memory) + graph = graph_builder.compile() + + return graph \ No newline at end of file diff --git a/src/agt20250902/__init__.py b/src/agt20250902/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/agt20250902/graph.py b/src/agt20250902/graph.py new file mode 100644 index 0000000..e69de29 diff --git a/src/mcp/__init__.py b/src/mcp/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/mcp/mcp_servers.json b/src/mcp/mcp_servers.json new file mode 100644 index 0000000..fe2eb83 --- /dev/null +++ b/src/mcp/mcp_servers.json @@ -0,0 +1,9 @@ +{ + "math": { + "command": "python", + "args": [ + "/home/kexsh/src/knightutils/test/mcp/math_server.py" + ], + "transport": "stdio" + } +} \ No newline at end of file diff --git a/src/mcp/mcp_tools.py b/src/mcp/mcp_tools.py new file mode 100644 index 0000000..74390e7 --- /dev/null +++ b/src/mcp/mcp_tools.py @@ -0,0 +1,9 @@ +from langchain_mcp_adapters.client import MultiServerMCPClient +import json + +def get_client() -> MultiServerMCPClient: + mcp_tools = json.load(open("mcp_tools.json")) + + client = MultiServerMCPClient(mcp_tools) + + return client \ No newline at end of file