commit
This commit is contained in:
@@ -2,7 +2,7 @@
|
||||
"dependencies": ["."],
|
||||
"graphs": {
|
||||
"agent": "./src/agent/graph.py:graph",
|
||||
"my_agent": "./src/agent/my_graph.py:graph"
|
||||
"my_agent": "./src/agent/my_graph.py:make_graph"
|
||||
},
|
||||
"env": ".env",
|
||||
"image_distro": "wolfi"
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
from typing import Annotated
|
||||
from typing import Annotated, Any
|
||||
|
||||
from langchain.chat_models import init_chat_model
|
||||
from langchain_core.tools import tool
|
||||
from langchain_mcp_adapters.client import MultiServerMCPClient
|
||||
from langchain_tavily import TavilySearch
|
||||
from langchain_core.messages import BaseMessage
|
||||
from langgraph.graph.state import CompiledStateGraph
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from langgraph.checkpoint.memory import InMemorySaver
|
||||
@@ -12,6 +14,8 @@ from langgraph.graph.message import add_messages
|
||||
from langgraph.prebuilt import ToolNode, tools_condition
|
||||
from langchain_community.chat_models.tongyi import ChatTongyi
|
||||
|
||||
from src.mcp.mcp_tools import get_client
|
||||
|
||||
llm = ChatTongyi(
|
||||
model="qwen-max", # 此处以qwen-max为例,您可按需更换模型名称。模型列表:https://help.aliyun.com/zh/model-studio/getting-started/models
|
||||
streaming=True,
|
||||
@@ -19,35 +23,45 @@ llm = ChatTongyi(
|
||||
)
|
||||
|
||||
|
||||
class State(TypedDict):
|
||||
messages: Annotated[list, add_messages]
|
||||
|
||||
graph_builder = StateGraph(State)
|
||||
|
||||
# tool = TavilySearch(max_results=2)
|
||||
|
||||
@tool
|
||||
def get_wheather(location: str) -> str:
|
||||
"""输入城市查询天气"""
|
||||
return f"The weather in {location} is 20 degrees Celsius."
|
||||
|
||||
tools = [get_wheather]
|
||||
llm_with_tools = llm.bind_tools(tools)
|
||||
class State(TypedDict):
|
||||
messages: Annotated[list, add_messages]
|
||||
|
||||
def chatbot(state: State):
|
||||
return {"messages": [llm_with_tools.invoke(state["messages"])]}
|
||||
|
||||
graph_builder.add_node("chatbot", chatbot)
|
||||
|
||||
tool_node = ToolNode(tools=[tool])
|
||||
graph_builder.add_node("tools", tool_node)
|
||||
async def make_graph() -> CompiledStateGraph[Any, Any, Any, Any]:
|
||||
graph_builder = StateGraph(State)
|
||||
|
||||
graph_builder.add_conditional_edges(
|
||||
"chatbot",
|
||||
tools_condition,
|
||||
)
|
||||
graph_builder.add_edge("tools", "chatbot")
|
||||
graph_builder.set_entry_point("chatbot")
|
||||
# memory = InMemorySaver()
|
||||
# graph = graph_builder.compile(checkpointer=memory)
|
||||
graph = graph_builder.compile()
|
||||
# tool = TavilySearch(max_results=2)
|
||||
|
||||
client = get_client()
|
||||
|
||||
mcp_tools = await client.get_tools()
|
||||
|
||||
tools = [get_wheather]
|
||||
tools.extend(mcp_tools)
|
||||
llm_with_tools = llm.bind_tools(tools)
|
||||
|
||||
def chatbot(state: State):
|
||||
return {"messages": [llm_with_tools.invoke(state["messages"])]}
|
||||
|
||||
graph_builder.add_node("chatbot", chatbot)
|
||||
|
||||
tool_node = ToolNode(tools=[tool])
|
||||
graph_builder.add_node("tools", tool_node)
|
||||
|
||||
graph_builder.add_conditional_edges(
|
||||
"chatbot",
|
||||
tools_condition,
|
||||
)
|
||||
graph_builder.add_edge("tools", "chatbot")
|
||||
graph_builder.set_entry_point("chatbot")
|
||||
# memory = InMemorySaver()
|
||||
# graph = graph_builder.compile(checkpointer=memory)
|
||||
graph = graph_builder.compile()
|
||||
|
||||
return graph
|
||||
0
src/agt20250902/__init__.py
Normal file
0
src/agt20250902/__init__.py
Normal file
0
src/agt20250902/graph.py
Normal file
0
src/agt20250902/graph.py
Normal file
0
src/mcp/__init__.py
Normal file
0
src/mcp/__init__.py
Normal file
9
src/mcp/mcp_servers.json
Normal file
9
src/mcp/mcp_servers.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"math": {
|
||||
"command": "python",
|
||||
"args": [
|
||||
"/home/kexsh/src/knightutils/test/mcp/math_server.py"
|
||||
],
|
||||
"transport": "stdio"
|
||||
}
|
||||
}
|
||||
9
src/mcp/mcp_tools.py
Normal file
9
src/mcp/mcp_tools.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from langchain_mcp_adapters.client import MultiServerMCPClient
|
||||
import json
|
||||
|
||||
def get_client() -> MultiServerMCPClient:
|
||||
mcp_tools = json.load(open("mcp_tools.json"))
|
||||
|
||||
client = MultiServerMCPClient(mcp_tools)
|
||||
|
||||
return client
|
||||
Reference in New Issue
Block a user