Compare commits

8 Commits

Author SHA1 Message Date
b6c4d1d048 运行OK
Some checks failed
Integration Tests / Integration Tests (ubuntu-latest, 3.11) (push) Has been cancelled
Integration Tests / Integration Tests (ubuntu-latest, 3.12) (push) Has been cancelled
2025-11-01 23:11:56 +08:00
81e64e3fb6 Merge branch 'kexsh/main/my' of http://192.168.0.4:3000/kexsh/graphtest into kexsh/main/my 2025-11-01 11:01:13 +08:00
c2e6a84e06 依赖 2025-11-01 10:59:52 +08:00
2f210d7ccf 解决配置文件找不到的问题;
Some checks failed
Integration Tests / Integration Tests (ubuntu-latest, 3.11) (push) Has been cancelled
Integration Tests / Integration Tests (ubuntu-latest, 3.12) (push) Has been cancelled
解决block的问题
2025-11-01 10:41:07 +08:00
6c7574cd88 commit
Some checks failed
Integration Tests / Integration Tests (ubuntu-latest, 3.11) (push) Has been cancelled
Integration Tests / Integration Tests (ubuntu-latest, 3.12) (push) Has been cancelled
2025-10-25 21:17:16 +08:00
303548d94e 能调通千问大模型了 2025-08-30 23:35:08 +08:00
cc67975d27 api key 2025-08-29 00:17:07 +08:00
0c8bfcf67b ignore .idea 2025-08-29 00:13:50 +08:00
12 changed files with 122 additions and 1 deletions

9
.env Normal file
View File

@@ -0,0 +1,9 @@
# (可选但推荐)设置您想要发送 traces 的项目名称,例如 'my-awesome-project'
# To separate your traces from other application
LANGSMITH_PROJECT=new-agent
# Add API keys for connecting to LLM providers, data sources, and other integrations here
LANGSMITH_API_KEY=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
# 通常还需要设置 LangChain 的跟踪端点,默认就是以下地址,通常无需修改
LANGCHAIN_TRACING_V2=true

2
.gitignore vendored
View File

@@ -162,3 +162,5 @@ cython_debug/
#.idea/ #.idea/
uv.lock uv.lock
.langgraph_api/ .langgraph_api/
.idea

View File

@@ -39,6 +39,7 @@ LANGSMITH_API_KEY=lsv2...
```shell ```shell
langgraph dev langgraph dev
langgraph dev --allow-blocking
``` ```
For more information on getting started with LangGraph Server, [see here](https://langchain-ai.github.io/langgraph/tutorials/langgraph-platform/local-server/). For more information on getting started with LangGraph Server, [see here](https://langchain-ai.github.io/langgraph/tutorials/langgraph-platform/local-server/).

View File

@@ -1,7 +1,8 @@
{ {
"dependencies": ["."], "dependencies": ["."],
"graphs": { "graphs": {
"agent": "./src/agent/graph.py:graph" "agent": "./src/agent/graph.py:graph",
"my_agent": "./src/agent/my_graph.py:make_graph"
}, },
"env": ".env", "env": ".env",
"image_distro": "wolfi" "image_distro": "wolfi"

6
requirements.txt Normal file
View File

@@ -0,0 +1,6 @@
langchain
langgraph
langchain_mcp_adapters
langchain_tavily
langchain_community
dashscope

68
src/agent/my_graph.py Normal file
View File

@@ -0,0 +1,68 @@
from typing import Annotated, Any
from langchain.chat_models import init_chat_model
from langchain_core.tools import tool
from langchain_mcp_adapters.client import MultiServerMCPClient
from langchain_tavily import TavilySearch
from langchain_core.messages import BaseMessage
from langgraph.graph.state import CompiledStateGraph
from typing_extensions import TypedDict
from langgraph.checkpoint.memory import InMemorySaver
from langgraph.graph import StateGraph
from langgraph.graph.message import add_messages
from langgraph.prebuilt import ToolNode, tools_condition
from langchain_community.chat_models.tongyi import ChatTongyi
from src.mcp.mcp_tools import get_client
llm = ChatTongyi(
model="qwen-max", # 此处以qwen-max为例您可按需更换模型名称。模型列表https://help.aliyun.com/zh/model-studio/getting-started/models
streaming=True,
# other params...
)
@tool
def get_wheather(location: str) -> str:
"""输入城市查询天气"""
return f"The weather in {location} is 20 degrees Celsius."
class State(TypedDict):
messages: Annotated[list, add_messages]
async def make_graph() -> CompiledStateGraph[Any, Any, Any, Any]:
graph_builder = StateGraph(State)
# tool = TavilySearch(max_results=2)
client = get_client()
mcp_tools = await client.get_tools()
# mcp_tools = []
tools = [get_wheather]
tools.extend(mcp_tools)
llm_with_tools = llm.bind_tools(tools)
def chatbot(state: State):
return {"messages": [llm_with_tools.invoke(state["messages"])]}
graph_builder.add_node("chatbot", chatbot)
tool_node = ToolNode(tools=tools)
graph_builder.add_node("tools", tool_node)
graph_builder.add_conditional_edges(
"chatbot",
tools_condition,
)
graph_builder.add_edge("tools", "chatbot")
graph_builder.set_entry_point("chatbot")
# memory = InMemorySaver()
# graph = graph_builder.compile(checkpointer=memory)
graph = graph_builder.compile()
return graph

View File

0
src/agt20250902/graph.py Normal file
View File

0
src/mcp/__init__.py Normal file
View File

View File

@@ -0,0 +1,9 @@
{
"math": {
"command": "python",
"args": [
"./src/mcp/server/math_server.py"
],
"transport": "stdio"
}
}

9
src/mcp/mcp_tools.py Normal file
View File

@@ -0,0 +1,9 @@
from langchain_mcp_adapters.client import MultiServerMCPClient
import json
def get_client() -> MultiServerMCPClient:
mcp_tools = json.load(open("src/mcp/config/mcp_servers.json"))
client = MultiServerMCPClient(mcp_tools)
return client

View File

@@ -0,0 +1,16 @@
from mcp.server.fastmcp import FastMCP
mcp = FastMCP("Math")
@mcp.tool()
def add(a: int, b: int) -> int:
"""Add two numbers"""
return a + b
@mcp.tool()
def multiply(a: int, b: int) -> int:
"""Multiply two numbers"""
return a * b
if __name__ == "__main__":
mcp.run(transport="stdio")