Compare commits
8 Commits
main
...
kexsh/main
| Author | SHA1 | Date | |
|---|---|---|---|
| b6c4d1d048 | |||
| 81e64e3fb6 | |||
| c2e6a84e06 | |||
| 2f210d7ccf | |||
| 6c7574cd88 | |||
| 303548d94e | |||
| cc67975d27 | |||
| 0c8bfcf67b |
9
.env
Normal file
9
.env
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# (可选但推荐)设置您想要发送 traces 的项目名称,例如 'my-awesome-project'
|
||||||
|
# To separate your traces from other application
|
||||||
|
LANGSMITH_PROJECT=new-agent
|
||||||
|
|
||||||
|
# Add API keys for connecting to LLM providers, data sources, and other integrations here
|
||||||
|
LANGSMITH_API_KEY=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
|
||||||
|
|
||||||
|
# 通常还需要设置 LangChain 的跟踪端点,默认就是以下地址,通常无需修改
|
||||||
|
LANGCHAIN_TRACING_V2=true
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -162,3 +162,5 @@ cython_debug/
|
|||||||
#.idea/
|
#.idea/
|
||||||
uv.lock
|
uv.lock
|
||||||
.langgraph_api/
|
.langgraph_api/
|
||||||
|
|
||||||
|
.idea
|
||||||
@@ -39,6 +39,7 @@ LANGSMITH_API_KEY=lsv2...
|
|||||||
|
|
||||||
```shell
|
```shell
|
||||||
langgraph dev
|
langgraph dev
|
||||||
|
langgraph dev --allow-blocking
|
||||||
```
|
```
|
||||||
|
|
||||||
For more information on getting started with LangGraph Server, [see here](https://langchain-ai.github.io/langgraph/tutorials/langgraph-platform/local-server/).
|
For more information on getting started with LangGraph Server, [see here](https://langchain-ai.github.io/langgraph/tutorials/langgraph-platform/local-server/).
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
{
|
{
|
||||||
"dependencies": ["."],
|
"dependencies": ["."],
|
||||||
"graphs": {
|
"graphs": {
|
||||||
"agent": "./src/agent/graph.py:graph"
|
"agent": "./src/agent/graph.py:graph",
|
||||||
|
"my_agent": "./src/agent/my_graph.py:make_graph"
|
||||||
},
|
},
|
||||||
"env": ".env",
|
"env": ".env",
|
||||||
"image_distro": "wolfi"
|
"image_distro": "wolfi"
|
||||||
|
|||||||
6
requirements.txt
Normal file
6
requirements.txt
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
langchain
|
||||||
|
langgraph
|
||||||
|
langchain_mcp_adapters
|
||||||
|
langchain_tavily
|
||||||
|
langchain_community
|
||||||
|
dashscope
|
||||||
68
src/agent/my_graph.py
Normal file
68
src/agent/my_graph.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
from typing import Annotated, Any
|
||||||
|
|
||||||
|
from langchain.chat_models import init_chat_model
|
||||||
|
from langchain_core.tools import tool
|
||||||
|
from langchain_mcp_adapters.client import MultiServerMCPClient
|
||||||
|
from langchain_tavily import TavilySearch
|
||||||
|
from langchain_core.messages import BaseMessage
|
||||||
|
from langgraph.graph.state import CompiledStateGraph
|
||||||
|
from typing_extensions import TypedDict
|
||||||
|
|
||||||
|
from langgraph.checkpoint.memory import InMemorySaver
|
||||||
|
from langgraph.graph import StateGraph
|
||||||
|
from langgraph.graph.message import add_messages
|
||||||
|
from langgraph.prebuilt import ToolNode, tools_condition
|
||||||
|
from langchain_community.chat_models.tongyi import ChatTongyi
|
||||||
|
|
||||||
|
from src.mcp.mcp_tools import get_client
|
||||||
|
|
||||||
|
llm = ChatTongyi(
|
||||||
|
model="qwen-max", # 此处以qwen-max为例,您可按需更换模型名称。模型列表:https://help.aliyun.com/zh/model-studio/getting-started/models
|
||||||
|
streaming=True,
|
||||||
|
# other params...
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@tool
|
||||||
|
def get_wheather(location: str) -> str:
|
||||||
|
"""输入城市查询天气"""
|
||||||
|
return f"The weather in {location} is 20 degrees Celsius."
|
||||||
|
|
||||||
|
class State(TypedDict):
|
||||||
|
messages: Annotated[list, add_messages]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
async def make_graph() -> CompiledStateGraph[Any, Any, Any, Any]:
|
||||||
|
graph_builder = StateGraph(State)
|
||||||
|
|
||||||
|
# tool = TavilySearch(max_results=2)
|
||||||
|
|
||||||
|
client = get_client()
|
||||||
|
|
||||||
|
mcp_tools = await client.get_tools()
|
||||||
|
# mcp_tools = []
|
||||||
|
|
||||||
|
tools = [get_wheather]
|
||||||
|
tools.extend(mcp_tools)
|
||||||
|
llm_with_tools = llm.bind_tools(tools)
|
||||||
|
|
||||||
|
def chatbot(state: State):
|
||||||
|
return {"messages": [llm_with_tools.invoke(state["messages"])]}
|
||||||
|
|
||||||
|
graph_builder.add_node("chatbot", chatbot)
|
||||||
|
|
||||||
|
tool_node = ToolNode(tools=tools)
|
||||||
|
graph_builder.add_node("tools", tool_node)
|
||||||
|
|
||||||
|
graph_builder.add_conditional_edges(
|
||||||
|
"chatbot",
|
||||||
|
tools_condition,
|
||||||
|
)
|
||||||
|
graph_builder.add_edge("tools", "chatbot")
|
||||||
|
graph_builder.set_entry_point("chatbot")
|
||||||
|
# memory = InMemorySaver()
|
||||||
|
# graph = graph_builder.compile(checkpointer=memory)
|
||||||
|
graph = graph_builder.compile()
|
||||||
|
|
||||||
|
return graph
|
||||||
0
src/agt20250902/__init__.py
Normal file
0
src/agt20250902/__init__.py
Normal file
0
src/agt20250902/graph.py
Normal file
0
src/agt20250902/graph.py
Normal file
0
src/mcp/__init__.py
Normal file
0
src/mcp/__init__.py
Normal file
9
src/mcp/config/mcp_servers.json
Normal file
9
src/mcp/config/mcp_servers.json
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"math": {
|
||||||
|
"command": "python",
|
||||||
|
"args": [
|
||||||
|
"./src/mcp/server/math_server.py"
|
||||||
|
],
|
||||||
|
"transport": "stdio"
|
||||||
|
}
|
||||||
|
}
|
||||||
9
src/mcp/mcp_tools.py
Normal file
9
src/mcp/mcp_tools.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from langchain_mcp_adapters.client import MultiServerMCPClient
|
||||||
|
import json
|
||||||
|
|
||||||
|
def get_client() -> MultiServerMCPClient:
|
||||||
|
mcp_tools = json.load(open("src/mcp/config/mcp_servers.json"))
|
||||||
|
|
||||||
|
client = MultiServerMCPClient(mcp_tools)
|
||||||
|
|
||||||
|
return client
|
||||||
16
src/mcp/server/math_server.py
Normal file
16
src/mcp/server/math_server.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
from mcp.server.fastmcp import FastMCP
|
||||||
|
|
||||||
|
mcp = FastMCP("Math")
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
def add(a: int, b: int) -> int:
|
||||||
|
"""Add two numbers"""
|
||||||
|
return a + b
|
||||||
|
|
||||||
|
@mcp.tool()
|
||||||
|
def multiply(a: int, b: int) -> int:
|
||||||
|
"""Multiply two numbers"""
|
||||||
|
return a * b
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
mcp.run(transport="stdio")
|
||||||
Reference in New Issue
Block a user