能调通千问大模型了
This commit is contained in:
@@ -1,7 +1,8 @@
|
|||||||
{
|
{
|
||||||
"dependencies": ["."],
|
"dependencies": ["."],
|
||||||
"graphs": {
|
"graphs": {
|
||||||
"agent": "./src/agent/graph.py:graph"
|
"agent": "./src/agent/graph.py:graph",
|
||||||
|
"my_agent": "./src/agent/my_graph.py:graph"
|
||||||
},
|
},
|
||||||
"env": ".env",
|
"env": ".env",
|
||||||
"image_distro": "wolfi"
|
"image_distro": "wolfi"
|
||||||
|
|||||||
1
requirements.txt
Normal file
1
requirements.txt
Normal file
@@ -0,0 +1 @@
|
|||||||
|
langchain
|
||||||
53
src/agent/my_graph.py
Normal file
53
src/agent/my_graph.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
from typing import Annotated
|
||||||
|
|
||||||
|
from langchain.chat_models import init_chat_model
|
||||||
|
from langchain_core.tools import tool
|
||||||
|
from langchain_tavily import TavilySearch
|
||||||
|
from langchain_core.messages import BaseMessage
|
||||||
|
from typing_extensions import TypedDict
|
||||||
|
|
||||||
|
from langgraph.checkpoint.memory import InMemorySaver
|
||||||
|
from langgraph.graph import StateGraph
|
||||||
|
from langgraph.graph.message import add_messages
|
||||||
|
from langgraph.prebuilt import ToolNode, tools_condition
|
||||||
|
from langchain_community.chat_models.tongyi import ChatTongyi
|
||||||
|
|
||||||
|
llm = ChatTongyi(
|
||||||
|
model="qwen-max", # 此处以qwen-max为例,您可按需更换模型名称。模型列表:https://help.aliyun.com/zh/model-studio/getting-started/models
|
||||||
|
streaming=True,
|
||||||
|
# other params...
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class State(TypedDict):
|
||||||
|
messages: Annotated[list, add_messages]
|
||||||
|
|
||||||
|
graph_builder = StateGraph(State)
|
||||||
|
|
||||||
|
# tool = TavilySearch(max_results=2)
|
||||||
|
|
||||||
|
@tool
|
||||||
|
def get_wheather(location: str) -> str:
|
||||||
|
"""输入城市查询天气"""
|
||||||
|
return f"The weather in {location} is 20 degrees Celsius."
|
||||||
|
|
||||||
|
tools = [get_wheather]
|
||||||
|
llm_with_tools = llm.bind_tools(tools)
|
||||||
|
|
||||||
|
def chatbot(state: State):
|
||||||
|
return {"messages": [llm_with_tools.invoke(state["messages"])]}
|
||||||
|
|
||||||
|
graph_builder.add_node("chatbot", chatbot)
|
||||||
|
|
||||||
|
tool_node = ToolNode(tools=[tool])
|
||||||
|
graph_builder.add_node("tools", tool_node)
|
||||||
|
|
||||||
|
graph_builder.add_conditional_edges(
|
||||||
|
"chatbot",
|
||||||
|
tools_condition,
|
||||||
|
)
|
||||||
|
graph_builder.add_edge("tools", "chatbot")
|
||||||
|
graph_builder.set_entry_point("chatbot")
|
||||||
|
# memory = InMemorySaver()
|
||||||
|
# graph = graph_builder.compile(checkpointer=memory)
|
||||||
|
graph = graph_builder.compile()
|
||||||
Reference in New Issue
Block a user