save all
This commit is contained in:
11
test/langchain/dashscope/qwen.py
Normal file
11
test/langchain/dashscope/qwen.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from langchain_community.chat_models.tongyi import ChatTongyi
|
||||
from langchain_core.messages import HumanMessage
|
||||
|
||||
chatLLM = ChatTongyi(
|
||||
model="qwen-max", # 此处以qwen-max为例,您可按需更换模型名称。模型列表:https://help.aliyun.com/zh/model-studio/getting-started/models
|
||||
streaming=True,
|
||||
# other params...
|
||||
)
|
||||
res = chatLLM.stream([HumanMessage(content="hi")], streaming=True)
|
||||
for r in res:
|
||||
print("chat resp:", r.content)
|
||||
Reference in New Issue
Block a user