11 lines
456 B
Python
11 lines
456 B
Python
from langchain_community.chat_models.tongyi import ChatTongyi
|
||
from langchain_core.messages import HumanMessage
|
||
|
||
chatLLM = ChatTongyi(
|
||
model="qwen-max", # 此处以qwen-max为例,您可按需更换模型名称。模型列表:https://help.aliyun.com/zh/model-studio/getting-started/models
|
||
streaming=True,
|
||
# other params...
|
||
)
|
||
res = chatLLM.stream([HumanMessage(content="hi")], streaming=True)
|
||
for r in res:
|
||
print("chat resp:", r.content) |