This commit is contained in:
2025-08-27 22:22:18 +08:00
parent f6c7c65d6c
commit e5362b80e2
32 changed files with 914 additions and 0 deletions

View File

@@ -0,0 +1,14 @@
from langchain_openai import ChatOpenAI
import os
chatLLM = ChatOpenAI(
api_key=os.getenv("DASHSCOPE_API_KEY"),
base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
model="qwen-plus", # 此处以qwen-plus为例您可按需更换模型名称。模型列表https://help.aliyun.com/zh/model-studio/getting-started/models
# other params...
)
messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "你是谁?"}]
response = chatLLM.invoke(messages)
print(response.model_dump_json())

View File

@@ -0,0 +1,11 @@
from langchain_community.chat_models.tongyi import ChatTongyi
from langchain_core.messages import HumanMessage
chatLLM = ChatTongyi(
model="qwen-max", # 此处以qwen-max为例您可按需更换模型名称。模型列表https://help.aliyun.com/zh/model-studio/getting-started/models
streaming=True,
# other params...
)
res = chatLLM.stream([HumanMessage(content="hi")], streaming=True)
for r in res:
print("chat resp:", r.content)

View File

@@ -0,0 +1,15 @@
import getpass
import os
if not os.environ.get("DEEPSEEK_API_KEY"):
os.environ["DEEPSEEK_API_KEY"] = os.environ['DASHSCOPE_API_KEY']
from langchain.chat_models import init_chat_model
model = init_chat_model("deepseek-chat", model_provider="deepseek")
messages = [
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "你是谁?"}]
response = model.invoke(messages)
print(response.model_dump_json())