save all
This commit is contained in:
14
test/langchain/compatible/qwen.py
Normal file
14
test/langchain/compatible/qwen.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from langchain_openai import ChatOpenAI
|
||||
import os
|
||||
|
||||
chatLLM = ChatOpenAI(
|
||||
api_key=os.getenv("DASHSCOPE_API_KEY"),
|
||||
base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
|
||||
model="qwen-plus", # 此处以qwen-plus为例,您可按需更换模型名称。模型列表:https://help.aliyun.com/zh/model-studio/getting-started/models
|
||||
# other params...
|
||||
)
|
||||
messages = [
|
||||
{"role": "system", "content": "You are a helpful assistant."},
|
||||
{"role": "user", "content": "你是谁?"}]
|
||||
response = chatLLM.invoke(messages)
|
||||
print(response.model_dump_json())
|
||||
11
test/langchain/dashscope/qwen.py
Normal file
11
test/langchain/dashscope/qwen.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from langchain_community.chat_models.tongyi import ChatTongyi
|
||||
from langchain_core.messages import HumanMessage
|
||||
|
||||
chatLLM = ChatTongyi(
|
||||
model="qwen-max", # 此处以qwen-max为例,您可按需更换模型名称。模型列表:https://help.aliyun.com/zh/model-studio/getting-started/models
|
||||
streaming=True,
|
||||
# other params...
|
||||
)
|
||||
res = chatLLM.stream([HumanMessage(content="hi")], streaming=True)
|
||||
for r in res:
|
||||
print("chat resp:", r.content)
|
||||
15
test/langchain/deepseek.py
Normal file
15
test/langchain/deepseek.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import getpass
|
||||
import os
|
||||
|
||||
if not os.environ.get("DEEPSEEK_API_KEY"):
|
||||
os.environ["DEEPSEEK_API_KEY"] = os.environ['DASHSCOPE_API_KEY']
|
||||
|
||||
from langchain.chat_models import init_chat_model
|
||||
|
||||
model = init_chat_model("deepseek-chat", model_provider="deepseek")
|
||||
|
||||
messages = [
|
||||
{"role": "system", "content": "You are a helpful assistant."},
|
||||
{"role": "user", "content": "你是谁?"}]
|
||||
response = model.invoke(messages)
|
||||
print(response.model_dump_json())
|
||||
Reference in New Issue
Block a user