save all
This commit is contained in:
15
test/langchain/compatible/deepseek.py
Normal file
15
test/langchain/compatible/deepseek.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from langchain_openai import ChatOpenAI
|
||||
from langchain_deepseek import ChatDeepSeek
|
||||
import os
|
||||
|
||||
chatLLM = ChatDeepSeek(
|
||||
api_key=os.getenv("DASHSCOPE_API_KEY"),
|
||||
base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
|
||||
model="deepseek-chat", # 此处以qwen-plus为例,您可按需更换模型名称。模型列表:https://help.aliyun.com/zh/model-studio/getting-started/models
|
||||
# other params...
|
||||
)
|
||||
messages = [
|
||||
{"role": "system", "content": "You are a helpful assistant."},
|
||||
{"role": "user", "content": "你是谁?"}]
|
||||
response = chatLLM.invoke(messages)
|
||||
print(response.model_dump_json())
|
||||
15
test/langchain/qwen.py
Normal file
15
test/langchain/qwen.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import getpass
|
||||
import os
|
||||
|
||||
if not os.environ.get("DEEPSEEK_API_KEY"):
|
||||
os.environ["DEEPSEEK_API_KEY"] = os.environ['DASHSCOPE_API_KEY']
|
||||
|
||||
from langchain.chat_models import init_chat_model
|
||||
|
||||
model = init_chat_model("qwen-max", model_provider="deepseek")
|
||||
|
||||
messages = [
|
||||
{"role": "system", "content": "You are a helpful assistant."},
|
||||
{"role": "user", "content": "你是谁?"}]
|
||||
response = model.invoke(messages)
|
||||
print(response.model_dump_json())
|
||||
Reference in New Issue
Block a user