save all
This commit is contained in:
@@ -1,2 +1,10 @@
|
||||
openai
|
||||
dashscope
|
||||
dashscope
|
||||
langchain
|
||||
langchain-deepseek
|
||||
langchain-community
|
||||
|
||||
llama-index
|
||||
llama-index-core
|
||||
llama-index-llms-dashscope
|
||||
llama-index-indices-managed-dashscope
|
||||
15
test/langchain/compatible/deepseek.py
Normal file
15
test/langchain/compatible/deepseek.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from langchain_openai import ChatOpenAI
|
||||
from langchain_deepseek import ChatDeepSeek
|
||||
import os
|
||||
|
||||
chatLLM = ChatDeepSeek(
|
||||
api_key=os.getenv("DASHSCOPE_API_KEY"),
|
||||
base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
|
||||
model="deepseek-chat", # 此处以qwen-plus为例,您可按需更换模型名称。模型列表:https://help.aliyun.com/zh/model-studio/getting-started/models
|
||||
# other params...
|
||||
)
|
||||
messages = [
|
||||
{"role": "system", "content": "You are a helpful assistant."},
|
||||
{"role": "user", "content": "你是谁?"}]
|
||||
response = chatLLM.invoke(messages)
|
||||
print(response.model_dump_json())
|
||||
15
test/langchain/qwen.py
Normal file
15
test/langchain/qwen.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import getpass
|
||||
import os
|
||||
|
||||
if not os.environ.get("DEEPSEEK_API_KEY"):
|
||||
os.environ["DEEPSEEK_API_KEY"] = os.environ['DASHSCOPE_API_KEY']
|
||||
|
||||
from langchain.chat_models import init_chat_model
|
||||
|
||||
model = init_chat_model("qwen-max", model_provider="deepseek")
|
||||
|
||||
messages = [
|
||||
{"role": "system", "content": "You are a helpful assistant."},
|
||||
{"role": "user", "content": "你是谁?"}]
|
||||
response = model.invoke(messages)
|
||||
print(response.model_dump_json())
|
||||
52
test/llamaindex/test_20250826_1.ipynb
Normal file
52
test/llamaindex/test_20250826_1.ipynb
Normal file
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "raw",
|
||||
"source": [
|
||||
"from llama_index.llms.dashscope import DashScope\n",
|
||||
"from llama_index.llms.openai import OpenAI\n",
|
||||
"\n",
|
||||
"llm = DashScope(model_name=\"qwen-max\") # 设置检索引擎生成回答时调用的大模型。"
|
||||
],
|
||||
"id": "a5d3b9e1d4e6588f"
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"id": "initial_id",
|
||||
"metadata": {
|
||||
"collapsed": true,
|
||||
"jupyter": {
|
||||
"is_executing": true
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"response = llm.complete(\"William Shakespeare is \")\n",
|
||||
"print(response)"
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 2
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython2",
|
||||
"version": "2.7.6"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
41
test/llamaindex/test_20250826_1.py
Normal file
41
test/llamaindex/test_20250826_1.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from llama_index.core.base.llms.types import ChatMessage
|
||||
from llama_index.llms.dashscope import DashScope
|
||||
import asyncio
|
||||
|
||||
llm = DashScope(model_name="qwen-max") # 设置检索引擎生成回答时调用的大模型。
|
||||
|
||||
def test1():
|
||||
response = llm.complete("William Shakespeare is ")
|
||||
print(response)
|
||||
|
||||
async def test2():
|
||||
response = await llm.acomplete("William Shakespeare is ")
|
||||
print(response)
|
||||
|
||||
|
||||
def test3():
|
||||
response = llm.stream_complete("William Shakespeare is ")
|
||||
for chunk in response:
|
||||
print(chunk)
|
||||
|
||||
def test4():
|
||||
handle = llm.stream_complete("William Shakespeare is ")
|
||||
|
||||
for token in handle:
|
||||
print(token.delta, end="", flush=True)
|
||||
|
||||
|
||||
def test5():
|
||||
messages = [
|
||||
ChatMessage(role="system", content="You are a helpful assistant."),
|
||||
ChatMessage(role="user", content="Tell me a joke."),
|
||||
]
|
||||
chat_response = llm.chat(messages)
|
||||
print(chat_response)
|
||||
|
||||
if __name__ == '__main__':
|
||||
# test1()
|
||||
# asyncio.run(test2())
|
||||
# test3()
|
||||
# test4()
|
||||
test5()
|
||||
44
test/llamaindex/test_20250826_2.ipynb
Normal file
44
test/llamaindex/test_20250826_2.ipynb
Normal file
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "raw",
|
||||
"source": "print(\"hello\")",
|
||||
"id": "a5d3b9e1d4e6588f"
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"id": "initial_id",
|
||||
"metadata": {
|
||||
"collapsed": true,
|
||||
"jupyter": {
|
||||
"is_executing": true
|
||||
}
|
||||
},
|
||||
"source": "",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 2
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython2",
|
||||
"version": "2.7.6"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
Reference in New Issue
Block a user