save all
This commit is contained in:
52
test/llamaindex/test_20250826_1.ipynb
Normal file
52
test/llamaindex/test_20250826_1.ipynb
Normal file
@@ -0,0 +1,52 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "raw",
|
||||
"source": [
|
||||
"from llama_index.llms.dashscope import DashScope\n",
|
||||
"from llama_index.llms.openai import OpenAI\n",
|
||||
"\n",
|
||||
"llm = DashScope(model_name=\"qwen-max\") # 设置检索引擎生成回答时调用的大模型。"
|
||||
],
|
||||
"id": "a5d3b9e1d4e6588f"
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"id": "initial_id",
|
||||
"metadata": {
|
||||
"collapsed": true,
|
||||
"jupyter": {
|
||||
"is_executing": true
|
||||
}
|
||||
},
|
||||
"source": [
|
||||
"response = llm.complete(\"William Shakespeare is \")\n",
|
||||
"print(response)"
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 2
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython2",
|
||||
"version": "2.7.6"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
41
test/llamaindex/test_20250826_1.py
Normal file
41
test/llamaindex/test_20250826_1.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from llama_index.core.base.llms.types import ChatMessage
|
||||
from llama_index.llms.dashscope import DashScope
|
||||
import asyncio
|
||||
|
||||
llm = DashScope(model_name="qwen-max") # 设置检索引擎生成回答时调用的大模型。
|
||||
|
||||
def test1():
|
||||
response = llm.complete("William Shakespeare is ")
|
||||
print(response)
|
||||
|
||||
async def test2():
|
||||
response = await llm.acomplete("William Shakespeare is ")
|
||||
print(response)
|
||||
|
||||
|
||||
def test3():
|
||||
response = llm.stream_complete("William Shakespeare is ")
|
||||
for chunk in response:
|
||||
print(chunk)
|
||||
|
||||
def test4():
|
||||
handle = llm.stream_complete("William Shakespeare is ")
|
||||
|
||||
for token in handle:
|
||||
print(token.delta, end="", flush=True)
|
||||
|
||||
|
||||
def test5():
|
||||
messages = [
|
||||
ChatMessage(role="system", content="You are a helpful assistant."),
|
||||
ChatMessage(role="user", content="Tell me a joke."),
|
||||
]
|
||||
chat_response = llm.chat(messages)
|
||||
print(chat_response)
|
||||
|
||||
if __name__ == '__main__':
|
||||
# test1()
|
||||
# asyncio.run(test2())
|
||||
# test3()
|
||||
# test4()
|
||||
test5()
|
||||
44
test/llamaindex/test_20250826_2.ipynb
Normal file
44
test/llamaindex/test_20250826_2.ipynb
Normal file
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "raw",
|
||||
"source": "print(\"hello\")",
|
||||
"id": "a5d3b9e1d4e6588f"
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"id": "initial_id",
|
||||
"metadata": {
|
||||
"collapsed": true,
|
||||
"jupyter": {
|
||||
"is_executing": true
|
||||
}
|
||||
},
|
||||
"source": "",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 2
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython2",
|
||||
"version": "2.7.6"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
Reference in New Issue
Block a user