文章目录
1.同步stream流
python
import os
from langchain_community.chat_models import ChatTongyi
os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")
chunks = []
for chunk in llm.stream(#stream同步,异步是astream
"天空是什么颜色?"
):
chunks.append(chunk)
print(chunk.content, end="|", flush=True)
#print(StrOutputParser().parse(chunks))
2.异步astream流
python
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage
os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")
prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事")
output_parser = StrOutputParser()
chain = prompt | llm | output_parser
async def async_astream_chain():
async for chunk in chain.astream({"topic": "干将"}):
print(chunk, end="|", flush=True)
asyncio.run(async_astream_chain())
3.异步astream流json输出
python
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser,JsonOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage
os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")
#prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事")
output_parser = JsonOutputParser()
chain = llm | output_parser
async def async_astream_chain():
async for chunk in chain.astream(
"以JSON格式输出法国、西班牙和日本的国家和人口列表"
):
print(chunk, flush=True)
asyncio.run(async_astream_chain())
4.异步事件astream_events流
python
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage
os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")
async def async_astream():
events = []
async for event in llm.astream_events("hello",version="v2"):
events.append( event)
print( events)
asyncio.run(async_astream())
"""
事件
[{
'event': 'on_chat_model_start',
'data': {
'input': 'hello'
},
'name': 'ChatTongyi',
'tags': [],
'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
'metadata': {
'ls_provider': 'tongyi',
'ls_model_type': 'chat',
'ls_model_name': 'qwen-plus'
},
'parent_ids': []
}, {
'event': 'on_chat_model_stream',
'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
'name': 'ChatTongyi',
'tags': [],
'metadata': {
'ls_provider': 'tongyi',
'ls_model_type': 'chat',
'ls_model_name': 'qwen-plus'
},
'data': {
'chunk': AIMessageChunk(content = 'Hello', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
},
'parent_ids': []
}, {
'event': 'on_chat_model_stream',
'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
'name': 'ChatTongyi',
'tags': [],
'metadata': {
'ls_provider': 'tongyi',
'ls_model_type': 'chat',
'ls_model_name': 'qwen-plus'
},
'data': {
'chunk': AIMessageChunk(content = '! How', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
},
'parent_ids': []
}, {
'event': 'on_chat_model_stream',
'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
'name': 'ChatTongyi',
'tags': [],
'metadata': {
'ls_provider': 'tongyi',
'ls_model_type': 'chat',
'ls_model_name': 'qwen-plus'
},
'data': {
'chunk': AIMessageChunk(content = ' can I assist', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
},
'parent_ids': []
}, {
'event': 'on_chat_model_stream',
'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
'name': 'ChatTongyi',
'tags': [],
'metadata': {
'ls_provider': 'tongyi',
'ls_model_type': 'chat',
'ls_model_name': 'qwen-plus'
},
'data': {
'chunk': AIMessageChunk(content = ' you today?', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
},
'parent_ids': []
}, {
'event': 'on_chat_model_stream',
'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
'name': 'ChatTongyi',
'tags': [],
'metadata': {
'ls_provider': 'tongyi',
'ls_model_type': 'chat',
'ls_model_name': 'qwen-plus'
},
'data': {
'chunk': AIMessageChunk(content = ' 😊', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
},
'parent_ids': []
}, {
'event': 'on_chat_model_stream',
'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
'name': 'ChatTongyi',
'tags': [],
'metadata': {
'ls_provider': 'tongyi',
'ls_model_type': 'chat',
'ls_model_name': 'qwen-plus'
},
'data': {
'chunk': AIMessageChunk(content = '', additional_kwargs = {}, response_metadata = {
'finish_reason': 'stop',
'request_id': '3952154f-a9f5-4b39-a3c5-6eb8c85a6213',
'token_usage': {
'input_tokens': 9,
'output_tokens': 11,
'total_tokens': 20,
'prompt_tokens_details': {
'cached_tokens': 0
}
}
}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
},
'parent_ids': []
}, {
'event': 'on_chat_model_stream',
'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
'name': 'ChatTongyi',
'tags': [],
'metadata': {
'ls_provider': 'tongyi',
'ls_model_type': 'chat',
'ls_model_name': 'qwen-plus'
},
'data': {
'chunk': AIMessageChunk(content = '', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9', chunk_position = 'last')
},
'parent_ids': []
}, {
'event': 'on_chat_model_end',
'data': {
'output': AIMessageChunk(content = 'Hello! How can I assist you today? 😊', additional_kwargs = {}, response_metadata = {
'finish_reason': 'stop',
'request_id': '3952154f-a9f5-4b39-a3c5-6eb8c85a6213',
'token_usage': {
'input_tokens': 9,
'output_tokens': 11,
'total_tokens': 20,
'prompt_tokens_details': {
'cached_tokens': 0
}
}
}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
},
'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
'name': 'ChatTongyi',
'tags': [],
'metadata': {
'ls_provider': 'tongyi',
'ls_model_type': 'chat',
'ls_model_name': 'qwen-plus'
},
'parent_ids': []
}]
"""
5.异步多线程
await asyncio.gather(task1(), task2())
python
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage
os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")
async def task1():
prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事,100字")
output_parser = StrOutputParser()
chain = prompt | llm | output_parser
async for chunk in chain.astream({"topic": "干将"}):
print(chunk, end="|", flush=True)
async def task2():
prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事,100字")
output_parser = StrOutputParser()
chain = prompt | llm | output_parser
async for chunk in chain.astream({"topic": "秦始皇"}):
print(chunk, end="|", flush=True)
async def main():
#同步调用
# await task1()
# await task2()
# 异步
await asyncio.gather(task1(), task2())
asyncio.run(main())