【大模型】-LangChain--stream流式同步异步

文章目录

1.同步stream流

python 复制代码
import os
from langchain_community.chat_models import ChatTongyi

os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")
chunks = []
for chunk in llm.stream(#stream同步,异步是astream
    "天空是什么颜色?"
):
    chunks.append(chunk)
    print(chunk.content, end="|", flush=True)
    #print(StrOutputParser().parse(chunks))

2.异步astream流

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事")
output_parser = StrOutputParser()
chain = prompt | llm | output_parser

async def async_astream_chain():
    async for chunk in chain.astream({"topic": "干将"}):
        print(chunk, end="|", flush=True)

asyncio.run(async_astream_chain())

3.异步astream流json输出

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser,JsonOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

#prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事")
output_parser = JsonOutputParser()
chain = llm | output_parser

async def async_astream_chain():
    async for chunk in chain.astream(
        "以JSON格式输出法国、西班牙和日本的国家和人口列表"
    ):
        print(chunk, flush=True)

asyncio.run(async_astream_chain())

4.异步事件astream_events流

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

async def async_astream():
    events = []
    async for event in llm.astream_events("hello",version="v2"):
        events.append( event)
    print( events)


asyncio.run(async_astream())


"""
事件
[{
	'event': 'on_chat_model_start',
	'data': {
		'input': 'hello'
	},
	'name': 'ChatTongyi',
	'tags': [],
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = 'Hello', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = '! How', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = ' can I assist', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = ' you today?', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = ' 😊', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = '', additional_kwargs = {}, response_metadata = {
			'finish_reason': 'stop',
			'request_id': '3952154f-a9f5-4b39-a3c5-6eb8c85a6213',
			'token_usage': {
				'input_tokens': 9,
				'output_tokens': 11,
				'total_tokens': 20,
				'prompt_tokens_details': {
					'cached_tokens': 0
				}
			}
		}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = '', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9', chunk_position = 'last')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_end',
	'data': {
		'output': AIMessageChunk(content = 'Hello! How can I assist you today? 😊', additional_kwargs = {}, response_metadata = {
			'finish_reason': 'stop',
			'request_id': '3952154f-a9f5-4b39-a3c5-6eb8c85a6213',
			'token_usage': {
				'input_tokens': 9,
				'output_tokens': 11,
				'total_tokens': 20,
				'prompt_tokens_details': {
					'cached_tokens': 0
				}
			}
		}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'parent_ids': []
}]
"""

5.异步多线程

await asyncio.gather(task1(), task2())

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

async def task1():
    prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事,100字")
    output_parser = StrOutputParser()
    chain = prompt | llm | output_parser
    async for chunk in chain.astream({"topic": "干将"}):
        print(chunk, end="|", flush=True)

async def task2():
    prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事,100字")
    output_parser = StrOutputParser()
    chain = prompt | llm | output_parser
    async for chunk in chain.astream({"topic": "秦始皇"}):
        print(chunk, end="|", flush=True)

async def main():
    #同步调用
    # await task1()
    # await task2()

    # 异步
    await asyncio.gather(task1(), task2())

asyncio.run(main())
相关推荐
杨云龙UP1 分钟前
从0到1快速学会Linux操作系统(基础),这一篇就够了!
linux·运维·服务器·学习·ubuntu·centos·ssh
Dovis(誓平步青云)4 分钟前
《Linux 信号入门:搞懂 “进程通信的紧急电话” 到底怎么用(初篇)》
linux·运维·服务器
华科易迅8 分钟前
Vue如何集成封装Axios
前端·javascript·vue.js
康一夏9 分钟前
Next.js 13变化有多大?
前端·react·nextjs
糖炒栗子03269 分钟前
前端项目标准环境搭建与启动
前端
不是az10 分钟前
CSS知识点记录
前端·javascript·css
爱分享的阿Q18 分钟前
GPT6-Spud-AGI前夜的豪赌
前端·easyui·agi
autumn20051 小时前
Flutter 框架跨平台鸿蒙开发 - 历史人物对话
服务器·flutter·华为·harmonyos
西西小飞龙1 小时前
Less/Sass Mixins vs. Extend
前端·less·sass
syjy21 小时前
(含下载)BeTheme WordPress主题使用教程
前端·wordpress·wordpress建站