【大模型】-LangChain--stream流式同步异步

文章目录

1.同步stream流

python 复制代码
import os
from langchain_community.chat_models import ChatTongyi

os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")
chunks = []
for chunk in llm.stream(#stream同步,异步是astream
    "天空是什么颜色?"
):
    chunks.append(chunk)
    print(chunk.content, end="|", flush=True)
    #print(StrOutputParser().parse(chunks))

2.异步astream流

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事")
output_parser = StrOutputParser()
chain = prompt | llm | output_parser

async def async_astream_chain():
    async for chunk in chain.astream({"topic": "干将"}):
        print(chunk, end="|", flush=True)

asyncio.run(async_astream_chain())

3.异步astream流json输出

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser,JsonOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

#prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事")
output_parser = JsonOutputParser()
chain = llm | output_parser

async def async_astream_chain():
    async for chunk in chain.astream(
        "以JSON格式输出法国、西班牙和日本的国家和人口列表"
    ):
        print(chunk, flush=True)

asyncio.run(async_astream_chain())

4.异步事件astream_events流

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

async def async_astream():
    events = []
    async for event in llm.astream_events("hello",version="v2"):
        events.append( event)
    print( events)


asyncio.run(async_astream())


"""
事件
[{
	'event': 'on_chat_model_start',
	'data': {
		'input': 'hello'
	},
	'name': 'ChatTongyi',
	'tags': [],
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = 'Hello', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = '! How', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = ' can I assist', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = ' you today?', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = ' 😊', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = '', additional_kwargs = {}, response_metadata = {
			'finish_reason': 'stop',
			'request_id': '3952154f-a9f5-4b39-a3c5-6eb8c85a6213',
			'token_usage': {
				'input_tokens': 9,
				'output_tokens': 11,
				'total_tokens': 20,
				'prompt_tokens_details': {
					'cached_tokens': 0
				}
			}
		}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = '', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9', chunk_position = 'last')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_end',
	'data': {
		'output': AIMessageChunk(content = 'Hello! How can I assist you today? 😊', additional_kwargs = {}, response_metadata = {
			'finish_reason': 'stop',
			'request_id': '3952154f-a9f5-4b39-a3c5-6eb8c85a6213',
			'token_usage': {
				'input_tokens': 9,
				'output_tokens': 11,
				'total_tokens': 20,
				'prompt_tokens_details': {
					'cached_tokens': 0
				}
			}
		}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'parent_ids': []
}]
"""

5.异步多线程

await asyncio.gather(task1(), task2())

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

async def task1():
    prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事,100字")
    output_parser = StrOutputParser()
    chain = prompt | llm | output_parser
    async for chunk in chain.astream({"topic": "干将"}):
        print(chunk, end="|", flush=True)

async def task2():
    prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事,100字")
    output_parser = StrOutputParser()
    chain = prompt | llm | output_parser
    async for chunk in chain.astream({"topic": "秦始皇"}):
        print(chunk, end="|", flush=True)

async def main():
    #同步调用
    # await task1()
    # await task2()

    # 异步
    await asyncio.gather(task1(), task2())

asyncio.run(main())
相关推荐
roman_日积跬步-终至千里2 小时前
【LangGraph4j】LangGraph4j 核心概念与图编排原理
java·服务器·数据库
野犬寒鸦2 小时前
从零起步学习并发编程 || 第六章:ReentrantLock与synchronized 的辨析及运用
java·服务器·数据库·后端·学习·算法
saber_andlibert3 小时前
TCMalloc底层实现
java·前端·网络
逍遥德3 小时前
如何学编程之01.理论篇.如何通过阅读代码来提高自己的编程能力?
前端·后端·程序人生·重构·软件构建·代码规范
冻感糕人~3 小时前
【珍藏必备】ReAct框架实战指南:从零开始构建AI智能体,让大模型学会思考与行动
java·前端·人工智能·react.js·大模型·就业·大模型学习
程序员agions3 小时前
2026年,“配置工程师“终于死绝了
前端·程序人生
alice--小文子3 小时前
cursor-mcp工具使用
java·服务器·前端
晚霞的不甘3 小时前
揭秘 CANN 内存管理:如何让大模型在小设备上“轻装上阵”?
前端·数据库·经验分享·flutter·3d
小迷糊的学习记录4 小时前
0.1 + 0.2 不等于 0.3
前端·javascript·面试
梦帮科技4 小时前
Node.js配置生成器CLI工具开发实战
前端·人工智能·windows·前端框架·node.js·json