【大模型】-LangChain--stream流式同步异步

文章目录

1.同步stream流

python 复制代码
import os
from langchain_community.chat_models import ChatTongyi

os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")
chunks = []
for chunk in llm.stream(#stream同步,异步是astream
    "天空是什么颜色?"
):
    chunks.append(chunk)
    print(chunk.content, end="|", flush=True)
    #print(StrOutputParser().parse(chunks))

2.异步astream流

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事")
output_parser = StrOutputParser()
chain = prompt | llm | output_parser

async def async_astream_chain():
    async for chunk in chain.astream({"topic": "干将"}):
        print(chunk, end="|", flush=True)

asyncio.run(async_astream_chain())

3.异步astream流json输出

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser,JsonOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

#prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事")
output_parser = JsonOutputParser()
chain = llm | output_parser

async def async_astream_chain():
    async for chunk in chain.astream(
        "以JSON格式输出法国、西班牙和日本的国家和人口列表"
    ):
        print(chunk, flush=True)

asyncio.run(async_astream_chain())

4.异步事件astream_events流

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

async def async_astream():
    events = []
    async for event in llm.astream_events("hello",version="v2"):
        events.append( event)
    print( events)


asyncio.run(async_astream())


"""
事件
[{
	'event': 'on_chat_model_start',
	'data': {
		'input': 'hello'
	},
	'name': 'ChatTongyi',
	'tags': [],
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = 'Hello', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = '! How', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = ' can I assist', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = ' you today?', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = ' 😊', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = '', additional_kwargs = {}, response_metadata = {
			'finish_reason': 'stop',
			'request_id': '3952154f-a9f5-4b39-a3c5-6eb8c85a6213',
			'token_usage': {
				'input_tokens': 9,
				'output_tokens': 11,
				'total_tokens': 20,
				'prompt_tokens_details': {
					'cached_tokens': 0
				}
			}
		}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = '', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9', chunk_position = 'last')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_end',
	'data': {
		'output': AIMessageChunk(content = 'Hello! How can I assist you today? 😊', additional_kwargs = {}, response_metadata = {
			'finish_reason': 'stop',
			'request_id': '3952154f-a9f5-4b39-a3c5-6eb8c85a6213',
			'token_usage': {
				'input_tokens': 9,
				'output_tokens': 11,
				'total_tokens': 20,
				'prompt_tokens_details': {
					'cached_tokens': 0
				}
			}
		}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'parent_ids': []
}]
"""

5.异步多线程

await asyncio.gather(task1(), task2())

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

async def task1():
    prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事,100字")
    output_parser = StrOutputParser()
    chain = prompt | llm | output_parser
    async for chunk in chain.astream({"topic": "干将"}):
        print(chunk, end="|", flush=True)

async def task2():
    prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事,100字")
    output_parser = StrOutputParser()
    chain = prompt | llm | output_parser
    async for chunk in chain.astream({"topic": "秦始皇"}):
        print(chunk, end="|", flush=True)

async def main():
    #同步调用
    # await task1()
    # await task2()

    # 异步
    await asyncio.gather(task1(), task2())

asyncio.run(main())
相关推荐
花归去2 分钟前
echarts 柱状图曲线图
开发语言·前端·javascript
喝拿铁写前端3 分钟前
当 AI 会写代码之后,我们应该怎么“管”它?
前端·人工智能
老前端的功夫6 分钟前
TypeScript 类型魔术:模板字面量类型的深层解密与工程实践
前端·javascript·ubuntu·架构·typescript·前端框架
HABuo28 分钟前
【Linux进程(四)】进程切换&环境变量深入剖析
linux·运维·服务器·c语言·c++·ubuntu·centos
Nan_Shu_61430 分钟前
学习: Threejs (2)
前端·javascript·学习
G_G#38 分钟前
纯前端js插件实现同一浏览器控制只允许打开一个标签,处理session变更问题
前端·javascript·浏览器标签页通信·只允许一个标签页
橘颂TA44 分钟前
【Linux】死锁四条件的底层逻辑:从锁冲突到 STL 组件的线程安全实践(Ⅵ)
linux·运维·服务器·c++·死锁
@大迁世界1 小时前
TypeScript 的本质并非类型,而是信任
开发语言·前端·javascript·typescript·ecmascript
GIS之路1 小时前
GDAL 实现矢量裁剪
前端·python·信息可视化
是一个Bug1 小时前
后端开发者视角的前端开发面试题清单(50道)
前端