【大模型】-LangChain--stream流式同步异步

文章目录

1.同步stream流

python 复制代码
import os
from langchain_community.chat_models import ChatTongyi

os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")
chunks = []
for chunk in llm.stream(#stream同步,异步是astream
    "天空是什么颜色?"
):
    chunks.append(chunk)
    print(chunk.content, end="|", flush=True)
    #print(StrOutputParser().parse(chunks))

2.异步astream流

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事")
output_parser = StrOutputParser()
chain = prompt | llm | output_parser

async def async_astream_chain():
    async for chunk in chain.astream({"topic": "干将"}):
        print(chunk, end="|", flush=True)

asyncio.run(async_astream_chain())

3.异步astream流json输出

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser,JsonOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

#prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事")
output_parser = JsonOutputParser()
chain = llm | output_parser

async def async_astream_chain():
    async for chunk in chain.astream(
        "以JSON格式输出法国、西班牙和日本的国家和人口列表"
    ):
        print(chunk, flush=True)

asyncio.run(async_astream_chain())

4.异步事件astream_events流

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

async def async_astream():
    events = []
    async for event in llm.astream_events("hello",version="v2"):
        events.append( event)
    print( events)


asyncio.run(async_astream())


"""
事件
[{
	'event': 'on_chat_model_start',
	'data': {
		'input': 'hello'
	},
	'name': 'ChatTongyi',
	'tags': [],
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = 'Hello', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = '! How', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = ' can I assist', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = ' you today?', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = ' 😊', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = '', additional_kwargs = {}, response_metadata = {
			'finish_reason': 'stop',
			'request_id': '3952154f-a9f5-4b39-a3c5-6eb8c85a6213',
			'token_usage': {
				'input_tokens': 9,
				'output_tokens': 11,
				'total_tokens': 20,
				'prompt_tokens_details': {
					'cached_tokens': 0
				}
			}
		}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_stream',
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'data': {
		'chunk': AIMessageChunk(content = '', additional_kwargs = {}, response_metadata = {}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9', chunk_position = 'last')
	},
	'parent_ids': []
}, {
	'event': 'on_chat_model_end',
	'data': {
		'output': AIMessageChunk(content = 'Hello! How can I assist you today? 😊', additional_kwargs = {}, response_metadata = {
			'finish_reason': 'stop',
			'request_id': '3952154f-a9f5-4b39-a3c5-6eb8c85a6213',
			'token_usage': {
				'input_tokens': 9,
				'output_tokens': 11,
				'total_tokens': 20,
				'prompt_tokens_details': {
					'cached_tokens': 0
				}
			}
		}, id = 'lc_run--ef708116-49b9-4572-8860-9bcf04c304e9')
	},
	'run_id': 'ef708116-49b9-4572-8860-9bcf04c304e9',
	'name': 'ChatTongyi',
	'tags': [],
	'metadata': {
		'ls_provider': 'tongyi',
		'ls_model_type': 'chat',
		'ls_model_name': 'qwen-plus'
	},
	'parent_ids': []
}]
"""

5.异步多线程

await asyncio.gather(task1(), task2())

python 复制代码
import os
import asyncio
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate
from langchain_community.chat_models import ChatTongyi
from langchain_core.messages import SystemMessage


os.environ["DASHSCOPE_API_KEY"] = "sk-秘钥"
llm = ChatTongyi(model="qwen-plus")

async def task1():
    prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事,100字")
    output_parser = StrOutputParser()
    chain = prompt | llm | output_parser
    async for chunk in chain.astream({"topic": "干将"}):
        print(chunk, end="|", flush=True)

async def task2():
    prompt = ChatPromptTemplate.from_template("给我讲一个{topic}的故事,100字")
    output_parser = StrOutputParser()
    chain = prompt | llm | output_parser
    async for chunk in chain.astream({"topic": "秦始皇"}):
        print(chunk, end="|", flush=True)

async def main():
    #同步调用
    # await task1()
    # await task2()

    # 异步
    await asyncio.gather(task1(), task2())

asyncio.run(main())
相关推荐
用户69371750013849 小时前
Google 正在“收紧侧加载”:陌生 APK 安装或需等待 24 小时
android·前端
蓝帆傲亦9 小时前
Web 前端搜索文字高亮实现方法汇总
前端
用户69371750013849 小时前
Room 3.0:这次不是升级,是重来
android·前端·google
漫随流水10 小时前
旅游推荐系统(view.py)
前端·数据库·python·旅游
踩着两条虫11 小时前
VTJ.PRO 核心架构全公开!从设计稿到代码,揭秘AI智能体如何“听懂人话”
前端·vue.js·ai编程
爱吃土豆的马铃薯ㅤㅤㅤㅤㅤㅤㅤㅤㅤ12 小时前
Linux 查询某进程文件所在路径 命令
linux·运维·服务器
jzlhll12312 小时前
kotlin Flow first() last()总结
开发语言·前端·kotlin
蓝冰凌13 小时前
Vue 3 中 defineExpose 的行为【defineExpose暴露ref变量】详解:自动解包、响应性与实际使用
前端·javascript·vue.js
奔跑的呱呱牛13 小时前
generate-route-vue基于文件系统的 Vue Router 动态路由生成工具
前端·javascript·vue.js
05大叔13 小时前
网络基础知识 域名,JSON格式,AI基础
运维·服务器·网络