目录
- Python协程的演进:从yield到async/await的完整历史
-
- [1. 引言:协程的概念与价值](#1. 引言:协程的概念与价值)
- [2. 生成器基础:yield的诞生(Python 2.2)](#2. 生成器基础:yield的诞生(Python 2.2))
-
- [2.1 生成器的引入](#2.1 生成器的引入)
- [2.2 生成器表达式](#2.2 生成器表达式)
- [3. 生成器作为协程:yield的增强(Python 2.5)](#3. 生成器作为协程:yield的增强(Python 2.5))
-
- [3.1 send()方法和协程的诞生](#3.1 send()方法和协程的诞生)
- [3.2 协程的异常处理](#3.2 协程的异常处理)
- [4. yield from语法(Python 3.3)](#4. yield from语法(Python 3.3))
-
- [4.1 yield from的引入](#4.1 yield from的引入)
- [4.2 协程委托模式](#4.2 协程委托模式)
- [5. asyncio框架和@asyncio.coroutine(Python 3.4)](#5. asyncio框架和@asyncio.coroutine(Python 3.4))
-
- [5.1 asyncio框架的引入](#5.1 asyncio框架的引入)
- [5.2 异步I/O操作](#5.2 异步I/O操作)
- [6. async/await语法(Python 3.5)](#6. async/await语法(Python 3.5))
-
- [6.1 原生协程和async/await](#6.1 原生协程和async/await)
- [6.2 异步迭代器和异步上下文管理器](#6.2 异步迭代器和异步上下文管理器)
- [7. 异步生成器(Python 3.6)](#7. 异步生成器(Python 3.6))
-
- [7.1 异步生成器的引入](#7.1 异步生成器的引入)
- [7.2 异步推导式](#7.2 异步推导式)
- [8. 现代异步编程模式(Python 3.7+)](#8. 现代异步编程模式(Python 3.7+))
-
- [8.1 高级异步模式](#8.1 高级异步模式)
『宝藏代码胶囊开张啦!』------ 我的 CodeCapsule 来咯!✨写代码不再头疼!我的新站点 CodeCapsule 主打一个 "白菜价"+"量身定制 "!无论是卡脖子的毕设/课设/文献复现 ,需要灵光一现的算法改进 ,还是想给项目加个"外挂",这里都有便宜又好用的代码方案等你发现!低成本,高适配,助你轻松通关!速来围观 👉 CodeCapsule官网
Python协程的演进:从yield到async/await的完整历史
1. 引言:协程的概念与价值
协程(Coroutine)是一种比线程更轻量级的并发编程范式,它允许函数在执行过程中暂停和恢复,而不是像普通函数那样一次性执行完毕。在深入Python协程演进历史之前,让我们先理解协程的核心价值:
python
def coroutine_concept_demo():
"""演示协程的基本概念"""
# 传统函数:一次性执行完毕
def traditional_function():
result = []
for i in range(3):
result.append(i)
return result
# 生成器函数:可以暂停和恢复
def generator_function():
for i in range(3):
yield i # 暂停执行,返回一个值
print("传统函数 vs 生成器函数:")
print("=" * 40)
# 传统函数执行
print("传统函数执行:")
result = traditional_function()
print(f"结果: {result}")
# 生成器执行
print("\n生成器执行:")
gen = generator_function()
for value in gen:
print(f"生成值: {value}")
print("\n协程的核心优势:")
advantages = [
"轻量级: 不需要线程上下文切换开销",
"高效: 在单线程内实现并发",
"可控: 精确控制执行流程",
"可组合: 易于构建复杂的异步逻辑"
]
for advantage in advantages:
print(f"• {advantage}")
# 运行概念演示
coroutine_concept_demo()
2. 生成器基础:yield的诞生(Python 2.2)
2.1 生成器的引入
Python 2.2引入了生成器,通过yield关键字实现了基本的暂停和恢复功能:
python
def generator_foundations():
"""演示Python 2.2的生成器基础"""
def simple_generator():
"""简单的生成器示例"""
print("开始执行")
yield 1
print("恢复执行")
yield 2
print("结束执行")
yield 3
def infinite_counter():
"""无限计数器生成器"""
count = 0
while True:
yield count
count += 1
def fibonacci_generator(limit):
"""斐波那契数列生成器"""
a, b = 0, 1
count = 0
while count < limit:
yield a
a, b = b, a + b
count += 1
print("Python 2.2 生成器基础:")
print("=" * 40)
# 基本生成器使用
print("1. 基本生成器:")
gen = simple_generator()
print(f"第一次next: {next(gen)}")
print(f"第二次next: {next(gen)}")
print(f"第三次next: {next(gen)}")
# 使用for循环
print("\n2. 使用for循环:")
for value in simple_generator():
print(f"值: {value}")
# 无限生成器
print("\n3. 无限生成器:")
counter = infinite_counter()
for i in range(5):
print(f"计数: {next(counter)}")
# 斐波那契生成器
print("\n4. 斐波那契数列:")
fib_gen = fibonacci_generator(10)
fib_numbers = list(fib_gen)
print(f"前10个斐波那契数: {fib_numbers}")
# 运行生成器基础演示
generator_foundations()
2.2 生成器表达式
Python 2.4引入了生成器表达式,提供了更简洁的生成器创建方式:
python
def generator_expressions_demo():
"""演示生成器表达式"""
# 列表推导式 vs 生成器表达式
numbers = [1, 2, 3, 4, 5]
# 列表推导式 - 立即计算所有值
list_comp = [x * 2 for x in numbers]
# 生成器表达式 - 惰性计算
gen_exp = (x * 2 for x in numbers)
print("生成器表达式 vs 列表推导式:")
print("=" * 40)
print(f"列表推导式结果: {list_comp}")
print(f"生成器表达式结果: {gen_exp}")
print(f"从生成器获取值: {list(gen_exp)}")
# 内存使用对比
print("\n内存使用对比:")
import sys
large_range = range(1000000)
# 列表推导式 - 占用大量内存
list_memory = sys.getsizeof([x for x in large_range])
# 生成器表达式 - 占用很少内存
gen_memory = sys.getsizeof((x for x in large_range))
print(f"列表推导式内存: {list_memory} 字节")
print(f"生成器表达式内存: {gen_memory} 字节")
print(f"内存节省: {list_memory/gen_memory:.0f}倍")
# 实际应用示例
print("\n实际应用 - 文件处理:")
def process_large_file():
"""处理大文件的生成器"""
# 模拟处理大文件
lines = [f"行 {i}" for i in range(100)]
for line in lines:
# 模拟处理每行
processed_line = line.upper()
yield processed_line
# 使用生成器处理数据流
processed_lines = (line for line in process_large_file() if '行 5' in line)
print(f"过滤结果: {list(processed_lines)}")
# 运行生成器表达式演示
generator_expressions_demo()
3. 生成器作为协程:yield的增强(Python 2.5)
3.1 send()方法和协程的诞生
Python 2.5为生成器添加了send()、throw()和close()方法,使生成器可以双向通信,真正成为协程:
python
def generator_as_coroutine():
"""演示生成器作为协程使用"""
def simple_coroutine():
"""简单的协程示例"""
print("协程启动")
while True:
received = yield "等待输入" # 暂停并等待发送值
print(f"接收到: {received}")
def accumulator():
"""累加器协程"""
total = 0
while True:
value = yield total
if value is None:
break
total += value
return total # Python 3.3+ 支持return值
def grep(pattern):
"""grep模式匹配协程"""
print(f"查找模式: {pattern}")
while True:
line = yield
if pattern in line:
print(f"找到: {line}")
print("Python 2.5 生成器作为协程:")
print("=" * 40)
# 基本协程使用
print("1. 简单协程:")
coro = simple_coroutine()
print(f"初始化: {next(coro)}") # 启动协程
print(f"发送值: {coro.send('Hello')}")
print(f"发送值: {coro.send('World')}")
# 累加器协程
print("\n2. 累加器协程:")
acc = accumulator()
next(acc) # 启动
print(f"累加 10: {acc.send(10)}")
print(f"累加 20: {acc.send(20)}")
print(f"累加 5: {acc.send(5)}")
# grep协程
print("\n3. grep协程:")
search = grep("python")
next(search) # 启动
search.send("我喜欢编程")
search.send("python很强大")
search.send("Java也不错")
search.send("python协程很有趣")
# 协程生命周期
print("\n4. 协程生命周期:")
def lifecycle_coroutine():
print("协程创建")
try:
while True:
value = yield
print(f"处理: {value}")
except GeneratorExit:
print("协程正常结束")
except Exception as e:
print(f"协程异常: {e}")
life_coro = lifecycle_coroutine()
next(life_coro)
life_coro.send("数据1")
life_coro.send("数据2")
life_coro.close() # 正常关闭
# 运行协程演示
generator_as_coroutine()
3.2 协程的异常处理
python
def coroutine_exception_handling():
"""演示协程的异常处理"""
def robust_coroutine():
"""健壮的协程示例"""
print("协程启动")
try:
while True:
try:
value = yield
print(f"处理值: {value}")
except ValueError as e:
print(f"处理ValueError: {e}")
except TypeError:
print("处理TypeError,跳过")
except GeneratorExit:
print("协程正常退出")
finally:
print("清理资源")
def calculator():
"""计算器协程"""
while True:
try:
expression = yield
result = eval(expression)
print(f"{expression} = {result}")
except Exception as e:
print(f"计算错误: {e}")
print("协程异常处理:")
print("=" * 40)
# 健壮协程测试
print("1. 健壮协程:")
robust = robust_coroutine()
next(robust)
robust.send("正常数据")
robust.throw(ValueError, "测试错误")
robust.send("继续处理")
robust.throw(TypeError)
robust.close()
# 计算器协程
print("\n2. 计算器协程:")
calc = calculator()
next(calc)
calc.send("2 + 3 * 4")
calc.send("10 / 2")
calc.send("10 / 0") # 除零错误
calc.send("'hello' + 5") # 类型错误
calc.send("2 ** 10")
calc.close()
# 运行异常处理演示
coroutine_exception_handling()
4. yield from语法(Python 3.3)
4.1 yield from的引入
Python 3.3引入了yield from语法,极大地简化了生成器委托和协程组合:
python
def yield_from_syntax():
"""演示yield from语法"""
def chain_generators():
"""链式生成器 - 传统方式"""
for i in range(3):
yield i
for char in 'abc':
yield char
def chain_with_yield_from():
"""使用yield from链式生成器"""
yield from range(3)
yield from 'abc'
def recursive_generator(n):
"""递归生成器"""
if n > 0:
yield n
yield from recursive_generator(n - 1)
def complex_data_processing():
"""复杂数据处理"""
data_sources = [
[1, 2, 3],
{'a', 'b', 'c'},
(x for x in range(4, 7))
]
for source in data_sources:
yield from source
print("Python 3.3 yield from语法:")
print("=" * 40)
# 基础对比
print("1. 传统方式 vs yield from:")
print(f"传统方式: {list(chain_generators())}")
print(f"yield from: {list(chain_with_yield_from())}")
# 递归生成器
print("\n2. 递归生成器:")
print(f"递归结果: {list(recursive_generator(5))}")
# 复杂数据处理
print("\n3. 复杂数据处理:")
print(f"处理结果: {list(complex_data_processing())}")
# yield from的返回值
print("\n4. yield from返回值:")
def subgenerator():
"""子生成器,返回最终结果"""
result = 0
while True:
value = yield
if value is None:
break
result += value
return result # 这个返回值会被yield from捕获
def delegating_generator():
"""委托生成器"""
total = yield from subgenerator()
yield f"最终结果: {total}"
delegate = delegating_generator()
next(delegate) # 启动
delegate.send(10)
delegate.send(20)
delegate.send(30)
try:
delegate.send(None) # 结束子生成器
except StopIteration as e:
print(f"委托结果: {e.value}")
# 运行yield from演示
yield_from_syntax()
4.2 协程委托模式
python
def coroutine_delegation_pattern():
"""演示协程委托模式"""
class CoroutineManager:
"""协程管理器"""
def __init__(self):
self.coroutines = []
def add_coroutine(self, coro):
"""添加协程"""
self.coroutines.append(coro)
def run(self):
"""运行所有协程"""
results = []
for coro in self.coroutines:
result = yield from coro
results.append(result)
return results
def data_processor(name, data):
"""数据处理协程"""
print(f"{name} 开始处理")
processed = []
for item in data:
# 模拟处理
processed.append(f"{name}_{item}")
yield # 让出控制权
print(f"{name} 处理完成")
return processed
def aggregator():
"""聚合器协程"""
manager = CoroutineManager()
# 添加多个数据处理协程
manager.add_coroutine(data_processor("A", [1, 2, 3]))
manager.add_coroutine(data_processor("B", [4, 5, 6]))
manager.add_coroutine(data_processor("C", [7, 8, 9]))
# 委托执行
results = yield from manager.run()
return results
print("协程委托模式:")
print("=" * 40)
def run_coroutine(coro):
"""运行协程的辅助函数"""
try:
while True:
next(coro)
except StopIteration as e:
return e.value
# 运行聚合器
agg = aggregator()
results = run_coroutine(agg)
print(f"聚合结果: {results}")
# 更复杂的委托示例
print("\n复杂委托示例:")
def worker(name, tasks):
"""工作协程"""
for i, task in enumerate(tasks):
print(f"{name} 执行任务 {i}: {task}")
yield # 模拟工作
yield f"{name}_结果_{i}"
return f"{name}_完成"
def supervisor():
"""监督协程"""
workers = [
worker("Worker1", ["任务A", "任务B"]),
worker("Worker2", ["任务C", "任务D", "任务E"]),
worker("Worker3", ["任务F"])
]
results = []
for worker_coro in workers:
result = yield from worker_coro
results.append(result)
return results
sup = supervisor()
final_results = run_coroutine(sup)
print(f"监督结果: {final_results}")
# 运行委托模式演示
coroutine_delegation_pattern()
5. asyncio框架和@asyncio.coroutine(Python 3.4)
5.1 asyncio框架的引入
Python 3.4引入了asyncio框架,为异步编程提供了标准库支持:
python
import asyncio
import time
def asyncio_introduction():
"""演示asyncio框架基础"""
@asyncio.coroutine
def simple_coroutine(name, delay):
"""简单的asyncio协程"""
print(f"{name} 开始,等待 {delay}秒")
yield from asyncio.sleep(delay)
print(f"{name} 完成")
return f"{name}_结果"
@asyncio.coroutine
def parallel_execution():
"""并行执行多个协程"""
# 创建多个任务
tasks = [
simple_coroutine("任务A", 2),
simple_coroutine("任务B", 1),
simple_coroutine("任务C", 3)
]
# 并行执行并等待所有完成
results = yield from asyncio.gather(*tasks)
return results
@asyncio.coroutine
def timeout_example():
"""超时处理示例"""
try:
# 设置超时
result = yield from asyncio.wait_for(
simple_coroutine("超时测试", 5),
timeout=2.0
)
return result
except asyncio.TimeoutError:
return "任务超时"
print("Python 3.4 asyncio框架:")
print("=" * 40)
# 事件循环基础
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
# 运行简单协程
print("1. 简单协程执行:")
start_time = time.time()
result = loop.run_until_complete(simple_coroutine("测试", 1))
print(f"结果: {result}, 耗时: {time.time() - start_time:.2f}秒")
# 并行执行
print("\n2. 并行执行:")
start_time = time.time()
results = loop.run_until_complete(parallel_execution())
print(f"所有结果: {results}")
print(f"总耗时: {time.time() - start_time:.2f}秒")
# 超时处理
print("\n3. 超时处理:")
result = loop.run_until_complete(timeout_example())
print(f"超时测试结果: {result}")
finally:
loop.close()
# 运行asyncio演示
asyncio_introduction()
5.2 异步I/O操作
python
import asyncio
import aiohttp # 需要安装: pip install aiohttp
def async_io_operations():
"""演示异步I/O操作"""
@asyncio.coroutine
def fetch_url(session, url):
"""获取URL内容"""
print(f"开始获取: {url}")
try:
with aiohttp.Timeout(10):
response = yield from session.get(url)
content = yield from response.text()
print(f"完成获取: {url}, 长度: {len(content)}")
return content[:100] # 返回前100个字符
except Exception as e:
print(f"获取失败 {url}: {e}")
return None
@asyncio.coroutine
def fetch_multiple_urls():
"""获取多个URL"""
urls = [
'http://httpbin.org/delay/1',
'http://httpbin.org/delay/2',
'http://httpbin.org/json',
'http://httpbin.org/html'
]
with aiohttp.ClientSession() as session:
tasks = [fetch_url(session, url) for url in urls]
results = yield from asyncio.gather(*tasks)
return results
@asyncio.coroutine
def file_operations():
"""模拟异步文件操作"""
print("开始文件操作")
# 模拟多个文件操作
operations = [
("读取文件A", 1),
("写入文件B", 0.5),
("处理文件C", 2),
("备份文件D", 1.5)
]
tasks = []
for op_name, delay in operations:
@asyncio.coroutine
def file_op(name, wait_time):
yield from asyncio.sleep(wait_time)
return f"{name}完成"
tasks.append(file_op(op_name, delay))
results = yield from asyncio.gather(*tasks)
return results
print("异步I/O操作:")
print("=" * 40)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
# 文件操作演示
print("1. 异步文件操作:")
start_time = time.time()
results = loop.run_until_complete(file_operations())
print(f"文件操作结果: {results}")
print(f"总耗时: {time.time() - start_time:.2f}秒")
# 网络请求演示(需要网络连接)
print("\n2. 异步网络请求:")
try:
start_time = time.time()
results = loop.run_until_complete(fetch_multiple_urls())
print(f"网络请求完成,获取了 {len(results)} 个结果")
print(f"总耗时: {time.time() - start_time:.2f}秒")
except Exception as e:
print(f"网络请求失败: {e}")
finally:
loop.close()
# 运行异步I/O演示
async_io_operations()
6. async/await语法(Python 3.5)
6.1 原生协程和async/await
Python 3.5引入了原生协程和async/await语法,使异步代码更加清晰:
python
def async_await_syntax():
"""演示async/await语法"""
# 传统asyncio协程
@asyncio.coroutine
def old_style_coroutine():
yield from asyncio.sleep(1)
return "传统风格"
# 原生协程
async def native_coroutine():
await asyncio.sleep(1)
return "原生风格"
async def comparison_demo():
"""对比演示"""
print("async/await vs @asyncio.coroutine:")
print("=" * 40)
# 运行传统协程
start_time = time.time()
result1 = await old_style_coroutine()
time1 = time.time() - start_time
# 运行原生协程
start_time = time.time()
result2 = await native_coroutine()
time2 = time.time() - start_time
print(f"传统风格: {result1}, 耗时: {time1:.2f}秒")
print(f"原生风格: {result2}, 耗时: {time2:.2f}秒")
async def complex_workflow():
"""复杂工作流示例"""
print("\n复杂工作流:")
async def step1():
print("步骤1: 开始")
await asyncio.sleep(1)
print("步骤1: 完成")
return "步骤1结果"
async def step2(data):
print(f"步骤2: 开始,使用 {data}")
await asyncio.sleep(0.5)
print("步骤2: 完成")
return f"步骤2处理({data})"
async def step3(data1, data2):
print(f"步骤3: 开始,使用 {data1} 和 {data2}")
await asyncio.sleep(0.8)
print("步骤3: 完成")
return f"最终结果({data1}, {data2})"
# 顺序执行但异步等待
result1 = await step1()
result2 = await step2(result1)
final_result = await step3(result1, result2)
return final_result
async def parallel_workflow():
"""并行工作流"""
print("\n并行工作流:")
async def worker(name, duration):
print(f"{name} 开始工作")
await asyncio.sleep(duration)
print(f"{name} 完成工作")
return f"{name}_结果"
# 并行执行多个worker
tasks = [
worker("Worker1", 2),
worker("Worker2", 1),
worker("Worker3", 1.5)
]
results = await asyncio.gather(*tasks)
return results
# 运行演示
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
# 对比演示
loop.run_until_complete(comparison_demo())
# 复杂工作流
result = loop.run_until_complete(complex_workflow())
print(f"复杂工作流结果: {result}")
# 并行工作流
results = loop.run_until_complete(parallel_workflow())
print(f"并行工作流结果: {results}")
finally:
loop.close()
# 运行async/await演示
async_await_syntax()
6.2 异步迭代器和异步上下文管理器
python
def async_iterators_context():
"""演示异步迭代器和异步上下文管理器"""
class AsyncCounter:
"""异步迭代器"""
def __init__(self, limit):
self.limit = limit
self.current = 0
def __aiter__(self):
return self
async def __anext__(self):
if self.current >= self.limit:
raise StopAsyncIteration
await asyncio.sleep(0.1) # 模拟异步操作
self.current += 1
return self.current - 1
class AsyncDatabaseConnection:
"""异步上下文管理器"""
async def __aenter__(self):
print("建立数据库连接")
await asyncio.sleep(0.5) # 模拟连接建立
self.connected = True
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
print("关闭数据库连接")
self.connected = False
await asyncio.sleep(0.2) # 模拟连接关闭
async def query(self, sql):
"""执行查询"""
if not self.connected:
raise RuntimeError("未连接数据库")
print(f"执行查询: {sql}")
await asyncio.sleep(0.3) # 模拟查询执行
return f"{sql}的结果"
async def async_iterator_demo():
"""异步迭代器演示"""
print("异步迭代器演示:")
print("=" * 30)
async for number in AsyncCounter(5):
print(f"获取到: {number}")
async def async_context_demo():
"""异步上下文管理器演示"""
print("\n异步上下文管理器演示:")
print("=" * 35)
async with AsyncDatabaseConnection() as db:
result1 = await db.query("SELECT * FROM users")
print(f"结果1: {result1}")
result2 = await db.query("SELECT COUNT(*) FROM products")
print(f"结果2: {result2}")
async def combined_demo():
"""组合使用演示"""
print("\n组合使用演示:")
print("=" * 25)
# 在异步上下文中使用异步迭代器
async with AsyncDatabaseConnection() as db:
async for i in AsyncCounter(3):
result = await db.query(f"查询{i}")
print(f"迭代{i}: {result}")
# 运行演示
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(async_iterator_demo())
loop.run_until_complete(async_context_demo())
loop.run_until_complete(combined_demo())
finally:
loop.close()
# 运行异步迭代器和上下文管理器演示
async_iterators_context()
7. 异步生成器(Python 3.6)
7.1 异步生成器的引入
Python 3.6引入了异步生成器,允许在异步函数中使用yield:
python
def async_generators():
"""演示异步生成器"""
async def async_counter(limit):
"""异步计数器生成器"""
for i in range(limit):
await asyncio.sleep(0.1) # 模拟异步操作
yield i
async def data_stream_processor():
"""数据流处理器"""
async for data in async_data_stream():
processed = await process_data(data)
yield processed
async def async_data_stream():
"""模拟异步数据流"""
data_chunks = ["chunk1", "chunk2", "chunk3", "chunk4"]
for chunk in data_chunks:
await asyncio.sleep(0.2)
yield chunk
async def process_data(data):
"""处理数据"""
await asyncio.sleep(0.1)
return f"处理后的{data}"
async def batch_processor(batch_size=2):
"""批处理器"""
batch = []
async for item in async_counter(10):
batch.append(item)
if len(batch) >= batch_size:
processed_batch = await process_batch(batch)
yield processed_batch
batch = []
# 处理剩余项目
if batch:
yield await process_batch(batch)
async def process_batch(batch):
"""处理批次"""
await asyncio.sleep(0.3)
return f"批次: {batch}"
async def async_generator_demo():
"""异步生成器演示"""
print("Python 3.6 异步生成器:")
print("=" * 35)
# 基本异步生成器
print("1. 基本异步生成器:")
async for value in async_counter(5):
print(f"生成值: {value}")
# 数据流处理
print("\n2. 数据流处理:")
async for processed in data_stream_processor():
print(f"处理结果: {processed}")
# 批处理
print("\n3. 批处理:")
async for batch_result in batch_processor(3):
print(f"批处理结果: {batch_result}")
# 运行演示
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(async_generator_demo())
finally:
loop.close()
# 运行异步生成器演示
async_generators()
7.2 异步推导式
python
def async_comprehensions():
"""演示异步推导式"""
async def async_data_source():
"""异步数据源"""
for i in range(10):
await asyncio.sleep(0.05)
yield i
async def process_item(item):
"""处理项目"""
await asyncio.sleep(0.01)
return item * 2
async def async_comprehension_demo():
"""异步推导式演示"""
print("异步推导式:")
print("=" * 25)
# 异步列表推导式
print("1. 异步列表推导式:")
results = [item * 2 async for item in async_data_source()]
print(f"结果: {results}")
# 带条件的异步推导式
print("\n2. 带条件的异步推导式:")
filtered_results = [
await process_item(item)
async for item in async_data_source()
if item % 2 == 0
]
print(f"过滤结果: {filtered_results}")
# 异步集合推导式
print("\n3. 异步集合推导式:")
unique_results = {item % 3 async for item in async_data_source()}
print(f"唯一结果: {unique_results}")
# 异步字典推导式
print("\n4. 异步字典推导式:")
dict_results = {
item: await process_item(item)
async for item in async_data_source()
if item < 5
}
print(f"字典结果: {dict_results}")
# 运行演示
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(async_comprehension_demo())
finally:
loop.close()
# 运行异步推导式演示
async_comprehensions()
8. 现代异步编程模式(Python 3.7+)
8.1 高级异步模式
python
def modern_async_patterns():
"""演示现代异步编程模式"""
async def task_with_timeout():
"""带超时的任务"""
async def long_running_task():
await asyncio.sleep(5)
return "任务完成"
try:
# 设置3秒超时
result = await asyncio.wait_for(long_running_task(), timeout=3.0)
return result
except asyncio.TimeoutError:
return "任务超时"
async def task_with_retry():
"""带重试的任务"""
max_retries = 3
async def unreliable_task():
import random
if random.random() < 0.7: # 70%失败率
raise RuntimeError("任务失败")
return "任务成功"
for attempt in range(max_retries):
try:
result = await unreliable_task()
return f"第{attempt + 1}次尝试成功: {result}"
except RuntimeError as e:
print(f"第{attempt + 1}次尝试失败: {e}")
if attempt < max_retries - 1:
await asyncio.sleep(1) # 等待后重试
raise RuntimeError(f"所有{max_retries}次尝试都失败")
async def producer_consumer_pattern():
"""生产者-消费者模式"""
import asyncio
queue = asyncio.Queue(maxsize=3)
async def producer(name, items):
"""生产者"""
for item in items:
await asyncio.sleep(0.1) # 模拟生产时间
await queue.put(f"{name}_{item}")
print(f"生产者{name} 生产: {item}")
await queue.put(None) # 结束信号
async def consumer(name):
"""消费者"""
while True:
item = await queue.get()
if item is None:
queue.put(None) # 传递结束信号
break
print(f"消费者{name} 消费: {item}")
await asyncio.sleep(0.2) # 模拟处理时间
queue.task_done()
# 创建生产者和消费者
producers = [
producer("P1", [1, 2, 3]),
producer("P2", [4, 5, 6])
]
consumers = [consumer("C1"), consumer("C2")]
# 启动所有任务
all_tasks = producers + consumers
await asyncio.gather(*all_tasks)
async def modern_patterns_demo():
"""现代模式演示"""
print("现代异步编程模式:")
print("=" * 30)
# 超时模式
print("1. 超时模式:")
timeout_result = await task_with_timeout()
print(f"超时任务结果: {timeout_result}")
# 重试模式
print("\n2. 重试模式:")
try:
retry_result = await task_with_retry()
print(f"重试任务结果: {retry_result}")
except RuntimeError as e:
print(f"重试失败: {e}")
# 生产者-消费者模式
print("\n3. 生产者-消费者模式:")
await producer_consumer_pattern()
# 运行演示
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(modern_patterns_demo())
finally:
loop.close()
# 运行现代模式演示
modern_async_patterns()