python爬虫 asyncio aiohttp aiofiles 多任务异步协程爬取图片 main.py
"""=== coding: UTF8 ==="""
import asyncio
import aiohttp
import aiofiles
urls = ["https://img.lianzhixiu.com/uploads/allimg/202109/9999/d1eeaa0450.jpg",
"https://img.lianzhixiu.com/uploads/allimg/202109/9999/6747451f08.jpg",
"https://img.lianzhixiu.com/uploads/allimg/202108/9999/88abd53cc1.jpg"
]
async def aioDownload(url):
print("开始下载")
name = url.rsplit("/", 1)[1]
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
async with aiofiles.open(name, mode='wb') as f:
await f.write(await resp.content.read())
print("下载完成")
async def main():
tasks = []
for url in urls:
task = asyncio.create_task(aioDownload(url))
tasks.append(task)
await asyncio.wait(tasks)
"""
========================================
主函数功能测试
========================================
"""
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
关注公众号,获取更多资料 ![在这里插入图片描述](https://img-blog.csdnimg.cn/fefa718716e24b8fab81d0cd6bbc3712.png?x-oss-process=image/watermark,type_ZHJvaWRzYW5zZmFsbGJhY2s,shadow_50,text_Q1NETiBA55qT5pyI55uI5rGf,size_20,color_FFFFFF,t_70,g_se,x_16#pic_center)
|