0%

python | asyncio.run 与 asyncio.gather

请先看

这里主要有以下场景

  • 多线程/线程池
  • 协程
  • 一个协程中多个不同 URL 请求

直接举一个例子吧。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import asyncio
import time
from concurrent.futures.thread import ThreadPoolExecutor

import aiohttp


async def fetch(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
return await response.text()


async def baidu1():
url = 'https://www.baidu.com' # 替换为你要请求的 URL
content = await fetch(url)
print(f"baidu 1")


async def baidu2():
url = 'https://www.baidu.com' # 替换为你要请求的 URL
content = await fetch(url)
print(f"baidu 2")


async def deal():
tasks = [baidu1(), baidu2()]
await asyncio.gather(*tasks)


def test():
asyncio.run(deal())


if __name__ == '__main__':
pool = ThreadPoolExecutor(max_workers=12)
pool.submit(test)
pool.submit(test)
time.sleep(10)

输出

1
2
3
4
baidu 2
baidu 1
baidu 2
baidu 1
请我喝杯咖啡吧~