>问题 - 我正在尝试使用 asyncio、async_timeout 和 aiohttp 向不同的 url 发出多个 get 请求。我只想在完成所有获取请求后或在超时期限过后进行休息处理,以先发生者为准。如果所有任务都没有在指定的_DEFAULT_TIME_OUT时间内完成,则无论完成什么 get 请求,都只会继续执行它们。
在下面的代码片段中,即使我的所有任务都已完成,我也一直在等待_DEFAULT_TIME_OUT段时间。如何显式解决超时问题。
async def get(self, session, url, attributes):
timeout_period = self._DEFAULT_TIME_OUT)
try:
with async_timeout.timeout(timeout_period) as timeout:
async with session.get(url) as response:
self.urls[url] = await response.content.read()
response.release()
print("---{}---".format(url))
raise asyncio.TimeoutError()
except asyncio.TimeoutError:
pass
async def http_request(self, even_loop):
tasks = []
async with aiohttp.ClientSession(loop=even_loop) as session:
for url in all_urls:
tasks.append(self.get(session, url, attributes))
await asyncio.gather(*tasks)
print("Do something else")
此示例演示如何使用async_timeout
并获取有关任务的信息:
import aiohttp
import asyncio
import async_timeout
async def get(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
return await response.json()
async def main():
timeout = 3.5
tasks = [
asyncio.create_task(get(f'http://httpbin.org/delay/{delay}'))
for delay
in range(1, 6)
]
try:
with async_timeout.timeout(timeout):
await asyncio.gather(*tasks)
except asyncio.TimeoutError:
pass
finally:
for i, task in enumerate(tasks):
if task.done() and not task.cancelled():
print(f'Task is finished: {task.result()["url"]}.')
else:
print(f'Task hasn't been finished.')
asyncio.run(main())
结果:
Task is finished: http://httpbin.org/delay/1.
Task is finished: http://httpbin.org/delay/2.
Task is finished: http://httpbin.org/delay/3.
Task hasn't been finished.
Task hasn't been finished.
您可以使用timeout
变量和delay
变量来查看一切是否按预期工作:
timeout = 10
tasks = [
asyncio.create_task(get(f'http://httpbin.org/delay/{delay}'))
for delay
in (1, 1, 1)
]
将在 1 秒后不久完成,所有任务都已完成。