1. import asyncio
    2. import time
    3. import requests
    4. import aiohttp
    5. stat=time.time()
    6. urls=[
    7. 'http://127.0.0.1:5000/bobo',
    8. 'http://127.0.0.1:5000/jay',
    9. 'http://127.0.0.1:5000/tom'
    10. ]
    11. async def get_page(url):
    12. async with aiohttp.ClientSession() as session:
    13. #session.get()/session.post():
    14. #UA伪装:headers=headers
    15. #参数:params/data
    16. #代理ip:proxy='http://ip:port'
    17. async with await session.get(url) as response:
    18. #text()返回字符串形式的响应数据
    19. #read()返回二进制形式的响应数据
    20. #json()返回的就是json对象
    21. #注意:获取响应数据操作之前一定要使用await进行手动挂起
    22. #否则拿不到数据
    23. page_text= await response.text()
    24. print(page_text)
    25. tasks=[]
    26. for url in urls:
    27. c=get_page(url)
    28. task=asyncio.ensure_future(c)
    29. tasks.append(task)
    30. loop=asyncio.get_event_loop()
    31. loop.run_until_complete(asyncio.wait(tasks))
    32. end=time.time()
    33. print('总耗时',end-stat)