import requests
import re
import time
from lxml
import etree
from multiprocessing
.dummy
import Pool
import asyncio
import aiohttp
start
= time
.time
()
urls
= [
'www.baidu.com', 'www.csdn.com', 'www.bilibili.com'
]
async def get_page(url
):
async with aiohttp
.ClientSession
() as session
:
async with await session
.get
(url
) as response
:
page_text
= await response
.text
()
print(page_text
)
tasks
= []
for url
in urls
:
c
= get_page
(url
)
task
= asyncio
.ensure_future
(c
)
tasks
.append
(task
)
loop
= asyncio
.get_event_loop
()
loop
.run_until_complete
(asyncio
.wait
(tasks
))
end
= time
.time
()
print(end
-start
)
转载请注明原文地址:https://tech.qufami.com/read-17660.html