aiohttp使用代理ip访问https网站报错的问题
昨天遇到个很坑die的问题,当使用aiohttp使用代理访问https网站时候一直报aiohttp.client_exceptions.ClientConnectorError: Cannot connect to host baidu.com:443 ssl:default [参数错误。]这个错误,然后博主在网上一顿搜,最后也没搜出个所以然来,唉,这也是我为什么要把这个坑贴出来的原因,最后在https://stackoverflow.com/这个论坛上才找到了解决办法。
问题
import aiohttp
import asyncio
import threading
from tools
import auto
# proxy
=auto
.proxies
, proxy_auth
=auto
.proxy_auth 这里的代理需要换成自己的
async def
quest(url
, headers
):
con
= aiohttp
.TCPConnector(verify_ssl
=False
)
async with aiohttp
.ClientSession(connector
=con
) as sess
:
async with sess
.get(url
=url
, headers
=headers
, proxy
=auto
.proxies
, proxy_auth
=auto
.proxy_auth
) as res
:
return await res
.read()
def
forever(loop
):
asyncio
.set_event_loop(loop
)
loop
.run_forever()
if __name__
== '__main__':
headers
= {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:79.0) Gecko/20100101 Firefox/79.0'}
url
= 'https://baidu.com'
loop
= asyncio
.new_event_loop()
t
= threading
.Thread(target
=forever
, args
=(loop
,))
t
.setDaemon(True
)
t
.start()
ret
= asyncio
.run_coroutine_threadsafe(quest(url
, headers
), loop
)
print(ret
.result())
然后就出现了这个问题,我用的python版本是3.8.2,aiohttp是3.6.2
解决
import aiohttp
import asyncio
import threading
from tools
import auto
asyncio
.set_event_loop_policy(asyncio
.WindowsSelectorEventLoopPolicy()) # 加上这一行
# proxy
=auto
.proxies
, proxy_auth
=auto
.proxy_auth 这里的代理需要换成自己的
async def
quest(url
, headers
):
con
= aiohttp
.TCPConnector(verify_ssl
=False
)
async with aiohttp
.ClientSession(connector
=con
, trust_env
=True
) as sess
: # 加上trust_env
=True
async with sess
.get(url
=url
, headers
=headers
, proxy
=auto
.proxies
, proxy_auth
=auto
.proxy_auth
) as res
:
return await res
.read()
def
forever(loop
):
asyncio
.set_event_loop(loop
)
loop
.run_forever()
if __name__
== '__main__':
headers
= {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:79.0) Gecko/20100101 Firefox/79.0'}
url
= 'https://baidu.com'
loop
= asyncio
.new_event_loop()
t
= threading
.Thread(target
=forever
, args
=(loop
,))
t
.setDaemon(True
)
t
.start()
ret
= asyncio
.run_coroutine_threadsafe(quest(url
, headers
), loop
)
print(ret
.result())
请求成功 至于原理我需要再去研究下,或者有带佬知道的欢迎给我留言哈,万分感谢!