From 0adeaf8bf06572e6f0056b0395e0728fe5f19140 Mon Sep 17 00:00:00 2001 From: Oleg Broytman Date: Mon, 19 Aug 2024 15:43:54 +0300 Subject: [PATCH] Feat(bkmk_raiohttp): Lower SSL cert validation strictness --- Robots/bkmk_raiohttp.py | 14 +++++++++++--- doc/ANNOUNCE | 2 +- doc/ChangeLog | 2 +- setup.py | 2 +- 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/Robots/bkmk_raiohttp.py b/Robots/bkmk_raiohttp.py index b195ff2..fb377ae 100644 --- a/Robots/bkmk_raiohttp.py +++ b/Robots/bkmk_raiohttp.py @@ -13,9 +13,11 @@ __all__ = ['robot_aiohttp'] from urllib.parse import urlsplit import asyncio +import ssl from aiohttp_socks import ProxyConnector from python_socks import parse_proxy_url +from urllib3.util.ssl_ import create_urllib3_context import aioftp import aiohttp import aiohttp.client_exceptions @@ -84,15 +86,21 @@ async def _get_http(url, headers={}, proxy=None, timeout=60): ) proxy = None timeout = aiohttp.ClientTimeout(connect=timeout, total=timeout) + ssl_context = create_urllib3_context(cert_reqs=0, + ciphers='ALL:@SECLEVEL=1') try: async with aiohttp.ClientSession( connector=connector, timeout=timeout ) as session: async with session.get( - url, headers=headers, proxy=proxy, - allow_redirects=False) as resp: + url, headers=headers, proxy=proxy, allow_redirects=False, + ssl_context=ssl_context) as resp: return None, resp.status, resp.headers, await resp.read() - except (asyncio.TimeoutError, aiohttp.client_exceptions.ClientError) as e: + except ( + ConnectionResetError, + asyncio.TimeoutError, ssl.SSLError, + aiohttp.client_exceptions.ClientError + ) as e: return e, None, None, None diff --git a/doc/ANNOUNCE b/doc/ANNOUNCE index c6981a1..61cb7af 100644 --- a/doc/ANNOUNCE +++ b/doc/ANNOUNCE @@ -11,7 +11,7 @@ Version 6.0.0 (2024-??-??) Robot based on aiohttp, processes multiple URLs in parallel. - Default list of robots is now multirequests,multiaio,curl. + Default list of robots is now multiaio,multirequests,curl. Make all robots async. Split check_bookmark() into sync and async variants. diff --git a/doc/ChangeLog b/doc/ChangeLog index 915b337..5a2e4cd 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -2,7 +2,7 @@ Version 6.0.0 (2024-??-??) Robot based on aiohttp, processes multiple URLs in parallel. - Default list of robots is now multirequests,multiaio,curl. + Default list of robots is now multiaio,multirequests,curl. Make all robots async. Split check_bookmark() into sync and async variants. diff --git a/setup.py b/setup.py index cba7fe2..a140ded 100755 --- a/setup.py +++ b/setup.py @@ -38,6 +38,6 @@ setup( 'html': ['beautifulsoup4', 'lxml'], 'requests': ['requests[socks]'], 'curl': ['pycurl', 'certifi'], - 'aiohttp': ['aiohttp>=2.3.2', 'aiohttp-socks', 'aioftp[socks]'], + 'aiohttp': ['aiohttp>=3', 'aiohttp-socks', 'aioftp[socks]'], }, ) -- 2.39.5