from urllib.parse import urlsplit
import asyncio
+import ssl
from aiohttp_socks import ProxyConnector
from python_socks import parse_proxy_url
+from urllib3.util.ssl_ import create_urllib3_context
import aioftp
import aiohttp
import aiohttp.client_exceptions
)
proxy = None
timeout = aiohttp.ClientTimeout(connect=timeout, total=timeout)
+ ssl_context = create_urllib3_context(cert_reqs=0,
+ ciphers='ALL:@SECLEVEL=1')
try:
async with aiohttp.ClientSession(
connector=connector, timeout=timeout
) as session:
async with session.get(
- url, headers=headers, proxy=proxy,
- allow_redirects=False) as resp:
+ url, headers=headers, proxy=proxy, allow_redirects=False,
+ ssl_context=ssl_context) as resp:
return None, resp.status, resp.headers, await resp.read()
- except (asyncio.TimeoutError, aiohttp.client_exceptions.ClientError) as e:
+ except (
+ ConnectionResetError,
+ asyncio.TimeoutError, ssl.SSLError,
+ aiohttp.client_exceptions.ClientError
+ ) as e:
return e, None, None, None
Robot based on aiohttp, processes multiple URLs in parallel.
- Default list of robots is now multirequests,multiaio,curl.
+ Default list of robots is now multiaio,multirequests,curl.
Make all robots async.
Split check_bookmark() into sync and async variants.
Robot based on aiohttp, processes multiple URLs in parallel.
- Default list of robots is now multirequests,multiaio,curl.
+ Default list of robots is now multiaio,multirequests,curl.
Make all robots async.
Split check_bookmark() into sync and async variants.
'html': ['beautifulsoup4', 'lxml'],
'requests': ['requests[socks]'],
'curl': ['pycurl', 'certifi'],
- 'aiohttp': ['aiohttp>=2.3.2', 'aiohttp-socks', 'aioftp[socks]'],
+ 'aiohttp': ['aiohttp>=3', 'aiohttp-socks', 'aioftp[socks]'],
},
)