From 386dc737739fab0af083fed233ac45d2782373e0 Mon Sep 17 00:00:00 2001 From: Oleg Broytman Date: Tue, 5 Mar 2024 23:14:47 +0300 Subject: [PATCH] Feat(Robots/bkmk_rrequests): Ignore all problems with certificates Drop SSL/TLS security to the lowest level. I want to get the pages at all cost. Unmatched names, expired certificates, small DH values are less of a concern for me comparing with DNS errors and connection timeouts. --- Robots/bkmk_rrequests.py | 35 ++++++++++++++++++++++++++++++++--- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/Robots/bkmk_rrequests.py b/Robots/bkmk_rrequests.py index c6a91af..4577b16 100644 --- a/Robots/bkmk_rrequests.py +++ b/Robots/bkmk_rrequests.py @@ -12,7 +12,10 @@ __all__ = ['robot_requests'] from urllib.parse import urlsplit +import warnings +from requests.adapters import HTTPAdapter +from requests.packages.urllib3.util.ssl_ import create_urllib3_context import requests import requests_ftp @@ -71,16 +74,42 @@ class robot_requests(robot_base): return '' # Alas, requests_ftp doesn't store welcome message +# See https://lukasa.co.uk/2017/02/Configuring_TLS_With_Requests/ + +class AllCiphersAdapter(HTTPAdapter): + """ + A TransportAdapter that re-enables 3DES support in Requests. + """ + def init_poolmanager(self, *args, **kwargs): + context = create_urllib3_context(cert_reqs=0, + ciphers='ALL:@SECLEVEL=1') + kwargs['ssl_context'] = context + return super(AllCiphersAdapter, self).init_poolmanager(*args, **kwargs) + + def proxy_manager_for(self, *args, **kwargs): + context = create_urllib3_context(cert_reqs=0, + ciphers='ALL:@SECLEVEL=1') + kwargs['ssl_context'] = context + return super(AllCiphersAdapter, self).proxy_manager_for( + *args, **kwargs) + + +warnings.filterwarnings('ignore', 'Unverified HTTPS request is being made') + + def request_get(url, headers, timeout, proxy): if proxy: proxies = {'http': proxy, 'https': proxy} else: proxies = None + s = requests.Session() + s.mount('https://', AllCiphersAdapter()) + try: - r = requests.Session().get( - url, headers=headers, timeout=timeout, - allow_redirects=False, proxies=proxies) + r = s.get(url, headers=headers, timeout=timeout, + allow_redirects=False, proxies=proxies, + verify=False) except requests.RequestException as e: return str(e), None else: -- 2.39.5