]> git.phdru.name Git - bookmarks_db.git/commitdiff
Feat(Robots/bkmk_rrequests): Ignore all problems with certificates
authorOleg Broytman <phd@phdru.name>
Tue, 5 Mar 2024 20:14:47 +0000 (23:14 +0300)
committerOleg Broytman <phd@phdru.name>
Tue, 5 Mar 2024 20:18:21 +0000 (23:18 +0300)
Drop SSL/TLS security to the lowest level.
I want to get the pages at all cost.
Unmatched names, expired certificates,
small DH values are less of a concern for me
comparing with DNS errors and connection timeouts.

Robots/bkmk_rrequests.py

index c6a91af3a962a3da3057c022b08edaaf96542ee3..4577b1608d4992816f8a7ac943c339ec61cca1ac 100644 (file)
@@ -12,7 +12,10 @@ __all__ = ['robot_requests']
 
 
 from urllib.parse import urlsplit
+import warnings
 
+from requests.adapters import HTTPAdapter
+from requests.packages.urllib3.util.ssl_ import create_urllib3_context
 import requests
 import requests_ftp
 
@@ -71,16 +74,42 @@ class robot_requests(robot_base):
         return ''  # Alas, requests_ftp doesn't store welcome message
 
 
+# See https://lukasa.co.uk/2017/02/Configuring_TLS_With_Requests/
+
+class AllCiphersAdapter(HTTPAdapter):
+    """
+    A TransportAdapter that re-enables 3DES support in Requests.
+    """
+    def init_poolmanager(self, *args, **kwargs):
+        context = create_urllib3_context(cert_reqs=0,
+                                         ciphers='ALL:@SECLEVEL=1')
+        kwargs['ssl_context'] = context
+        return super(AllCiphersAdapter, self).init_poolmanager(*args, **kwargs)
+
+    def proxy_manager_for(self, *args, **kwargs):
+        context = create_urllib3_context(cert_reqs=0,
+                                         ciphers='ALL:@SECLEVEL=1')
+        kwargs['ssl_context'] = context
+        return super(AllCiphersAdapter, self).proxy_manager_for(
+            *args, **kwargs)
+
+
+warnings.filterwarnings('ignore', 'Unverified HTTPS request is being made')
+
+
 def request_get(url, headers, timeout, proxy):
     if proxy:
         proxies = {'http': proxy, 'https': proxy}
     else:
         proxies = None
 
+    s = requests.Session()
+    s.mount('https://', AllCiphersAdapter())
+
     try:
-        r = requests.Session().get(
-            url, headers=headers, timeout=timeout,
-            allow_redirects=False, proxies=proxies)
+        r = s.get(url, headers=headers, timeout=timeout,
+                  allow_redirects=False, proxies=proxies,
+                  verify=False)
     except requests.RequestException as e:
         return str(e), None
     else: