]> git.phdru.name Git - bookmarks_db.git/commitdiff
Feat(Robots): Update X-User-Agent header
authorOleg Broytman <phd@phdru.name>
Thu, 1 Aug 2024 10:03:16 +0000 (13:03 +0300)
committerOleg Broytman <phd@phdru.name>
Thu, 1 Aug 2024 16:02:21 +0000 (19:02 +0300)
Robots/bkmk_rcurl.py
Robots/bkmk_robot_base.py
Robots/bkmk_rrequests.py
Robots/bkmk_rurllib.py
Robots/bkmk_rurllib2.py
Robots/bkmk_rurllib_py3.py

index 61258562608acb51a6f6f714606012c53d1b35ac..96f208cc1d8e055519846240071e68d5f876d43e 100644 (file)
@@ -18,6 +18,9 @@ from Robots.bkmk_robot_base import robot_base, request_headers
 
 
 class robot_curl(robot_base):
+    def version_str(self):
+        return str(pycurl.version)
+
     def get(self, bookmark, url, accept_charset=False, use_proxy=False):
         if accept_charset and bookmark.charset:
             headers = request_headers.copy()
index 2d186acb0c211107d485f0e44e6919044f2ac7b1..9b989e5f70ee4d1035721677fbfc99cd759dd78a 100644 (file)
@@ -84,6 +84,13 @@ class robot_base(Robot):
         Robot.__init__(self, *args, **kw)
         socket.setdefaulttimeout(int(self.timeout))
 
+        global _x_user_agent
+        _x_user_agent = '%s %s' % (_x_user_agent, self.version_str())
+        request_headers['X-User-Agent'] = _x_user_agent
+
+    def version_str(self):
+        return self.__class__.__name__
+
     def check_url(self, bookmark):
         try:
             self.start = int(time.time())
index e27c4110b08e15034031fbab5716f63963c84cf7..638ed9129d07f3c92e2e7c7f026e4e12c1a995fc 100644 (file)
@@ -17,6 +17,7 @@ from requests.adapters import HTTPAdapter
 from requests.packages.urllib3.util.ssl_ import create_urllib3_context
 import requests
 import requests_ftp
+import urllib3
 
 from Robots.bkmk_robot_base import robot_base, request_headers
 
@@ -24,6 +25,9 @@ requests_ftp.monkeypatch_session()
 
 
 class robot_requests(robot_base):
+    def version_str(self):
+        return 'python-requests urllib3/%s' % urllib3.__version__
+
     def get(self, bookmark, url, accept_charset=False, use_proxy=False):
         if accept_charset and bookmark.charset:
             headers = request_headers.copy()
index 19d9fc177c3f839be641e20cb5c4ad03b0e5d915..2731ad593d28045b8e52372c2b3f03226794dcb6 100644 (file)
@@ -84,6 +84,9 @@ urllib.ftpwrapper = myftpwrapper
 
 
 class robot_urllib(robot_base):
+    def version_str(self):
+        return 'urllib'
+
     def get(self, bookmark, url, accept_charset=False, use_proxy=False):
         if use_proxy:
             opener.proxies = {'http': self.proxy, 'https': self.proxy}
index 5e604977f81cbab0fce2e6b828f8f8b62fc52957..13d2c3b06c76437de4ea02979c6b85995d98340b 100644 (file)
@@ -39,6 +39,9 @@ urllib2.install_opener(opener)
 
 
 class robot_urllib2(robot_base):
+    def version_str(self):
+        return 'urllib2'
+
     def get(self, bookmark, url, accept_charset=False, use_proxy=False):
         request = urllib2.Request(url)
         for h, v in request_headers.items():
index 3928768f23fc990dff2412eeb7352b48e7614165..79470463afcdbdd86ff11006f7d171af5f4ece8a 100644 (file)
@@ -85,6 +85,9 @@ urllib.request.ftpwrapper = myftpwrapper
 
 
 class robot_urllib_py3(robot_base):
+    def version_str(self):
+        return 'urllib'
+
     def get(self, bookmark, url, accept_charset=False, use_proxy=False):
         if use_proxy:
             opener.proxies = {'http': self.proxy, 'https': self.proxy}