From: Oleg Broytman Date: Sun, 12 Nov 2023 11:24:49 +0000 (+0300) Subject: Fix(Robots/bkmk_rurllib_py3.py): Restore opener X-Git-Tag: 5.0.0~51 X-Git-Url: https://git.phdru.name/?a=commitdiff_plain;h=5e3685956769a46ded587d33d5cb69ca4d0bbe7a;p=bookmarks_db.git Fix(Robots/bkmk_rurllib_py3.py): Restore opener `urllib.request.urlcleanup()` clears opener. --- diff --git a/Robots/bkmk_rurllib_py3.py b/Robots/bkmk_rurllib_py3.py index b392887..796a6fc 100644 --- a/Robots/bkmk_rurllib_py3.py +++ b/Robots/bkmk_rurllib_py3.py @@ -54,23 +54,23 @@ class MyURLopener(urllib.request.URLopener): raise IOError(('http error', errcode, errmsg, headers)) -urllib.request._opener = MyURLopener() +urllib.request._opener = opener = MyURLopener() # Fake headers to pretend this is a real browser _user_agent = "Mozilla/5.0 (X11; U; Linux 2.6 i686; en)" " Gecko/20001221 Firefox/2.0.0" -urllib.request._opener.addheaders[0] = ('User-Agent', _user_agent) +opener.addheaders[0] = ('User-Agent', _user_agent) _x_user_agent = "bookmarks_db (Python %d.%d.%d; urllib/%s)" % ( sys.version_info[0], sys.version_info[1], sys.version_info[2], urllib.request.__version__ ) -urllib.request._opener.addheader('X-User-Agent', _x_user_agent) -urllib.request._opener.addheader('Referer', '') +opener.addheader('X-User-Agent', _x_user_agent) +opener.addheader('Referer', '') -urllib.request._opener.addheader('Accept', '*/*') -urllib.request._opener.addheader('Accept-Language', 'ru,en') -urllib.request._opener.addheader('Cache-Control', 'max-age=300') -urllib.request._opener.addheader('Connection', 'close') +opener.addheader('Accept', '*/*') +opener.addheader('Accept-Language', 'ru,en') +opener.addheader('Cache-Control', 'max-age=300') +opener.addheader('Connection', 'close') urllib_ftpwrapper = urllib.request.ftpwrapper @@ -91,16 +91,16 @@ class robot_urllib_py3(robot_base): def get(self, bookmark, url, accept_charset=False): try: # Set fake referer to the base URL - urllib.request._opener.addheaders[2] = ('Referer', url) + opener.addheaders[2] = ('Referer', url) if accept_charset and bookmark.charset: - urllib.request._opener.addheader('Accept-Charset', bookmark.charset) + opener.addheader('Accept-Charset', bookmark.charset) try: fname, headers = urllib.request.urlretrieve(url) finally: if accept_charset and bookmark.charset: # Remove Accept-Charset - del urllib.request._opener.addheaders[-1] + del opener.addheaders[-1] infile = open(fname, 'rt') content = infile.read() @@ -125,7 +125,7 @@ class robot_urllib_py3(robot_base): def get_ftp_welcome(self): global ftpcache_key - _welcome = urllib.request._opener.ftpcache[ftpcache_key].ftp.welcome + _welcome = opener.ftpcache[ftpcache_key].ftp.welcome # I am assuming there are no duplicate ftp URLs in db. # If there are - ftpcache_key in next line is invalid. ftpcache_key = None @@ -134,3 +134,4 @@ class robot_urllib_py3(robot_base): def finish_check_url(self, bookmark): robot_base.finish_check_url(self, bookmark) urllib.request.urlcleanup() + urllib.request._opener = opener