X-Git-Url: https://git.phdru.name/?a=blobdiff_plain;f=Robots%2Fbkmk_rurllib.py;h=1b3182ea04de146e44fe0ac219599a30517ca34e;hb=8a33ee42d7f8edff076734eba8c1718da8127b22;hp=3ef47eba975ac4d475306915ec862701d0f33c05;hpb=9faa13f6f8199790cf01533e857c593520559649;p=bookmarks_db.git diff --git a/Robots/bkmk_rurllib.py b/Robots/bkmk_rurllib.py index 3ef47eb..1b3182e 100644 --- a/Robots/bkmk_rurllib.py +++ b/Robots/bkmk_rurllib.py @@ -11,8 +11,9 @@ __license__ = "GNU GPL" __all__ = ['robot_urllib'] -import sys, os -import time, urllib +import sys +import urllib + from Robots.bkmk_robot_base import robot_base, get_error @@ -43,11 +44,12 @@ class MyURLopener(urllib.URLopener): # Error 401 -- authentication required def http_error_401(self, url, fp, errcode, errmsg, headers, data=None): - raise IOError(('http error', errcode, "Authentication required ", headers)) + raise IOError( + ('http error', errcode, "Authentication required ", headers)) def http_error_default(self, url, fp, errcode, errmsg, headers): if fp: - void = fp.read() + fp.read() fp.close() raise IOError(('http error', errcode, errmsg, headers)) @@ -55,10 +57,13 @@ class MyURLopener(urllib.URLopener): urllib._urlopener = MyURLopener() # Fake headers to pretend this is a real browser -_user_agent = "Mozilla/5.0 (X11; U; Linux 2.6 i686; en) Gecko/20001221 Firefox/2.0.0" +_user_agent = "Mozilla/5.0 (X11; U; Linux 2.6 i686; en)" +" Gecko/20001221 Firefox/2.0.0" urllib._urlopener.addheaders[0] = ('User-Agent', _user_agent) _x_user_agent = "bookmarks_db (Python %d.%d.%d; urllib/%s)" % ( - sys.version_info[0], sys.version_info[1], sys.version_info[2], urllib.__version__) + sys.version_info[0], sys.version_info[1], + sys.version_info[2], urllib.__version__ +) urllib._urlopener.addheader('X-User-Agent', _x_user_agent) urllib._urlopener.addheader('Referer', '') @@ -78,6 +83,7 @@ class myftpwrapper(urllib_ftpwrapper): global ftpcache_key ftpcache_key = (user, host, port, '/'.join(dirs)) + urllib.ftpwrapper = myftpwrapper @@ -93,7 +99,8 @@ class robot_urllib(robot_base): fname, headers = urllib.urlretrieve(url) finally: if accept_charset and bookmark.charset: - del urllib._urlopener.addheaders[-1] # Remove Accept-Charset + # Remove Accept-Charset + del urllib._urlopener.addheaders[-1] infile = open(fname, 'rb') content = infile.read() @@ -107,7 +114,8 @@ class robot_urllib(robot_base): except IOError as e: if (e[0] == "http error") and (e[1] == -1): error = None - bookmark.no_error = "The server did not return any header - it is not an error, actually" + bookmark.no_error = "The server did not return any header - " + "it is not an error, actually" self.log(' no headers: %s' % bookmark.no_error) else: error = get_error(e)