X-Git-Url: https://git.phdru.name/?a=blobdiff_plain;f=Robots%2Fbkmk_rurllib.py;h=b41722fade3af41f245429e11689c3ffae55bb58;hb=71900f3630cb51580964038b78100d60e3671981;hp=f0a614e3799e1ccae9750bd57fbbf5c4f99ccd90;hpb=9d989c8c9ebd7f13e9f0fdc95fa139c99ed83387;p=bookmarks_db.git diff --git a/Robots/bkmk_rurllib.py b/Robots/bkmk_rurllib.py index f0a614e..b41722f 100644 --- a/Robots/bkmk_rurllib.py +++ b/Robots/bkmk_rurllib.py @@ -5,7 +5,7 @@ This file is a part of Bookmarks database and Internet robot. """ __author__ = "Oleg Broytman " -__copyright__ = "Copyright (C) 2000-2014 PhiloSoft Design" +__copyright__ = "Copyright (C) 2000-2017 PhiloSoft Design" __license__ = "GNU GPL" __all__ = ['robot_urllib'] @@ -13,12 +13,18 @@ __all__ = ['robot_urllib'] import sys, os import time, urllib -from Robots.bkmk_robot_base import robot_base, RedirectException, get_error +from Robots.bkmk_robot_base import robot_base, get_error +class RedirectException(Exception): + def __init__(self, errcode, newurl): + Exception.__init__(self) + self.errcode = errcode + self.newurl = newurl + class MyURLopener(urllib.URLopener): - # Error 302 -- relocated (temporarily) - def http_error_302(self, url, fp, errcode, errmsg, headers, data=None): + # Error 301 -- relocated (permanently) + def http_error_301(self, url, fp, errcode, errmsg, headers, data=None): if headers.has_key('location'): newurl = headers['location'] elif headers.has_key('uri'): @@ -27,36 +33,38 @@ class MyURLopener(urllib.URLopener): newurl = "Nowhere" raise RedirectException(errcode, newurl) - # Error 301 -- also relocated (permanently) - http_error_301 = http_error_302 - # Error 307 -- also relocated (temporary) - http_error_307 = http_error_302 + # Error 302 -- relocated (temporarily) + http_error_302 = http_error_301 + # Error 303 -- relocated (see other) + http_error_303 = http_error_301 + # Error 307 -- relocated (temporarily) + http_error_307 = http_error_301 # Error 401 -- authentication required def http_error_401(self, url, fp, errcode, errmsg, headers, data=None): - raise IOError, ('http error', errcode, "Authentication required ", headers) + raise IOError(('http error', errcode, "Authentication required ", headers)) def http_error_default(self, url, fp, errcode, errmsg, headers): if fp: void = fp.read() fp.close() - raise IOError, ('http error', errcode, errmsg, headers) + raise IOError(('http error', errcode, errmsg, headers)) urllib._urlopener = MyURLopener() # Fake headers to pretend this is a real browser -_version = "Mozilla/5.0 (X11; U; Linux 2.6 i686; en) Gecko/20001221 Firefox/2.0.0" -urllib._urlopener.addheaders[0] = ('User-Agent', _version) -_version = "bookmarks_db (Python %d.%d.%d; urllib/%s)" % ( +_user_agent = "Mozilla/5.0 (X11; U; Linux 2.6 i686; en) Gecko/20001221 Firefox/2.0.0" +urllib._urlopener.addheaders[0] = ('User-Agent', _user_agent) +_x_user_agent = "bookmarks_db (Python %d.%d.%d; urllib/%s)" % ( sys.version_info[0], sys.version_info[1], sys.version_info[2], urllib.__version__) -urllib._urlopener.addheader('X-User-Agent', _version) +urllib._urlopener.addheader('X-User-Agent', _x_user_agent) urllib._urlopener.addheader('Referer', '') -urllib._urlopener.addheader('Connection', 'close') urllib._urlopener.addheader('Accept', '*/*') urllib._urlopener.addheader('Accept-Language', 'ru,en') urllib._urlopener.addheader('Cache-Control', 'max-age=300') +urllib._urlopener.addheader('Connection', 'close') urllib_ftpwrapper = urllib.ftpwrapper @@ -72,32 +80,38 @@ urllib.ftpwrapper = myftpwrapper class robot_urllib(robot_base): - def urlretrieve(self, bookmark, url, accept_charset=False): + def get(self, bookmark, url, accept_charset=False): try: # Set fake referer to the base URL urllib._urlopener.addheaders[2] = ('Referer', url) if accept_charset and bookmark.charset: urllib._urlopener.addheader('Accept-Charset', bookmark.charset) - fname, headers = urllib.urlretrieve(url) - if accept_charset and bookmark.charset: - del urllib._urlopener.addheaders[-1] + try: + fname, headers = urllib.urlretrieve(url) + finally: + if accept_charset and bookmark.charset: + del urllib._urlopener.addheaders[-1] # Remove Accept-Charset infile = open(fname, 'rb') content = infile.read() infile.close() - return headers, content + return None, None, None, headers, content + + except RedirectException, e: + return None, e.errcode, e.newurl, None, None - except IOError, msg: - if (msg[0] == "http error") and (msg[1] == -1): + except IOError, e: + if (e[0] == "http error") and (e[1] == -1): + error = None bookmark.no_error = "The server did not return any header - it is not an error, actually" self.log(' no headers: %s' % bookmark.no_error) else: - bookmark.error = get_error(msg) - self.log(' Error: %s' % bookmark.error) + error = get_error(e) + self.log(' Error: %s' % error) - return None, None + return error, None, None, None, None def get_ftp_welcome(self): global ftpcache_key @@ -106,5 +120,6 @@ class robot_urllib(robot_base): # If there are - ftpcache_key in prev line is invalid. return _welcome - def cleanup(self): + def finish_check_url(self, bookmark): + robot_base.finish_check_url(self, bookmark) urllib.urlcleanup()