X-Git-Url: https://git.phdru.name/?a=blobdiff_plain;f=Robots%2Fbkmk_rsimple.py;h=f18dface219f183fd23bdc69980ef4848c710684;hb=7ebd86416b89bf67bb97f7680b87660f8cdc0cf7;hp=7a21c4fd1a1e72faf1397df7a853194dab30f696;hpb=89976fb4bb0376958ea17cfe337ecb872221eeb1;p=bookmarks_db.git diff --git a/Robots/bkmk_rsimple.py b/Robots/bkmk_rsimple.py index 7a21c4f..f18dfac 100644 --- a/Robots/bkmk_rsimple.py +++ b/Robots/bkmk_rsimple.py @@ -1,9 +1,20 @@ """ - Simple, strightforward robot; guaranteed to has problems with timeouts :) + Simple, strightforward robot - Written by BroytMann, Mar 2000 - Mar 2004. Copyright (C) 2000-2004 PhiloSoft Design + Written by Oleg BroytMann. Copyright (C) 2000-2007 PhiloSoft Design. """ +import sys, os +import time, urllib +from base64 import b64encode +from urlparse import urljoin + +from m_lib.net.www.util import parse_time +from m_lib.md5wrapper import md5wrapper + +from bkmk_objects import Robot +from parse_html import parse_html + class RedirectException(Exception): reloc_dict = { @@ -13,12 +24,7 @@ class RedirectException(Exception): } def __init__(self, errcode, newurl): Exception.__init__(self, "(%s) to %s" % (self.reloc_dict[errcode], newurl)) - - -import string, os -import time, urllib -from m_lib.net.www.util import parse_time -from m_lib.md5wrapper import md5wrapper + self.url = newurl class MyURLopener(urllib.URLopener): @@ -39,23 +45,38 @@ class MyURLopener(urllib.URLopener): def http_error_401(self, url, fp, errcode, errmsg, headers, data=None): raise IOError, ('http error', errcode, "Authentication required ", headers) + def http_error_default(self, url, fp, errcode, errmsg, headers): + if fp: + void = fp.read() + fp.close() + raise IOError, ('http error', errcode, errmsg, headers) + urllib._urlopener = MyURLopener() -# Some sites allow only Mozilla-compatible browsers; way to stop robots? -server_version = "Mozilla/3.0 (compatible; Python-urllib/%s)" % urllib.__version__ -urllib._urlopener.addheaders[0] = ('User-agent', server_version) +# Fake headers to pretend this is a real browser +_version = "Mozilla/5.0 (X11; U; Linux 2.6 i686; en) Gecko/20001221 Firefox/2.0.0" +urllib._urlopener.addheaders[0] = ('User-Agent', _version) +_version = "bookmarks_db (Python %d.%d.%d; urllib/%s)" % ( + sys.version_info[0], sys.version_info[1], sys.version_info[2], urllib.__version__) +urllib._urlopener.addheader('X-User-Agent', _version) +urllib._urlopener.addheader('Referer', '') + +urllib._urlopener.addheader('Connection', 'close') +urllib._urlopener.addheader('Accept', '*/*') +urllib._urlopener.addheader('Accept-Language', 'ru,en') +urllib._urlopener.addheader('Cache-Control', 'max-age=300') def get_error(msg): - if type(msg) == type(""): + if isinstance(msg, str): return msg else: s = [] for i in msg: - s.append("'%s'" % string.join(string.split(str(i), "\n"), "\\n")) - return "(%s)" % string.join(s) + s.append("'%s'" % str(i).replace('\n', "\\n")) + return "(%s)" % ' '.join(s) urllib_ftpwrapper = urllib.ftpwrapper @@ -65,7 +86,7 @@ class myftpwrapper(urllib_ftpwrapper): def __init__(self, user, passwd, host, port, dirs): urllib_ftpwrapper.__init__(self, user, passwd, host, port, dirs) global ftpcache_key - ftpcache_key = (user, host, port, string.join(dirs, '/')) + ftpcache_key = (user, host, port, '/'.join(dirs)) urllib.ftpwrapper = myftpwrapper @@ -77,21 +98,29 @@ def get_welcome(): return _welcome -from bkmk_objects import Robot -from parse_html import parse_html +icons = {} # Icon cache; maps URL to a tuple (content type, data) + # or None if there is no icon. class robot_simple(Robot): - def check_url(self, bookmark, url_type, url_rest): + def check_url(self, bookmark): if not self.tempfname: self.tempfname = bookmark.tempfname try: try: self.start = int(time.time()) + bookmark.icon = None + + url_type, url_rest = urllib.splittype(bookmark.href) url_host, url_path = urllib.splithost(url_rest) url_path, url_tag = urllib.splittag(url_path) + # Set fake referer to the root of the site + urllib._urlopener.addheaders[2] = ('Referer', "%s://%s%s" % (url_type, url_host, url_path)) + + if bookmark.charset: urllib._urlopener.addheader('Accept-Charset', bookmark.charset) fname, headers = urllib.urlretrieve("%s://%s%s" % (url_type, url_host, url_path), self.tempfname) + if bookmark.charset: del urllib._urlopener.addheaders[-1] size = 0 last_modified = None @@ -132,15 +161,13 @@ class robot_simple(Robot): content_type, charset = content_type.split(';') content_type = content_type.strip() charset = charset.split('=')[1].strip() - if self.log: self.log(" HTTP charset : %s" % charset) + self.log(" HTTP charset : %s" % charset) except (ValueError, IndexError): charset = None - if self.log: self.log(" no charset in Content-Type header") + self.log(" no charset in Content-Type header") if content_type == "text/html": parser = parse_html(fname, charset, self.log) - title = parser.title.replace('\r', '').replace('\n', ' ').strip() - bookmark.real_title = parser.unescape(title) - if self.log: self.log(" final title : %s" % bookmark.real_title) + bookmark.real_title = parser.title if parser.refresh: refresh = parser.refresh try: @@ -148,13 +175,62 @@ class robot_simple(Robot): except IndexError: url = "self" try: - timeout = int(refresh.split(';')[0]) + timeout = float(refresh.split(';')[0]) except (IndexError, ValueError): - timeout = None - if timeout is None: raise RedirectException("html", "Bad redirect to %s (%s)" % (url, refresh)) else: - raise RedirectException("html", "%s (%d sec)" % (url, timeout)) + try: + timeout = int(refresh.split(';')[0]) + except ValueError: + pass # float timeout + raise RedirectException("html", "%s (%s sec)" % (url, timeout)) + + # Get favicon.ico + icon = parser.icon + if not icon: + icon = "/favicon.ico" + icon = urljoin("%s://%s%s" % (url_type, url_host, url_path), icon) + self.log(" looking for icon at: %s" % icon) + if icon in icons: + if icons[icon]: + content_type, bookmark.icon = icons[icon] + self.log(" cached icon: %s" % content_type) + else: + self.log(" cached icon: no icon") + else: + try: + _icon = icon + for i in range(8): + try: + fname, headers = urllib.urlretrieve(_icon) + except RedirectException, e: + _icon = e.url + self.log(" redirect to : %s" % _icon) + else: + break + else: + raise IOError("Too many redirects") + except: + etype, emsg, tb = sys.exc_info() + self.log(" no icon : %s %s" % (etype, emsg)) + etype = None + emsg = None + tb = None + icons[icon] = None + else: + content_type = headers["Content-Type"] + if content_type.startswith("image/"): + icon_file = open(fname, "rb") + icon = icon_file.read() + icon_file.close() + bookmark.icon = "data:%s;base64,%s" % (content_type, b64encode(icon)) + self.log(" got icon : %s" % content_type) + icons[icon] = (content_type, bookmark.icon) + else: + self.log(" no icon : bad content type '%s'" % content_type) + icons[icon] = None + os.remove(fname) + except KeyError: pass @@ -173,6 +249,11 @@ class robot_simple(Robot): except KeyboardInterrupt: return 0 + except: + import traceback + traceback.print_exc() + bookmark.error = "Exception!" + finally: self.finish_check_url(bookmark)