]> git.phdru.name Git - bookmarks_db.git/blobdiff - Robots/bkmk_rsimple.py
Cache icons.
[bookmarks_db.git] / Robots / bkmk_rsimple.py
index c3311bc475c203c38de354fffb657a324175ef55..96135824be74d83867458e0b0af73caf2df5687a 100644 (file)
@@ -4,14 +4,17 @@
    Written by Oleg BroytMann. Copyright (C) 2000-2007 PhiloSoft Design.
 """
 
-
-import sys, os, string
+import sys, os
 import time, urllib
 from base64 import b64encode
 from urlparse import urljoin
+
 from m_lib.net.www.util import parse_time
 from m_lib.md5wrapper import md5wrapper
 
+from bkmk_objects import Robot
+from parse_html import parse_html
+
 
 class RedirectException(Exception):
    reloc_dict = {
@@ -46,28 +49,28 @@ class MyURLopener(urllib.URLopener):
 urllib._urlopener = MyURLopener()
 
 # Fake headers to pretend this is a real browser
-_version = "Links (2.1; Linux 2.6 i686; 80x25)"
+_version = "Mozilla/5.0 (X11; U; Linux 2.6 i686; en) Gecko/20001221 Firefox/2.0.0"
 urllib._urlopener.addheaders[0] = ('User-Agent', _version)
-_version = "bookmarks_db (Python-urllib/%s)" % urllib.__version__
+_version = "bookmarks_db (Python %d.%d.%d; urllib/%s)" % (
+   sys.version_info[0], sys.version_info[1], sys.version_info[2], urllib.__version__)
 urllib._urlopener.addheader('X-User-Agent', _version)
+urllib._urlopener.addheader('Referer', '')
 
 urllib._urlopener.addheader('Connection', 'close')
-urllib._urlopener.addheader('Content-Length', '0')
 urllib._urlopener.addheader('Accept', '*/*')
 urllib._urlopener.addheader('Accept-Language', 'ru,en')
 urllib._urlopener.addheader('Cache-Control', 'max-age=300')
-urllib._urlopener.addheader('Referer', 'http://www.yahoo.com/')
 
 
 def get_error(msg):
-   if type(msg) == type(""):
+   if isinstance(msg, str):
       return msg
 
    else:
       s = []
       for i in msg:
-         s.append("'%s'" % string.join(string.split(str(i), "\n"), "\\n"))
-      return "(%s)" % string.join(s)
+         s.append("'%s'" % str(i).replace('\n', "\\n"))
+      return "(%s)" % ' '.join(s)
 
 
 urllib_ftpwrapper = urllib.ftpwrapper
@@ -77,7 +80,7 @@ class myftpwrapper(urllib_ftpwrapper):
    def __init__(self, user, passwd, host, port, dirs):
       urllib_ftpwrapper.__init__(self, user, passwd, host, port, dirs)
       global ftpcache_key
-      ftpcache_key = (user, host, port, string.join(dirs, '/'))
+      ftpcache_key = (user, host, port, tuple(dirs))
 
 urllib.ftpwrapper = myftpwrapper
 
@@ -89,8 +92,8 @@ def get_welcome():
    return _welcome
 
 
-from bkmk_objects import Robot
-from parse_html import parse_html
+icons = {} # Icon cache; maps URL to a tuple (content type, data)
+           # or None if there is no icon.
 
 class robot_simple(Robot):
    def check_url(self, bookmark):
@@ -106,6 +109,9 @@ class robot_simple(Robot):
             url_host, url_path = urllib.splithost(url_rest)
             url_path, url_tag  = urllib.splittag(url_path)
 
+            # Set fake referer to the root of the site
+            urllib._urlopener.addheaders[2] = ('Referer', "%s://%s%s" % (url_type, url_host, url_path))
+
             if bookmark.charset: urllib._urlopener.addheader('Accept-Charset', bookmark.charset)
             fname, headers = urllib.urlretrieve("%s://%s%s" % (url_type, url_host, url_path), self.tempfname)
             if bookmark.charset: del urllib._urlopener.addheaders[-1]
@@ -149,10 +155,10 @@ class robot_simple(Robot):
                      content_type, charset = content_type.split(';')
                      content_type = content_type.strip()
                      charset = charset.split('=')[1].strip()
-                     if self.log: self.log("   HTTP charset   : %s" % charset)
+                     self.log("   HTTP charset   : %s" % charset)
                   except (ValueError, IndexError):
                      charset = None
-                     if self.log: self.log("   no charset in Content-Type header")
+                     self.log("   no charset in Content-Type header")
                   if content_type == "text/html":
                      parser = parse_html(fname, charset, self.log)
                      bookmark.real_title = parser.title
@@ -178,35 +184,46 @@ class robot_simple(Robot):
                      if not icon:
                         icon = "/favicon.ico"
                      icon = urljoin("%s://%s%s" % (url_type, url_host, url_path), icon)
-                     if self.log: self.log("   looking for icon at : %s" % icon)
-                     try:
-                        for i in range(8):
-                           try:
-                              fname, headers = urllib.urlretrieve(icon)
-                           except RedirectException, e:
-                              icon = e.url
-                              if self.log: self.log("       redirect to : %s" % icon)
-                           else:
-                              break
+                     self.log("   looking for icon at: %s" % icon)
+                     if icon in icons:
+                        if icons[icon]:
+                           content_type, bookmark.icon = icons[icon]
+                           self.log("       cached icon: %s" % content_type)
                         else:
-                           raise IOError("Too many redirects")
-                     except:
-                        etype, emsg, tb = sys.exc_info()
-                        if self.log: self.log("   no icon        : %s %s" % (etype, emsg))
-                        etype = None
-                        emsg = None
-                        tb = None
+                           self.log("       cached icon: no icon")
                      else:
-                        content_type = headers["Content-Type"]
-                        if content_type.startswith("image/"):
-                           icon_file = open(fname, "rb")
-                           icon = icon_file.read()
-                           icon_file.close()
-                           bookmark.icon = "data:%s;base64,%s" % (content_type, b64encode(icon))
-                           if self.log: self.log("   got icon       : %s" % content_type)
+                        try:
+                           _icon = icon
+                           for i in range(8):
+                              try:
+                                 fname, headers = urllib.urlretrieve(_icon)
+                              except RedirectException, e:
+                                 _icon = e.url
+                                 self.log("       redirect to : %s" % _icon)
+                              else:
+                                 break
+                           else:
+                              raise IOError("Too many redirects")
+                        except:
+                           etype, emsg, tb = sys.exc_info()
+                           self.log("   no icon        : %s %s" % (etype, emsg))
+                           etype = None
+                           emsg = None
+                           tb = None
+                           icons[icon] = None
                         else:
-                           if self.log: self.log("   no icon        : bad content type '%s'" % content_type)
-                        os.remove(fname)
+                           content_type = headers["Content-Type"]
+                           if content_type.startswith("image/"):
+                              icon_file = open(fname, "rb")
+                              icon = icon_file.read()
+                              icon_file.close()
+                              bookmark.icon = "data:%s;base64,%s" % (content_type, b64encode(icon))
+                              self.log("   got icon       : %s" % content_type)
+                              icons[icon] = (content_type, bookmark.icon)
+                           else:
+                              self.log("   no icon        : bad content type '%s'" % content_type)
+                              icons[icon] = None
+                           os.remove(fname)
 
                except KeyError:
                   pass