]> git.phdru.name Git - bookmarks_db.git/blobdiff - Robots/bkmk_rsimple.py
Change parse_html to parse strings, not files
[bookmarks_db.git] / Robots / bkmk_rsimple.py
index ea2decec8b82ee78ebb6d3dedc2cd5ee5b5dd435..2c4df9e278f99b186daa869720f9437c81f7cefa 100644 (file)
@@ -1,9 +1,16 @@
-"""
-   Simple, strightforward robot
+"""Simple, strightforward robot
+
+This file is a part of Bookmarks database and Internet robot.
 
-   Written by Oleg BroytMann. Copyright (C) 2000-2007 PhiloSoft Design.
 """
 
+__author__ = "Oleg Broytman <phd@phdru.name>"
+__copyright__ = "Copyright (C) 2000-2014 PhiloSoft Design"
+__license__ = "GNU GPL"
+
+__all__ = ['robot_simple', 'get_error']
+
+
 import sys, os
 import time, urllib
 from base64 import b64encode
@@ -13,13 +20,15 @@ from m_lib.net.www.util import parse_time
 from m_lib.md5wrapper import md5wrapper
 
 from bkmk_objects import Robot
-from parse_html import parse_html
+from parse_html import parse_filename
 
 
 class RedirectException(Exception):
    reloc_dict = {
       301: "perm.",
-      302: "temp.",
+      302: "temp2.",
+      303: "temp3.",
+      307: "temp7.",
       "html": "html"
    }
    def __init__(self, errcode, newurl):
@@ -40,11 +49,19 @@ class MyURLopener(urllib.URLopener):
 
    # Error 301 -- also relocated (permanently)
    http_error_301 = http_error_302
+   # Error 307 -- also relocated (temporary)
+   http_error_307 = http_error_302
 
    # Error 401 -- authentication required
    def http_error_401(self, url, fp, errcode, errmsg, headers, data=None): 
       raise IOError, ('http error', errcode, "Authentication required ", headers)
 
+   def http_error_default(self, url, fp, errcode, errmsg, headers):
+      if fp:
+         void = fp.read()
+         fp.close()
+      raise IOError, ('http error', errcode, errmsg, headers)
+
 
 urllib._urlopener = MyURLopener()
 
@@ -97,163 +114,184 @@ icons = {} # Icon cache; maps URL to a tuple (content type, data)
 
 class robot_simple(Robot):
    def check_url(self, bookmark):
-      if not self.tempfname:
-         self.tempfname = bookmark.tempfname
-
+      fname = None
       try:
-         try:
-            self.start = int(time.time())
-            bookmark.icon = None
+         self.start = int(time.time())
+         bookmark.icon = None
 
-            url_type, url_rest = urllib.splittype(bookmark.href)
-            url_host, url_path = urllib.splithost(url_rest)
-            url_path, url_tag  = urllib.splittag(url_path)
+         url_type, url_rest = urllib.splittype(bookmark.href)
+         url_host, url_path = urllib.splithost(url_rest)
+         url_path, url_tag  = urllib.splittag(url_path)
 
-            # Set fake referer to the root of the site
-            urllib._urlopener.addheaders[2] = ('Referer', "%s://%s%s" % (url_type, url_host, url_path))
+         # Set fake referer to the root of the site
+         urllib._urlopener.addheaders[2] = ('Referer', "%s://%s%s" % (url_type, url_host, url_path))
 
-            if bookmark.charset: urllib._urlopener.addheader('Accept-Charset', bookmark.charset)
-            fname, headers = urllib.urlretrieve("%s://%s%s" % (url_type, url_host, url_path), self.tempfname)
-            if bookmark.charset: del urllib._urlopener.addheaders[-1]
+         if bookmark.charset: urllib._urlopener.addheader('Accept-Charset', bookmark.charset)
+         fname, headers = urllib.urlretrieve("%s://%s%s" % (url_type, url_host, url_path))
+         if bookmark.charset: del urllib._urlopener.addheaders[-1]
 
-            size = 0
-            last_modified = None
+         size = 0
+         last_modified = None
 
-            if headers:
-               try:
-                  size = headers["Content-Length"]
-               except KeyError:
-                  pass
+         if headers:
+            try:
+               size = headers["Content-Length"]
+            except KeyError:
+               pass
 
-               try:
-                  last_modified = headers["Last-Modified"]
-               except KeyError:
-                  pass
-
-               if last_modified:
-                  last_modified = parse_time(last_modified)
+            try:
+               last_modified = headers["Last-Modified"]
+            except KeyError:
+               pass
 
             if last_modified:
-               last_modified = str(int(last_modified))
-            else:
-               last_modified = bookmark.last_visit
+               last_modified = parse_time(last_modified)
 
-            bookmark.size = size
-            bookmark.last_modified = last_modified
+         if last_modified:
+            last_modified = str(int(last_modified))
+         else:
+            last_modified = bookmark.last_visit
+
+         bookmark.size = size
+         bookmark.last_modified = last_modified
 
-            md5 = md5wrapper()
-            if urllib._urlopener.type == "ftp": # Pass welcome message through MD5
-               md5.update(get_welcome())
+         md5 = md5wrapper()
+         if urllib._urlopener.type == "ftp": # Pass welcome message through MD5
+            md5.update(get_welcome())
 
-            md5.md5file(self.tempfname)
-            bookmark.md5 = str(md5)
+         md5.md5file(fname)
+         bookmark.md5 = str(md5)
 
-            if headers:
+         if headers:
+            try:
+               content_type = headers["Content-Type"]
+               self.log("   Content-Type: %s" % content_type)
                try:
-                  content_type = headers["Content-Type"]
-                  try:
-                     content_type, charset = content_type.split(';')
-                     content_type = content_type.strip()
-                     charset = charset.split('=')[1].strip()
-                     self.log("   HTTP charset   : %s" % charset)
-                  except (ValueError, IndexError):
-                     charset = None
-                     self.log("   no charset in Content-Type header")
-                  if content_type == "text/html":
-                     parser = parse_html(fname, charset, self.log)
-                     bookmark.real_title = parser.title
-                     if parser.refresh:
-                        refresh = parser.refresh
-                        try:
-                           url = refresh.split('=', 1)[1]
-                        except IndexError:
-                           url = "self"
-                        try:
-                           timeout = float(refresh.split(';')[0])
-                        except (IndexError, ValueError):
-                           raise RedirectException("html", "Bad redirect to %s (%s)" % (url, refresh))
-                        else:
+                  # extract charset from "text/html; foo; charset=UTF-8, bar; baz;"
+                  content_type, charset = content_type.split(';', 1)
+                  content_type = content_type.strip()
+                  charset = charset.split('=')[1].strip().split(',')[0]
+                  self.log("   HTTP charset   : %s" % charset)
+               except (ValueError, IndexError):
+                  charset = None
+                  self.log("   no charset in Content-Type header")
+               for ctype in ("text/html", "application/xhtml+xml"):
+                  if content_type.startswith(ctype):
+                      html = True
+                      break
+               else:
+                  html = False
+               if html:
+                  parser = parse_filename(fname, charset, self.log)
+                  if parser:
+                      bookmark.real_title = parser.title
+                      icon = parser.icon
+                  else:
+                     icon = None
+                  if not icon:
+                     icon = "/favicon.ico"
+                  icon = urljoin("%s://%s%s" % (url_type, url_host, url_path), icon)
+                  self.log("   looking for icon at: %s" % icon)
+                  if icon in icons:
+                     if icons[icon]:
+                        bookmark.icon_href = icon
+                        content_type, bookmark.icon = icons[icon]
+                        self.log("   cached icon: %s" % content_type)
+                     else:
+                        self.log("   cached icon: no icon")
+                  else:
+                     try:
+                        _icon = icon
+                        for i in range(8):
                            try:
-                              timeout = int(refresh.split(';')[0])
-                           except ValueError:
-                              pass # float timeout
-                           raise RedirectException("html", "%s (%s sec)" % (url, timeout))
-
-                     # Get favicon.ico
-                     icon = parser.icon
-                     if not icon:
-                        icon = "/favicon.ico"
-                     icon = urljoin("%s://%s%s" % (url_type, url_host, url_path), icon)
-                     self.log("   looking for icon at: %s" % icon)
-                     if icon in icons:
-                        if icons[icon]:
-                           content_type, bookmark.icon = icons[icon]
-                           self.log("       cached icon: %s" % content_type)
+                              icon_fname, headers = urllib.urlretrieve(_icon)
+                           except RedirectException, e:
+                              _icon = e.url
+                              self.log("   redirect to : %s" % _icon)
+                           else:
+                              break
                         else:
-                           self.log("       cached icon: no icon")
+                           raise IOError("Too many redirects")
+                     except:
+                        etype, emsg, tb = sys.exc_info()
+                        self.log("   no icon        : %s %s" % (etype, emsg))
+                        etype = emsg = tb = None
+                        icons[icon] = None
                      else:
-                        try:
-                           _icon = icon
-                           for i in range(8):
-                              try:
-                                 fname, headers = urllib.urlretrieve(_icon)
-                              except RedirectException, e:
-                                 _icon = e.url
-                                 self.log("       redirect to : %s" % _icon)
-                              else:
-                                 break
-                           else:
-                              raise IOError("Too many redirects")
-                        except:
-                           etype, emsg, tb = sys.exc_info()
-                           self.log("   no icon        : %s %s" % (etype, emsg))
-                           etype = None
-                           emsg = None
-                           tb = None
-                           icons[icon] = None
+                        content_type = headers["Content-Type"]
+                        if content_type.startswith("application/") \
+                              or content_type.startswith("image/") \
+                              or content_type.startswith("text/plain"):
+                           icon_file = open(icon_fname, "rb")
+                           icon_data = icon_file.read()
+                           icon_file.close()
+                           bookmark.icon_href = icon
+                           self.log("   got icon       : %s" % content_type)
+                           if content_type.startswith("application/") \
+                                 or content_type.startswith("text/plain"):
+                              self.log("   non-image content type, assume x-icon")
+                              content_type = 'image/x-icon'
+                           bookmark.icon = "data:%s;base64,%s" % (content_type, b64encode(icon_data))
+                           icons[icon] = (content_type, bookmark.icon)
                         else:
-                           content_type = headers["Content-Type"]
-                           if content_type.startswith("image/"):
-                              icon_file = open(fname, "rb")
-                              icon = icon_file.read()
-                              icon_file.close()
-                              bookmark.icon = "data:%s;base64,%s" % (content_type, b64encode(icon))
-                              self.log("   got icon       : %s" % content_type)
-                              icons[icon] = (content_type, bookmark.icon)
-                           else:
-                              self.log("   no icon        : bad content type '%s'" % content_type)
-                              icons[icon] = None
-                           os.remove(fname)
-
-               except KeyError:
-                  pass
-
-         except IOError, msg:
-            if (msg[0] == "http error") and (msg[1] == -1):
-               bookmark.no_error = "The server did not return any header - it is not an error, actually"
-            else:
-               bookmark.error = get_error(msg)
-
-         except EOFError:
-            bookmark.error = "Unexpected EOF (FTP server closed connection)"
-
-         except RedirectException, msg:
-            bookmark.moved = str(msg)
-
-         except KeyboardInterrupt:
-            return 0
+                           self.log("   no icon        : bad content type '%s'" % content_type)
+                           icons[icon] = None
+                  if parser and parser.refresh:
+                     refresh = parser.refresh
+                     try:
+                        url = refresh.split('=', 1)[1]
+                     except IndexError:
+                        url = "self"
+                     try:
+                        timeout = float(refresh.split(';')[0])
+                     except (IndexError, ValueError):
+                        raise RedirectException("html", "Bad redirect to %s (%s)" % (url, refresh))
+                     else:
+                        try:
+                           timeout = int(refresh.split(';')[0])
+                        except ValueError:
+                           pass # float timeout
+                        raise RedirectException("html", "%s (%s sec)" % (url, timeout))
+
+            except KeyError, key:
+               self.log("   no header: %s" % key)
+
+      except IOError, msg:
+         if (msg[0] == "http error") and (msg[1] == -1):
+            bookmark.no_error = "The server did not return any header - it is not an error, actually"
+            self.log('   no headers: %s' % bookmark.no_error)
+         else:
+            bookmark.error = get_error(msg)
+            self.log('   Error: %s' % bookmark.error)
+
+      except EOFError:
+         bookmark.error = "Unexpected EOF (FTP server closed connection)"
+         self.log('   EOF: %s' % bookmark.error)
+
+      except RedirectException, msg:
+         bookmark.moved = str(msg)
+         self.log('   Moved: %s' % bookmark.moved)
+
+      except KeyboardInterrupt:
+         self.log("Keyboard interrupt (^C)")
+         return 0
+
+      except:
+         import traceback
+         traceback.print_exc()
+         bookmark.error = "Exception!"
+         self.log('   Exception: %s' % bookmark.error)
 
       finally:
-         self.finish_check_url(bookmark)
+         self.finish_check_url(bookmark, fname)
 
       # Tested
       return 1
 
-
-   def finish_check_url(self, bookmark):
+   def finish_check_url(self, bookmark, fname=None):
       # Calculate these attributes even in case of an error
-      if os.path.exists(self.tempfname):
-         size = str(os.path.getsize(self.tempfname))
+      if fname and os.path.exists(fname):
+         size = str(os.path.getsize(fname))
          if size[-1] == 'L':
             size = size[:-1]
          bookmark.size = size
@@ -263,3 +301,4 @@ class robot_simple(Robot):
 
       now = int(time.time())
       bookmark.test_time = str(now - start)
+      urllib.urlcleanup()