]> git.phdru.name Git - bookmarks_db.git/blobdiff - Robots/bkmk_rsimple.py
Change parse_html to parse strings, not files
[bookmarks_db.git] / Robots / bkmk_rsimple.py
index f481d66e1bce396d46387571211d010eeea40a52..2c4df9e278f99b186daa869720f9437c81f7cefa 100644 (file)
@@ -1,9 +1,16 @@
-"""
-   Simple, strightforward robot
+"""Simple, strightforward robot
+
+This file is a part of Bookmarks database and Internet robot.
 
-   Written by Oleg BroytMann. Copyright (C) 2000-2008 PhiloSoft Design.
 """
 
+__author__ = "Oleg Broytman <phd@phdru.name>"
+__copyright__ = "Copyright (C) 2000-2014 PhiloSoft Design"
+__license__ = "GNU GPL"
+
+__all__ = ['robot_simple', 'get_error']
+
+
 import sys, os
 import time, urllib
 from base64 import b64encode
@@ -13,13 +20,15 @@ from m_lib.net.www.util import parse_time
 from m_lib.md5wrapper import md5wrapper
 
 from bkmk_objects import Robot
-from parse_html import parse_html
+from parse_html import parse_filename
 
 
 class RedirectException(Exception):
    reloc_dict = {
       301: "perm.",
-      302: "temp.",
+      302: "temp2.",
+      303: "temp3.",
+      307: "temp7.",
       "html": "html"
    }
    def __init__(self, errcode, newurl):
@@ -40,6 +49,8 @@ class MyURLopener(urllib.URLopener):
 
    # Error 301 -- also relocated (permanently)
    http_error_301 = http_error_302
+   # Error 307 -- also relocated (temporary)
+   http_error_307 = http_error_302
 
    # Error 401 -- authentication required
    def http_error_401(self, url, fp, errcode, errmsg, headers, data=None): 
@@ -103,9 +114,7 @@ icons = {} # Icon cache; maps URL to a tuple (content type, data)
 
 class robot_simple(Robot):
    def check_url(self, bookmark):
-      if not self.tempfname:
-         self.tempfname = bookmark.tempfname
-
+      fname = None
       try:
          self.start = int(time.time())
          bookmark.icon = None
@@ -118,7 +127,7 @@ class robot_simple(Robot):
          urllib._urlopener.addheaders[2] = ('Referer', "%s://%s%s" % (url_type, url_host, url_path))
 
          if bookmark.charset: urllib._urlopener.addheader('Accept-Charset', bookmark.charset)
-         fname, headers = urllib.urlretrieve("%s://%s%s" % (url_type, url_host, url_path), self.tempfname)
+         fname, headers = urllib.urlretrieve("%s://%s%s" % (url_type, url_host, url_path))
          if bookmark.charset: del urllib._urlopener.addheaders[-1]
 
          size = 0
@@ -150,12 +159,13 @@ class robot_simple(Robot):
          if urllib._urlopener.type == "ftp": # Pass welcome message through MD5
             md5.update(get_welcome())
 
-         md5.md5file(self.tempfname)
+         md5.md5file(fname)
          bookmark.md5 = str(md5)
 
          if headers:
             try:
                content_type = headers["Content-Type"]
+               self.log("   Content-Type: %s" % content_type)
                try:
                   # extract charset from "text/html; foo; charset=UTF-8, bar; baz;"
                   content_type, charset = content_type.split(';', 1)
@@ -165,47 +175,39 @@ class robot_simple(Robot):
                except (ValueError, IndexError):
                   charset = None
                   self.log("   no charset in Content-Type header")
-               if content_type in ("text/html", "application/xhtml+xml"):
-                  parser = parse_html(fname, charset, self.log)
-                  bookmark.real_title = parser.title
-                  if parser.refresh:
-                     refresh = parser.refresh
-                     try:
-                        url = refresh.split('=', 1)[1]
-                     except IndexError:
-                        url = "self"
-                     try:
-                        timeout = float(refresh.split(';')[0])
-                     except (IndexError, ValueError):
-                        raise RedirectException("html", "Bad redirect to %s (%s)" % (url, refresh))
-                     else:
-                        try:
-                           timeout = int(refresh.split(';')[0])
-                        except ValueError:
-                           pass # float timeout
-                        raise RedirectException("html", "%s (%s sec)" % (url, timeout))
-
-                  # Get favicon.ico
-                  icon = parser.icon
+               for ctype in ("text/html", "application/xhtml+xml"):
+                  if content_type.startswith(ctype):
+                      html = True
+                      break
+               else:
+                  html = False
+               if html:
+                  parser = parse_filename(fname, charset, self.log)
+                  if parser:
+                      bookmark.real_title = parser.title
+                      icon = parser.icon
+                  else:
+                     icon = None
                   if not icon:
                      icon = "/favicon.ico"
                   icon = urljoin("%s://%s%s" % (url_type, url_host, url_path), icon)
                   self.log("   looking for icon at: %s" % icon)
                   if icon in icons:
                      if icons[icon]:
+                        bookmark.icon_href = icon
                         content_type, bookmark.icon = icons[icon]
-                        self.log("       cached icon: %s" % content_type)
+                        self.log("   cached icon: %s" % content_type)
                      else:
-                        self.log("       cached icon: no icon")
+                        self.log("   cached icon: no icon")
                   else:
                      try:
                         _icon = icon
                         for i in range(8):
                            try:
-                              fname, headers = urllib.urlretrieve(_icon)
+                              icon_fname, headers = urllib.urlretrieve(_icon)
                            except RedirectException, e:
                               _icon = e.url
-                              self.log("       redirect to : %s" % _icon)
+                              self.log("   redirect to : %s" % _icon)
                            else:
                               break
                         else:
@@ -213,58 +215,83 @@ class robot_simple(Robot):
                      except:
                         etype, emsg, tb = sys.exc_info()
                         self.log("   no icon        : %s %s" % (etype, emsg))
-                        etype = None
-                        emsg = None
-                        tb = None
+                        etype = emsg = tb = None
                         icons[icon] = None
                      else:
                         content_type = headers["Content-Type"]
-                        if content_type.startswith("image/"):
-                           icon_file = open(fname, "rb")
-                           icon = icon_file.read()
+                        if content_type.startswith("application/") \
+                              or content_type.startswith("image/") \
+                              or content_type.startswith("text/plain"):
+                           icon_file = open(icon_fname, "rb")
+                           icon_data = icon_file.read()
                            icon_file.close()
-                           bookmark.icon = "data:%s;base64,%s" % (content_type, b64encode(icon))
+                           bookmark.icon_href = icon
                            self.log("   got icon       : %s" % content_type)
+                           if content_type.startswith("application/") \
+                                 or content_type.startswith("text/plain"):
+                              self.log("   non-image content type, assume x-icon")
+                              content_type = 'image/x-icon'
+                           bookmark.icon = "data:%s;base64,%s" % (content_type, b64encode(icon_data))
                            icons[icon] = (content_type, bookmark.icon)
                         else:
                            self.log("   no icon        : bad content type '%s'" % content_type)
                            icons[icon] = None
-                        os.remove(fname)
+                  if parser and parser.refresh:
+                     refresh = parser.refresh
+                     try:
+                        url = refresh.split('=', 1)[1]
+                     except IndexError:
+                        url = "self"
+                     try:
+                        timeout = float(refresh.split(';')[0])
+                     except (IndexError, ValueError):
+                        raise RedirectException("html", "Bad redirect to %s (%s)" % (url, refresh))
+                     else:
+                        try:
+                           timeout = int(refresh.split(';')[0])
+                        except ValueError:
+                           pass # float timeout
+                        raise RedirectException("html", "%s (%s sec)" % (url, timeout))
 
-            except KeyError:
-               pass
+            except KeyError, key:
+               self.log("   no header: %s" % key)
 
       except IOError, msg:
          if (msg[0] == "http error") and (msg[1] == -1):
             bookmark.no_error = "The server did not return any header - it is not an error, actually"
+            self.log('   no headers: %s' % bookmark.no_error)
          else:
             bookmark.error = get_error(msg)
+            self.log('   Error: %s' % bookmark.error)
 
       except EOFError:
          bookmark.error = "Unexpected EOF (FTP server closed connection)"
+         self.log('   EOF: %s' % bookmark.error)
 
       except RedirectException, msg:
          bookmark.moved = str(msg)
+         self.log('   Moved: %s' % bookmark.moved)
 
       except KeyboardInterrupt:
+         self.log("Keyboard interrupt (^C)")
          return 0
 
       except:
          import traceback
          traceback.print_exc()
          bookmark.error = "Exception!"
+         self.log('   Exception: %s' % bookmark.error)
 
       finally:
-         self.finish_check_url(bookmark)
+         self.finish_check_url(bookmark, fname)
 
       # Tested
       return 1
 
-
-   def finish_check_url(self, bookmark):
+   def finish_check_url(self, bookmark, fname=None):
       # Calculate these attributes even in case of an error
-      if os.path.exists(self.tempfname):
-         size = str(os.path.getsize(self.tempfname))
+      if fname and os.path.exists(fname):
+         size = str(os.path.getsize(fname))
          if size[-1] == 'L':
             size = size[:-1]
          bookmark.size = size
@@ -274,3 +301,4 @@ class robot_simple(Robot):
 
       now = int(time.time())
       bookmark.test_time = str(now - start)
+      urllib.urlcleanup()