1 """Simple, strightforward robot
3 This file is a part of Bookmarks database and Internet robot.
6 __author__ = "Oleg Broytman <phd@phdru.name>"
7 __copyright__ = "Copyright (C) 2000-2012 PhiloSoft Design"
8 __license__ = "GNU GPL"
10 __all__ = ['robot_simple', 'get_error']
15 from base64 import b64encode
16 from urlparse import urljoin
18 from m_lib.net.www.util import parse_time
19 from m_lib.md5wrapper import md5wrapper
21 from bkmk_objects import Robot
22 from parse_html import parse_html
25 class RedirectException(Exception):
31 def __init__(self, errcode, newurl):
32 Exception.__init__(self, "(%s) to %s" % (self.reloc_dict[errcode], newurl))
36 class MyURLopener(urllib.URLopener):
37 # Error 302 -- relocated (temporarily)
38 def http_error_302(self, url, fp, errcode, errmsg, headers, data=None):
39 if headers.has_key('location'):
40 newurl = headers['location']
41 elif headers.has_key('uri'):
42 newurl = headers['uri']
45 raise RedirectException(errcode, newurl)
47 # Error 301 -- also relocated (permanently)
48 http_error_301 = http_error_302
49 # Error 307 -- also relocated (temporary)
50 http_error_307 = http_error_302
52 # Error 401 -- authentication required
53 def http_error_401(self, url, fp, errcode, errmsg, headers, data=None):
54 raise IOError, ('http error', errcode, "Authentication required ", headers)
56 def http_error_default(self, url, fp, errcode, errmsg, headers):
60 raise IOError, ('http error', errcode, errmsg, headers)
63 urllib._urlopener = MyURLopener()
65 # Fake headers to pretend this is a real browser
66 _version = "Mozilla/5.0 (X11; U; Linux 2.6 i686; en) Gecko/20001221 Firefox/2.0.0"
67 urllib._urlopener.addheaders[0] = ('User-Agent', _version)
68 _version = "bookmarks_db (Python %d.%d.%d; urllib/%s)" % (
69 sys.version_info[0], sys.version_info[1], sys.version_info[2], urllib.__version__)
70 urllib._urlopener.addheader('X-User-Agent', _version)
71 urllib._urlopener.addheader('Referer', '')
73 urllib._urlopener.addheader('Connection', 'close')
74 urllib._urlopener.addheader('Accept', '*/*')
75 urllib._urlopener.addheader('Accept-Language', 'ru,en')
76 urllib._urlopener.addheader('Cache-Control', 'max-age=300')
80 if isinstance(msg, str):
86 s.append("'%s'" % str(i).replace('\n', "\\n"))
87 return "(%s)" % ' '.join(s)
90 urllib_ftpwrapper = urllib.ftpwrapper
93 class myftpwrapper(urllib_ftpwrapper):
94 def __init__(self, user, passwd, host, port, dirs):
95 urllib_ftpwrapper.__init__(self, user, passwd, host, port, dirs)
97 ftpcache_key = (user, host, port, '/'.join(dirs))
99 urllib.ftpwrapper = myftpwrapper
103 _welcome = urllib._urlopener.ftpcache[ftpcache_key].ftp.welcome
104 ftpcache_key = None # I am assuming there are no duplicate ftp URLs in db.
105 # If there are - ftpcache_key in prev line is invalid.
109 icons = {} # Icon cache; maps URL to a tuple (content type, data)
110 # or None if there is no icon.
112 class robot_simple(Robot):
113 def check_url(self, bookmark):
116 self.start = int(time.time())
119 url_type, url_rest = urllib.splittype(bookmark.href)
120 url_host, url_path = urllib.splithost(url_rest)
121 url_path, url_tag = urllib.splittag(url_path)
123 # Set fake referer to the root of the site
124 urllib._urlopener.addheaders[2] = ('Referer', "%s://%s%s" % (url_type, url_host, url_path))
126 if bookmark.charset: urllib._urlopener.addheader('Accept-Charset', bookmark.charset)
127 fname, headers = urllib.urlretrieve("%s://%s%s" % (url_type, url_host, url_path))
128 if bookmark.charset: del urllib._urlopener.addheaders[-1]
135 size = headers["Content-Length"]
140 last_modified = headers["Last-Modified"]
145 last_modified = parse_time(last_modified)
148 last_modified = str(int(last_modified))
150 last_modified = bookmark.last_visit
153 bookmark.last_modified = last_modified
156 if urllib._urlopener.type == "ftp": # Pass welcome message through MD5
157 md5.update(get_welcome())
160 bookmark.md5 = str(md5)
164 content_type = headers["Content-Type"]
165 self.log(" Content-Type: %s" % content_type)
167 # extract charset from "text/html; foo; charset=UTF-8, bar; baz;"
168 content_type, charset = content_type.split(';', 1)
169 content_type = content_type.strip()
170 charset = charset.split('=')[1].strip().split(',')[0]
171 self.log(" HTTP charset : %s" % charset)
172 except (ValueError, IndexError):
174 self.log(" no charset in Content-Type header")
175 for ctype in ("text/html", "application/xhtml+xml"):
176 if content_type.startswith(ctype):
182 parser = parse_html(fname, charset, self.log)
184 bookmark.real_title = parser.title
189 icon = "/favicon.ico"
190 icon = urljoin("%s://%s%s" % (url_type, url_host, url_path), icon)
191 self.log(" looking for icon at: %s" % icon)
194 bookmark.icon_href = icon
195 content_type, bookmark.icon = icons[icon]
196 self.log(" cached icon: %s" % content_type)
198 self.log(" cached icon: no icon")
204 icon_fname, headers = urllib.urlretrieve(_icon)
205 except RedirectException, e:
207 self.log(" redirect to : %s" % _icon)
211 raise IOError("Too many redirects")
213 etype, emsg, tb = sys.exc_info()
214 self.log(" no icon : %s %s" % (etype, emsg))
215 etype = emsg = tb = None
218 content_type = headers["Content-Type"]
219 if content_type.startswith("application/") \
220 or content_type.startswith("image/") \
221 or content_type.startswith("text/plain"):
222 icon_file = open(icon_fname, "rb")
223 icon_data = icon_file.read()
225 bookmark.icon_href = icon
226 self.log(" got icon : %s" % content_type)
227 if content_type.startswith("application/") \
228 or content_type.startswith("text/plain"):
229 self.log(" non-image content type, assume x-icon")
230 content_type = 'image/x-icon'
231 bookmark.icon = "data:%s;base64,%s" % (content_type, b64encode(icon_data))
232 icons[icon] = (content_type, bookmark.icon)
234 self.log(" no icon : bad content type '%s'" % content_type)
236 if parser and parser.refresh:
237 refresh = parser.refresh
239 url = refresh.split('=', 1)[1]
243 timeout = float(refresh.split(';')[0])
244 except (IndexError, ValueError):
245 raise RedirectException("html", "Bad redirect to %s (%s)" % (url, refresh))
248 timeout = int(refresh.split(';')[0])
251 raise RedirectException("html", "%s (%s sec)" % (url, timeout))
253 except KeyError, key:
254 self.log(" no header: %s" % key)
257 if (msg[0] == "http error") and (msg[1] == -1):
258 bookmark.no_error = "The server did not return any header - it is not an error, actually"
259 self.log(' no headers: %s' % bookmark.no_error)
261 bookmark.error = get_error(msg)
262 self.log(' Error: %s' % bookmark.error)
265 bookmark.error = "Unexpected EOF (FTP server closed connection)"
266 self.log(' EOF: %s' % bookmark.error)
268 except RedirectException, msg:
269 bookmark.moved = str(msg)
270 self.log(' Moved: %s' % bookmark.moved)
272 except KeyboardInterrupt:
273 self.log("Keyboard interrupt (^C)")
278 traceback.print_exc()
279 bookmark.error = "Exception!"
280 self.log(' Exception: %s' % bookmark.error)
283 self.finish_check_url(bookmark, fname)
288 def finish_check_url(self, bookmark, fname=None):
289 # Calculate these attributes even in case of an error
290 if fname and os.path.exists(fname):
291 size = str(os.path.getsize(fname))
297 bookmark.last_tested = str(start)
299 now = int(time.time())
300 bookmark.test_time = str(now - start)