1 """Robot based on urllib2
3 This file is a part of Bookmarks database and Internet robot.
7 __author__ = "Oleg Broytman <phd@phdru.name>"
8 __copyright__ = "Copyright (C) 2014 PhiloSoft Design"
9 __license__ = "GNU GPL"
11 __all__ = ['robot_urllib2']
17 from Robots.bkmk_robot_base import robot_base
20 opener = urllib2.OpenerDirector()
21 default_classes = [urllib2.HTTPHandler, urllib2.HTTPDefaultErrorHandler,
22 urllib2.FTPHandler, urllib2.HTTPErrorProcessor]
23 if hasattr(httplib, 'HTTPS'):
24 default_classes.insert(0, urllib2.HTTPSHandler)
25 for klass in default_classes:
26 opener.add_handler(klass())
28 urllib2.install_opener(opener)
31 # Fake headers to pretend this is a real browser
32 _user_agent = "Mozilla/5.0 (X11; Linux i686; rv:30.0) Gecko/20100101 Firefox/30.0"
33 _x_user_agent = "bookmarks_db (Python %d.%d.%d; urllib2)" % (
34 sys.version_info[0], sys.version_info[1], sys.version_info[2])
37 class robot_urllib2(robot_base):
38 def get(self, bookmark, url, accept_charset=False):
39 request = urllib2.Request(url)
40 request.add_header('Accept', '*/*')
41 if accept_charset and bookmark.charset:
42 request.add_header('Accept-Charset', bookmark.charset)
43 request.add_header('Accept-Language', 'ru,en')
44 request.add_header('Cache-Control', 'max-age=300')
45 request.add_header('Connection', 'close')
46 request.add_header('Referer', url)
47 request.add_header('User-agent', _user_agent)
48 request.add_header('X-User-Agent', _x_user_agent)
51 response = urllib2.urlopen(request)
52 except urllib2.HTTPError, e:
53 if e.code in (301, 302, 303, 307):
54 return None, e.code, e.hdrs['Location'], None, None
56 return "HTTP Error %s: %s" % (e.code, e.msg), None, None, None, None
57 except urllib2.URLError, e:
58 return "URL Error: %s" % e.reason, None, None, None, None
60 return None, None, None, response.info(), response.read()
62 def get_ftp_welcome(self):