1 """Simple, strightforward robot based on urllib
3 This file is a part of Bookmarks database and Internet robot.
7 __author__ = "Oleg Broytman <phd@phdru.name>"
8 __copyright__ = "Copyright (C) 2000-2014 PhiloSoft Design"
9 __license__ = "GNU GPL"
11 __all__ = ['robot_urllib']
16 from Robots.bkmk_robot_base import robot_base, RedirectException, get_error
19 class MyURLopener(urllib.URLopener):
20 # Error 301 -- relocated (permanently)
21 def http_error_301(self, url, fp, errcode, errmsg, headers, data=None):
22 if headers.has_key('location'):
23 newurl = headers['location']
24 elif headers.has_key('uri'):
25 newurl = headers['uri']
28 raise RedirectException(errcode, newurl)
30 # Error 302 -- relocated (temporarily)
31 http_error_302 = http_error_301
32 # Error 303 -- relocated (see other)
33 http_error_303 = http_error_301
34 # Error 307 -- relocated (temporarily)
35 http_error_307 = http_error_301
37 # Error 401 -- authentication required
38 def http_error_401(self, url, fp, errcode, errmsg, headers, data=None):
39 raise IOError, ('http error', errcode, "Authentication required ", headers)
41 def http_error_default(self, url, fp, errcode, errmsg, headers):
45 raise IOError, ('http error', errcode, errmsg, headers)
48 urllib._urlopener = MyURLopener()
50 # Fake headers to pretend this is a real browser
51 _version = "Mozilla/5.0 (X11; U; Linux 2.6 i686; en) Gecko/20001221 Firefox/2.0.0"
52 urllib._urlopener.addheaders[0] = ('User-Agent', _version)
53 _version = "bookmarks_db (Python %d.%d.%d; urllib/%s)" % (
54 sys.version_info[0], sys.version_info[1], sys.version_info[2], urllib.__version__)
55 urllib._urlopener.addheader('X-User-Agent', _version)
56 urllib._urlopener.addheader('Referer', '')
58 urllib._urlopener.addheader('Connection', 'close')
59 urllib._urlopener.addheader('Accept', '*/*')
60 urllib._urlopener.addheader('Accept-Language', 'ru,en')
61 urllib._urlopener.addheader('Cache-Control', 'max-age=300')
64 urllib_ftpwrapper = urllib.ftpwrapper
67 class myftpwrapper(urllib_ftpwrapper):
68 def __init__(self, user, passwd, host, port, dirs):
69 urllib_ftpwrapper.__init__(self, user, passwd, host, port, dirs)
71 ftpcache_key = (user, host, port, '/'.join(dirs))
73 urllib.ftpwrapper = myftpwrapper
76 class robot_urllib(robot_base):
77 def get(self, bookmark, url, accept_charset=False):
79 # Set fake referer to the base URL
80 urllib._urlopener.addheaders[2] = ('Referer', url)
82 if accept_charset and bookmark.charset:
83 urllib._urlopener.addheader('Accept-Charset', bookmark.charset)
85 fname, headers = urllib.urlretrieve(url)
87 if accept_charset and bookmark.charset:
88 del urllib._urlopener.addheaders[-1] # Remove Accept-Charset
90 infile = open(fname, 'rb')
91 content = infile.read()
94 return None, headers, content
97 if (msg[0] == "http error") and (msg[1] == -1):
99 bookmark.no_error = "The server did not return any header - it is not an error, actually"
100 self.log(' no headers: %s' % bookmark.no_error)
102 error = get_error(msg)
103 self.log(' Error: %s' % error)
105 return error, None, None
107 def get_ftp_welcome(self):
109 _welcome = urllib._urlopener.ftpcache[ftpcache_key].ftp.welcome
110 ftpcache_key = None # I am assuming there are no duplicate ftp URLs in db.
111 # If there are - ftpcache_key in prev line is invalid.