- def urlretrieve(self, bookmark, url, accept_charset=False):
- try:
- # Set fake referer to the base URL
- urllib._urlopener.addheaders[2] = ('Referer', url)
-
- if accept_charset and bookmark.charset:
- urllib._urlopener.addheader('Accept-Charset', bookmark.charset)
- fname, headers = urllib.urlretrieve(url)
- if accept_charset and bookmark.charset:
- del urllib._urlopener.addheaders[-1]
-
- infile = open(fname, 'rb')
- content = infile.read()
- infile.close()
-
- return headers, content, None
-
- except IOError, msg:
- if (msg[0] == "http error") and (msg[1] == -1):
- error = None
- bookmark.no_error = "The server did not return any header - it is not an error, actually"
- self.log(' no headers: %s' % bookmark.no_error)
- else:
- error = get_error(msg)
- self.log(' Error: %s' % error)
-
- return None, None, error
-
- def get_ftp_welcome(self):
- global ftpcache_key
- _welcome = urllib._urlopener.ftpcache[ftpcache_key].ftp.welcome
- ftpcache_key = None # I am assuming there are no duplicate ftp URLs in db.
- # If there are - ftpcache_key in prev line is invalid.
- return _welcome
-
- def cleanup(self):
- urllib.urlcleanup()
+ def get(self, bookmark, url, accept_charset=False):
+ try:
+ # Set fake referer to the base URL
+ urllib._urlopener.addheaders[2] = ('Referer', url)
+
+ if accept_charset and bookmark.charset:
+ urllib._urlopener.addheader('Accept-Charset', bookmark.charset)
+ try:
+ fname, headers = urllib.urlretrieve(url)
+ finally:
+ if accept_charset and bookmark.charset:
+ del urllib._urlopener.addheaders[-1] # Remove Accept-Charset
+
+ infile = open(fname, 'rb')
+ content = infile.read()
+ infile.close()
+
+ return None, None, None, headers, content
+
+ except RedirectException as e:
+ return None, e.errcode, e.newurl, None, None
+
+ except IOError as e:
+ if (e[0] == "http error") and (e[1] == -1):
+ error = None
+ bookmark.no_error = "The server did not return any header - it is not an error, actually"
+ self.log(' no headers: %s' % bookmark.no_error)
+ else:
+ error = get_error(e)
+ self.log(' Error: %s' % error)
+
+ return error, None, None, None, None
+
+ def get_ftp_welcome(self):
+ global ftpcache_key
+ _welcome = urllib._urlopener.ftpcache[ftpcache_key].ftp.welcome
+ ftpcache_key = None # I am assuming there are no duplicate ftp URLs in db.
+ # If there are - ftpcache_key in prev line is invalid.
+ return _welcome
+
+ def finish_check_url(self, bookmark):
+ robot_base.finish_check_url(self, bookmark)
+ urllib.urlcleanup()