class robot_urllib(robot_base):
- def urlretrieve(self, bookmark, url, accept_charset=False):
+ def get(self, bookmark, url, accept_charset=False):
try:
# Set fake referer to the base URL
urllib._urlopener.addheaders[2] = ('Referer', url)
if accept_charset and bookmark.charset:
urllib._urlopener.addheader('Accept-Charset', bookmark.charset)
- fname, headers = urllib.urlretrieve(url)
- if accept_charset and bookmark.charset:
- del urllib._urlopener.addheaders[-1]
+ try:
+ fname, headers = urllib.urlretrieve(url)
+ finally:
+ if accept_charset and bookmark.charset:
+ del urllib._urlopener.addheaders[-1] # Remove Accept-Charset
infile = open(fname, 'rb')
content = infile.read()
infile.close()
- return headers, content, None
+ return None, headers, content
except IOError, msg:
if (msg[0] == "http error") and (msg[1] == -1):
error = get_error(msg)
self.log(' Error: %s' % error)
- return None, None, error
+ return error, None, None
def get_ftp_welcome(self):
global ftpcache_key