bookmark.icon = None
if bookmark.charset:
- headers = request_headers.copy()
- headers['Accept-Charset'] = bookmark.charset
+ req_headers = request_headers.copy()
+ req_headers['Accept-Charset'] = bookmark.charset
else:
- headers = request_headers
+ req_headers = request_headers
- error, http_status_code, redirect_to, headers, content = \
- await self.get_url(bookmark.href, headers)
+ error, http_status_code, redirect_to, resp_headers, content = \
+ await self.get_url(bookmark.href, req_headers)
if error is not None:
bookmark.error = error
size = 0
last_modified = None
- if headers:
+ if resp_headers:
try:
- size = headers["Content-Length"]
+ size = resp_headers["Content-Length"]
except KeyError:
pass
try:
- last_modified = headers["Last-Modified"]
+ last_modified = resp_headers["Last-Modified"]
except KeyError:
pass
bookmark.last_modified = last_modified
charset = None
- if headers:
+ if resp_headers:
try:
- content_type = headers["Content-Type"]
+ content_type = resp_headers["Content-Type"]
self.log(" Content-Type : %s" % content_type)
if content_type is None:
if b'html' in content.lower():
finally:
self.finish_check_url(bookmark)
- async def get_url(self, url, headers):
+ async def get_url(self, url, req_headers):
split_results = urlsplit(url)
url_proto = split_results.scheme
url_host = split_results.hostname
if use_proxy and url_host in self.proxy_ok:
self.log(' Immediately trying with the proxy')
- error, http_status_code, redirect_to, headers, content = \
- await self.get(url, headers, use_proxy=True)
+ error, http_status_code, redirect_to, resp_headers, content = \
+ await self.get(url, req_headers, use_proxy=True)
else:
- error, http_status_code, redirect_to, headers, content = \
- await self.get(url, headers)
+ error, http_status_code, redirect_to, resp_headers, content = \
+ await self.get(url, req_headers)
if error is not None and (
not url_host.startswith('localhost') and
not url_host.startswith('127.')
self.log(' Error : %s' % error)
if use_proxy and http_status_code != 404:
self.log(' Retrying with the proxy...')
- error, http_status_code, redirect_to, headers, content = \
- await self.get(url, headers, use_proxy=True)
+ error, http_status_code, redirect_to, resp_headers, content = \
+ await self.get(url, req_headers, use_proxy=True)
if error is None:
self.proxy_ok.add(url_host)
if (error is not None) or (
return error, http_status_code, None, None, None
if http_status_code and (http_status_code >= 300):
return None, http_status_code, redirect_to, None, None
- return None, None, None, headers, content
+ return None, None, None, resp_headers, content
def set_redirect(self, bookmark, errcode, newurl):
bookmark.moved = moved = "(%s) to %s" % (_reloc_dict[errcode], newurl)
def version_str(self):
return 'aiohttp/%s' % aiohttp.__version__
- async def get(self, url, headers, use_proxy=False):
+ async def get(self, url, req_headers, use_proxy=False):
if url.startswith('ftp://'):
error, body = await _get_ftp(
url, timeout=self.ftp_timeout,
proxy = None
error, status, resp_headers, body = await _get_http(
- url, headers=headers, proxy=proxy,
+ url, req_headers, proxy=proxy,
timeout=self.timeout,
)
if error is not None or (status and status >= 400):
return '' # We don't store welcome message yet
-async def _get_http(url, headers={}, proxy=None, timeout=60):
+async def _get_http(url, req_headers={}, proxy=None, timeout=60):
connector = None
if proxy and proxy.startswith('socks5'):
if proxy.startswith('socks5h://'):
connector=connector, timeout=timeout
) as session:
async with session.get(
- url, headers=headers, proxy=proxy, allow_redirects=False,
+ url, headers=req_headers,
+ proxy=proxy, allow_redirects=False,
ssl_context=ssl_context) as resp:
return None, resp.status, resp.headers, await resp.read()
except (
def version_str(self):
return str(pycurl.version)
- async def get(self, url, headers, use_proxy=False):
- headers = ['%s: %s' % (k, v) for k, v in headers.items()]
+ async def get(self, url, req_headers, use_proxy=False):
+ req_headers = ['%s: %s' % (k, v) for k, v in req_headers.items()]
curl = pycurl.Curl()
- self.headers = {}
+ self.resp_headers = {}
self.body = b''
# Do not follow redirects
curl.setopt(pycurl.WRITEFUNCTION, self.body_callback)
curl.setopt(pycurl.HTTPGET, 1)
- curl.setopt(pycurl.HTTPHEADER, headers)
+ curl.setopt(pycurl.HTTPHEADER, req_headers)
try:
url.encode('ascii')
except UnicodeEncodeError:
if status >= 400:
return "Error %d" % status, status, None, None, None
if status >= 300:
- return None, status, self.headers['Location'], None, None
- return None, None, None, self.headers, self.body
+ return None, status, self.resp_headers['Location'], None, None
+ return None, None, None, self.resp_headers, self.body
def header_callback(self, data):
for encoding in 'ascii', 'latin1', 'utf-8':
return
if ':' in data:
key, value = data.split(':', 1)
- self.headers[key.title()] = value.strip()
+ self.resp_headers[key.title()] = value.strip()
def body_callback(self, data):
self.body += data
def version_str(self):
return 'python-requests urllib3/%s' % urllib3.__version__
- async def get(self, url, headers, use_proxy=False):
+ async def get(self, url, req_headers, use_proxy=False):
if url.startswith('ftp://'):
error, welcome, body = _get_ftp(url, self.timeout)
if error is not None:
error = r = None
try:
- r = s.get(url, headers=headers, timeout=self.timeout,
+ r = s.get(url, headers=req_headers, timeout=self.timeout,
allow_redirects=False, proxies=proxies,
verify=False)
except requests.RequestException as e:
current_href.set(bookmark.href)
await self.check_bookmark_async(bookmark)
- async def get_url(self, url, headers):
+ async def get_url(self, url, req_headers):
if url not in self.logs:
self.logs[url] = []
current_href.set(url)
- return await super(multi_async_mixin, self).get_url(url, headers)
+ return await super(multi_async_mixin, self).get_url(url, req_headers)
def wait(self):
self.loop.run_until_complete(self.wait_async())