proxy_error = set()
timeout = 60
- ftp_timeout = 60
report_checked = True
def __init__(self, *args, **kw):
- for attr in 'timeout', 'ftp_timeout':
+ for attr in ['timeout']:
value = getattr(self, attr)
if not isinstance(value, int):
value = int(value)
async def get(self, url, req_headers, use_proxy=False):
if url.startswith('ftp://'):
error, body = await _get_ftp(
- url, timeout=self.ftp_timeout,
+ url, timeout=self.timeout,
)
if error is not None:
error = str(error)
curl.setopt(pycurl.SSL_VERIFYPEER, 0)
curl.setopt(curl.CAINFO, certifi.where())
# Set timeouts to avoid hanging too long
- if url.startswith('ftp://'):
- timeout = robot.ftp_timeout
- else:
- timeout = robot.timeout
- curl.setopt(pycurl.CONNECTTIMEOUT, timeout)
- curl.setopt(pycurl.TIMEOUT, timeout)
+ curl.setopt(pycurl.CONNECTTIMEOUT, robot.timeout)
+ curl.setopt(pycurl.TIMEOUT, robot.timeout)
# Parse Last-Modified
curl.setopt(pycurl.OPT_FILETIME, 1)
continue
# Setup timeout watch
- if url.startswith('ftp://'):
- timeout = self.ftp_timeout
- else:
- timeout = self.timeout
- timeoutCall = reactor.callLater(timeout, d.cancel)
+ timeoutCall = reactor.callLater(self.timeout, d.cancel)
d.addBoth(self.cancelTimeout, timeoutCall)
d.addCallback(self.cbRequest, queue, timeoutCall)
Default list of robots is still multirequests,aio.
+ Robots: Removed ftp_timeout.
+
WHERE TO GET
Home page: https://phdru.name/Software/Python/#bookmarks_db
Default list of robots is still multirequests,aio.
+ Robots: Removed ftp_timeout.
+
Version 6.1.0 (2024-09-08)
Combine aiohttp with multiaio; the combined robot is named just aio.