__all__ = ['robot_curl']
-from queue import Queue
from time import sleep
-import concurrent.futures
-import threading
import certifi
import pycurl
-from Robots.base import robot_base, encode_url
-from Robots.concurrent_futures import concurrent_futures
+from Robots.base import encode_url
+from Robots.concurrent_futures import cf_multithread
-class robot_curl(concurrent_futures):
- def __init__(self, *args, **kw):
- self.concurrent_class = concurrent.futures.ThreadPoolExecutor
- concurrent_futures.__init__(self, *args, **kw)
- #
- # Rename self.log, create one log_lines list per URL (thread)
- self.file_log = self.log
- del self.log
- self.local = threading.local()
-
- # Queue to pass requests to the main loop
- self.queue = Queue()
-
- # Main loop: run curl_multi
- self.executor.submit(self.curl_multi)
-
- def __getattr__(self, attr):
- if attr != 'log':
- raise AttributeError(attr)
- try:
- return self.local.log.append
- except AttributeError:
- return self.file_log
-
+class robot_curl(cf_multithread):
def version_str(self):
- return str(pycurl.version)
-
- def check_bkmk_task(self):
- return self.check_bkmk_thread
+ return super(cf_multithread, self).version_str() \
+ + '; ' + str(pycurl.version)
- def check_bkmk_thread(self, bookmark):
- self.local.log = log = []
- robot_base.check_bookmark(self, bookmark)
- return bookmark, log
+ def main_thread(self):
+ """Main loop: run curl_multi"""
- async def get(self, url, req_headers, use_proxy=False):
- # Queue to wait results
- queue = Queue(maxsize=1)
- self.queue.put((url, req_headers, use_proxy, queue))
- return queue.get()
-
- def curl_multi(self):
_curl_multi = pycurl.CurlMulti()
_curl_multi.wrappers = {}
if num_handles == 0:
sleep(1)
- def wait(self):
- super(robot_curl, self).wait()
- if not self.bookmarks and not self.pending:
- self.queue.put(None) # Signal to stop
-
def get_ftp_welcome(self):
return '' # We don't store welcome message yet
__all__ = ['concurrent_futures']
+from queue import Queue
import concurrent.futures
import os
+import threading
from bkmk_objects import copy_bkmk
from Robots.base import robot_base
def version_str(self):
return super(concurrent_futures, self).version_str() \
- + ' multi: concurrent.futures.' + self.concurrent_class_name
+ + '; multi: concurrent.futures.' + self.concurrent_class_name
def wait(self):
log = self.log
def __init__(self, *args, **kw):
self.concurrent_class = concurrent.futures.ProcessPoolExecutor
concurrent_futures.__init__(self, *args, **kw)
+
+
+class cf_multithread(concurrent_futures):
+ def __init__(self, *args, **kw):
+ self.concurrent_class = concurrent.futures.ThreadPoolExecutor
+ concurrent_futures.__init__(self, *args, **kw)
+
+ # Rename self.log, create one log_lines list per URL (thread)
+ self.file_log = self.log
+ del self.log
+ self.local = threading.local()
+
+ # Queue to pass requests to the main loop
+ self.queue = Queue()
+
+ # Main loop
+ self.executor.submit(self.main_thread)
+
+ def __getattr__(self, attr):
+ if attr != 'log':
+ raise AttributeError(attr)
+ try:
+ return self.local.log.append
+ except AttributeError:
+ return self.file_log
+
+ def check_bkmk_task(self):
+ return self.check_bkmk_thread
+
+ def check_bkmk_thread(self, bookmark):
+ self.local.log = log = []
+ robot_base.check_bookmark(self, bookmark)
+ return bookmark, log
+
+ async def get(self, url, req_headers, use_proxy=False):
+ # Queue to wait results
+ queue = Queue(maxsize=1)
+ self.queue.put((url, req_headers, use_proxy, queue))
+ return queue.get()
+
+ def wait(self):
+ super(cf_multithread, self).wait()
+ if not self.bookmarks and not self.pending:
+ self.queue.put(None) # Signal to stop