class robot_curl(robot_base):
+ def version_str(self):
+ return str(pycurl.version)
+
def get(self, bookmark, url, accept_charset=False, use_proxy=False):
if accept_charset and bookmark.charset:
headers = request_headers.copy()
Robot.__init__(self, *args, **kw)
socket.setdefaulttimeout(int(self.timeout))
+ global _x_user_agent
+ _x_user_agent = '%s %s' % (_x_user_agent, self.version_str())
+ request_headers['X-User-Agent'] = _x_user_agent
+
+ def version_str(self):
+ return self.__class__.__name__
+
def check_url(self, bookmark):
try:
self.start = int(time.time())
from requests.packages.urllib3.util.ssl_ import create_urllib3_context
import requests
import requests_ftp
+import urllib3
from Robots.bkmk_robot_base import robot_base, request_headers
class robot_requests(robot_base):
+ def version_str(self):
+ return 'python-requests urllib3/%s' % urllib3.__version__
+
def get(self, bookmark, url, accept_charset=False, use_proxy=False):
if accept_charset and bookmark.charset:
headers = request_headers.copy()
class robot_urllib(robot_base):
+ def version_str(self):
+ return 'urllib'
+
def get(self, bookmark, url, accept_charset=False, use_proxy=False):
if use_proxy:
opener.proxies = {'http': self.proxy, 'https': self.proxy}
class robot_urllib2(robot_base):
+ def version_str(self):
+ return 'urllib2'
+
def get(self, bookmark, url, accept_charset=False, use_proxy=False):
request = urllib2.Request(url)
for h, v in request_headers.items():
class robot_urllib_py3(robot_base):
+ def version_str(self):
+ return 'urllib'
+
def get(self, bookmark, url, accept_charset=False, use_proxy=False):
if use_proxy:
opener.proxies = {'http': self.proxy, 'https': self.proxy}