From b8ca67f11ce972fddaaf84e92658cd381a7882f8 Mon Sep 17 00:00:00 2001 From: Oleg Broytman Date: Tue, 20 Aug 2024 20:28:15 +0300 Subject: [PATCH] Chore(Robots): Report "Checked: " but avoid duplicates Robots that process multiple URLs in parallel report it themselves. --- Robots/base.py | 4 ++++ Robots/bkmk_rmultirequests.py | 1 + Robots/multi_mixin.py | 1 + 3 files changed, 6 insertions(+) diff --git a/Robots/base.py b/Robots/base.py index 3a15b56..d9551be 100644 --- a/Robots/base.py +++ b/Robots/base.py @@ -80,6 +80,8 @@ class robot_base(Robot): timeout = 60 ftp_timeout = 60 + report_checked = True + def __init__(self, *args, **kw): for attr in 'timeout', 'ftp_timeout': value = getattr(self, attr) @@ -94,6 +96,8 @@ class robot_base(Robot): return self.__class__.__name__ def check_bookmark(self, bookmark): + if self.report_checked: + self.log('Checked: %s' % bookmark.href) return asyncio.run(self.check_bookmark_async(bookmark)) async def check_bookmark_async(self, bookmark): diff --git a/Robots/bkmk_rmultirequests.py b/Robots/bkmk_rmultirequests.py index 0c7dbd4..97bc34e 100644 --- a/Robots/bkmk_rmultirequests.py +++ b/Robots/bkmk_rmultirequests.py @@ -91,5 +91,6 @@ def worker_check_bookmark(bookmark): log_lines = [] robot = import_robot('requests') set_params(robot, robot_params) + robot.report_checked = False robot(log_lines.append).check_bookmark(bookmark) return bookmark, log_lines diff --git a/Robots/multi_mixin.py b/Robots/multi_mixin.py index ef4542c..b927195 100644 --- a/Robots/multi_mixin.py +++ b/Robots/multi_mixin.py @@ -13,6 +13,7 @@ __all__ = ['multi_mixin'] class multi_mixin: max_urls = 10 # Small default + report_checked = False def __init__(self, *args, **kw): if isinstance(self.max_urls, str): -- 2.39.5