]> git.phdru.name Git - bookmarks_db.git/commitdiff
Chore(Robots): Report "Checked: <URL>" but avoid duplicates
authorOleg Broytman <phd@phdru.name>
Tue, 20 Aug 2024 17:28:15 +0000 (20:28 +0300)
committerOleg Broytman <phd@phdru.name>
Sat, 7 Sep 2024 10:59:02 +0000 (13:59 +0300)
Robots that process multiple URLs in parallel report it themselves.

Robots/base.py
Robots/bkmk_rmultirequests.py
Robots/multi_mixin.py

index 3a15b56e9d7ba332636a190e4a97ed2dbb844751..d9551be25ed4dfb6785e97d39b75b8864f2c74d9 100644 (file)
@@ -80,6 +80,8 @@ class robot_base(Robot):
     timeout = 60
     ftp_timeout = 60
 
+    report_checked = True
+
     def __init__(self, *args, **kw):
         for attr in 'timeout', 'ftp_timeout':
             value = getattr(self, attr)
@@ -94,6 +96,8 @@ class robot_base(Robot):
         return self.__class__.__name__
 
     def check_bookmark(self, bookmark):
+        if self.report_checked:
+            self.log('Checked: %s' % bookmark.href)
         return asyncio.run(self.check_bookmark_async(bookmark))
 
     async def check_bookmark_async(self, bookmark):
index 0c7dbd46e9c16453ded0de855231858dde17358a..97bc34eaef9e143ef60caa136f68625a1bdb9db8 100644 (file)
@@ -91,5 +91,6 @@ def worker_check_bookmark(bookmark):
     log_lines = []
     robot = import_robot('requests')
     set_params(robot, robot_params)
+    robot.report_checked = False
     robot(log_lines.append).check_bookmark(bookmark)
     return bookmark, log_lines
index ef4542cdbb611a572f9af4d205f546685c2f58c0..b92719526bc74ab370c7a71830db739118b26195 100644 (file)
@@ -13,6 +13,7 @@ __all__ = ['multi_mixin']
 
 class multi_mixin:
     max_urls = 10  # Small default
+    report_checked = False
 
     def __init__(self, *args, **kw):
         if isinstance(self.max_urls, str):