]> git.phdru.name Git - bookmarks_db.git/commitdiff
Refactor(Robots): Rename "mix-ins" to "base classes"
authorOleg Broytman <phd@phdru.name>
Wed, 5 Mar 2025 23:36:07 +0000 (02:36 +0300)
committerOleg Broytman <phd@phdru.name>
Wed, 5 Mar 2025 23:36:07 +0000 (02:36 +0300)
Robots/bkmk_raio.py
Robots/concurrent_futures.py
Robots/multi_base.py [moved from Robots/multi_mixin.py with 84% similarity]

index b6159dc64ef1e135a42816475a91076cf8ce8f27..bb45797f7699bec5e90d30a3f5ad3069e4e53df4 100644 (file)
@@ -5,7 +5,7 @@ This file is a part of Bookmarks database and Internet robot.
 """
 
 __author__ = "Oleg Broytman <phd@phdru.name>"
-__copyright__ = "Copyright (C) 2024 PhiloSoft Design"
+__copyright__ = "Copyright (C) 2024, 2025 PhiloSoft Design"
 __license__ = "GNU GPL"
 
 __all__ = ['robot_aio']
@@ -23,15 +23,15 @@ import aioftp
 import aiohttp
 import aiohttp.client_exceptions
 
-from Robots.multi_mixin import multi_mixin
+from Robots.multi_base import multi_base
 
 
 current_href = contextvars.ContextVar('current_href')
 
 
-class robot_aio(multi_mixin):
+class robot_aio(multi_base):
     def __init__(self, *args, **kw):
-        multi_mixin.__init__(self, *args, **kw)
+        multi_base.__init__(self, *args, **kw)
 
         # We need one event loop for the entire application
         # so that we can save pending tasks between calls to self.wait().
index 825379c5c0d895281894dfdb3e3fc001d8c3284b..53d852a5dbc158ef8dca291d313eff51ad95d30b 100644 (file)
@@ -1,4 +1,5 @@
-"""Mix-in for robots based on concurrent.futures, processes multiple URLs in parallel.
+"""Base classes for robots based on concurrent.futures to
+processes multiple URLs in parallel.
 
 This file is a part of Bookmarks database and Internet robot.
 
@@ -16,19 +17,19 @@ import os
 
 from bkmk_objects import copy_bkmk
 from robots import import_robot, set_params, robot_params
-from Robots.multi_mixin import multi_mixin
+from Robots.multi_base import multi_base
 
 
 cpu_count = os.cpu_count()
 
 
-class concurrent_futures(multi_mixin):
+class concurrent_futures(multi_base):
     # We're I/O-bound, not CPU-bound
     max_urls = 2*cpu_count if cpu_count else 10
 
     def __init__(self, *args, **kw):
         self.concurrent_class_name = self.concurrent_class.__name__
-        multi_mixin.__init__(self, *args, **kw)
+        multi_base.__init__(self, *args, **kw)
         self.executor = self.concurrent_class(max_workers=self.max_urls)
 
     def version_str(self):
similarity index 84%
rename from Robots/multi_mixin.py
rename to Robots/multi_base.py
index 128253ea49662b52143efae39419bb53be345622..d3f8750a9ef0ee19c2c72dc27981a252c9a25ad2 100644 (file)
@@ -1,20 +1,20 @@
-"""Mix-in for robots that process multiple URLs in parallel.
+"""Base class for robots that process multiple URLs in parallel.
 
 This file is a part of Bookmarks database and Internet robot.
 
 """
 
 __author__ = "Oleg Broytman <phd@phdru.name>"
-__copyright__ = "Copyright (C) 2024 PhiloSoft Design"
+__copyright__ = "Copyright (C) 2024, 2025 PhiloSoft Design"
 __license__ = "GNU GPL"
 
-__all__ = ['multi_mixin']
+__all__ = ['multi_base']
 
 
 from Robots.base import robot_base
 
 
-class multi_mixin(robot_base):
+class multi_base(robot_base):
     max_urls = 10  # Small default
     report_checked = False