From: Oleg Broytman Date: Wed, 5 Mar 2025 23:36:07 +0000 (+0300) Subject: Refactor(Robots): Rename "mix-ins" to "base classes" X-Git-Tag: 6.4.0~7 X-Git-Url: https://git.phdru.name/?a=commitdiff_plain;h=6aaa6f163f10e63c19331d36d13efb0870e27d56;p=bookmarks_db.git Refactor(Robots): Rename "mix-ins" to "base classes" --- diff --git a/Robots/bkmk_raio.py b/Robots/bkmk_raio.py index b6159dc..bb45797 100644 --- a/Robots/bkmk_raio.py +++ b/Robots/bkmk_raio.py @@ -5,7 +5,7 @@ This file is a part of Bookmarks database and Internet robot. """ __author__ = "Oleg Broytman " -__copyright__ = "Copyright (C) 2024 PhiloSoft Design" +__copyright__ = "Copyright (C) 2024, 2025 PhiloSoft Design" __license__ = "GNU GPL" __all__ = ['robot_aio'] @@ -23,15 +23,15 @@ import aioftp import aiohttp import aiohttp.client_exceptions -from Robots.multi_mixin import multi_mixin +from Robots.multi_base import multi_base current_href = contextvars.ContextVar('current_href') -class robot_aio(multi_mixin): +class robot_aio(multi_base): def __init__(self, *args, **kw): - multi_mixin.__init__(self, *args, **kw) + multi_base.__init__(self, *args, **kw) # We need one event loop for the entire application # so that we can save pending tasks between calls to self.wait(). diff --git a/Robots/concurrent_futures.py b/Robots/concurrent_futures.py index 825379c..53d852a 100644 --- a/Robots/concurrent_futures.py +++ b/Robots/concurrent_futures.py @@ -1,4 +1,5 @@ -"""Mix-in for robots based on concurrent.futures, processes multiple URLs in parallel. +"""Base classes for robots based on concurrent.futures to +processes multiple URLs in parallel. This file is a part of Bookmarks database and Internet robot. @@ -16,19 +17,19 @@ import os from bkmk_objects import copy_bkmk from robots import import_robot, set_params, robot_params -from Robots.multi_mixin import multi_mixin +from Robots.multi_base import multi_base cpu_count = os.cpu_count() -class concurrent_futures(multi_mixin): +class concurrent_futures(multi_base): # We're I/O-bound, not CPU-bound max_urls = 2*cpu_count if cpu_count else 10 def __init__(self, *args, **kw): self.concurrent_class_name = self.concurrent_class.__name__ - multi_mixin.__init__(self, *args, **kw) + multi_base.__init__(self, *args, **kw) self.executor = self.concurrent_class(max_workers=self.max_urls) def version_str(self): diff --git a/Robots/multi_mixin.py b/Robots/multi_base.py similarity index 84% rename from Robots/multi_mixin.py rename to Robots/multi_base.py index 128253e..d3f8750 100644 --- a/Robots/multi_mixin.py +++ b/Robots/multi_base.py @@ -1,20 +1,20 @@ -"""Mix-in for robots that process multiple URLs in parallel. +"""Base class for robots that process multiple URLs in parallel. This file is a part of Bookmarks database and Internet robot. """ __author__ = "Oleg Broytman " -__copyright__ = "Copyright (C) 2024 PhiloSoft Design" +__copyright__ = "Copyright (C) 2024, 2025 PhiloSoft Design" __license__ = "GNU GPL" -__all__ = ['multi_mixin'] +__all__ = ['multi_base'] from Robots.base import robot_base -class multi_mixin(robot_base): +class multi_base(robot_base): max_urls = 10 # Small default report_checked = False