]> git.phdru.name Git - bookmarks_db.git/commitdiff
Feat(Robots): Robot based on `httpx.AsyncClinet()`
authorOleg Broytman <phd@phdru.name>
Thu, 6 Mar 2025 17:18:21 +0000 (20:18 +0300)
committerOleg Broytman <phd@phdru.name>
Thu, 6 Mar 2025 17:18:21 +0000 (20:18 +0300)
Robots/bkmk_rasynchttpx.py [new file with mode: 0644]
doc/ANNOUNCE
doc/ChangeLog
doc/TODO
robots.py

diff --git a/Robots/bkmk_rasynchttpx.py b/Robots/bkmk_rasynchttpx.py
new file mode 100644 (file)
index 0000000..19e5dec
--- /dev/null
@@ -0,0 +1,52 @@
+"""Robot based on httpx.AsyncCLient()
+
+This file is a part of Bookmarks database and Internet robot.
+
+"""
+
+__author__ = "Oleg Broytman <phd@phdru.name>"
+__copyright__ = "Copyright (C) 2025 PhiloSoft Design"
+__license__ = "GNU GPL"
+
+__all__ = ['robot_asynchttpx']
+
+
+import httpx
+import socksio
+
+from Robots.aio_base import aio_base
+from Robots.util import get_ftp
+
+
+class robot_asynchttpx(aio_base):
+    def version_str(self):
+        return 'python-httpx/%s' % httpx.__version__
+
+    async def get(self, url, req_headers, use_proxy=False):
+        if url.startswith('ftp://'):
+            error, welcome, body = get_ftp(url, self.timeout)
+            if error is not None:
+                return error, None, None, None
+            self.welcome = welcome
+            return None, None, None, body
+
+        if use_proxy:
+            proxy = self.proxy
+        else:
+            proxy = None
+
+        try:
+            async with httpx.AsyncClient(proxy=proxy, verify=False) as c:
+                r = await c.get(url, headers=req_headers,
+                                timeout=httpx.Timeout(self.timeout),
+                                follow_redirects=False)
+        except (httpx.RequestError, socksio.ProtocolError) as e:
+            error = str(e)
+            return error, None, None, None
+
+        return None, r.status_code, r.headers, r.content
+
+    def get_ftp_welcome(self):
+        welcome = self.welcome
+        self.welcome = ''
+        return welcome
index 612401b65b7aba27cf277fce73158cd649e32276..7a7370091f53ecfe8769f0f5a4a94708283fb833 100644 (file)
@@ -11,6 +11,8 @@ Version 6.4.0 (2025-??-??)
 
    Robots based on curl-cffi.
 
+   Robot based on httpx.AsyncClinet().
+
 Version 6.3.0 (2025-03-02)
 
    Robots based on pycurl.
index 176f6f0d6e77213fe43f0fce52ebcaeb4746d465..49d820feb6f5249692174e13e3c7d2752db8bb06 100644 (file)
@@ -2,6 +2,8 @@ Version 6.4.0 (2025-??-??)
 
    Robots based on curl-cffi.
 
+   Robot based on httpx.AsyncClinet().
+
 Version 6.3.0 (2025-03-02)
 
    Robots based on pycurl.
index 5e9a6d87ace8c65174e35555b33fdff4752ce4c1..bdc06753734b46d781105e72636efd380eeac331 100644 (file)
--- a/doc/TODO
+++ b/doc/TODO
@@ -1,5 +1,3 @@
-Async httpx.
-
 Async curl-cffi.
 
 Twisted.
index ea293c671b3e09a264ad93adfbbdd989c004e812..6f2c5a28c65604219322c76b75bc5065369e443d 100644 (file)
--- a/robots.py
+++ b/robots.py
@@ -16,7 +16,7 @@ from bkmk_objects import parse_params, set_params
 
 robot_names, robot_params = parse_params(
     environ.get("BKMK_ROBOT",
-                "multicurlcffi,multirequests,multihttpx,curlcffi,aio"))
+                "multicurlcffi,multirequests,multihttpx,aio"))
 
 
 def import_robot(robot_name):