]> git.phdru.name Git - bookmarks_db.git/blobdiff - Robots/bkmk_robot_base.py
Refactor(Robots): Refactor request headers
[bookmarks_db.git] / Robots / bkmk_robot_base.py
index 724391853f57f4d7912f579e261d403de35ccb47..df33a26bd43162d92252e04df933bcb378abaffe 100644 (file)
@@ -24,6 +24,22 @@ from bkmk_objects import Robot
 from parse_html import parse_html
 
 
+# Fake headers to pretend this is a real browser
+_user_agent = "Mozilla/5.0 (X11; U; Linux 2.6 i686; en)"
+" Gecko/20001221 Firefox/2.0.0"
+_x_user_agent = "bookmarks_db (Python %d.%d.%d)" % sys.version_info[:3]
+
+request_headers = {
+    'Accept': '*/*',
+    'Accept-Language': 'ru,en',
+    'Cache-Control': 'max-age=300',
+    'Connection': 'close',
+    'Referer': '/',
+    'User-Agent': _user_agent,
+    'X-User-Agent': _x_user_agent,
+}
+
+
 reloc_dict = {
   301: "perm1.",
   302: "temp2.",
@@ -135,8 +151,13 @@ class robot_base(Robot):
                             break
                     content_stripped = content.strip()
                     if content_stripped and charset:
-                        content_stripped = content_stripped.decode(
-                            charset, 'replace')
+                        try:
+                            content_stripped = content_stripped.decode(
+                                charset, 'replace')
+                        except LookupError:
+                            charset = None
+                            self.log("   unknown charset "
+                                     "in Content-Type header")
                     if content_stripped and is_html:
                         parser = parse_html(
                             content_stripped, charset, self.log)