]> git.phdru.name Git - m_librarian.git/blobdiff - m_librarian/inp.py
Feat(ml-import): Display tty progress bar during import
[m_librarian.git] / m_librarian / inp.py
index 09cd42be2151a5e7b11dda19673cbada15578721..76ab97be9883c4cd240ae725acc3300acf897e98 100644 (file)
@@ -1,22 +1,23 @@
 
-__all__ = ['import_inpx']
-
 import os
 from zipfile import ZipFile
-from sqlobject import sqlhub, SQLObjectNotFound
+from sqlobject import sqlhub
+from sqlobject.sqlbuilder import Select
 from .db import Author, Book, Extension, Genre, Language, \
     insert_name, insert_author
 
+__all__ = ['import_inpx']
+
 
 EOT = chr(4)  # INP field separator
 
 
 def split_line(line):
     parts = line.strip().split(EOT)
-    l = len(parts)
-    if l < 11:
+    _l = len(parts)
+    if _l < 11:
         raise ValueError('Unknown INP structure: "%s"' % line)
-    if l == 11:  # Standard structure
+    if _l == 11:  # Standard structure
         parts.append(None)  # Emulate lang
     else:  # New structure
         parts = parts[:12]
@@ -26,12 +27,6 @@ def split_line(line):
 def import_inp_line(archive, parts):
     authors, genres, title, series, ser_no, file, size, lib_id, deleted, \
         extension, date, language = parts
-    try:
-        Book.archive_file_idx.get(archive, file)
-    except SQLObjectNotFound:
-        pass
-    else:
-        return
     try:
         ser_no = int(ser_no)
     except ValueError:
@@ -71,20 +66,45 @@ def import_inp_line(archive, parts):
 
 
 def import_inp(archive, inp):
+    files = set()
+    connection = sqlhub.processConnection
+    for file, in connection.queryAll(connection.sqlrepr(
+            Select(Book.q.file, Book.q.archive == archive))):
+        files.add(file)
     for line in inp:
-        import_inp_line(archive, split_line(line))
+        line = line.decode('utf-8')
+        parts = split_line(line)
+        file = parts[5]
+        if file not in files:
+            files.add(file)
+            import_inp_line(archive, parts)
 
 
-def import_inpx(path):
+def import_inpx(path, pbar_cb=None):
     inpx = ZipFile(path)
+    if pbar_cb:
+        inp_count = 0
+        for name in inpx.namelist():
+            ext = os.path.splitext(name)[1]
+            if ext == '.inp':
+                inp_count += 1
+        pbar_cb.set_max(inp_count)
+    inp_count = 0
     for name in inpx.namelist():
         archive, ext = os.path.splitext(name)
         if ext != '.inp':
             continue
+        if pbar_cb:
+            inp_count += 1
+            pbar_cb.display(inp_count)
         inp = inpx.open(name)
         sqlhub.doInTransaction(import_inp, archive + '.zip', inp)
         inp.close()
     connection = sqlhub.processConnection
-    if connection.dbName in ('postgres', 'sqlite'):
+    if connection.dbName == 'postgres':
         for table in Author, Book, Extension, Genre, Language:
             connection.query("VACUUM %s" % table.sqlmeta.table)
+    elif connection.dbName == 'sqlite':
+        connection.query("VACUUM")
+    if pbar_cb:
+        pbar_cb.close()