From a7d5c190c7f46543373f20731dbb2b583e141dac Mon Sep 17 00:00:00 2001 From: Oliver Date: Sun, 7 Dec 2025 13:50:10 +0000 Subject: [PATCH] Update termux_setup/shrinkdb.py --- termux_setup/shrinkdb.py | 26 ++++++++++---------------- 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/termux_setup/shrinkdb.py b/termux_setup/shrinkdb.py index cc7a68b..065283e 100644 --- a/termux_setup/shrinkdb.py +++ b/termux_setup/shrinkdb.py @@ -13,25 +13,26 @@ KEEP_TABLES = [ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s', stream=sys.stdout) def shrink_db(src_path, dst_path): - # Ensure we don't try to read a half-written file if os.path.getsize(src_path) < 1024: return - tmp_path = dst_path + ".tmp" if os.path.exists(tmp_path): os.remove(tmp_path) try: con = sqlite3.connect(tmp_path) - con.execute("PRAGMA journal_mode=OFF; PRAGMA synchronous=0;") - con.execute(f"ATTACH DATABASE '{src_path}' AS src;") + con.execute("PRAGMA journal_mode=OFF") + con.execute("PRAGMA synchronous=0") + con.execute(f"ATTACH DATABASE '{src_path}' AS src") tables = [r[0] for r in con.execute("SELECT name FROM src.sqlite_master WHERE type='table'").fetchall()] for tbl in KEEP_TABLES: if tbl not in tables: continue - # Copy Schema & Data - con.execute(con.execute(f"SELECT sql FROM src.sqlite_master WHERE name='{tbl}'").fetchone()[0]) + # Copy Schema + sql = con.execute(f"SELECT sql FROM src.sqlite_master WHERE name='{tbl}'").fetchone()[0] + con.execute(sql) + # Copy Data if tbl == "XIAOMI_ACTIVITY_SAMPLE" and MAX_DAYS > 0: cutoff = int(time.time()) - (MAX_DAYS * 86400) con.execute(f"INSERT INTO {tbl} SELECT * FROM src.{tbl} WHERE TIMESTAMP >= {cutoff}") @@ -40,8 +41,6 @@ def shrink_db(src_path, dst_path): con.commit() con.close() - - # Atomic move to prevent partial sync os.rename(tmp_path, dst_path) logging.info(f"Exported: {os.path.basename(dst_path)} ({os.path.getsize(dst_path)>>10} KB)") @@ -52,30 +51,25 @@ def shrink_db(src_path, dst_path): def main(): os.makedirs(SYNC_DIR, exist_ok=True) logging.info(f"Watching {WATCH_DIR}") - - # Track files to avoid re-processing same file repeatedly processed_files = {} while True: try: - # Look for DB files (Gadgetbridge export is usually named 'Gadgetbridge' or 'Gadgetbridge.db') for f in os.listdir(WATCH_DIR): - if "Gadgetbridge" in f and not f.endswith(".wal") and not f.endswith(".shm"): + if "Gadgetbridge" in f and not f.endswith((".wal", ".shm", ".tmp")): path = os.path.join(WATCH_DIR, f) mtime = os.path.getmtime(path) - # Process if new or modified > 5 seconds ago (stable) if path not in processed_files or processed_files[path] != mtime: - # Debounce: Ensure file isn't actively being written - if (time.time() - mtime) > 5: + if (time.time() - mtime) > 5: logging.info(f"Processing {f}...") out_name = f"GB_Small_{int(time.time())}.db" shrink_db(path, os.path.join(SYNC_DIR, out_name)) processed_files[path] = mtime - time.sleep(10) except KeyboardInterrupt: break except Exception as e: logging.error(e) if __name__ == "__main__": main() +EOF