From 28060f5940c6bb42a78f45c7ba0440143b48b4ff Mon Sep 17 00:00:00 2001 From: Oliver Date: Sun, 7 Dec 2025 13:34:54 +0000 Subject: [PATCH] Add termux_setup/shrinkdb.py --- termux_setup/shrinkdb.py | 81 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 termux_setup/shrinkdb.py diff --git a/termux_setup/shrinkdb.py b/termux_setup/shrinkdb.py new file mode 100644 index 0000000..cc7a68b --- /dev/null +++ b/termux_setup/shrinkdb.py @@ -0,0 +1,81 @@ +import sqlite3, os, time, logging, sys + +# --- Config --- +WATCH_DIR = "/storage/emulated/0/Documents/GB_Raw" +SYNC_DIR = "/storage/emulated/0/Documents/GB_Sync" +MAX_DAYS = 90 +KEEP_TABLES = [ + "USER", "DEVICE", "DEVICE_ATTRIBUTES", "BATTERY_LEVEL", + "XIAOMI_ACTIVITY_SAMPLE", "XIAOMI_SLEEP_TIME_SAMPLE", + "XIAOMI_DAILY_SUMMARY_SAMPLE", "MI_BAND_ACTIVITY_SAMPLE" +] + +logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s', stream=sys.stdout) + +def shrink_db(src_path, dst_path): + # Ensure we don't try to read a half-written file + if os.path.getsize(src_path) < 1024: return + + tmp_path = dst_path + ".tmp" + if os.path.exists(tmp_path): os.remove(tmp_path) + + try: + con = sqlite3.connect(tmp_path) + con.execute("PRAGMA journal_mode=OFF; PRAGMA synchronous=0;") + con.execute(f"ATTACH DATABASE '{src_path}' AS src;") + + tables = [r[0] for r in con.execute("SELECT name FROM src.sqlite_master WHERE type='table'").fetchall()] + + for tbl in KEEP_TABLES: + if tbl not in tables: continue + + # Copy Schema & Data + con.execute(con.execute(f"SELECT sql FROM src.sqlite_master WHERE name='{tbl}'").fetchone()[0]) + + if tbl == "XIAOMI_ACTIVITY_SAMPLE" and MAX_DAYS > 0: + cutoff = int(time.time()) - (MAX_DAYS * 86400) + con.execute(f"INSERT INTO {tbl} SELECT * FROM src.{tbl} WHERE TIMESTAMP >= {cutoff}") + else: + con.execute(f"INSERT INTO {tbl} SELECT * FROM src.{tbl}") + + con.commit() + con.close() + + # Atomic move to prevent partial sync + os.rename(tmp_path, dst_path) + logging.info(f"Exported: {os.path.basename(dst_path)} ({os.path.getsize(dst_path)>>10} KB)") + + except Exception as e: + logging.error(f"Failed {src_path}: {e}") + if os.path.exists(tmp_path): os.remove(tmp_path) + +def main(): + os.makedirs(SYNC_DIR, exist_ok=True) + logging.info(f"Watching {WATCH_DIR}") + + # Track files to avoid re-processing same file repeatedly + processed_files = {} + + while True: + try: + # Look for DB files (Gadgetbridge export is usually named 'Gadgetbridge' or 'Gadgetbridge.db') + for f in os.listdir(WATCH_DIR): + if "Gadgetbridge" in f and not f.endswith(".wal") and not f.endswith(".shm"): + path = os.path.join(WATCH_DIR, f) + mtime = os.path.getmtime(path) + + # Process if new or modified > 5 seconds ago (stable) + if path not in processed_files or processed_files[path] != mtime: + # Debounce: Ensure file isn't actively being written + if (time.time() - mtime) > 5: + logging.info(f"Processing {f}...") + out_name = f"GB_Small_{int(time.time())}.db" + shrink_db(path, os.path.join(SYNC_DIR, out_name)) + processed_files[path] = mtime + + time.sleep(10) + except KeyboardInterrupt: break + except Exception as e: logging.error(e) + +if __name__ == "__main__": + main()