Add termux_setup/shrinkdb.py
This commit is contained in:
parent
0b0839be86
commit
28060f5940
81
termux_setup/shrinkdb.py
Normal file
81
termux_setup/shrinkdb.py
Normal file
@ -0,0 +1,81 @@
|
||||
import sqlite3, os, time, logging, sys
|
||||
|
||||
# --- Config ---
|
||||
WATCH_DIR = "/storage/emulated/0/Documents/GB_Raw"
|
||||
SYNC_DIR = "/storage/emulated/0/Documents/GB_Sync"
|
||||
MAX_DAYS = 90
|
||||
KEEP_TABLES = [
|
||||
"USER", "DEVICE", "DEVICE_ATTRIBUTES", "BATTERY_LEVEL",
|
||||
"XIAOMI_ACTIVITY_SAMPLE", "XIAOMI_SLEEP_TIME_SAMPLE",
|
||||
"XIAOMI_DAILY_SUMMARY_SAMPLE", "MI_BAND_ACTIVITY_SAMPLE"
|
||||
]
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s', stream=sys.stdout)
|
||||
|
||||
def shrink_db(src_path, dst_path):
|
||||
# Ensure we don't try to read a half-written file
|
||||
if os.path.getsize(src_path) < 1024: return
|
||||
|
||||
tmp_path = dst_path + ".tmp"
|
||||
if os.path.exists(tmp_path): os.remove(tmp_path)
|
||||
|
||||
try:
|
||||
con = sqlite3.connect(tmp_path)
|
||||
con.execute("PRAGMA journal_mode=OFF; PRAGMA synchronous=0;")
|
||||
con.execute(f"ATTACH DATABASE '{src_path}' AS src;")
|
||||
|
||||
tables = [r[0] for r in con.execute("SELECT name FROM src.sqlite_master WHERE type='table'").fetchall()]
|
||||
|
||||
for tbl in KEEP_TABLES:
|
||||
if tbl not in tables: continue
|
||||
|
||||
# Copy Schema & Data
|
||||
con.execute(con.execute(f"SELECT sql FROM src.sqlite_master WHERE name='{tbl}'").fetchone()[0])
|
||||
|
||||
if tbl == "XIAOMI_ACTIVITY_SAMPLE" and MAX_DAYS > 0:
|
||||
cutoff = int(time.time()) - (MAX_DAYS * 86400)
|
||||
con.execute(f"INSERT INTO {tbl} SELECT * FROM src.{tbl} WHERE TIMESTAMP >= {cutoff}")
|
||||
else:
|
||||
con.execute(f"INSERT INTO {tbl} SELECT * FROM src.{tbl}")
|
||||
|
||||
con.commit()
|
||||
con.close()
|
||||
|
||||
# Atomic move to prevent partial sync
|
||||
os.rename(tmp_path, dst_path)
|
||||
logging.info(f"Exported: {os.path.basename(dst_path)} ({os.path.getsize(dst_path)>>10} KB)")
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Failed {src_path}: {e}")
|
||||
if os.path.exists(tmp_path): os.remove(tmp_path)
|
||||
|
||||
def main():
|
||||
os.makedirs(SYNC_DIR, exist_ok=True)
|
||||
logging.info(f"Watching {WATCH_DIR}")
|
||||
|
||||
# Track files to avoid re-processing same file repeatedly
|
||||
processed_files = {}
|
||||
|
||||
while True:
|
||||
try:
|
||||
# Look for DB files (Gadgetbridge export is usually named 'Gadgetbridge' or 'Gadgetbridge.db')
|
||||
for f in os.listdir(WATCH_DIR):
|
||||
if "Gadgetbridge" in f and not f.endswith(".wal") and not f.endswith(".shm"):
|
||||
path = os.path.join(WATCH_DIR, f)
|
||||
mtime = os.path.getmtime(path)
|
||||
|
||||
# Process if new or modified > 5 seconds ago (stable)
|
||||
if path not in processed_files or processed_files[path] != mtime:
|
||||
# Debounce: Ensure file isn't actively being written
|
||||
if (time.time() - mtime) > 5:
|
||||
logging.info(f"Processing {f}...")
|
||||
out_name = f"GB_Small_{int(time.time())}.db"
|
||||
shrink_db(path, os.path.join(SYNC_DIR, out_name))
|
||||
processed_files[path] = mtime
|
||||
|
||||
time.sleep(10)
|
||||
except KeyboardInterrupt: break
|
||||
except Exception as e: logging.error(e)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Loading…
Reference in New Issue
Block a user