Update termux_setup/shrinkdb.py
This commit is contained in:
parent
d20367d67b
commit
a7d5c190c7
@ -13,25 +13,26 @@ KEEP_TABLES = [
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s', stream=sys.stdout)
|
||||
|
||||
def shrink_db(src_path, dst_path):
|
||||
# Ensure we don't try to read a half-written file
|
||||
if os.path.getsize(src_path) < 1024: return
|
||||
|
||||
tmp_path = dst_path + ".tmp"
|
||||
if os.path.exists(tmp_path): os.remove(tmp_path)
|
||||
|
||||
try:
|
||||
con = sqlite3.connect(tmp_path)
|
||||
con.execute("PRAGMA journal_mode=OFF; PRAGMA synchronous=0;")
|
||||
con.execute(f"ATTACH DATABASE '{src_path}' AS src;")
|
||||
con.execute("PRAGMA journal_mode=OFF")
|
||||
con.execute("PRAGMA synchronous=0")
|
||||
con.execute(f"ATTACH DATABASE '{src_path}' AS src")
|
||||
|
||||
tables = [r[0] for r in con.execute("SELECT name FROM src.sqlite_master WHERE type='table'").fetchall()]
|
||||
|
||||
for tbl in KEEP_TABLES:
|
||||
if tbl not in tables: continue
|
||||
|
||||
# Copy Schema & Data
|
||||
con.execute(con.execute(f"SELECT sql FROM src.sqlite_master WHERE name='{tbl}'").fetchone()[0])
|
||||
# Copy Schema
|
||||
sql = con.execute(f"SELECT sql FROM src.sqlite_master WHERE name='{tbl}'").fetchone()[0]
|
||||
con.execute(sql)
|
||||
|
||||
# Copy Data
|
||||
if tbl == "XIAOMI_ACTIVITY_SAMPLE" and MAX_DAYS > 0:
|
||||
cutoff = int(time.time()) - (MAX_DAYS * 86400)
|
||||
con.execute(f"INSERT INTO {tbl} SELECT * FROM src.{tbl} WHERE TIMESTAMP >= {cutoff}")
|
||||
@ -40,8 +41,6 @@ def shrink_db(src_path, dst_path):
|
||||
|
||||
con.commit()
|
||||
con.close()
|
||||
|
||||
# Atomic move to prevent partial sync
|
||||
os.rename(tmp_path, dst_path)
|
||||
logging.info(f"Exported: {os.path.basename(dst_path)} ({os.path.getsize(dst_path)>>10} KB)")
|
||||
|
||||
@ -52,30 +51,25 @@ def shrink_db(src_path, dst_path):
|
||||
def main():
|
||||
os.makedirs(SYNC_DIR, exist_ok=True)
|
||||
logging.info(f"Watching {WATCH_DIR}")
|
||||
|
||||
# Track files to avoid re-processing same file repeatedly
|
||||
processed_files = {}
|
||||
|
||||
while True:
|
||||
try:
|
||||
# Look for DB files (Gadgetbridge export is usually named 'Gadgetbridge' or 'Gadgetbridge.db')
|
||||
for f in os.listdir(WATCH_DIR):
|
||||
if "Gadgetbridge" in f and not f.endswith(".wal") and not f.endswith(".shm"):
|
||||
if "Gadgetbridge" in f and not f.endswith((".wal", ".shm", ".tmp")):
|
||||
path = os.path.join(WATCH_DIR, f)
|
||||
mtime = os.path.getmtime(path)
|
||||
|
||||
# Process if new or modified > 5 seconds ago (stable)
|
||||
if path not in processed_files or processed_files[path] != mtime:
|
||||
# Debounce: Ensure file isn't actively being written
|
||||
if (time.time() - mtime) > 5:
|
||||
logging.info(f"Processing {f}...")
|
||||
out_name = f"GB_Small_{int(time.time())}.db"
|
||||
shrink_db(path, os.path.join(SYNC_DIR, out_name))
|
||||
processed_files[path] = mtime
|
||||
|
||||
time.sleep(10)
|
||||
except KeyboardInterrupt: break
|
||||
except Exception as e: logging.error(e)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
EOF
|
||||
|
||||
Loading…
Reference in New Issue
Block a user