Update termux_setup/shrinkdb.py
This commit is contained in:
parent
d20367d67b
commit
a7d5c190c7
@ -13,25 +13,26 @@ KEEP_TABLES = [
|
|||||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s', stream=sys.stdout)
|
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s', stream=sys.stdout)
|
||||||
|
|
||||||
def shrink_db(src_path, dst_path):
|
def shrink_db(src_path, dst_path):
|
||||||
# Ensure we don't try to read a half-written file
|
|
||||||
if os.path.getsize(src_path) < 1024: return
|
if os.path.getsize(src_path) < 1024: return
|
||||||
|
|
||||||
tmp_path = dst_path + ".tmp"
|
tmp_path = dst_path + ".tmp"
|
||||||
if os.path.exists(tmp_path): os.remove(tmp_path)
|
if os.path.exists(tmp_path): os.remove(tmp_path)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
con = sqlite3.connect(tmp_path)
|
con = sqlite3.connect(tmp_path)
|
||||||
con.execute("PRAGMA journal_mode=OFF; PRAGMA synchronous=0;")
|
con.execute("PRAGMA journal_mode=OFF")
|
||||||
con.execute(f"ATTACH DATABASE '{src_path}' AS src;")
|
con.execute("PRAGMA synchronous=0")
|
||||||
|
con.execute(f"ATTACH DATABASE '{src_path}' AS src")
|
||||||
|
|
||||||
tables = [r[0] for r in con.execute("SELECT name FROM src.sqlite_master WHERE type='table'").fetchall()]
|
tables = [r[0] for r in con.execute("SELECT name FROM src.sqlite_master WHERE type='table'").fetchall()]
|
||||||
|
|
||||||
for tbl in KEEP_TABLES:
|
for tbl in KEEP_TABLES:
|
||||||
if tbl not in tables: continue
|
if tbl not in tables: continue
|
||||||
|
|
||||||
# Copy Schema & Data
|
# Copy Schema
|
||||||
con.execute(con.execute(f"SELECT sql FROM src.sqlite_master WHERE name='{tbl}'").fetchone()[0])
|
sql = con.execute(f"SELECT sql FROM src.sqlite_master WHERE name='{tbl}'").fetchone()[0]
|
||||||
|
con.execute(sql)
|
||||||
|
|
||||||
|
# Copy Data
|
||||||
if tbl == "XIAOMI_ACTIVITY_SAMPLE" and MAX_DAYS > 0:
|
if tbl == "XIAOMI_ACTIVITY_SAMPLE" and MAX_DAYS > 0:
|
||||||
cutoff = int(time.time()) - (MAX_DAYS * 86400)
|
cutoff = int(time.time()) - (MAX_DAYS * 86400)
|
||||||
con.execute(f"INSERT INTO {tbl} SELECT * FROM src.{tbl} WHERE TIMESTAMP >= {cutoff}")
|
con.execute(f"INSERT INTO {tbl} SELECT * FROM src.{tbl} WHERE TIMESTAMP >= {cutoff}")
|
||||||
@ -40,8 +41,6 @@ def shrink_db(src_path, dst_path):
|
|||||||
|
|
||||||
con.commit()
|
con.commit()
|
||||||
con.close()
|
con.close()
|
||||||
|
|
||||||
# Atomic move to prevent partial sync
|
|
||||||
os.rename(tmp_path, dst_path)
|
os.rename(tmp_path, dst_path)
|
||||||
logging.info(f"Exported: {os.path.basename(dst_path)} ({os.path.getsize(dst_path)>>10} KB)")
|
logging.info(f"Exported: {os.path.basename(dst_path)} ({os.path.getsize(dst_path)>>10} KB)")
|
||||||
|
|
||||||
@ -52,30 +51,25 @@ def shrink_db(src_path, dst_path):
|
|||||||
def main():
|
def main():
|
||||||
os.makedirs(SYNC_DIR, exist_ok=True)
|
os.makedirs(SYNC_DIR, exist_ok=True)
|
||||||
logging.info(f"Watching {WATCH_DIR}")
|
logging.info(f"Watching {WATCH_DIR}")
|
||||||
|
|
||||||
# Track files to avoid re-processing same file repeatedly
|
|
||||||
processed_files = {}
|
processed_files = {}
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
# Look for DB files (Gadgetbridge export is usually named 'Gadgetbridge' or 'Gadgetbridge.db')
|
|
||||||
for f in os.listdir(WATCH_DIR):
|
for f in os.listdir(WATCH_DIR):
|
||||||
if "Gadgetbridge" in f and not f.endswith(".wal") and not f.endswith(".shm"):
|
if "Gadgetbridge" in f and not f.endswith((".wal", ".shm", ".tmp")):
|
||||||
path = os.path.join(WATCH_DIR, f)
|
path = os.path.join(WATCH_DIR, f)
|
||||||
mtime = os.path.getmtime(path)
|
mtime = os.path.getmtime(path)
|
||||||
|
|
||||||
# Process if new or modified > 5 seconds ago (stable)
|
|
||||||
if path not in processed_files or processed_files[path] != mtime:
|
if path not in processed_files or processed_files[path] != mtime:
|
||||||
# Debounce: Ensure file isn't actively being written
|
|
||||||
if (time.time() - mtime) > 5:
|
if (time.time() - mtime) > 5:
|
||||||
logging.info(f"Processing {f}...")
|
logging.info(f"Processing {f}...")
|
||||||
out_name = f"GB_Small_{int(time.time())}.db"
|
out_name = f"GB_Small_{int(time.time())}.db"
|
||||||
shrink_db(path, os.path.join(SYNC_DIR, out_name))
|
shrink_db(path, os.path.join(SYNC_DIR, out_name))
|
||||||
processed_files[path] = mtime
|
processed_files[path] = mtime
|
||||||
|
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
except KeyboardInterrupt: break
|
except KeyboardInterrupt: break
|
||||||
except Exception as e: logging.error(e)
|
except Exception as e: logging.error(e)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
EOF
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user