From 661542b0565833ea540caca8902cbd1f766c8176 Mon Sep 17 00:00:00 2001 From: dgtlmoon Date: Fri, 9 Apr 2021 04:49:50 +0200 Subject: [PATCH] Fix backup generation on relative paths (like when run outside docker, under venv, etc) --- backend/__init__.py | 23 ++++++++++++++--------- changedetection.py | 7 ++++--- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/backend/__init__.py b/backend/__init__.py index 5574ba4f..c1125b67 100644 --- a/backend/__init__.py +++ b/backend/__init__.py @@ -436,6 +436,7 @@ def changedetection_app(config=None, datastore_o=None): # We're good but backups are even better! @app.route("/backup", methods=['GET']) def get_backup(): + import zipfile from pathlib import Path @@ -444,26 +445,30 @@ def changedetection_app(config=None, datastore_o=None): # We only care about UUIDS from the current index file uuids = list(datastore.data['watching'].keys()) + backup_filepath = os.path.join(app.config['datastore_path'], backupname) - with zipfile.ZipFile(os.path.join(app.config['datastore_path'], backupname), 'w', + with zipfile.ZipFile(backup_filepath, "w", compression=zipfile.ZIP_DEFLATED, - compresslevel=6) as zipObj: + compresslevel=8) as zipObj: # Be sure we're written fresh datastore.sync_to_json() + os.chdir(app.config['datastore_path']) + # Add the index - zipObj.write(os.path.join(app.config['datastore_path'], "url-watches.json")) - # Add any snapshot data we find + zipObj.write(os.path.join(app.config['datastore_path'], "url-watches.json"), arcname="url-watches.json") + + # Add any snapshot data we find, use the full path to access the file, but make the file 'relative' in the Zip. for txt_file_path in Path(app.config['datastore_path']).rglob('*.txt'): parent_p = txt_file_path.parent if parent_p.name in uuids: - zipObj.write(txt_file_path) + zipObj.write(txt_file_path, + arcname=str(txt_file_path).replace(app.config['datastore_path'], ''), + compress_type=zipfile.ZIP_DEFLATED, + compresslevel=8) - return send_file(os.path.join(app.config['datastore_path'], backupname), - as_attachment=True, - mimetype="application/zip", - attachment_filename=backupname) + return send_from_directory(app.config['datastore_path'], backupname) @app.route("/static//", methods=['GET']) def static_content(group, filename): diff --git a/changedetection.py b/changedetection.py index 3a112c9c..34ac116c 100644 --- a/changedetection.py +++ b/changedetection.py @@ -3,6 +3,7 @@ # Launch as a eventlet.wsgi server instance. import getopt +import os import sys import eventlet @@ -15,7 +16,9 @@ from backend import store def main(argv): ssl_mode = False port = 5000 - datastore_path = "./datastore" + + # Must be absolute so that send_from_directory doesnt try to make it relative to backend/ + datastore_path = os.path.join(os.getcwd(), "datastore") try: opts, args = getopt.getopt(argv, "sd:p:", "purge") @@ -38,8 +41,6 @@ def main(argv): if opt == '-d': datastore_path = arg - - # threads can read from disk every x seconds right? # front end can just save # We just need to know which threads are looking at which UUIDs