diff --git a/changedetectionio/blueprint/backups/__init__.py b/changedetectionio/blueprint/backups/__init__.py new file mode 100644 index 00000000..1ae10c13 --- /dev/null +++ b/changedetectionio/blueprint/backups/__init__.py @@ -0,0 +1,161 @@ +import datetime +import glob +import threading + +from flask import Blueprint, render_template, send_from_directory, flash, url_for, redirect, abort +import os + +from changedetectionio.store import ChangeDetectionStore +from changedetectionio.flask_app import login_optionally_required +from loguru import logger + +BACKUP_FILENAME_FORMAT = "changedetection-backup-{}.zip" + + +def create_backup(datastore_path, watches: dict): + logger.debug("Creating backup...") + import zipfile + from pathlib import Path + + # create a ZipFile object + timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") + backupname = BACKUP_FILENAME_FORMAT.format(timestamp) + backup_filepath = os.path.join(datastore_path, backupname) + + with zipfile.ZipFile(backup_filepath, "w", + compression=zipfile.ZIP_DEFLATED, + compresslevel=8) as zipObj: + + # Add the index + zipObj.write(os.path.join(datastore_path, "url-watches.json"), arcname="url-watches.json") + + # Add the flask app secret + zipObj.write(os.path.join(datastore_path, "secret.txt"), arcname="secret.txt") + + # Add any data in the watch data directory. + for uuid, w in watches.items(): + for f in Path(w.watch_data_dir).glob('*'): + zipObj.write(f, + # Use the full path to access the file, but make the file 'relative' in the Zip. + arcname=os.path.join(f.parts[-2], f.parts[-1]), + compress_type=zipfile.ZIP_DEFLATED, + compresslevel=8) + + # Create a list file with just the URLs, so it's easier to port somewhere else in the future + list_file = "url-list.txt" + with open(os.path.join(datastore_path, list_file), "w") as f: + for uuid in watches: + url = watches[uuid]["url"] + f.write("{}\r\n".format(url)) + list_with_tags_file = "url-list-with-tags.txt" + with open( + os.path.join(datastore_path, list_with_tags_file), "w" + ) as f: + for uuid in watches: + url = watches[uuid].get('url') + tag = watches[uuid].get('tags', {}) + f.write("{} {}\r\n".format(url, tag)) + + # Add it to the Zip + zipObj.write( + os.path.join(datastore_path, list_file), + arcname=list_file, + compress_type=zipfile.ZIP_DEFLATED, + compresslevel=8, + ) + zipObj.write( + os.path.join(datastore_path, list_with_tags_file), + arcname=list_with_tags_file, + compress_type=zipfile.ZIP_DEFLATED, + compresslevel=8, + ) + + +def construct_blueprint(datastore: ChangeDetectionStore): + backups_blueprint = Blueprint('backups', __name__, template_folder="templates") + backup_threads = [] + + @login_optionally_required + @backups_blueprint.route("/request-backup", methods=['GET']) + def request_backup(): + if any(thread.is_alive() for thread in backup_threads): + flash("A backup is already running, check back in a few minutes", "error") + return redirect(url_for('backups.index')) + + if len(find_backups()) > int(os.getenv("MAX_NUMBER_BACKUPS", 100)): + flash("Maximum number of backups reached, please remove some", "error") + return redirect(url_for('backups.index')) + + # Be sure we're written fresh + datastore.sync_to_json() + zip_thread = threading.Thread(target=create_backup, args=(datastore.datastore_path, datastore.data.get("watching"))) + zip_thread.start() + backup_threads.append(zip_thread) + flash("Backup building in background, check back in a few minutes.") + + return redirect(url_for('backups.index')) + + def find_backups(): + backup_filepath = os.path.join(datastore.datastore_path, BACKUP_FILENAME_FORMAT.format("*")) + backups = glob.glob(backup_filepath) + backup_info = [] + + for backup in backups: + size = os.path.getsize(backup) / (1024 * 1024) + creation_time = os.path.getctime(backup) + backup_info.append({ + 'filename': os.path.basename(backup), + 'filesize': f"{size:.2f}", + 'creation_time': creation_time + }) + + backup_info.sort(key=lambda x: x['creation_time'], reverse=True) + + return backup_info + + @login_optionally_required + @backups_blueprint.route("/download/", methods=['GET']) + def download_backup(filename): + import re + filename = filename.strip() + backup_filename_regex = BACKUP_FILENAME_FORMAT.format("\d+") + + full_path = os.path.join(os.path.abspath(datastore.datastore_path), filename) + if not full_path.startswith(os.path.abspath(datastore.datastore_path)): + abort(404) + + if filename == 'latest': + backups = find_backups() + filename = backups[0]['filename'] + + if not re.match(r"^" + backup_filename_regex + "$", filename): + abort(400) # Bad Request if the filename doesn't match the pattern + + logger.debug(f"Backup download request for '{full_path}'") + return send_from_directory(os.path.abspath(datastore.datastore_path), filename, as_attachment=True) + + @login_optionally_required + @backups_blueprint.route("/", methods=['GET']) + def index(): + backups = find_backups() + output = render_template("overview.html", + available_backups=backups, + backup_running=any(thread.is_alive() for thread in backup_threads) + ) + + return output + + @login_optionally_required + @backups_blueprint.route("/remove-backups", methods=['GET']) + def remove_backups(): + + backup_filepath = os.path.join(datastore.datastore_path, BACKUP_FILENAME_FORMAT.format("*")) + backups = glob.glob(backup_filepath) + for backup in backups: + os.unlink(backup) + + flash("Backups were deleted.") + + return redirect(url_for('backups.index')) + + return backups_blueprint diff --git a/changedetectionio/blueprint/backups/templates/overview.html b/changedetectionio/blueprint/backups/templates/overview.html new file mode 100644 index 00000000..b07be4bd --- /dev/null +++ b/changedetectionio/blueprint/backups/templates/overview.html @@ -0,0 +1,36 @@ +{% extends 'base.html' %} +{% block content %} + {% from '_helpers.html' import render_simple_field, render_field %} +
+
+

Backups

+ {% if backup_running %} +

+ A backup is running! +

+ {% endif %} +

+ Here you can download and request a new backup, when a backup is completed you will see it listed below. +

+
+ {% if available_backups %} + + {% else %} +

+ No backups found. +

+ {% endif %} + + Create backup + {% if available_backups %} + Remove backups + {% endif %} +
+
+ + +{% endblock %} diff --git a/changedetectionio/flask_app.py b/changedetectionio/flask_app.py index 66d5b00d..5be46e1c 100644 --- a/changedetectionio/flask_app.py +++ b/changedetectionio/flask_app.py @@ -1236,78 +1236,6 @@ def changedetection_app(config=None, datastore_o=None): return output - # We're good but backups are even better! - @app.route("/backup", methods=['GET']) - @login_optionally_required - def get_backup(): - - import zipfile - from pathlib import Path - - # Remove any existing backup file, for now we just keep one file - - for previous_backup_filename in Path(datastore_o.datastore_path).rglob('changedetection-backup-*.zip'): - os.unlink(previous_backup_filename) - - # create a ZipFile object - timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") - backupname = "changedetection-backup-{}.zip".format(timestamp) - backup_filepath = os.path.join(datastore_o.datastore_path, backupname) - - with zipfile.ZipFile(backup_filepath, "w", - compression=zipfile.ZIP_DEFLATED, - compresslevel=8) as zipObj: - - # Be sure we're written fresh - datastore.sync_to_json() - - # Add the index - zipObj.write(os.path.join(datastore_o.datastore_path, "url-watches.json"), arcname="url-watches.json") - - # Add the flask app secret - zipObj.write(os.path.join(datastore_o.datastore_path, "secret.txt"), arcname="secret.txt") - - # Add any data in the watch data directory. - for uuid, w in datastore.data['watching'].items(): - for f in Path(w.watch_data_dir).glob('*'): - zipObj.write(f, - # Use the full path to access the file, but make the file 'relative' in the Zip. - arcname=os.path.join(f.parts[-2], f.parts[-1]), - compress_type=zipfile.ZIP_DEFLATED, - compresslevel=8) - - # Create a list file with just the URLs, so it's easier to port somewhere else in the future - list_file = "url-list.txt" - with open(os.path.join(datastore_o.datastore_path, list_file), "w") as f: - for uuid in datastore.data["watching"]: - url = datastore.data["watching"][uuid]["url"] - f.write("{}\r\n".format(url)) - list_with_tags_file = "url-list-with-tags.txt" - with open( - os.path.join(datastore_o.datastore_path, list_with_tags_file), "w" - ) as f: - for uuid in datastore.data["watching"]: - url = datastore.data["watching"][uuid].get('url') - tag = datastore.data["watching"][uuid].get('tags', {}) - f.write("{} {}\r\n".format(url, tag)) - - # Add it to the Zip - zipObj.write( - os.path.join(datastore_o.datastore_path, list_file), - arcname=list_file, - compress_type=zipfile.ZIP_DEFLATED, - compresslevel=8, - ) - zipObj.write( - os.path.join(datastore_o.datastore_path, list_with_tags_file), - arcname=list_with_tags_file, - compress_type=zipfile.ZIP_DEFLATED, - compresslevel=8, - ) - - # Send_from_directory needs to be the full absolute path - return send_from_directory(os.path.abspath(datastore_o.datastore_path), backupname, as_attachment=True) - @app.route("/static//", methods=['GET']) def static_content(group, filename): from flask import make_response @@ -1687,6 +1615,9 @@ def changedetection_app(config=None, datastore_o=None): import changedetectionio.blueprint.check_proxies as check_proxies app.register_blueprint(check_proxies.construct_blueprint(datastore=datastore), url_prefix='/check_proxy') + import changedetectionio.blueprint.backups as backups + app.register_blueprint(backups.construct_blueprint(datastore), url_prefix='/backups') + # @todo handle ctrl break ticker_thread = threading.Thread(target=ticker_thread_check_time_launch_checks).start() diff --git a/changedetectionio/templates/base.html b/changedetectionio/templates/base.html index 5218eb0b..ac726343 100644 --- a/changedetectionio/templates/base.html +++ b/changedetectionio/templates/base.html @@ -70,7 +70,7 @@ IMPORT
  • - BACKUP + BACKUPS
  • {% else %}
  • diff --git a/changedetectionio/tests/test_backup.py b/changedetectionio/tests/test_backup.py index c6dfd956..1dffc005 100644 --- a/changedetectionio/tests/test_backup.py +++ b/changedetectionio/tests/test_backup.py @@ -26,8 +26,24 @@ def test_backup(client, live_server, measure_memory_usage): assert b"1 Imported" in res.data wait_for_all_checks(client) + # Launch the thread in the background to create the backup res = client.get( - url_for("get_backup"), + url_for("backups.request_backup"), + follow_redirects=True + ) + time.sleep(2) + + res = client.get( + url_for("backups.index"), + follow_redirects=True + ) + # Can see the download link to the backup + assert b'