Re #348 - Add test for backup, use proper datastore path

pull/350/head
dgtlmoon 3 years ago
parent 023951a10e
commit b33105d576

@ -758,7 +758,8 @@ def changedetection_app(config=None, datastore_o=None):
from pathlib import Path from pathlib import Path
# Remove any existing backup file, for now we just keep one file # Remove any existing backup file, for now we just keep one file
for previous_backup_filename in Path(app.config['datastore_path']).rglob('changedetection-backup-*.zip'):
for previous_backup_filename in Path(datastore_o.datastore_path).rglob('changedetection-backup-*.zip'):
os.unlink(previous_backup_filename) os.unlink(previous_backup_filename)
# create a ZipFile object # create a ZipFile object
@ -766,7 +767,7 @@ def changedetection_app(config=None, datastore_o=None):
# We only care about UUIDS from the current index file # We only care about UUIDS from the current index file
uuids = list(datastore.data['watching'].keys()) uuids = list(datastore.data['watching'].keys())
backup_filepath = os.path.join(app.config['datastore_path'], backupname) backup_filepath = os.path.join(datastore_o.datastore_path, backupname)
with zipfile.ZipFile(backup_filepath, "w", with zipfile.ZipFile(backup_filepath, "w",
compression=zipfile.ZIP_DEFLATED, compression=zipfile.ZIP_DEFLATED,
@ -776,22 +777,22 @@ def changedetection_app(config=None, datastore_o=None):
datastore.sync_to_json() datastore.sync_to_json()
# Add the index # Add the index
zipObj.write(os.path.join(app.config['datastore_path'], "url-watches.json"), arcname="url-watches.json") zipObj.write(os.path.join(datastore_o.datastore_path, "url-watches.json"), arcname="url-watches.json")
# Add the flask app secret # Add the flask app secret
zipObj.write(os.path.join(app.config['datastore_path'], "secret.txt"), arcname="secret.txt") zipObj.write(os.path.join(datastore_o.datastore_path, "secret.txt"), arcname="secret.txt")
# Add any snapshot data we find, use the full path to access the file, but make the file 'relative' in the Zip. # Add any snapshot data we find, use the full path to access the file, but make the file 'relative' in the Zip.
for txt_file_path in Path(app.config['datastore_path']).rglob('*.txt'): for txt_file_path in Path(datastore_o.datastore_path).rglob('*.txt'):
parent_p = txt_file_path.parent parent_p = txt_file_path.parent
if parent_p.name in uuids: if parent_p.name in uuids:
zipObj.write(txt_file_path, zipObj.write(txt_file_path,
arcname=str(txt_file_path).replace(app.config['datastore_path'], ''), arcname=str(txt_file_path).replace(datastore_o.datastore_path, ''),
compress_type=zipfile.ZIP_DEFLATED, compress_type=zipfile.ZIP_DEFLATED,
compresslevel=8) compresslevel=8)
# Create a list file with just the URLs, so it's easier to port somewhere else in the future # Create a list file with just the URLs, so it's easier to port somewhere else in the future
list_file = os.path.join(app.config['datastore_path'], "url-list.txt") list_file = os.path.join(datastore_o.datastore_path, "url-list.txt")
with open(list_file, "w") as f: with open(list_file, "w") as f:
for uuid in datastore.data['watching']: for uuid in datastore.data['watching']:
url = datastore.data['watching'][uuid]['url'] url = datastore.data['watching'][uuid]['url']
@ -803,7 +804,7 @@ def changedetection_app(config=None, datastore_o=None):
compress_type=zipfile.ZIP_DEFLATED, compress_type=zipfile.ZIP_DEFLATED,
compresslevel=8) compresslevel=8)
return send_from_directory(app.config['datastore_path'], backupname, as_attachment=True) return send_from_directory(datastore_o.datastore_path, backupname, as_attachment=True)
@app.route("/static/<string:group>/<string:filename>", methods=['GET']) @app.route("/static/<string:group>/<string:filename>", methods=['GET'])
def static_content(group, filename): def static_content(group, filename):

@ -0,0 +1,25 @@
#!/usr/bin/python3
import time
from flask import url_for
from urllib.request import urlopen
from . util import set_original_response, set_modified_response, live_server_setup
def test_backup(client, live_server):
live_server_setup(live_server)
# Give the endpoint time to spin up
time.sleep(1)
res = client.get(
url_for("get_backup"),
follow_redirects=True
)
# Should get the right zip content type
assert res.content_type == "application/zip"
# Should be PK/ZIP stream
assert res.data.count(b'PK') >= 2
Loading…
Cancel
Save