diff --git a/Dockerfile b/Dockerfile index 2e6452c6..73dae1b0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -9,6 +9,9 @@ WORKDIR /app ENV PYTHONUNBUFFERED=1 +# Attempt to store the triggered commit +RUN echo "docker hub: $SOURCE_COMMIT" >/source.txt + RUN [ ! -d "/datastore" ] && mkdir /datastore CMD [ "python", "./backend.py" ] diff --git a/backend/backend.py b/backend/backend.py index b29edea7..248c58b7 100644 --- a/backend/backend.py +++ b/backend/backend.py @@ -268,6 +268,10 @@ def get_backup(): with zipfile.ZipFile(os.path.join("/datastore", backupname), 'w', compression=zipfile.ZIP_DEFLATED, compresslevel=6) as zipObj: + + # Be sure we're written fresh + datastore.sync_to_json() + # Add the index zipObj.write(os.path.join("/datastore", "url-watches.json")) # Add any snapshot data we find diff --git a/backend/store.py b/backend/store.py index fa399796..16ed08f2 100644 --- a/backend/store.py +++ b/backend/store.py @@ -1,7 +1,8 @@ import json import uuid as uuid_builder import validators - +import os.path +from os import path # Is there an existing library to ensure some data store (JSON etc) is in sync with CRUD methods? # Open a github issue if you know something :) @@ -43,6 +44,11 @@ class ChangeDetectionStore: 'history' : {} # Dict of timestamp and output stripped filename } + if path.isfile('/source.txt'): + with open('/source.txt') as f: + # Should be set in Dockerfile to look for /source.txt , this will give us the git commit # + # So when someone gives us a backup file to examine, we know exactly what code they were running. + self.__data['build_sha'] = f.read() try: with open('/datastore/url-watches.json') as json_file: