commit
273644d2d7
@ -1,18 +1,31 @@
|
||||
.git
|
||||
.github
|
||||
changedetectionio/processors/__pycache__
|
||||
changedetectionio/api/__pycache__
|
||||
changedetectionio/model/__pycache__
|
||||
changedetectionio/blueprint/price_data_follower/__pycache__
|
||||
changedetectionio/blueprint/tags/__pycache__
|
||||
changedetectionio/blueprint/__pycache__
|
||||
changedetectionio/blueprint/browser_steps/__pycache__
|
||||
changedetectionio/fetchers/__pycache__
|
||||
changedetectionio/tests/visualselector/__pycache__
|
||||
changedetectionio/tests/restock/__pycache__
|
||||
changedetectionio/tests/__pycache__
|
||||
changedetectionio/tests/fetchers/__pycache__
|
||||
changedetectionio/tests/unit/__pycache__
|
||||
changedetectionio/tests/proxy_list/__pycache__
|
||||
changedetectionio/__pycache__
|
||||
# Git
|
||||
.git/
|
||||
.gitignore
|
||||
|
||||
# GitHub
|
||||
.github/
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
**/__pycache__
|
||||
**/*.py[cod]
|
||||
|
||||
# Caches
|
||||
.mypy_cache/
|
||||
.pytest_cache/
|
||||
.ruff_cache/
|
||||
|
||||
# Distribution / packaging
|
||||
build/
|
||||
dist/
|
||||
*.egg-info*
|
||||
|
||||
# Virtual environment
|
||||
.env
|
||||
.venv/
|
||||
venv/
|
||||
|
||||
# IntelliJ IDEA
|
||||
.idea/
|
||||
|
||||
# Visual Studio
|
||||
.vscode/
|
||||
|
@ -1,14 +1,29 @@
|
||||
__pycache__
|
||||
.idea
|
||||
*.pyc
|
||||
datastore/url-watches.json
|
||||
datastore/*
|
||||
__pycache__
|
||||
.pytest_cache
|
||||
build
|
||||
dist
|
||||
venv
|
||||
test-datastore/*
|
||||
test-datastore
|
||||
# Byte-compiled / optimized / DLL files
|
||||
**/__pycache__
|
||||
**/*.py[cod]
|
||||
|
||||
# Caches
|
||||
.mypy_cache/
|
||||
.pytest_cache/
|
||||
.ruff_cache/
|
||||
|
||||
# Distribution / packaging
|
||||
build/
|
||||
dist/
|
||||
*.egg-info*
|
||||
|
||||
# Virtual environment
|
||||
.env
|
||||
.venv/
|
||||
venv/
|
||||
|
||||
# IDEs
|
||||
.idea
|
||||
.vscode/settings.json
|
||||
|
||||
# Datastore files
|
||||
datastore/
|
||||
test-datastore/
|
||||
|
||||
# Memory consumption log
|
||||
test-memory.log
|
||||
|
@ -0,0 +1,164 @@
|
||||
import datetime
|
||||
import glob
|
||||
import threading
|
||||
|
||||
from flask import Blueprint, render_template, send_from_directory, flash, url_for, redirect, abort
|
||||
import os
|
||||
|
||||
from changedetectionio.store import ChangeDetectionStore
|
||||
from changedetectionio.flask_app import login_optionally_required
|
||||
from loguru import logger
|
||||
|
||||
BACKUP_FILENAME_FORMAT = "changedetection-backup-{}.zip"
|
||||
|
||||
|
||||
def create_backup(datastore_path, watches: dict):
|
||||
logger.debug("Creating backup...")
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
|
||||
# create a ZipFile object
|
||||
timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
backupname = BACKUP_FILENAME_FORMAT.format(timestamp)
|
||||
backup_filepath = os.path.join(datastore_path, backupname)
|
||||
|
||||
with zipfile.ZipFile(backup_filepath.replace('.zip', '.tmp'), "w",
|
||||
compression=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=8) as zipObj:
|
||||
|
||||
# Add the index
|
||||
zipObj.write(os.path.join(datastore_path, "url-watches.json"), arcname="url-watches.json")
|
||||
|
||||
# Add the flask app secret
|
||||
zipObj.write(os.path.join(datastore_path, "secret.txt"), arcname="secret.txt")
|
||||
|
||||
# Add any data in the watch data directory.
|
||||
for uuid, w in watches.items():
|
||||
for f in Path(w.watch_data_dir).glob('*'):
|
||||
zipObj.write(f,
|
||||
# Use the full path to access the file, but make the file 'relative' in the Zip.
|
||||
arcname=os.path.join(f.parts[-2], f.parts[-1]),
|
||||
compress_type=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=8)
|
||||
|
||||
# Create a list file with just the URLs, so it's easier to port somewhere else in the future
|
||||
list_file = "url-list.txt"
|
||||
with open(os.path.join(datastore_path, list_file), "w") as f:
|
||||
for uuid in watches:
|
||||
url = watches[uuid]["url"]
|
||||
f.write("{}\r\n".format(url))
|
||||
list_with_tags_file = "url-list-with-tags.txt"
|
||||
with open(
|
||||
os.path.join(datastore_path, list_with_tags_file), "w"
|
||||
) as f:
|
||||
for uuid in watches:
|
||||
url = watches[uuid].get('url')
|
||||
tag = watches[uuid].get('tags', {})
|
||||
f.write("{} {}\r\n".format(url, tag))
|
||||
|
||||
# Add it to the Zip
|
||||
zipObj.write(
|
||||
os.path.join(datastore_path, list_file),
|
||||
arcname=list_file,
|
||||
compress_type=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=8,
|
||||
)
|
||||
zipObj.write(
|
||||
os.path.join(datastore_path, list_with_tags_file),
|
||||
arcname=list_with_tags_file,
|
||||
compress_type=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=8,
|
||||
)
|
||||
|
||||
# Now it's done, rename it so it shows up finally and its completed being written.
|
||||
os.rename(backup_filepath.replace('.zip', '.tmp'), backup_filepath.replace('.tmp', '.zip'))
|
||||
|
||||
|
||||
def construct_blueprint(datastore: ChangeDetectionStore):
|
||||
backups_blueprint = Blueprint('backups', __name__, template_folder="templates")
|
||||
backup_threads = []
|
||||
|
||||
@login_optionally_required
|
||||
@backups_blueprint.route("/request-backup", methods=['GET'])
|
||||
def request_backup():
|
||||
if any(thread.is_alive() for thread in backup_threads):
|
||||
flash("A backup is already running, check back in a few minutes", "error")
|
||||
return redirect(url_for('backups.index'))
|
||||
|
||||
if len(find_backups()) > int(os.getenv("MAX_NUMBER_BACKUPS", 100)):
|
||||
flash("Maximum number of backups reached, please remove some", "error")
|
||||
return redirect(url_for('backups.index'))
|
||||
|
||||
# Be sure we're written fresh
|
||||
datastore.sync_to_json()
|
||||
zip_thread = threading.Thread(target=create_backup, args=(datastore.datastore_path, datastore.data.get("watching")))
|
||||
zip_thread.start()
|
||||
backup_threads.append(zip_thread)
|
||||
flash("Backup building in background, check back in a few minutes.")
|
||||
|
||||
return redirect(url_for('backups.index'))
|
||||
|
||||
def find_backups():
|
||||
backup_filepath = os.path.join(datastore.datastore_path, BACKUP_FILENAME_FORMAT.format("*"))
|
||||
backups = glob.glob(backup_filepath)
|
||||
backup_info = []
|
||||
|
||||
for backup in backups:
|
||||
size = os.path.getsize(backup) / (1024 * 1024)
|
||||
creation_time = os.path.getctime(backup)
|
||||
backup_info.append({
|
||||
'filename': os.path.basename(backup),
|
||||
'filesize': f"{size:.2f}",
|
||||
'creation_time': creation_time
|
||||
})
|
||||
|
||||
backup_info.sort(key=lambda x: x['creation_time'], reverse=True)
|
||||
|
||||
return backup_info
|
||||
|
||||
@login_optionally_required
|
||||
@backups_blueprint.route("/download/<string:filename>", methods=['GET'])
|
||||
def download_backup(filename):
|
||||
import re
|
||||
filename = filename.strip()
|
||||
backup_filename_regex = BACKUP_FILENAME_FORMAT.format("\d+")
|
||||
|
||||
full_path = os.path.join(os.path.abspath(datastore.datastore_path), filename)
|
||||
if not full_path.startswith(os.path.abspath(datastore.datastore_path)):
|
||||
abort(404)
|
||||
|
||||
if filename == 'latest':
|
||||
backups = find_backups()
|
||||
filename = backups[0]['filename']
|
||||
|
||||
if not re.match(r"^" + backup_filename_regex + "$", filename):
|
||||
abort(400) # Bad Request if the filename doesn't match the pattern
|
||||
|
||||
logger.debug(f"Backup download request for '{full_path}'")
|
||||
return send_from_directory(os.path.abspath(datastore.datastore_path), filename, as_attachment=True)
|
||||
|
||||
@login_optionally_required
|
||||
@backups_blueprint.route("/", methods=['GET'])
|
||||
def index():
|
||||
backups = find_backups()
|
||||
output = render_template("overview.html",
|
||||
available_backups=backups,
|
||||
backup_running=any(thread.is_alive() for thread in backup_threads)
|
||||
)
|
||||
|
||||
return output
|
||||
|
||||
@login_optionally_required
|
||||
@backups_blueprint.route("/remove-backups", methods=['GET'])
|
||||
def remove_backups():
|
||||
|
||||
backup_filepath = os.path.join(datastore.datastore_path, BACKUP_FILENAME_FORMAT.format("*"))
|
||||
backups = glob.glob(backup_filepath)
|
||||
for backup in backups:
|
||||
os.unlink(backup)
|
||||
|
||||
flash("Backups were deleted.")
|
||||
|
||||
return redirect(url_for('backups.index'))
|
||||
|
||||
return backups_blueprint
|
@ -0,0 +1,36 @@
|
||||
{% extends 'base.html' %}
|
||||
{% block content %}
|
||||
{% from '_helpers.html' import render_simple_field, render_field %}
|
||||
<div class="edit-form">
|
||||
<div class="box-wrap inner">
|
||||
<h4>Backups</h4>
|
||||
{% if backup_running %}
|
||||
<p>
|
||||
<strong>A backup is running!</strong>
|
||||
</p>
|
||||
{% endif %}
|
||||
<p>
|
||||
Here you can download and request a new backup, when a backup is completed you will see it listed below.
|
||||
</p>
|
||||
<br>
|
||||
{% if available_backups %}
|
||||
<ul>
|
||||
{% for backup in available_backups %}
|
||||
<li><a href="{{ url_for('backups.download_backup', filename=backup["filename"]) }}">{{ backup["filename"] }}</a> {{ backup["filesize"] }} Mb</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% else %}
|
||||
<p>
|
||||
<strong>No backups found.</strong>
|
||||
</p>
|
||||
{% endif %}
|
||||
|
||||
<a class="pure-button pure-button-primary" href="{{ url_for('backups.request_backup') }}">Create backup</a>
|
||||
{% if available_backups %}
|
||||
<a class="pure-button button-small button-error " href="{{ url_for('backups.remove_backups') }}">Remove backups</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
{% endblock %}
|
After Width: | Height: | Size: 5.9 KiB |
@ -0,0 +1,109 @@
|
||||
function getTimeInTimezone(timezone) {
|
||||
const now = new Date();
|
||||
const options = {
|
||||
timeZone: timezone,
|
||||
weekday: 'long',
|
||||
year: 'numeric',
|
||||
hour12: false,
|
||||
month: '2-digit',
|
||||
day: '2-digit',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
second: '2-digit',
|
||||
};
|
||||
|
||||
const formatter = new Intl.DateTimeFormat('en-US', options);
|
||||
return formatter.format(now);
|
||||
}
|
||||
|
||||
$(document).ready(function () {
|
||||
|
||||
let exceedsLimit = false;
|
||||
const warning_text = $("#timespan-warning")
|
||||
const timezone_text_widget = $("input[id*='time_schedule_limit-timezone']")
|
||||
|
||||
toggleVisibility('#time_schedule_limit-enabled, #requests-time_schedule_limit-enabled', '#schedule-day-limits-wrapper', true)
|
||||
|
||||
setInterval(() => {
|
||||
let success = true;
|
||||
try {
|
||||
// Show the current local time according to either placeholder or entered TZ name
|
||||
if (timezone_text_widget.val().length) {
|
||||
$('#local-time-in-tz').text(getTimeInTimezone(timezone_text_widget.val()));
|
||||
} else {
|
||||
// So maybe use what is in the placeholder (which will be the default settings)
|
||||
$('#local-time-in-tz').text(getTimeInTimezone(timezone_text_widget.attr('placeholder')));
|
||||
}
|
||||
} catch (error) {
|
||||
success = false;
|
||||
$('#local-time-in-tz').text("");
|
||||
console.error(timezone_text_widget.val())
|
||||
}
|
||||
|
||||
$(timezone_text_widget).toggleClass('error', !success);
|
||||
|
||||
}, 500);
|
||||
|
||||
$('#schedule-day-limits-wrapper').on('change click blur', 'input, checkbox, select', function() {
|
||||
|
||||
let allOk = true;
|
||||
|
||||
// Controls setting the warning that the time could overlap into the next day
|
||||
$("li.day-schedule").each(function () {
|
||||
const $schedule = $(this);
|
||||
const $checkbox = $schedule.find("input[type='checkbox']");
|
||||
|
||||
if ($checkbox.is(":checked")) {
|
||||
const timeValue = $schedule.find("input[type='time']").val();
|
||||
const durationHours = parseInt($schedule.find("select[name*='-duration-hours']").val(), 10) || 0;
|
||||
const durationMinutes = parseInt($schedule.find("select[name*='-duration-minutes']").val(), 10) || 0;
|
||||
|
||||
if (timeValue) {
|
||||
const [startHours, startMinutes] = timeValue.split(":").map(Number);
|
||||
const totalMinutes = (startHours * 60 + startMinutes) + (durationHours * 60 + durationMinutes);
|
||||
|
||||
exceedsLimit = totalMinutes > 1440
|
||||
if (exceedsLimit) {
|
||||
allOk = false
|
||||
}
|
||||
// Set the row/day-of-week highlight
|
||||
$schedule.toggleClass("warning", exceedsLimit);
|
||||
}
|
||||
} else {
|
||||
$schedule.toggleClass("warning", false);
|
||||
}
|
||||
});
|
||||
|
||||
warning_text.toggle(!allOk)
|
||||
});
|
||||
|
||||
$('table[id*="time_schedule_limit-saturday"], table[id*="time_schedule_limit-sunday"]').addClass("weekend-day")
|
||||
|
||||
// Presets [weekend] [business hours] etc
|
||||
$(document).on('click', '[data-template].set-schedule', function () {
|
||||
// Get the value of the 'data-template' attribute
|
||||
switch ($(this).attr('data-template')) {
|
||||
case 'business-hours':
|
||||
$('.day-schedule table:not(.weekend-day) input[type="time"]').val('09:00')
|
||||
$('.day-schedule table:not(.weekend-day) select[id*="-duration-hours"]').val('8');
|
||||
$('.day-schedule table:not(.weekend-day) select[id*="-duration-minutes"]').val('0');
|
||||
$('.day-schedule input[id*="-enabled"]').prop('checked', true);
|
||||
$('.day-schedule .weekend-day input[id*="-enabled"]').prop('checked', false);
|
||||
break;
|
||||
case 'weekend':
|
||||
$('.day-schedule .weekend-day input[type="time"][id$="start-time"]').val('00:00')
|
||||
$('.day-schedule .weekend-day select[id*="-duration-hours"]').val('24');
|
||||
$('.day-schedule .weekend-day select[id*="-duration-minutes"]').val('0');
|
||||
$('.day-schedule input[id*="-enabled"]').prop('checked', false);
|
||||
$('.day-schedule .weekend-day input[id*="-enabled"]').prop('checked', true);
|
||||
break;
|
||||
case 'reset':
|
||||
|
||||
$('.day-schedule input[type="time"]').val('00:00')
|
||||
$('.day-schedule select[id*="-duration-hours"]').val('24');
|
||||
$('.day-schedule select[id*="-duration-minutes"]').val('0');
|
||||
$('.day-schedule input[id*="-enabled"]').prop('checked', true);
|
||||
break;
|
||||
}
|
||||
});
|
||||
});
|
@ -0,0 +1,179 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import time
|
||||
from datetime import datetime, timezone
|
||||
from zoneinfo import ZoneInfo
|
||||
from flask import url_for
|
||||
from .util import live_server_setup, wait_for_all_checks, extract_UUID_from_client
|
||||
|
||||
def test_setup(client, live_server):
|
||||
live_server_setup(live_server)
|
||||
|
||||
def test_check_basic_scheduler_functionality(client, live_server, measure_memory_usage):
|
||||
#live_server_setup(live_server)
|
||||
days = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']
|
||||
test_url = url_for('test_random_content_endpoint', _external=True)
|
||||
|
||||
# We use "Pacific/Kiritimati" because its the furthest +14 hours, so it might show up more interesting bugs
|
||||
# The rest of the actual functionality should be covered in the unit-test unit/test_scheduler.py
|
||||
#####################
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data={"application-empty_pages_are_a_change": "",
|
||||
"requests-time_between_check-seconds": 1,
|
||||
"application-timezone": "Pacific/Kiritimati", # Most Forward Time Zone (UTC+14:00)
|
||||
'application-fetch_backend': "html_requests"},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
res = client.get(url_for("settings_page"))
|
||||
assert b'Pacific/Kiritimati' in res.data
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
uuid = extract_UUID_from_client(client)
|
||||
|
||||
# Setup all the days of the weeks using XXX as the placeholder for monday/tuesday/etc
|
||||
|
||||
tpl = {
|
||||
"time_schedule_limit-XXX-start_time": "00:00",
|
||||
"time_schedule_limit-XXX-duration-hours": 24,
|
||||
"time_schedule_limit-XXX-duration-minutes": 0,
|
||||
"time_schedule_limit-XXX-enabled": '', # All days are turned off
|
||||
"time_schedule_limit-enabled": 'y', # Scheduler is enabled, all days however are off.
|
||||
}
|
||||
|
||||
scheduler_data = {}
|
||||
for day in days:
|
||||
for key, value in tpl.items():
|
||||
# Replace "XXX" with the current day in the key
|
||||
new_key = key.replace("XXX", day)
|
||||
scheduler_data[new_key] = value
|
||||
|
||||
last_check = live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked']
|
||||
data = {
|
||||
"url": test_url,
|
||||
"fetch_backend": "html_requests"
|
||||
}
|
||||
data.update(scheduler_data)
|
||||
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data=data,
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
|
||||
res = client.get(url_for("edit_page", uuid="first"))
|
||||
assert b"Pacific/Kiritimati" in res.data, "Should be Pacific/Kiritimati in placeholder data"
|
||||
|
||||
# "Edit" should not trigger a check because it's not enabled in the schedule.
|
||||
time.sleep(2)
|
||||
assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] == last_check
|
||||
|
||||
# Enabling today in Kiritimati should work flawless
|
||||
kiritimati_time = datetime.now(timezone.utc).astimezone(ZoneInfo("Pacific/Kiritimati"))
|
||||
kiritimati_time_day_of_week = kiritimati_time.strftime("%A").lower()
|
||||
live_server.app.config['DATASTORE'].data['watching'][uuid]["time_schedule_limit"][kiritimati_time_day_of_week]["enabled"] = True
|
||||
time.sleep(3)
|
||||
assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] != last_check
|
||||
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
||||
|
||||
|
||||
def test_check_basic_global_scheduler_functionality(client, live_server, measure_memory_usage):
|
||||
#live_server_setup(live_server)
|
||||
days = ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']
|
||||
test_url = url_for('test_random_content_endpoint', _external=True)
|
||||
|
||||
res = client.post(
|
||||
url_for("import_page"),
|
||||
data={"urls": test_url},
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"1 Imported" in res.data
|
||||
wait_for_all_checks(client)
|
||||
uuid = extract_UUID_from_client(client)
|
||||
|
||||
# Setup all the days of the weeks using XXX as the placeholder for monday/tuesday/etc
|
||||
|
||||
tpl = {
|
||||
"requests-time_schedule_limit-XXX-start_time": "00:00",
|
||||
"requests-time_schedule_limit-XXX-duration-hours": 24,
|
||||
"requests-time_schedule_limit-XXX-duration-minutes": 0,
|
||||
"requests-time_schedule_limit-XXX-enabled": '', # All days are turned off
|
||||
"requests-time_schedule_limit-enabled": 'y', # Scheduler is enabled, all days however are off.
|
||||
}
|
||||
|
||||
scheduler_data = {}
|
||||
for day in days:
|
||||
for key, value in tpl.items():
|
||||
# Replace "XXX" with the current day in the key
|
||||
new_key = key.replace("XXX", day)
|
||||
scheduler_data[new_key] = value
|
||||
|
||||
data = {
|
||||
"application-empty_pages_are_a_change": "",
|
||||
"application-timezone": "Pacific/Kiritimati", # Most Forward Time Zone (UTC+14:00)
|
||||
'application-fetch_backend': "html_requests",
|
||||
"requests-time_between_check-hours": 0,
|
||||
"requests-time_between_check-minutes": 0,
|
||||
"requests-time_between_check-seconds": 1,
|
||||
}
|
||||
data.update(scheduler_data)
|
||||
|
||||
#####################
|
||||
res = client.post(
|
||||
url_for("settings_page"),
|
||||
data=data,
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
assert b"Settings updated." in res.data
|
||||
|
||||
res = client.get(url_for("settings_page"))
|
||||
assert b'Pacific/Kiritimati' in res.data
|
||||
|
||||
wait_for_all_checks(client)
|
||||
|
||||
# UI Sanity check
|
||||
|
||||
res = client.get(url_for("edit_page", uuid="first"))
|
||||
assert b"Pacific/Kiritimati" in res.data, "Should be Pacific/Kiritimati in placeholder data"
|
||||
|
||||
#### HITTING SAVE SHOULD NOT TRIGGER A CHECK
|
||||
last_check = live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked']
|
||||
res = client.post(
|
||||
url_for("edit_page", uuid="first"),
|
||||
data={
|
||||
"url": test_url,
|
||||
"fetch_backend": "html_requests",
|
||||
"time_between_check_use_default": "y"},
|
||||
follow_redirects=True
|
||||
)
|
||||
assert b"Updated watch." in res.data
|
||||
time.sleep(2)
|
||||
assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] == last_check
|
||||
|
||||
# Enabling "today" in Kiritimati time should make the system check that watch
|
||||
kiritimati_time = datetime.now(timezone.utc).astimezone(ZoneInfo("Pacific/Kiritimati"))
|
||||
kiritimati_time_day_of_week = kiritimati_time.strftime("%A").lower()
|
||||
live_server.app.config['DATASTORE'].data['settings']['requests']['time_schedule_limit'][kiritimati_time_day_of_week]["enabled"] = True
|
||||
|
||||
time.sleep(3)
|
||||
assert live_server.app.config['DATASTORE'].data['watching'][uuid]['last_checked'] != last_check
|
||||
|
||||
# Cleanup everything
|
||||
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)
|
||||
assert b'Deleted' in res.data
|
@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# run from dir above changedetectionio/ dir
|
||||
# python3 -m unittest changedetectionio.tests.unit.test_jinja2_security
|
||||
|
||||
import unittest
|
||||
from datetime import datetime, timedelta
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
class TestScheduler(unittest.TestCase):
|
||||
|
||||
# UTC+14:00 (Line Islands, Kiribati) is the farthest ahead, always ahead of UTC.
|
||||
# UTC-12:00 (Baker Island, Howland Island) is the farthest behind, always one calendar day behind UTC.
|
||||
|
||||
def test_timezone_basic_time_within_schedule(self):
|
||||
from changedetectionio import time_handler
|
||||
|
||||
timezone_str = 'Europe/Berlin'
|
||||
debug_datetime = datetime.now(ZoneInfo(timezone_str))
|
||||
day_of_week = debug_datetime.strftime('%A')
|
||||
time_str = str(debug_datetime.hour)+':00'
|
||||
duration = 60 # minutes
|
||||
|
||||
# The current time should always be within 60 minutes of [time_hour]:00
|
||||
result = time_handler.am_i_inside_time(day_of_week=day_of_week,
|
||||
time_str=time_str,
|
||||
timezone_str=timezone_str,
|
||||
duration=duration)
|
||||
|
||||
self.assertEqual(result, True, f"{debug_datetime} is within time scheduler {day_of_week} {time_str} in {timezone_str} for {duration} minutes")
|
||||
|
||||
def test_timezone_basic_time_outside_schedule(self):
|
||||
from changedetectionio import time_handler
|
||||
|
||||
timezone_str = 'Europe/Berlin'
|
||||
# We try a date in the future..
|
||||
debug_datetime = datetime.now(ZoneInfo(timezone_str))+ timedelta(days=-1)
|
||||
day_of_week = debug_datetime.strftime('%A')
|
||||
time_str = str(debug_datetime.hour) + ':00'
|
||||
duration = 60*24 # minutes
|
||||
|
||||
# The current time should always be within 60 minutes of [time_hour]:00
|
||||
result = time_handler.am_i_inside_time(day_of_week=day_of_week,
|
||||
time_str=time_str,
|
||||
timezone_str=timezone_str,
|
||||
duration=duration)
|
||||
|
||||
self.assertNotEqual(result, True,
|
||||
f"{debug_datetime} is NOT within time scheduler {day_of_week} {time_str} in {timezone_str} for {duration} minutes")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -0,0 +1,105 @@
|
||||
from datetime import timedelta, datetime
|
||||
from enum import IntEnum
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
|
||||
class Weekday(IntEnum):
|
||||
"""Enumeration for days of the week."""
|
||||
Monday = 0
|
||||
Tuesday = 1
|
||||
Wednesday = 2
|
||||
Thursday = 3
|
||||
Friday = 4
|
||||
Saturday = 5
|
||||
Sunday = 6
|
||||
|
||||
|
||||
def am_i_inside_time(
|
||||
day_of_week: str,
|
||||
time_str: str,
|
||||
timezone_str: str,
|
||||
duration: int = 15,
|
||||
) -> bool:
|
||||
"""
|
||||
Determines if the current time falls within a specified time range.
|
||||
|
||||
Parameters:
|
||||
day_of_week (str): The day of the week (e.g., 'Monday').
|
||||
time_str (str): The start time in 'HH:MM' format.
|
||||
timezone_str (str): The timezone identifier (e.g., 'Europe/Berlin').
|
||||
duration (int, optional): The duration of the time range in minutes. Default is 15.
|
||||
|
||||
Returns:
|
||||
bool: True if the current time is within the time range, False otherwise.
|
||||
"""
|
||||
# Parse the target day of the week
|
||||
try:
|
||||
target_weekday = Weekday[day_of_week.capitalize()]
|
||||
except KeyError:
|
||||
raise ValueError(f"Invalid day_of_week: '{day_of_week}'. Must be a valid weekday name.")
|
||||
|
||||
# Parse the start time
|
||||
try:
|
||||
target_time = datetime.strptime(time_str, '%H:%M').time()
|
||||
except ValueError:
|
||||
raise ValueError(f"Invalid time_str: '{time_str}'. Must be in 'HH:MM' format.")
|
||||
|
||||
# Define the timezone
|
||||
try:
|
||||
tz = ZoneInfo(timezone_str)
|
||||
except Exception:
|
||||
raise ValueError(f"Invalid timezone_str: '{timezone_str}'. Must be a valid timezone identifier.")
|
||||
|
||||
# Get the current time in the specified timezone
|
||||
now_tz = datetime.now(tz)
|
||||
|
||||
# Check if the current day matches the target day or overlaps due to duration
|
||||
current_weekday = now_tz.weekday()
|
||||
start_datetime_tz = datetime.combine(now_tz.date(), target_time, tzinfo=tz)
|
||||
|
||||
# Handle previous day's overlap
|
||||
if target_weekday == (current_weekday - 1) % 7:
|
||||
# Calculate start and end times for the overlap from the previous day
|
||||
start_datetime_tz -= timedelta(days=1)
|
||||
end_datetime_tz = start_datetime_tz + timedelta(minutes=duration)
|
||||
if start_datetime_tz <= now_tz < end_datetime_tz:
|
||||
return True
|
||||
|
||||
# Handle current day's range
|
||||
if target_weekday == current_weekday:
|
||||
end_datetime_tz = start_datetime_tz + timedelta(minutes=duration)
|
||||
if start_datetime_tz <= now_tz < end_datetime_tz:
|
||||
return True
|
||||
|
||||
# Handle next day's overlap
|
||||
if target_weekday == (current_weekday + 1) % 7:
|
||||
end_datetime_tz = start_datetime_tz + timedelta(minutes=duration)
|
||||
if now_tz < start_datetime_tz and now_tz + timedelta(days=1) < end_datetime_tz:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def is_within_schedule(time_schedule_limit, default_tz="UTC"):
|
||||
if time_schedule_limit and time_schedule_limit.get('enabled'):
|
||||
# Get the timezone the time schedule is in, so we know what day it is there
|
||||
tz_name = time_schedule_limit.get('timezone')
|
||||
if not tz_name:
|
||||
tz_name = default_tz
|
||||
|
||||
now_day_name_in_tz = datetime.now(ZoneInfo(tz_name.strip())).strftime('%A')
|
||||
selected_day_schedule = time_schedule_limit.get(now_day_name_in_tz.lower())
|
||||
if not selected_day_schedule.get('enabled'):
|
||||
return False
|
||||
|
||||
duration = selected_day_schedule.get('duration')
|
||||
selected_day_run_duration_m = int(duration.get('hours')) * 60 + int(duration.get('minutes'))
|
||||
|
||||
is_valid = am_i_inside_time(day_of_week=now_day_name_in_tz,
|
||||
time_str=selected_day_schedule['start_time'],
|
||||
timezone_str=tz_name,
|
||||
duration=selected_day_run_duration_m)
|
||||
|
||||
return is_valid
|
||||
|
||||
return False
|
After Width: | Height: | Size: 64 KiB |
Loading…
Reference in new issue