Merge branch 'master' into 2039-restock-use-itemprop

pull/2041/head
dgtlmoon 6 months ago committed by GitHub
commit c32173b280
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -88,7 +88,7 @@ jobs:
- name: Build and push :dev - name: Build and push :dev
id: docker_build id: docker_build
if: ${{ github.ref }} == "refs/heads/master" if: ${{ github.ref }} == "refs/heads/master"
uses: docker/build-push-action@v5 uses: docker/build-push-action@v6
with: with:
context: ./ context: ./
file: ./Dockerfile file: ./Dockerfile
@ -106,7 +106,7 @@ jobs:
- name: Build and push :tag - name: Build and push :tag
id: docker_build_tag_release id: docker_build_tag_release
if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.') if: github.event_name == 'release' && startsWith(github.event.release.tag_name, '0.')
uses: docker/build-push-action@v5 uses: docker/build-push-action@v6
with: with:
context: ./ context: ./
file: ./Dockerfile file: ./Dockerfile

@ -51,7 +51,7 @@ jobs:
# Check we can still build under alpine/musl # Check we can still build under alpine/musl
- name: Test that the docker containers can build (musl via alpine check) - name: Test that the docker containers can build (musl via alpine check)
id: docker_build_musl id: docker_build_musl
uses: docker/build-push-action@v5 uses: docker/build-push-action@v6
with: with:
context: ./ context: ./
file: ./.github/test/Dockerfile-alpine file: ./.github/test/Dockerfile-alpine
@ -59,7 +59,7 @@ jobs:
- name: Test that the docker containers can build - name: Test that the docker containers can build
id: docker_build id: docker_build
uses: docker/build-push-action@v5 uses: docker/build-push-action@v6
# https://github.com/docker/build-push-action#customizing # https://github.com/docker/build-push-action#customizing
with: with:
context: ./ context: ./

@ -3,9 +3,9 @@
# @NOTE! I would love to move to 3.11 but it breaks the async handler in changedetectionio/content_fetchers/puppeteer.py # @NOTE! I would love to move to 3.11 but it breaks the async handler in changedetectionio/content_fetchers/puppeteer.py
# If you know how to fix it, please do! and test it for both 3.10 and 3.11 # If you know how to fix it, please do! and test it for both 3.10 and 3.11
ARG PYTHON_VERSION=3.10 ARG PYTHON_VERSION=3.11
FROM python:${PYTHON_VERSION}-slim-bookworm as builder FROM python:${PYTHON_VERSION}-slim-bookworm AS builder
# See `cryptography` pin comment in requirements.txt # See `cryptography` pin comment in requirements.txt
ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1 ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1
@ -26,7 +26,8 @@ WORKDIR /install
COPY requirements.txt /requirements.txt COPY requirements.txt /requirements.txt
RUN pip install --target=/dependencies -r /requirements.txt # --extra-index-url https://www.piwheels.org/simple is for cryptography module to be prebuilt (or rustc etc needs to be installed)
RUN pip install --extra-index-url https://www.piwheels.org/simple --target=/dependencies -r /requirements.txt
# Playwright is an alternative to Selenium # Playwright is an alternative to Selenium
# Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing # Excluded this package from requirements.txt to prevent arm/v6 and arm/v7 builds from failing

@ -2,7 +2,7 @@
# Read more https://github.com/dgtlmoon/changedetection.io/wiki # Read more https://github.com/dgtlmoon/changedetection.io/wiki
__version__ = '0.45.24' __version__ = '0.45.25'
from changedetectionio.strtobool import strtobool from changedetectionio.strtobool import strtobool
from json.decoder import JSONDecodeError from json.decoder import JSONDecodeError

@ -171,23 +171,33 @@ class WatchSingleHistory(Resource):
curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history/1677092977 -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json" curl http://localhost:5000/api/v1/watch/cc0cfffa-f449-477b-83ea-0caafd1dc091/history/1677092977 -H"x-api-key:813031b16330fe25e3780cf0325daa45" -H "Content-Type: application/json"
@apiName Get single snapshot content @apiName Get single snapshot content
@apiGroup Watch History @apiGroup Watch History
@apiParam {String} [html] Optional Set to =1 to return the last HTML (only stores last 2 snapshots, use `latest` as timestamp)
@apiSuccess (200) {String} OK @apiSuccess (200) {String} OK
@apiSuccess (404) {String} ERR Not found @apiSuccess (404) {String} ERR Not found
""" """
watch = self.datastore.data['watching'].get(uuid) watch = self.datastore.data['watching'].get(uuid)
if not watch: if not watch:
abort(404, message='No watch exists with the UUID of {}'.format(uuid)) abort(404, message=f"No watch exists with the UUID of {uuid}")
if not len(watch.history): if not len(watch.history):
abort(404, message='Watch found but no history exists for the UUID {}'.format(uuid)) abort(404, message=f"Watch found but no history exists for the UUID {uuid}")
if timestamp == 'latest': if timestamp == 'latest':
timestamp = list(watch.history.keys())[-1] timestamp = list(watch.history.keys())[-1]
if request.args.get('html'):
content = watch.get_fetched_html(timestamp)
if content:
response = make_response(content, 200)
response.mimetype = "text/html"
else:
response = make_response("No content found", 404)
response.mimetype = "text/plain"
else:
content = watch.get_history_snapshot(timestamp) content = watch.get_history_snapshot(timestamp)
response = make_response(content, 200) response = make_response(content, 200)
response.mimetype = "text/plain" response.mimetype = "text/plain"
return response return response

@ -187,8 +187,10 @@ def construct_blueprint(datastore: ChangeDetectionStore):
u = browsersteps_sessions[browsersteps_session_id]['browserstepper'].page.url u = browsersteps_sessions[browsersteps_session_id]['browserstepper'].page.url
if is_last_step and u: if is_last_step and u:
(screenshot, xpath_data) = browsersteps_sessions[browsersteps_session_id]['browserstepper'].request_visualselector_data() (screenshot, xpath_data) = browsersteps_sessions[browsersteps_session_id]['browserstepper'].request_visualselector_data()
datastore.save_screenshot(watch_uuid=uuid, screenshot=screenshot) watch = datastore.data['watching'].get(uuid)
datastore.save_xpath_data(watch_uuid=uuid, data=xpath_data) if watch:
watch.save_screenshot(screenshot=screenshot)
watch.save_xpath_data(data=xpath_data)
# if not this_session.page: # if not this_session.page:
# cleanup_playwright_session() # cleanup_playwright_session()

@ -63,7 +63,7 @@ xpath://body/div/span[contains(@class, 'example-class')]",
<ul> <ul>
<li>JSONPath: Prefix with <code>json:</code>, use <code>json:$</code> to force re-formatting if required, <a href="https://jsonpath.com/" target="new">test your JSONPath here</a>.</li> <li>JSONPath: Prefix with <code>json:</code>, use <code>json:$</code> to force re-formatting if required, <a href="https://jsonpath.com/" target="new">test your JSONPath here</a>.</li>
{% if jq_support %} {% if jq_support %}
<li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>.</li> <li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>. Prefix <code>jqraw:</code> outputs the results as text instead of a JSON list.</li>
{% else %} {% else %}
<li>jq support not installed</li> <li>jq support not installed</li>
{% endif %} {% endif %}

@ -30,14 +30,21 @@ function isItemInStock() {
'dieser artikel ist bald wieder verfügbar', 'dieser artikel ist bald wieder verfügbar',
'dostępne wkrótce', 'dostępne wkrótce',
'en rupture de stock', 'en rupture de stock',
'ist derzeit nicht auf lager', 'isn\'t in stock right now',
'isnt in stock right now',
'isnt in stock right now',
'item is no longer available', 'item is no longer available',
'let me know when it\'s available', 'let me know when it\'s available',
'mail me when available',
'message if back in stock', 'message if back in stock',
'nachricht bei', 'nachricht bei',
'nicht auf lager', 'nicht auf lager',
'nicht lagernd',
'nicht lieferbar', 'nicht lieferbar',
'nicht verfügbar',
'nicht vorrätig',
'nicht zur verfügung', 'nicht zur verfügung',
'nie znaleziono produktów',
'niet beschikbaar', 'niet beschikbaar',
'niet leverbaar', 'niet leverbaar',
'niet op voorraad', 'niet op voorraad',
@ -48,6 +55,7 @@ function isItemInStock() {
'not currently available', 'not currently available',
'not in stock', 'not in stock',
'notify me when available', 'notify me when available',
'notify me',
'notify when available', 'notify when available',
'não estamos a aceitar encomendas', 'não estamos a aceitar encomendas',
'out of stock', 'out of stock',
@ -62,12 +70,16 @@ function isItemInStock() {
'this item is currently unavailable', 'this item is currently unavailable',
'tickets unavailable', 'tickets unavailable',
'tijdelijk uitverkocht', 'tijdelijk uitverkocht',
'unavailable nearby',
'unavailable tickets', 'unavailable tickets',
'vergriffen',
'vorbestellen',
'vorbestellung ist bald möglich', 'vorbestellung ist bald möglich',
'we couldn\'t find any products that match', 'we couldn\'t find any products that match',
'we do not currently have an estimate of when this product will be back in stock.', 'we do not currently have an estimate of when this product will be back in stock.',
'we don\'t know when or if this item will be back in stock.', 'we don\'t know when or if this item will be back in stock.',
'we were not able to find a match', 'we were not able to find a match',
'when this arrives in stock',
'zur zeit nicht an lager', 'zur zeit nicht an lager',
'品切れ', '品切れ',
'已售', '已售',

@ -182,6 +182,7 @@ visibleElementsArray.forEach(function (element) {
// Inject the current one set in the include_filters, which may be a CSS rule // Inject the current one set in the include_filters, which may be a CSS rule
// used for displaying the current one in VisualSelector, where its not one we generated. // used for displaying the current one in VisualSelector, where its not one we generated.
if (include_filters.length) { if (include_filters.length) {
let results;
// Foreach filter, go and find it on the page and add it to the results so we can visualise it again // Foreach filter, go and find it on the page and add it to the results so we can visualise it again
for (const f of include_filters) { for (const f of include_filters) {
bbox = false; bbox = false;
@ -197,10 +198,15 @@ if (include_filters.length) {
if (f.startsWith('/') || f.startsWith('xpath')) { if (f.startsWith('/') || f.startsWith('xpath')) {
var qry_f = f.replace(/xpath(:|\d:)/, '') var qry_f = f.replace(/xpath(:|\d:)/, '')
console.log("[xpath] Scanning for included filter " + qry_f) console.log("[xpath] Scanning for included filter " + qry_f)
q = document.evaluate(qry_f, document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue; let xpathResult = document.evaluate(qry_f, document, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE, null);
results = [];
for (let i = 0; i < xpathResult.snapshotLength; i++) {
results.push(xpathResult.snapshotItem(i));
}
} else { } else {
console.log("[css] Scanning for included filter " + f) console.log("[css] Scanning for included filter " + f)
q = document.querySelector(f); console.log("[css] Scanning for included filter " + f);
results = document.querySelectorAll(f);
} }
} catch (e) { } catch (e) {
// Maybe catch DOMException and alert? // Maybe catch DOMException and alert?
@ -208,11 +214,14 @@ if (include_filters.length) {
console.log(e); console.log(e);
} }
if (q) { if (results.length) {
// Iterate over the results
results.forEach(node => {
// Try to resolve //something/text() back to its /something so we can atleast get the bounding box // Try to resolve //something/text() back to its /something so we can atleast get the bounding box
try { try {
if (typeof q.nodeName == 'string' && q.nodeName === '#text') { if (typeof node.nodeName == 'string' && node.nodeName === '#text') {
q = q.parentElement node = node.parentElement
} }
} catch (e) { } catch (e) {
console.log(e) console.log(e)
@ -220,24 +229,19 @@ if (include_filters.length) {
} }
// #1231 - IN the case XPath attribute filter is applied, we will have to traverse up and find the element. // #1231 - IN the case XPath attribute filter is applied, we will have to traverse up and find the element.
if (typeof q.getBoundingClientRect == 'function') { if (typeof node.getBoundingClientRect == 'function') {
bbox = q.getBoundingClientRect(); bbox = node.getBoundingClientRect();
console.log("xpath_element_scraper: Got filter element, scroll from top was " + scroll_y) console.log("xpath_element_scraper: Got filter element, scroll from top was " + scroll_y)
} else { } else {
try { try {
// Try and see we can find its ownerElement // Try and see we can find its ownerElement
bbox = q.ownerElement.getBoundingClientRect(); bbox = node.ownerElement.getBoundingClientRect();
console.log("xpath_element_scraper: Got filter by ownerElement element, scroll from top was " + scroll_y) console.log("xpath_element_scraper: Got filter by ownerElement element, scroll from top was " + scroll_y)
} catch (e) { } catch (e) {
console.log(e) console.log(e)
console.log("xpath_element_scraper: error looking up q.ownerElement") console.log("xpath_element_scraper: error looking up q.ownerElement")
} }
} }
}
if (!q) {
console.log("xpath_element_scraper: filter element " + f + " was not found");
}
if (bbox && bbox['width'] > 0 && bbox['height'] > 0) { if (bbox && bbox['width'] > 0 && bbox['height'] > 0) {
size_pos.push({ size_pos.push({
@ -245,7 +249,10 @@ if (include_filters.length) {
width: parseInt(bbox['width']), width: parseInt(bbox['width']),
height: parseInt(bbox['height']), height: parseInt(bbox['height']),
left: parseInt(bbox['left']), left: parseInt(bbox['left']),
top: parseInt(bbox['top']) + scroll_y top: parseInt(bbox['top']) + scroll_y,
highlight_as_custom_filter: true
});
}
}); });
} }
} }

@ -731,7 +731,7 @@ def changedetection_app(config=None, datastore_o=None):
datastore.data['watching'][uuid].update(extra_update_obj) datastore.data['watching'][uuid].update(extra_update_obj)
if request.args.get('unpause_on_save'): if request.args.get('unpause_on_save'):
flash("Updated watch - unpaused!.") flash("Updated watch - unpaused!")
else: else:
flash("Updated watch.") flash("Updated watch.")

@ -3,8 +3,6 @@ from bs4 import BeautifulSoup
from inscriptis import get_text from inscriptis import get_text
from jsonpath_ng.ext import parse from jsonpath_ng.ext import parse
from typing import List from typing import List
from inscriptis.css_profiles import CSS_PROFILES, HtmlElement
from inscriptis.html_properties import Display
from inscriptis.model.config import ParserConfig from inscriptis.model.config import ParserConfig
from xml.sax.saxutils import escape as xml_escape from xml.sax.saxutils import escape as xml_escape
import json import json
@ -196,12 +194,12 @@ def extract_element(find='title', html_content=''):
# #
def _parse_json(json_data, json_filter): def _parse_json(json_data, json_filter):
if 'json:' in json_filter: if json_filter.startswith("json:"):
jsonpath_expression = parse(json_filter.replace('json:', '')) jsonpath_expression = parse(json_filter.replace('json:', ''))
match = jsonpath_expression.find(json_data) match = jsonpath_expression.find(json_data)
return _get_stripped_text_from_json_match(match) return _get_stripped_text_from_json_match(match)
if 'jq:' in json_filter: if json_filter.startswith("jq:") or json_filter.startswith("jqraw:"):
try: try:
import jq import jq
@ -209,11 +207,16 @@ def _parse_json(json_data, json_filter):
# `jq` requires full compilation in windows and so isn't generally available # `jq` requires full compilation in windows and so isn't generally available
raise Exception("jq not support not found") raise Exception("jq not support not found")
jq_expression = jq.compile(json_filter.replace('jq:', '')) if json_filter.startswith("jq:"):
jq_expression = jq.compile(json_filter.removeprefix("jq:"))
match = jq_expression.input(json_data).all() match = jq_expression.input(json_data).all()
return _get_stripped_text_from_json_match(match) return _get_stripped_text_from_json_match(match)
if json_filter.startswith("jqraw:"):
jq_expression = jq.compile(json_filter.removeprefix("jqraw:"))
match = jq_expression.input(json_data).all()
return '\n'.join(str(item) for item in match)
def _get_stripped_text_from_json_match(match): def _get_stripped_text_from_json_match(match):
s = [] s = []
# More than one result, we will return it as a JSON list. # More than one result, we will return it as a JSON list.

@ -169,6 +169,8 @@ class model(watch_base):
if len(tmp_history): if len(tmp_history):
self.__newest_history_key = list(tmp_history.keys())[-1] self.__newest_history_key = list(tmp_history.keys())[-1]
else:
self.__newest_history_key = None
self.__history_n = len(tmp_history) self.__history_n = len(tmp_history)
@ -266,14 +268,9 @@ class model(watch_base):
def save_history_text(self, contents, timestamp, snapshot_id): def save_history_text(self, contents, timestamp, snapshot_id):
import brotli import brotli
self.ensure_data_dir_exists() logger.trace(f"{self.get('uuid')} - Updating history.txt with timestamp {timestamp}")
# Small hack so that we sleep just enough to allow 1 second between history snapshots self.ensure_data_dir_exists()
# this is because history.txt indexes/keys snapshots by epoch seconds and we dont want dupe keys
if self.__newest_history_key and int(timestamp) == int(self.__newest_history_key):
logger.warning(f"Timestamp {timestamp} already exists, waiting 1 seconds so we have a unique key in history.txt")
timestamp = str(int(timestamp) + 1)
time.sleep(1)
threshold = int(os.getenv('SNAPSHOT_BROTLI_COMPRESSION_THRESHOLD', 1024)) threshold = int(os.getenv('SNAPSHOT_BROTLI_COMPRESSION_THRESHOLD', 1024))
skip_brotli = strtobool(os.getenv('DISABLE_BROTLI_TEXT_SNAPSHOT', 'False')) skip_brotli = strtobool(os.getenv('DISABLE_BROTLI_TEXT_SNAPSHOT', 'False'))
@ -466,8 +463,42 @@ class model(watch_base):
# None is set # None is set
return False return False
def save_error_text(self, contents):
self.ensure_data_dir_exists()
target_path = os.path.join(self.watch_data_dir, "last-error.txt")
with open(target_path, 'w') as f:
f.write(contents)
def save_xpath_data(self, data, as_error=False):
import json
if as_error:
target_path = os.path.join(self.watch_data_dir, "elements-error.json")
else:
target_path = os.path.join(self.watch_data_dir, "elements.json")
self.ensure_data_dir_exists()
with open(target_path, 'w') as f:
f.write(json.dumps(data))
f.close()
# Save as PNG, PNG is larger but better for doing visual diff in the future
def save_screenshot(self, screenshot: bytes, as_error=False):
if as_error:
target_path = os.path.join(self.watch_data_dir, "last-error-screenshot.png")
else:
target_path = os.path.join(self.watch_data_dir, "last-screenshot.png")
self.ensure_data_dir_exists()
def get_last_fetched_before_filters(self): with open(target_path, 'wb') as f:
f.write(screenshot)
f.close()
def get_last_fetched_text_before_filters(self):
import brotli import brotli
filepath = os.path.join(self.watch_data_dir, 'last-fetched.br') filepath = os.path.join(self.watch_data_dir, 'last-fetched.br')
@ -482,12 +513,56 @@ class model(watch_base):
with open(filepath, 'rb') as f: with open(filepath, 'rb') as f:
return(brotli.decompress(f.read()).decode('utf-8')) return(brotli.decompress(f.read()).decode('utf-8'))
def save_last_fetched_before_filters(self, contents): def save_last_text_fetched_before_filters(self, contents):
import brotli import brotli
filepath = os.path.join(self.watch_data_dir, 'last-fetched.br') filepath = os.path.join(self.watch_data_dir, 'last-fetched.br')
with open(filepath, 'wb') as f: with open(filepath, 'wb') as f:
f.write(brotli.compress(contents, mode=brotli.MODE_TEXT)) f.write(brotli.compress(contents, mode=brotli.MODE_TEXT))
def save_last_fetched_html(self, timestamp, contents):
import brotli
self.ensure_data_dir_exists()
snapshot_fname = f"{timestamp}.html.br"
filepath = os.path.join(self.watch_data_dir, snapshot_fname)
with open(filepath, 'wb') as f:
contents = contents.encode('utf-8') if isinstance(contents, str) else contents
try:
f.write(brotli.compress(contents))
except Exception as e:
logger.warning(f"{self.get('uuid')} - Unable to compress snapshot, saving as raw data to {filepath}")
logger.warning(e)
f.write(contents)
self._prune_last_fetched_html_snapshots()
def get_fetched_html(self, timestamp):
import brotli
snapshot_fname = f"{timestamp}.html.br"
filepath = os.path.join(self.watch_data_dir, snapshot_fname)
if os.path.isfile(filepath):
with open(filepath, 'rb') as f:
return (brotli.decompress(f.read()).decode('utf-8'))
return False
def _prune_last_fetched_html_snapshots(self):
dates = list(self.history.keys())
dates.reverse()
for index, timestamp in enumerate(dates):
snapshot_fname = f"{timestamp}.html.br"
filepath = os.path.join(self.watch_data_dir, snapshot_fname)
# Keep only the first 2
if index > 1 and os.path.isfile(filepath):
os.remove(filepath)
@property @property
def get_browsersteps_available_screenshots(self): def get_browsersteps_available_screenshots(self):
"For knowing which screenshots are available to show the user in BrowserSteps UI" "For knowing which screenshots are available to show the user in BrowserSteps UI"

@ -1,5 +1,6 @@
from abc import abstractmethod from abc import abstractmethod
from changedetectionio.strtobool import strtobool from changedetectionio.strtobool import strtobool
from changedetectionio.model import Watch
from copy import deepcopy from copy import deepcopy
from loguru import logger from loguru import logger
import hashlib import hashlib
@ -138,7 +139,7 @@ class difference_detection_processor():
# After init, call run_changedetection() which will do the actual change-detection # After init, call run_changedetection() which will do the actual change-detection
@abstractmethod @abstractmethod
def run_changedetection(self, uuid, skip_when_checksum_same=True): def run_changedetection(self, watch: Watch, skip_when_checksum_same=True):
update_obj = {'last_notification_error': False, 'last_error': False} update_obj = {'last_notification_error': False, 'last_error': False}
some_data = 'xxxxx' some_data = 'xxxxx'
update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest() update_obj["previous_md5"] = hashlib.md5(some_data.encode('utf-8')).hexdigest()

@ -1,6 +1,5 @@
from . import difference_detection_processor from . import difference_detection_processor
from ..model import Restock from ..model import Restock
from copy import deepcopy
from loguru import logger from loguru import logger
import hashlib import hashlib
import re import re
@ -107,12 +106,7 @@ class perform_site_check(difference_detection_processor):
screenshot = None screenshot = None
xpath_data = None xpath_data = None
def run_changedetection(self, watch, skip_when_checksum_same=True):
def run_changedetection(self, uuid, skip_when_checksum_same=True):
# DeepCopy so we can be sure we don't accidently change anything by reference
watch = deepcopy(self.datastore.data['watching'].get(uuid))
if not watch: if not watch:
raise Exception("Watch no longer exists.") raise Exception("Watch no longer exists.")
@ -157,7 +151,14 @@ class perform_site_check(difference_detection_processor):
logger.debug( logger.debug(
f"Restock - using scraped browserdata - Watch UUID {uuid} restock check returned '{self.fetcher.instock_data}' from JS scraper.") f"Restock - using scraped browserdata - Watch UUID {uuid} restock check returned '{self.fetcher.instock_data}' from JS scraper.")
if not self.fetcher.instock_data: # Main detection method
fetched_md5 = None
if self.fetcher.instock_data:
fetched_md5 = hashlib.md5(self.fetcher.instock_data.encode('utf-8')).hexdigest()
# 'Possibly in stock' comes from stock-not-in-stock.js when no string found above the fold.
update_obj["in_stock"] = True if self.fetcher.instock_data == 'Possibly in stock' else False
logger.debug(f"Watch UUID {watch.get('uuid')} restock check returned '{self.fetcher.instock_data}' from JS scraper.")
else:
raise UnableToExtractRestockData(status_code=self.fetcher.status_code) raise UnableToExtractRestockData(status_code=self.fetcher.status_code)
# Main detection method # Main detection method
@ -165,7 +166,7 @@ class perform_site_check(difference_detection_processor):
# The main thing that all this at the moment comes down to :) # The main thing that all this at the moment comes down to :)
changed_detected = False changed_detected = False
logger.debug(f"Watch UUID {uuid} restock check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}") logger.debug(f"Watch UUID {watch.get('uuid')} restock check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}")
# out of stock -> back in stock only? # out of stock -> back in stock only?
if watch.get('restock') and watch['restock'].get('in_stock') != update_obj['restock'].get('in_stock'): if watch.get('restock') and watch['restock'].get('in_stock') != update_obj['restock'].get('in_stock'):

@ -10,18 +10,17 @@ from . import difference_detection_processor
from ..html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text from ..html_tools import PERL_STYLE_REGEX, cdata_in_document_to_text
from changedetectionio import html_tools, content_fetchers from changedetectionio import html_tools, content_fetchers
from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT from changedetectionio.blueprint.price_data_follower import PRICE_DATA_TRACK_ACCEPT, PRICE_DATA_TRACK_REJECT
import changedetectionio.content_fetchers
from copy import deepcopy
from loguru import logger from loguru import logger
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
name = 'Webpage Text/HTML, JSON and PDF changes' name = 'Webpage Text/HTML, JSON and PDF changes'
description = 'Detects all text changes where possible' description = 'Detects all text changes where possible'
json_filter_prefixes = ['json:', 'jq:'] json_filter_prefixes = ['json:', 'jq:', 'jqraw:']
class FilterNotFoundInResponse(ValueError): class FilterNotFoundInResponse(ValueError):
def __init__(self, msg): def __init__(self, msg, screenshot=None):
self.screenshot = screenshot
ValueError.__init__(self, msg) ValueError.__init__(self, msg)
@ -34,14 +33,12 @@ class PDFToHTMLToolNotFound(ValueError):
# (set_proxy_from_list) # (set_proxy_from_list)
class perform_site_check(difference_detection_processor): class perform_site_check(difference_detection_processor):
def run_changedetection(self, uuid, skip_when_checksum_same=True): def run_changedetection(self, watch, skip_when_checksum_same=True):
changed_detected = False changed_detected = False
html_content = "" html_content = ""
screenshot = False # as bytes screenshot = False # as bytes
stripped_text_from_html = "" stripped_text_from_html = ""
# DeepCopy so we can be sure we don't accidently change anything by reference
watch = deepcopy(self.datastore.data['watching'].get(uuid))
if not watch: if not watch:
raise Exception("Watch no longer exists.") raise Exception("Watch no longer exists.")
@ -116,12 +113,12 @@ class perform_site_check(difference_detection_processor):
# Better would be if Watch.model could access the global data also # Better would be if Watch.model could access the global data also
# and then use getattr https://docs.python.org/3/reference/datamodel.html#object.__getitem__ # and then use getattr https://docs.python.org/3/reference/datamodel.html#object.__getitem__
# https://realpython.com/inherit-python-dict/ instead of doing it procedurely # https://realpython.com/inherit-python-dict/ instead of doing it procedurely
include_filters_from_tags = self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='include_filters') include_filters_from_tags = self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='include_filters')
# 1845 - remove duplicated filters in both group and watch include filter # 1845 - remove duplicated filters in both group and watch include filter
include_filters_rule = list(dict.fromkeys(watch.get('include_filters', []) + include_filters_from_tags)) include_filters_rule = list(dict.fromkeys(watch.get('include_filters', []) + include_filters_from_tags))
subtractive_selectors = [*self.datastore.get_tag_overrides_for_watch(uuid=uuid, attr='subtractive_selectors'), subtractive_selectors = [*self.datastore.get_tag_overrides_for_watch(uuid=watch.get('uuid'), attr='subtractive_selectors'),
*watch.get("subtractive_selectors", []), *watch.get("subtractive_selectors", []),
*self.datastore.data["settings"]["application"].get("global_subtractive_selectors", []) *self.datastore.data["settings"]["application"].get("global_subtractive_selectors", [])
] ]
@ -188,7 +185,7 @@ class perform_site_check(difference_detection_processor):
append_pretty_line_formatting=not watch.is_source_type_url) append_pretty_line_formatting=not watch.is_source_type_url)
if not html_content.strip(): if not html_content.strip():
raise FilterNotFoundInResponse(include_filters_rule) raise FilterNotFoundInResponse(msg=include_filters_rule, screenshot=self.fetcher.screenshot)
if has_subtractive_selectors: if has_subtractive_selectors:
html_content = html_tools.element_removal(subtractive_selectors, html_content) html_content = html_tools.element_removal(subtractive_selectors, html_content)
@ -222,7 +219,7 @@ class perform_site_check(difference_detection_processor):
from .. import diff from .. import diff
# needs to not include (added) etc or it may get used twice # needs to not include (added) etc or it may get used twice
# Replace the processed text with the preferred result # Replace the processed text with the preferred result
rendered_diff = diff.render_diff(previous_version_file_contents=watch.get_last_fetched_before_filters(), rendered_diff = diff.render_diff(previous_version_file_contents=watch.get_last_fetched_text_before_filters(),
newest_version_file_contents=stripped_text_from_html, newest_version_file_contents=stripped_text_from_html,
include_equal=False, # not the same lines include_equal=False, # not the same lines
include_added=watch.get('filter_text_added', True), include_added=watch.get('filter_text_added', True),
@ -231,7 +228,7 @@ class perform_site_check(difference_detection_processor):
line_feed_sep="\n", line_feed_sep="\n",
include_change_type_prefix=False) include_change_type_prefix=False)
watch.save_last_fetched_before_filters(text_content_before_ignored_filter) watch.save_last_text_fetched_before_filters(text_content_before_ignored_filter)
if not rendered_diff and stripped_text_from_html: if not rendered_diff and stripped_text_from_html:
# We had some content, but no differences were found # We had some content, but no differences were found
@ -344,17 +341,17 @@ class perform_site_check(difference_detection_processor):
if not watch['title'] or not len(watch['title']): if not watch['title'] or not len(watch['title']):
update_obj['title'] = html_tools.extract_element(find='title', html_content=self.fetcher.content) update_obj['title'] = html_tools.extract_element(find='title', html_content=self.fetcher.content)
logger.debug(f"Watch UUID {uuid} content check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}") logger.debug(f"Watch UUID {watch.get('uuid')} content check - Previous MD5: {watch.get('previous_md5')}, Fetched MD5 {fetched_md5}")
if changed_detected: if changed_detected:
if watch.get('check_unique_lines', False): if watch.get('check_unique_lines', False):
has_unique_lines = watch.lines_contain_something_unique_compared_to_history(lines=stripped_text_from_html.splitlines()) has_unique_lines = watch.lines_contain_something_unique_compared_to_history(lines=stripped_text_from_html.splitlines())
# One or more lines? unsure? # One or more lines? unsure?
if not has_unique_lines: if not has_unique_lines:
logger.debug(f"check_unique_lines: UUID {uuid} didnt have anything new setting change_detected=False") logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} didnt have anything new setting change_detected=False")
changed_detected = False changed_detected = False
else: else:
logger.debug(f"check_unique_lines: UUID {uuid} had unique content") logger.debug(f"check_unique_lines: UUID {watch.get('uuid')} had unique content")
# Always record the new checksum # Always record the new checksum
update_obj["previous_md5"] = fetched_md5 update_obj["previous_md5"] = fetched_md5

@ -1,14 +1,5 @@
$(document).ready(function () { $(document).ready(function () {
// duplicate
var csrftoken = $('input[name=csrf_token]').val();
$.ajaxSetup({
beforeSend: function (xhr, settings) {
if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) {
xhr.setRequestHeader("X-CSRFToken", csrftoken)
}
}
})
var browsersteps_session_id; var browsersteps_session_id;
var browser_interface_seconds_remaining = 0; var browser_interface_seconds_remaining = 0;
var apply_buttons_disabled = false; var apply_buttons_disabled = false;

@ -0,0 +1,10 @@
$(document).ready(function () {
$.ajaxSetup({
beforeSend: function (xhr, settings) {
if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) {
xhr.setRequestHeader("X-CSRFToken", csrftoken)
}
}
})
});

@ -1,13 +1,4 @@
$(document).ready(function () { $(document).ready(function () {
var csrftoken = $('input[name=csrf_token]').val();
$.ajaxSetup({
beforeSend: function (xhr, settings) {
if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) {
xhr.setRequestHeader("X-CSRFToken", csrftoken)
}
}
})
$('.needs-localtime').each(function () { $('.needs-localtime').each(function () {
for (var option of this.options) { for (var option of this.options) {
var dateObject = new Date(option.value * 1000); var dateObject = new Date(option.value * 1000);
@ -48,6 +39,12 @@ $(document).ready(function () {
$("#highlightSnippet").remove(); $("#highlightSnippet").remove();
} }
// Listen for Escape key press
window.addEventListener('keydown', function (e) {
if (e.key === 'Escape') {
clean();
}
}, false);
function dragTextHandler(event) { function dragTextHandler(event) {
console.log('mouseupped'); console.log('mouseupped');

@ -13,16 +13,6 @@ $(document).ready(function() {
$('#send-test-notification').click(function (e) { $('#send-test-notification').click(function (e) {
e.preventDefault(); e.preventDefault();
// this can be global
var csrftoken = $('input[name=csrf_token]').val();
$.ajaxSetup({
beforeSend: function(xhr, settings) {
if (!/^(GET|HEAD|OPTIONS|TRACE)$/i.test(settings.type) && !this.crossDomain) {
xhr.setRequestHeader("X-CSRFToken", csrftoken)
}
}
})
data = { data = {
notification_body: $('#notification_body').val(), notification_body: $('#notification_body').val(),
notification_format: $('#notification_format').val(), notification_format: $('#notification_format').val(),

@ -2,250 +2,239 @@
// All rights reserved. // All rights reserved.
// yes - this is really a hack, if you are a front-ender and want to help, please get in touch! // yes - this is really a hack, if you are a front-ender and want to help, please get in touch!
$(document).ready(function () { let runInClearMode = false;
var current_selected_i; $(document).ready(() => {
var state_clicked = false; let currentSelections = [];
let currentSelection = null;
var c; let appendToList = false;
let c, xctx, ctx;
// greyed out fill context let xScale = 1, yScale = 1;
var xctx; let selectorImage, selectorImageRect, selectorData;
// redline highlight context
var ctx;
// Global jQuery selectors with "Elem" appended
var current_default_xpath = []; const $selectorCanvasElem = $('#selector-canvas');
var x_scale = 1; const $includeFiltersElem = $("#include_filters");
var y_scale = 1; const $selectorBackgroundElem = $("img#selector-background");
var selector_image; const $selectorCurrentXpathElem = $("#selector-current-xpath span");
var selector_image_rect; const $fetchingUpdateNoticeElem = $('.fetching-update-notice');
var selector_data; const $selectorWrapperElem = $("#selector-wrapper");
$('#visualselector-tab').click(function () { // Color constants
$("img#selector-background").off('load'); const FILL_STYLE_HIGHLIGHT = 'rgba(205,0,0,0.35)';
state_clicked = false; const FILL_STYLE_GREYED_OUT = 'rgba(205,205,205,0.95)';
current_selected_i = false; const STROKE_STYLE_HIGHLIGHT = 'rgba(255,0,0, 0.9)';
bootstrap_visualselector(); const FILL_STYLE_REDLINE = 'rgba(255,0,0, 0.1)';
const STROKE_STYLE_REDLINE = 'rgba(225,0,0,0.9)';
$('#visualselector-tab').click(() => {
$selectorBackgroundElem.off('load');
currentSelections = [];
bootstrapVisualSelector();
}); });
$(document).on('keydown', function (event) { function clearReset() {
if ($("img#selector-background").is(":visible")) {
if (event.key == "Escape") {
state_clicked = false;
ctx.clearRect(0, 0, c.width, c.height); ctx.clearRect(0, 0, c.width, c.height);
if ($includeFiltersElem.val().length) {
alert("Existing filters under the 'Filters & Triggers' tab were cleared.");
} }
$includeFiltersElem.val('');
currentSelections = [];
// Means we ignore the xpaths from the scraper marked as sel.highlight_as_custom_filter (it matched a previous selector)
runInClearMode = true;
highlightCurrentSelected();
} }
function splitToList(v) {
return v.split('\n').map(line => line.trim()).filter(line => line.length > 0);
}
function sortScrapedElementsBySize() {
// Sort the currentSelections array by area (width * height) in descending order
selectorData['size_pos'].sort((a, b) => {
const areaA = a.width * a.height;
const areaB = b.width * b.height;
return areaB - areaA;
}); });
}
// For when the page loads $(document).on('keydown keyup', (event) => {
if (!window.location.hash || window.location.hash != '#visualselector') { if (event.code === 'ShiftLeft' || event.code === 'ShiftRight') {
$("img#selector-background").attr('src', ''); appendToList = event.type === 'keydown';
return;
} }
// Handle clearing button/link if (event.type === 'keydown') {
$('#clear-selector').on('click', function (event) { if ($selectorBackgroundElem.is(":visible") && event.key === "Escape") {
if (!state_clicked) { clearReset();
alert('Oops, Nothing selected!'); }
} }
state_clicked = false;
ctx.clearRect(0, 0, c.width, c.height);
xctx.clearRect(0, 0, c.width, c.height);
$("#include_filters").val('');
}); });
$('#clear-selector').on('click', () => {
clearReset();
});
// So if they start switching between visualSelector and manual filters, stop it from rendering old filters
$('li.tab a').on('click', () => {
runInClearMode = true;
});
bootstrap_visualselector(); if (!window.location.hash || window.location.hash !== '#visualselector') {
$selectorBackgroundElem.attr('src', '');
return;
}
bootstrapVisualSelector();
function bootstrap_visualselector() { function bootstrapVisualSelector() {
if (1) { $selectorBackgroundElem
// bootstrap it, this will trigger everything else .on("error", () => {
$("img#selector-background").on("error", function () { $fetchingUpdateNoticeElem.html("<strong>Ooops!</strong> The VisualSelector tool needs at least one fetched page, please unpause the watch and/or wait for the watch to complete fetching and then reload this page.")
$('.fetching-update-notice').html("<strong>Ooops!</strong> The VisualSelector tool needs atleast one fetched page, please unpause the watch and/or wait for the watch to complete fetching and then reload this page."); .css('color', '#bb0000');
$('.fetching-update-notice').css('color','#bb0000'); $('#selector-current-xpath, #clear-selector').hide();
$('#selector-current-xpath').hide(); })
$('#clear-selector').hide(); .on('load', () => {
}).bind('load', function () {
console.log("Loaded background..."); console.log("Loaded background...");
c = document.getElementById("selector-canvas"); c = document.getElementById("selector-canvas");
// greyed out fill context
xctx = c.getContext("2d"); xctx = c.getContext("2d");
// redline highlight context
ctx = c.getContext("2d"); ctx = c.getContext("2d");
if ($("#include_filters").val().trim().length) { fetchData();
current_default_xpath = $("#include_filters").val().split(/\r?\n/g); $selectorCanvasElem.off("mousemove mousedown");
} else { })
current_default_xpath = []; .attr("src", screenshot_url);
}
fetch_data(); let s = `${$selectorBackgroundElem.attr('src')}?${new Date().getTime()}`;
$('#selector-canvas').off("mousemove mousedown"); $selectorBackgroundElem.attr('src', s);
// screenshot_url defined in the edit.html template
}).attr("src", screenshot_url);
}
// Tell visualSelector that the image should update
var s = $("img#selector-background").attr('src') + "?" + new Date().getTime();
$("img#selector-background").attr('src', s)
} }
// This is fired once the img src is loaded in bootstrap_visualselector() function fetchData() {
function fetch_data() { $fetchingUpdateNoticeElem.html("Fetching element data..");
// Image is ready
$('.fetching-update-notice').html("Fetching element data..");
$.ajax({ $.ajax({
url: watch_visual_selector_data_url, url: watch_visual_selector_data_url,
context: document.body context: document.body
}).done(function (data) { }).done((data) => {
$('.fetching-update-notice').html("Rendering.."); $fetchingUpdateNoticeElem.html("Rendering..");
selector_data = data; selectorData = data;
sortScrapedElementsBySize();
console.log("Reported browser width from backend: " + data['browser_width']); console.log("Reported browser width from backend: " + data['browser_width']);
state_clicked = false; setScale();
set_scale(); reflowSelector();
reflow_selector(); $fetchingUpdateNoticeElem.fadeOut();
$('.fetching-update-notice').fadeOut();
}); });
}
function updateFiltersText() {
// Assuming currentSelections is already defined and contains the selections
let uniqueSelections = new Set(currentSelections.map(sel => (sel[0] === '/' ? `xpath:${sel.xpath}` : sel.xpath)));
// Convert the Set back to an array and join with newline characters
let textboxFilterText = Array.from(uniqueSelections).join("\n");
$includeFiltersElem.val(textboxFilterText);
} }
function setScale() {
$selectorWrapperElem.show();
selectorImage = $selectorBackgroundElem[0];
selectorImageRect = selectorImage.getBoundingClientRect();
function set_scale() { $selectorCanvasElem.attr({
'height': selectorImageRect.height,
'width': selectorImageRect.width
});
$selectorWrapperElem.attr('width', selectorImageRect.width);
$('#visual-selector-heading').css('max-width', selectorImageRect.width + "px")
// some things to check if the scaling doesnt work xScale = selectorImageRect.width / selectorImage.naturalWidth;
// - that the widths/sizes really are about the actual screen size cat elements.json |grep -o width......|sort|uniq yScale = selectorImageRect.height / selectorImage.naturalHeight;
$("#selector-wrapper").show();
selector_image = $("img#selector-background")[0];
selector_image_rect = selector_image.getBoundingClientRect();
// make the canvas the same size as the image ctx.strokeStyle = STROKE_STYLE_HIGHLIGHT;
$('#selector-canvas').attr('height', selector_image_rect.height); ctx.fillStyle = FILL_STYLE_REDLINE;
$('#selector-canvas').attr('width', selector_image_rect.width);
$('#selector-wrapper').attr('width', selector_image_rect.width);
x_scale = selector_image_rect.width / selector_data['browser_width'];
y_scale = selector_image_rect.height / selector_image.naturalHeight;
ctx.strokeStyle = 'rgba(255,0,0, 0.9)';
ctx.fillStyle = 'rgba(255,0,0, 0.1)';
ctx.lineWidth = 3; ctx.lineWidth = 3;
console.log("scaling set x: " + x_scale + " by y:" + y_scale); console.log("Scaling set x: " + xScale + " by y:" + yScale);
$("#selector-current-xpath").css('max-width', selector_image_rect.width); $("#selector-current-xpath").css('max-width', selectorImageRect.width);
} }
function reflow_selector() { function reflowSelector() {
$(window).resize(function () { $(window).resize(() => {
set_scale(); setScale();
highlight_current_selected_i(); highlightCurrentSelected();
}); });
var selector_currnt_xpath_text = $("#selector-current-xpath span");
set_scale(); setScale();
console.log(selectorData['size_pos'].length + " selectors found");
console.log(selector_data['size_pos'].length + " selectors found"); let existingFilters = splitToList($includeFiltersElem.val());
// highlight the default one if we can find it in the xPath list selectorData['size_pos'].forEach(sel => {
// or the xpath matches the default one if ((!runInClearMode && sel.highlight_as_custom_filter) || existingFilters.includes(sel.xpath)) {
found = false;
if (current_default_xpath.length) {
// Find the first one that matches
// @todo In the future paint all that match
for (const c of current_default_xpath) {
for (var i = selector_data['size_pos'].length; i !== 0; i--) {
if (selector_data['size_pos'][i - 1].xpath.trim() === c.trim()) {
console.log("highlighting " + c); console.log("highlighting " + c);
current_selected_i = i - 1; currentSelections.push(sel);
highlight_current_selected_i();
found = true;
break;
}
}
if (found) {
break;
}
}
if (!found) {
alert("Unfortunately your existing CSS/xPath Filter was no longer found!");
}
} }
});
$('#selector-canvas').bind('mousemove', function (e) { highlightCurrentSelected();
if (state_clicked) { updateFiltersText();
return;
} $selectorCanvasElem.bind('mousemove', handleMouseMove.debounce(5));
ctx.clearRect(0, 0, c.width, c.height); $selectorCanvasElem.bind('mousedown', handleMouseDown.debounce(5));
current_selected_i = null; $selectorCanvasElem.bind('mouseleave', highlightCurrentSelected.debounce(5));
// Add in offset function handleMouseMove(e) {
if ((typeof e.offsetX === "undefined" || typeof e.offsetY === "undefined") || (e.offsetX === 0 && e.offsetY === 0)) { if (!e.offsetX && !e.offsetY) {
var targetOffset = $(e.target).offset(); const targetOffset = $(e.target).offset();
e.offsetX = e.pageX - targetOffset.left; e.offsetX = e.pageX - targetOffset.left;
e.offsetY = e.pageY - targetOffset.top; e.offsetY = e.pageY - targetOffset.top;
} }
// Reverse order - the most specific one should be deeper/"laster" ctx.fillStyle = FILL_STYLE_HIGHLIGHT;
// Basically, find the most 'deepest'
var found = 0;
ctx.fillStyle = 'rgba(205,0,0,0.35)';
// Will be sorted by smallest width*height first
for (var i = 0; i <= selector_data['size_pos'].length; i++) {
// draw all of them? let them choose somehow?
var sel = selector_data['size_pos'][i];
// If we are in a bounding-box
if (e.offsetY > sel.top * y_scale && e.offsetY < sel.top * y_scale + sel.height * y_scale
&&
e.offsetX > sel.left * y_scale && e.offsetX < sel.left * y_scale + sel.width * y_scale
) {
// FOUND ONE selectorData['size_pos'].forEach(sel => {
set_current_selected_text(sel.xpath); if (e.offsetY > sel.top * yScale && e.offsetY < sel.top * yScale + sel.height * yScale &&
ctx.strokeRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale); e.offsetX > sel.left * yScale && e.offsetX < sel.left * yScale + sel.width * yScale) {
ctx.fillRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale); setCurrentSelectedText(sel.xpath);
drawHighlight(sel);
// no need to keep digging currentSelections.push(sel);
// @todo or, O to go out/up, I to go in currentSelection = sel;
// or double click to go up/out the selector? highlightCurrentSelected();
current_selected_i = i; currentSelections.pop();
found += 1;
break;
} }
})
} }
}.debounce(5));
function set_current_selected_text(s) { function setCurrentSelectedText(s) {
selector_currnt_xpath_text[0].innerHTML = s; $selectorCurrentXpathElem[0].innerHTML = s;
} }
function highlight_current_selected_i() { function drawHighlight(sel) {
if (state_clicked) { ctx.strokeRect(sel.left * xScale, sel.top * yScale, sel.width * xScale, sel.height * yScale);
state_clicked = false; ctx.fillRect(sel.left * xScale, sel.top * yScale, sel.width * xScale, sel.height * yScale);
xctx.clearRect(0, 0, c.width, c.height);
return;
} }
var sel = selector_data['size_pos'][current_selected_i]; function handleMouseDown() {
if (sel[0] == '/') { // If we are in 'appendToList' mode, grow the list, if not, just 1
// @todo - not sure just checking / is right currentSelections = appendToList ? [...currentSelections, currentSelection] : [currentSelection];
$("#include_filters").val('xpath:' + sel.xpath); highlightCurrentSelected();
} else { updateFiltersText();
$("#include_filters").val(sel.xpath);
} }
xctx.fillStyle = 'rgba(205,205,205,0.95)';
xctx.strokeStyle = 'rgba(225,0,0,0.9)';
xctx.lineWidth = 3;
xctx.fillRect(0, 0, c.width, c.height);
// Clear out what only should be seen (make a clear/clean spot)
xctx.clearRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
xctx.strokeRect(sel.left * x_scale, sel.top * y_scale, sel.width * x_scale, sel.height * y_scale);
state_clicked = true;
set_current_selected_text(sel.xpath);
} }
function highlightCurrentSelected() {
xctx.fillStyle = FILL_STYLE_GREYED_OUT;
xctx.strokeStyle = STROKE_STYLE_REDLINE;
xctx.lineWidth = 3;
xctx.clearRect(0, 0, c.width, c.height);
$('#selector-canvas').bind('mousedown', function (e) { currentSelections.forEach(sel => {
highlight_current_selected_i(); //xctx.clearRect(sel.left * xScale, sel.top * yScale, sel.width * xScale, sel.height * yScale);
xctx.strokeRect(sel.left * xScale, sel.top * yScale, sel.width * xScale, sel.height * yScale);
}); });
} }
}); });

@ -1,6 +1,8 @@
#selector-wrapper { #selector-wrapper {
height: 100%; height: 100%;
text-align: center;
max-height: 70vh; max-height: 70vh;
overflow-y: scroll; overflow-y: scroll;
position: relative; position: relative;

@ -676,15 +676,26 @@ footer {
and also iPads specifically. and also iPads specifically.
*/ */
.watch-table { .watch-table {
/* make headings work on mobile */
thead {
display: block;
tr {
th {
display: inline-block;
}
}
.empty-cell {
display: none;
}
}
/* Force table to not be like tables anymore */ /* Force table to not be like tables anymore */
thead, tbody {
tbody,
th,
td, td,
tr { tr {
display: block; display: block;
} }
}
.last-checked { .last-checked {
>span { >span {
@ -707,13 +718,6 @@ footer {
display: inline-block; display: inline-block;
} }
/* Hide table headers (but not display: none;, for accessibility) */
thead tr {
position: absolute;
top: -9999px;
left: -9999px;
}
.pure-table td, .pure-table td,
.pure-table th { .pure-table th {
border: none; border: none;
@ -758,6 +762,7 @@ footer {
thead { thead {
background-color: var(--color-background-table-thead); background-color: var(--color-background-table-thead);
color: var(--color-text); color: var(--color-text);
border-bottom: 1px solid var(--color-background-table-thead);
} }
td, td,

@ -866,14 +866,17 @@ footer {
and also iPads specifically. and also iPads specifically.
*/ */
.watch-table { .watch-table {
/* make headings work on mobile */
/* Force table to not be like tables anymore */ /* Force table to not be like tables anymore */
/* Force table to not be like tables anymore */ /* Force table to not be like tables anymore */ }
/* Hide table headers (but not display: none;, for accessibility) */ } .watch-table thead {
.watch-table thead, display: block; }
.watch-table tbody, .watch-table thead tr th {
.watch-table th, display: inline-block; }
.watch-table td, .watch-table thead .empty-cell {
.watch-table tr { display: none; }
.watch-table tbody td,
.watch-table tbody tr {
display: block; } display: block; }
.watch-table .last-checked > span { .watch-table .last-checked > span {
vertical-align: middle; } vertical-align: middle; }
@ -885,10 +888,6 @@ footer {
content: "Last Changed "; } content: "Last Changed "; }
.watch-table td.inline { .watch-table td.inline {
display: inline-block; } display: inline-block; }
.watch-table thead tr {
position: absolute;
top: -9999px;
left: -9999px; }
.watch-table .pure-table td, .watch-table .pure-table td,
.watch-table .pure-table th { .watch-table .pure-table th {
border: none; } border: none; }
@ -915,7 +914,8 @@ footer {
border-color: var(--color-border-table-cell); } border-color: var(--color-border-table-cell); }
.pure-table thead { .pure-table thead {
background-color: var(--color-background-table-thead); background-color: var(--color-background-table-thead);
color: var(--color-text); } color: var(--color-text);
border-bottom: 1px solid var(--color-background-table-thead); }
.pure-table td, .pure-table td,
.pure-table th { .pure-table th {
border-left-color: var(--color-border-table-cell); } border-left-color: var(--color-border-table-cell); }
@ -1068,6 +1068,7 @@ ul {
#selector-wrapper { #selector-wrapper {
height: 100%; height: 100%;
text-align: center;
max-height: 70vh; max-height: 70vh;
overflow-y: scroll; overflow-y: scroll;
position: relative; } position: relative; }

@ -163,7 +163,6 @@ class ChangeDetectionStore:
del (update_obj[dict_key]) del (update_obj[dict_key])
self.__data['watching'][uuid].update(update_obj) self.__data['watching'][uuid].update(update_obj)
self.needs_write = True self.needs_write = True
@property @property
@ -243,6 +242,15 @@ class ChangeDetectionStore:
def clear_watch_history(self, uuid): def clear_watch_history(self, uuid):
import pathlib import pathlib
from .model import Restock from .model import Restock
# JSON Data, Screenshots, Textfiles (history index and snapshots), HTML in the future etc
for item in pathlib.Path(os.path.join(self.datastore_path, uuid)).rglob("*.*"):
unlink(item)
# Force the attr to recalculate
bump = self.__data['watching'][uuid].history
# Do this last because it will trigger a recheck due to last_checked being zero
self.__data['watching'][uuid].update({ self.__data['watching'][uuid].update({
'browser_steps_last_error_step' : None, 'browser_steps_last_error_step' : None,
'check_count': 0, 'check_count': 0,
@ -260,13 +268,6 @@ class ChangeDetectionStore:
'restock': Restock() 'restock': Restock()
}) })
# JSON Data, Screenshots, Textfiles (history index and snapshots), HTML in the future etc
for item in pathlib.Path(os.path.join(self.datastore_path, uuid)).rglob("*.*"):
unlink(item)
# Force the attr to recalculate
bump = self.__data['watching'][uuid].history
self.needs_write_urgent = True self.needs_write_urgent = True
def add_watch(self, url, tag='', extras=None, tag_uuids=None, write_to_disk_now=True): def add_watch(self, url, tag='', extras=None, tag_uuids=None, write_to_disk_now=True):
@ -377,46 +378,6 @@ class ChangeDetectionStore:
return False return False
# Save as PNG, PNG is larger but better for doing visual diff in the future
def save_screenshot(self, watch_uuid, screenshot: bytes, as_error=False):
if not self.data['watching'].get(watch_uuid):
return
if as_error:
target_path = os.path.join(self.datastore_path, watch_uuid, "last-error-screenshot.png")
else:
target_path = os.path.join(self.datastore_path, watch_uuid, "last-screenshot.png")
self.data['watching'][watch_uuid].ensure_data_dir_exists()
with open(target_path, 'wb') as f:
f.write(screenshot)
f.close()
def save_error_text(self, watch_uuid, contents):
if not self.data['watching'].get(watch_uuid):
return
self.data['watching'][watch_uuid].ensure_data_dir_exists()
target_path = os.path.join(self.datastore_path, watch_uuid, "last-error.txt")
with open(target_path, 'w') as f:
f.write(contents)
def save_xpath_data(self, watch_uuid, data, as_error=False):
if not self.data['watching'].get(watch_uuid):
return
if as_error:
target_path = os.path.join(self.datastore_path, watch_uuid, "elements-error.json")
else:
target_path = os.path.join(self.datastore_path, watch_uuid, "elements.json")
self.data['watching'][watch_uuid].ensure_data_dir_exists()
with open(target_path, 'w') as f:
f.write(json.dumps(data))
f.close()
def sync_to_json(self): def sync_to_json(self):
logger.info("Saving JSON..") logger.info("Saving JSON..")
try: try:
@ -892,3 +853,8 @@ class ChangeDetectionStore:
# Something custom here # Something custom here
self.__data["watching"][uuid]['time_between_check_use_default'] = False self.__data["watching"][uuid]['time_between_check_use_default'] = False
# Correctly set datatype for older installs where 'tag' was string and update_12 did not catch it
def update_16(self):
for uuid, watch in self.data['watching'].items():
if isinstance(watch.get('tags'), str):
self.data['watching'][uuid]['tags'] = []

@ -26,7 +26,11 @@
<meta name="msapplication-TileColor" content="#da532c"> <meta name="msapplication-TileColor" content="#da532c">
<meta name="msapplication-config" content="favicons/browserconfig.xml"> <meta name="msapplication-config" content="favicons/browserconfig.xml">
<meta name="theme-color" content="#ffffff"> <meta name="theme-color" content="#ffffff">
<script>
const csrftoken="{{ csrf_token() }}";
</script>
<script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script> <script src="{{url_for('static_content', group='js', filename='jquery-3.6.0.min.js')}}"></script>
<script src="{{url_for('static_content', group='js', filename='csrf.js')}}" defer></script>
</head> </head>
<body> <body>

@ -290,7 +290,7 @@ xpath://body/div/span[contains(@class, 'example-class')]",
<ul> <ul>
<li>JSONPath: Prefix with <code>json:</code>, use <code>json:$</code> to force re-formatting if required, <a href="https://jsonpath.com/" target="new">test your JSONPath here</a>.</li> <li>JSONPath: Prefix with <code>json:</code>, use <code>json:$</code> to force re-formatting if required, <a href="https://jsonpath.com/" target="new">test your JSONPath here</a>.</li>
{% if jq_support %} {% if jq_support %}
<li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>.</li> <li>jq: Prefix with <code>jq:</code> and <a href="https://jqplay.org/" target="new">test your jq here</a>. Using <a href="https://stedolan.github.io/jq/" target="new">jq</a> allows for complex filtering and processing of JSON data with built-in functions, regex, filtering, and more. See examples and documentation <a href="https://stedolan.github.io/jq/manual/" target="new">here</a>. Prefix <code>jqraw:</code> outputs the results as text instead of a JSON list.</li>
{% else %} {% else %}
<li>jq support not installed</li> <li>jq support not installed</li>
{% endif %} {% endif %}
@ -424,9 +424,8 @@ Unavailable") }}
<fieldset> <fieldset>
<div class="pure-control-group"> <div class="pure-control-group">
{% if visualselector_enabled %} {% if visualselector_enabled %}
<span class="pure-form-message-inline"> <span class="pure-form-message-inline" id="visual-selector-heading">
The Visual Selector tool lets you select the <i>text</i> elements that will be used for the change detection &dash; after the <i>Browser Steps</i> has completed.<br> The Visual Selector tool lets you select the <i>text</i> elements that will be used for the change detection. It automatically fills-in the filters in the "CSS/JSONPath/JQ/XPath Filters" box of the <a href="#filters-and-triggers">Filters & Triggers</a> tab. Use <strong>Shift+Click</strong> to select multiple items.
This tool is a helper to manage filters in the "CSS/JSONPath/JQ/XPath Filters" box of the <a href="#filters-and-triggers">Filters & Triggers</a> tab.
</span> </span>
<div id="selector-header"> <div id="selector-header">

@ -73,14 +73,14 @@
{% set link_order = "desc" if sort_order == 'asc' else "asc" %} {% set link_order = "desc" if sort_order == 'asc' else "asc" %}
{% set arrow_span = "" %} {% set arrow_span = "" %}
<th><input style="vertical-align: middle" type="checkbox" id="check-all" > <a class="{{ 'active '+link_order if sort_attribute == 'date_created' else 'inactive' }}" href="{{url_for('index', sort='date_created', order=link_order, tag=active_tag_uuid)}}"># <span class='arrow {{link_order}}'></span></a></th> <th><input style="vertical-align: middle" type="checkbox" id="check-all" > <a class="{{ 'active '+link_order if sort_attribute == 'date_created' else 'inactive' }}" href="{{url_for('index', sort='date_created', order=link_order, tag=active_tag_uuid)}}"># <span class='arrow {{link_order}}'></span></a></th>
<th></th> <th class="empty-cell"></th>
<th><a class="{{ 'active '+link_order if sort_attribute == 'label' else 'inactive' }}" href="{{url_for('index', sort='label', order=link_order, tag=active_tag_uuid)}}">Website <span class='arrow {{link_order}}'></span></a></th> <th><a class="{{ 'active '+link_order if sort_attribute == 'label' else 'inactive' }}" href="{{url_for('index', sort='label', order=link_order, tag=active_tag_uuid)}}">Website <span class='arrow {{link_order}}'></span></a></th>
{% if any_has_restock_price_processor %} {% if any_has_restock_price_processor %}
<th>Restock &amp; Price</th> <th>Restock &amp; Price</th>
{% endif %} {% endif %}
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_checked' else 'inactive' }}" href="{{url_for('index', sort='last_checked', order=link_order, tag=active_tag_uuid)}}">Last Checked <span class='arrow {{link_order}}'></span></a></th> <th><a class="{{ 'active '+link_order if sort_attribute == 'last_checked' else 'inactive' }}" href="{{url_for('index', sort='last_checked', order=link_order, tag=active_tag_uuid)}}">Last Checked <span class='arrow {{link_order}}'></span></a></th>
<th><a class="{{ 'active '+link_order if sort_attribute == 'last_changed' else 'inactive' }}" href="{{url_for('index', sort='last_changed', order=link_order, tag=active_tag_uuid)}}">Last Changed <span class='arrow {{link_order}}'></span></a></th> <th><a class="{{ 'active '+link_order if sort_attribute == 'last_changed' else 'inactive' }}" href="{{url_for('index', sort='last_changed', order=link_order, tag=active_tag_uuid)}}">Last Changed <span class='arrow {{link_order}}'></span></a></th>
<th></th> <th class="empty-cell"></th>
</tr> </tr>
</thead> </thead>
<tbody> <tbody>

@ -149,6 +149,15 @@ def test_api_simple(client, live_server):
headers={'x-api-key': api_key}, headers={'x-api-key': api_key},
) )
assert b'which has this one new line' in res.data assert b'which has this one new line' in res.data
assert b'<div id' not in res.data
# Fetch the HTML of the latest one
res = client.get(
url_for("watchsinglehistory", uuid=watch_uuid, timestamp='latest')+"?html=1",
headers={'x-api-key': api_key},
)
assert b'which has this one new line' in res.data
assert b'<div id' in res.data
# Fetch the whole watch # Fetch the whole watch
res = client.get( res = client.get(

@ -3,7 +3,8 @@
import time import time
from flask import url_for from flask import url_for
from urllib.request import urlopen from urllib.request import urlopen
from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI from .util import set_original_response, set_modified_response, live_server_setup, wait_for_all_checks, extract_rss_token_from_UI, \
extract_UUID_from_client
sleep_time_for_fetch_thread = 3 sleep_time_for_fetch_thread = 3
@ -141,6 +142,14 @@ def test_check_basic_change_detection_functionality(client, live_server):
assert b'Mark all viewed' not in res.data assert b'Mark all viewed' not in res.data
assert b'unviewed' not in res.data assert b'unviewed' not in res.data
# #2458 "clear history" should make the Watch object update its status correctly when the first snapshot lands again
uuid = extract_UUID_from_client(client)
client.get(url_for("clear_watch_history", uuid=uuid))
client.get(url_for("form_watch_checknow"), follow_redirects=True)
wait_for_all_checks(client)
res = client.get(url_for("index"))
assert b'preview/' in res.data
# #
# Cleanup everything # Cleanup everything
res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True) res = client.get(url_for("form_delete", uuid="all"), follow_redirects=True)

@ -5,15 +5,13 @@ import os
import json import json
import logging import logging
from flask import url_for from flask import url_for
from .util import live_server_setup from .util import live_server_setup, wait_for_all_checks
from urllib.parse import urlparse, parse_qs from urllib.parse import urlparse, parse_qs
def test_consistent_history(client, live_server): def test_consistent_history(client, live_server):
live_server_setup(live_server) live_server_setup(live_server)
# Give the endpoint time to spin up r = range(1, 30)
time.sleep(1)
r = range(1, 50)
for one in r: for one in r:
test_url = url_for('test_endpoint', content_type="text/html", content=str(one), _external=True) test_url = url_for('test_endpoint', content_type="text/html", content=str(one), _external=True)
@ -25,15 +23,8 @@ def test_consistent_history(client, live_server):
assert b"1 Imported" in res.data assert b"1 Imported" in res.data
time.sleep(3) wait_for_all_checks(client)
while True:
res = client.get(url_for("index"))
logging.debug("Waiting for 'Checking now' to go away..")
if b'Checking now' not in res.data:
break
time.sleep(0.5)
time.sleep(3)
# Essentially just triggers the DB write/update # Essentially just triggers the DB write/update
res = client.post( res = client.post(
url_for("settings_page"), url_for("settings_page"),
@ -44,8 +35,9 @@ def test_consistent_history(client, live_server):
) )
assert b"Settings updated." in res.data assert b"Settings updated." in res.data
# Give it time to write it out
time.sleep(3) time.sleep(2)
json_db_file = os.path.join(live_server.app.config['DATASTORE'].datastore_path, 'url-watches.json') json_db_file = os.path.join(live_server.app.config['DATASTORE'].datastore_path, 'url-watches.json')
json_obj = None json_obj = None
@ -58,7 +50,7 @@ def test_consistent_history(client, live_server):
# each one should have a history.txt containing just one line # each one should have a history.txt containing just one line
for w in json_obj['watching'].keys(): for w in json_obj['watching'].keys():
history_txt_index_file = os.path.join(live_server.app.config['DATASTORE'].datastore_path, w, 'history.txt') history_txt_index_file = os.path.join(live_server.app.config['DATASTORE'].datastore_path, w, 'history.txt')
assert os.path.isfile(history_txt_index_file), "History.txt should exist where I expect it - {}".format(history_txt_index_file) assert os.path.isfile(history_txt_index_file), f"History.txt should exist where I expect it at {history_txt_index_file}"
# Same like in model.Watch # Same like in model.Watch
with open(history_txt_index_file, "r") as f: with open(history_txt_index_file, "r") as f:
@ -70,15 +62,15 @@ def test_consistent_history(client, live_server):
w)) w))
# Find the snapshot one # Find the snapshot one
for fname in files_in_watch_dir: for fname in files_in_watch_dir:
if fname != 'history.txt': if fname != 'history.txt' and 'html' not in fname:
# contents should match what we requested as content returned from the test url # contents should match what we requested as content returned from the test url
with open(os.path.join(live_server.app.config['DATASTORE'].datastore_path, w, fname), 'r') as snapshot_f: with open(os.path.join(live_server.app.config['DATASTORE'].datastore_path, w, fname), 'r') as snapshot_f:
contents = snapshot_f.read() contents = snapshot_f.read()
watch_url = json_obj['watching'][w]['url'] watch_url = json_obj['watching'][w]['url']
u = urlparse(watch_url) u = urlparse(watch_url)
q = parse_qs(u[4]) q = parse_qs(u[4])
assert q['content'][0] == contents.strip(), "Snapshot file {} should contain {}".format(fname, q['content'][0]) assert q['content'][0] == contents.strip(), f"Snapshot file {fname} should contain {q['content'][0]}"
assert len(files_in_watch_dir) == 2, "Should be just two files in the dir, history.txt and the snapshot" assert len(files_in_watch_dir) == 3, "Should be just three files in the dir, html.br snapshot, history.txt and the extracted text snapshot"

@ -45,7 +45,6 @@ def test_highlight_ignore(client, live_server):
) )
res = client.get(url_for("edit_page", uuid=uuid)) res = client.get(url_for("edit_page", uuid=uuid))
# should be a regex now # should be a regex now
assert b'/oh\ yeah\ \d+/' in res.data assert b'/oh\ yeah\ \d+/' in res.data
@ -55,3 +54,7 @@ def test_highlight_ignore(client, live_server):
# And it should register in the preview page # And it should register in the preview page
res = client.get(url_for("preview_page", uuid=uuid)) res = client.get(url_for("preview_page", uuid=uuid))
assert b'<div class="ignored">oh yeah 456' in res.data assert b'<div class="ignored">oh yeah 456' in res.data
# Should be in base.html
assert b'csrftoken' in res.data

@ -41,20 +41,27 @@ and it can also be repeated
from .. import html_tools from .. import html_tools
# See that we can find the second <script> one, which is not broken, and matches our filter # See that we can find the second <script> one, which is not broken, and matches our filter
text = html_tools.extract_json_as_string(content, "json:$.offers.price") text = html_tools.extract_json_as_string(content, "json:$.offers.priceCurrency")
assert text == "23.5" assert text == '"AUD"'
text = html_tools.extract_json_as_string('{"id":5}', "json:$.id")
assert text == "5"
# also check for jq # also check for jq
if jq_support: if jq_support:
text = html_tools.extract_json_as_string(content, "jq:.offers.price") text = html_tools.extract_json_as_string(content, "jq:.offers.priceCurrency")
assert text == "23.5" assert text == '"AUD"'
text = html_tools.extract_json_as_string('{"id":5}', "jq:.id") text = html_tools.extract_json_as_string('{"id":5}', "jq:.id")
assert text == "5" assert text == "5"
text = html_tools.extract_json_as_string('{"id":5}', "json:$.id") text = html_tools.extract_json_as_string(content, "jqraw:.offers.priceCurrency")
assert text == "AUD"
text = html_tools.extract_json_as_string('{"id":5}', "jqraw:.id")
assert text == "5" assert text == "5"
# When nothing at all is found, it should throw JSONNOTFound # When nothing at all is found, it should throw JSONNOTFound
# Which is caught and shown to the user in the watch-overview table # Which is caught and shown to the user in the watch-overview table
with pytest.raises(html_tools.JSONNotFound) as e_info: with pytest.raises(html_tools.JSONNotFound) as e_info:
@ -64,6 +71,9 @@ and it can also be repeated
with pytest.raises(html_tools.JSONNotFound) as e_info: with pytest.raises(html_tools.JSONNotFound) as e_info:
html_tools.extract_json_as_string('COMPLETE GIBBERISH, NO JSON!', "jq:.id") html_tools.extract_json_as_string('COMPLETE GIBBERISH, NO JSON!', "jq:.id")
with pytest.raises(html_tools.JSONNotFound) as e_info:
html_tools.extract_json_as_string('COMPLETE GIBBERISH, NO JSON!', "jqraw:.id")
def test_unittest_inline_extract_body(): def test_unittest_inline_extract_body():
content = """ content = """
@ -291,6 +301,10 @@ def test_check_jq_filter(client, live_server):
if jq_support: if jq_support:
check_json_filter('jq:.boss.name', client, live_server) check_json_filter('jq:.boss.name', client, live_server)
def test_check_jqraw_filter(client, live_server):
if jq_support:
check_json_filter('jqraw:.boss.name', client, live_server)
def check_json_filter_bool_val(json_filter, client, live_server): def check_json_filter_bool_val(json_filter, client, live_server):
set_original_response() set_original_response()
@ -345,6 +359,10 @@ def test_check_jq_filter_bool_val(client, live_server):
if jq_support: if jq_support:
check_json_filter_bool_val("jq:.available", client, live_server) check_json_filter_bool_val("jq:.available", client, live_server)
def test_check_jqraw_filter_bool_val(client, live_server):
if jq_support:
check_json_filter_bool_val("jq:.available", client, live_server)
# Re #265 - Extended JSON selector test # Re #265 - Extended JSON selector test
# Stuff to consider here # Stuff to consider here
# - Selector should be allowed to return empty when it doesnt match (people might wait for some condition) # - Selector should be allowed to return empty when it doesnt match (people might wait for some condition)
@ -492,3 +510,7 @@ def test_check_jsonpath_ext_filter(client, live_server):
def test_check_jq_ext_filter(client, live_server): def test_check_jq_ext_filter(client, live_server):
if jq_support: if jq_support:
check_json_ext_filter('jq:.[] | select(.status | contains("Sold"))', client, live_server) check_json_ext_filter('jq:.[] | select(.status | contains("Sold"))', client, live_server)
def test_check_jqraw_ext_filter(client, live_server):
if jq_support:
check_json_ext_filter('jq:.[] | select(.status | contains("Sold"))', client, live_server)

@ -1,11 +1,12 @@
from . import content_fetchers
from .processors.restock_diff import UnableToExtractRestockData
from .processors.text_json_diff import FilterNotFoundInResponse
from changedetectionio import html_tools
from copy import deepcopy
import os import os
import threading
import queue import queue
import threading
import time import time
from . import content_fetchers
from changedetectionio import html_tools
from .processors.text_json_diff import FilterNotFoundInResponse
from .processors.restock_diff import UnableToExtractRestockData
# A single update worker # A single update worker
# #
@ -245,14 +246,18 @@ class update_worker(threading.Thread):
contents = b'' contents = b''
process_changedetection_results = True process_changedetection_results = True
update_obj = {} update_obj = {}
logger.info(f"Processing watch UUID {uuid} "
f"Priority {queued_item_data.priority} " # Clear last errors (move to preflight func?)
f"URL {self.datastore.data['watching'][uuid]['url']}") self.datastore.data['watching'][uuid]['browser_steps_last_error_step'] = None
watch = self.datastore.data['watching'].get(uuid)
logger.info(f"Processing watch UUID {uuid} Priority {queued_item_data.priority} URL {watch['url']}")
now = time.time() now = time.time()
try: try:
# Processor is what we are using for detecting the "Change" # Processor is what we are using for detecting the "Change"
processor = self.datastore.data['watching'][uuid].get('processor', 'text_json_diff') processor = watch.get('processor', 'text_json_diff')
# if system... # if system...
# Abort processing when the content was the same as the last fetch # Abort processing when the content was the same as the last fetch
@ -272,12 +277,10 @@ class update_worker(threading.Thread):
watch_uuid=uuid watch_uuid=uuid
) )
# Clear last errors (move to preflight func?)
self.datastore.data['watching'][uuid]['browser_steps_last_error_step'] = None
update_handler.call_browser() update_handler.call_browser()
changed_detected, update_obj, contents = update_handler.run_changedetection(uuid, changed_detected, update_obj, contents = update_handler.run_changedetection(
watch=watch,
skip_when_checksum_same=skip_when_same_checksum, skip_when_checksum_same=skip_when_same_checksum,
) )
@ -309,7 +312,8 @@ class update_worker(threading.Thread):
}) })
if e.screenshot: if e.screenshot:
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=e.screenshot) watch.save_screenshot(screenshot=e.screenshot, as_error=True)
process_changedetection_results = False process_changedetection_results = False
except content_fetchers.exceptions.Non200ErrorCodeReceived as e: except content_fetchers.exceptions.Non200ErrorCodeReceived as e:
@ -325,11 +329,11 @@ class update_worker(threading.Thread):
err_text = "Error - Request returned a HTTP error code {}".format(str(e.status_code)) err_text = "Error - Request returned a HTTP error code {}".format(str(e.status_code))
if e.screenshot: if e.screenshot:
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=e.screenshot, as_error=True) watch.save_screenshot(screenshot=e.screenshot, as_error=True)
if e.xpath_data: if e.xpath_data:
self.datastore.save_xpath_data(watch_uuid=uuid, data=e.xpath_data, as_error=True) watch.save_xpath_data(data=e.xpath_data, as_error=True)
if e.page_text: if e.page_text:
self.datastore.save_error_text(watch_uuid=uuid, contents=e.page_text) watch.save_error_text(contents=e.page_text)
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text}) self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text})
process_changedetection_results = False process_changedetection_results = False
@ -340,17 +344,19 @@ class update_worker(threading.Thread):
err_text = "Warning, no filters were found, no change detection ran - Did the page change layout? update your Visual Filter if necessary." err_text = "Warning, no filters were found, no change detection ran - Did the page change layout? update your Visual Filter if necessary."
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text}) self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text})
if e.screenshot:
watch.save_screenshot(screenshot=e.screenshot, as_error=True)
# Only when enabled, send the notification # Only when enabled, send the notification
if self.datastore.data['watching'][uuid].get('filter_failure_notification_send', False): if watch.get('filter_failure_notification_send', False):
c = self.datastore.data['watching'][uuid].get('consecutive_filter_failures', 5) c = watch.get('consecutive_filter_failures', 5)
c += 1 c += 1
# Send notification if we reached the threshold? # Send notification if we reached the threshold?
threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts', threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts',
0) 0)
logger.warning(f"Filter for {uuid} not found, consecutive_filter_failures: {c}") logger.warning(f"Filter for {uuid} not found, consecutive_filter_failures: {c}")
if threshold > 0 and c >= threshold: if threshold > 0 and c >= threshold:
if not self.datastore.data['watching'][uuid].get('notification_muted'): if not watch.get('notification_muted'):
self.send_filter_failure_notification(uuid) self.send_filter_failure_notification(uuid)
c = 0 c = 0
@ -400,15 +406,15 @@ class update_worker(threading.Thread):
} }
) )
if self.datastore.data['watching'][uuid].get('filter_failure_notification_send', False): if watch.get('filter_failure_notification_send', False):
c = self.datastore.data['watching'][uuid].get('consecutive_filter_failures', 5) c = watch.get('consecutive_filter_failures', 5)
c += 1 c += 1
# Send notification if we reached the threshold? # Send notification if we reached the threshold?
threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts', threshold = self.datastore.data['settings']['application'].get('filter_failure_notification_threshold_attempts',
0) 0)
logger.error(f"Step for {uuid} not found, consecutive_filter_failures: {c}") logger.error(f"Step for {uuid} not found, consecutive_filter_failures: {c}")
if threshold > 0 and c >= threshold: if threshold > 0 and c >= threshold:
if not self.datastore.data['watching'][uuid].get('notification_muted'): if not watch.get('notification_muted'):
self.send_step_failure_notification(watch_uuid=uuid, step_n=e.step_n) self.send_step_failure_notification(watch_uuid=uuid, step_n=e.step_n)
c = 0 c = 0
@ -430,7 +436,7 @@ class update_worker(threading.Thread):
except content_fetchers.exceptions.JSActionExceptions as e: except content_fetchers.exceptions.JSActionExceptions as e:
err_text = "Error running JS Actions - Page request - "+e.message err_text = "Error running JS Actions - Page request - "+e.message
if e.screenshot: if e.screenshot:
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=e.screenshot, as_error=True) watch.save_screenshot(screenshot=e.screenshot, as_error=True)
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text, self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code}) 'last_check_status': e.status_code})
process_changedetection_results = False process_changedetection_results = False
@ -440,7 +446,7 @@ class update_worker(threading.Thread):
err_text = "{} - {}".format(err_text, e.message) err_text = "{} - {}".format(err_text, e.message)
if e.screenshot: if e.screenshot:
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=e.screenshot, as_error=True) watch.save_screenshot(screenshot=e.screenshot, as_error=True)
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text, self.datastore.update_watch(uuid=uuid, update_obj={'last_error': err_text,
'last_check_status': e.status_code, 'last_check_status': e.status_code,
@ -467,8 +473,6 @@ class update_worker(threading.Thread):
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': "Exception: " + str(e)}) self.datastore.update_watch(uuid=uuid, update_obj={'last_error': "Exception: " + str(e)})
# Other serious error # Other serious error
process_changedetection_results = False process_changedetection_results = False
# import traceback
# print(traceback.format_exc())
else: else:
# Crash protection, the watch entry could have been removed by this point (during a slow chrome fetch etc) # Crash protection, the watch entry could have been removed by this point (during a slow chrome fetch etc)
@ -476,7 +480,7 @@ class update_worker(threading.Thread):
continue continue
# Mark that we never had any failures # Mark that we never had any failures
if not self.datastore.data['watching'][uuid].get('ignore_status_codes'): if not watch.get('ignore_status_codes'):
update_obj['consecutive_filter_failures'] = 0 update_obj['consecutive_filter_failures'] = 0
# Everything ran OK, clean off any previous error # Everything ran OK, clean off any previous error
@ -484,25 +488,48 @@ class update_worker(threading.Thread):
self.cleanup_error_artifacts(uuid) self.cleanup_error_artifacts(uuid)
if not self.datastore.data['watching'].get(uuid):
continue
# #
# Different exceptions mean that we may or may not want to bump the snapshot, trigger notifications etc # Different exceptions mean that we may or may not want to bump the snapshot, trigger notifications etc
if process_changedetection_results: if process_changedetection_results:
# Always save the screenshot if it's available
if update_handler.screenshot:
watch.save_screenshot(screenshot=update_handler.screenshot)
if update_handler.xpath_data:
watch.save_xpath_data(data=update_handler.xpath_data)
try: try:
watch = self.datastore.data['watching'].get(uuid)
self.datastore.update_watch(uuid=uuid, update_obj=update_obj) self.datastore.update_watch(uuid=uuid, update_obj=update_obj)
# Also save the snapshot on the first time checked # Also save the snapshot on the first time checked
if changed_detected or not watch['last_checked']: if changed_detected or not watch.get('last_checked'):
timestamp = round(time.time())
# Small hack so that we sleep just enough to allow 1 second between history snapshots
# this is because history.txt indexes/keys snapshots by epoch seconds and we dont want dupe keys
if watch.newest_history_key and int(timestamp) == int(watch.newest_history_key):
logger.warning(
f"Timestamp {timestamp} already exists, waiting 1 seconds so we have a unique key in history.txt")
timestamp = str(int(timestamp) + 1)
time.sleep(1)
watch.save_history_text(contents=contents, watch.save_history_text(contents=contents,
timestamp=str(round(time.time())), timestamp=timestamp,
snapshot_id=update_obj.get('previous_md5', 'none')) snapshot_id=update_obj.get('previous_md5', 'none'))
if update_handler.fetcher.content:
watch.save_last_fetched_html(contents=update_handler.fetcher.content, timestamp=timestamp)
# A change was detected # A change was detected
if changed_detected: if changed_detected:
# Notifications should only trigger on the second time (first time, we gather the initial snapshot) # Notifications should only trigger on the second time (first time, we gather the initial snapshot)
if watch.history_n >= 2: if watch.history_n >= 2:
logger.info(f"Change detected in UUID {uuid} - {watch['url']}") logger.info(f"Change detected in UUID {uuid} - {watch['url']}")
if not self.datastore.data['watching'][uuid].get('notification_muted'): if not watch.get('notification_muted'):
self.send_content_changed_notification(watch_uuid=uuid) self.send_content_changed_notification(watch_uuid=uuid)
else: else:
logger.info(f"Change triggered in UUID {uuid} due to first history saving (no notifications sent) - {watch['url']}") logger.info(f"Change triggered in UUID {uuid} due to first history saving (no notifications sent) - {watch['url']}")
@ -513,9 +540,9 @@ class update_worker(threading.Thread):
logger.critical(str(e)) logger.critical(str(e))
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)}) self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
if self.datastore.data['watching'].get(uuid):
# Always record that we atleast tried # Always record that we atleast tried
count = self.datastore.data['watching'][uuid].get('check_count', 0) + 1 count = watch.get('check_count', 0) + 1
# Record the 'server' header reply, can be used for actions in the future like cloudflare/akamai workarounds # Record the 'server' header reply, can be used for actions in the future like cloudflare/akamai workarounds
try: try:
@ -531,12 +558,6 @@ class update_worker(threading.Thread):
'check_count': count 'check_count': count
}) })
# Always save the screenshot if it's available
if update_handler.screenshot:
self.datastore.save_screenshot(watch_uuid=uuid, screenshot=update_handler.screenshot)
if update_handler.xpath_data:
self.datastore.save_xpath_data(watch_uuid=uuid, data=update_handler.xpath_data)
self.current_uuid = None # Done self.current_uuid = None # Done
self.q.task_done() self.q.task_done()

@ -69,9 +69,10 @@ services:
# If WEBDRIVER or PLAYWRIGHT are enabled, changedetection container depends on that # If WEBDRIVER or PLAYWRIGHT are enabled, changedetection container depends on that
# and must wait before starting (substitute "browser-chrome" with "playwright-chrome" if last one is used) # and must wait before starting (substitute "browser-chrome" with "playwright-chrome" if last one is used)
# depends_on: # depends_on:
# browser-chrome: # playwright-chrome:
# condition: service_started # condition: service_started
# Used for fetching pages via Playwright+Chrome where you need Javascript support. # Used for fetching pages via Playwright+Chrome where you need Javascript support.
# RECOMMENDED FOR FETCHING PAGES WITH CHROME # RECOMMENDED FOR FETCHING PAGES WITH CHROME
# playwright-chrome: # playwright-chrome:

@ -41,10 +41,8 @@ apprise~=1.8.0
# use v1.x due to https://github.com/eclipse/paho.mqtt.python/issues/814 # use v1.x due to https://github.com/eclipse/paho.mqtt.python/issues/814
paho-mqtt>=1.6.1,<2.0.0 paho-mqtt>=1.6.1,<2.0.0
# This mainly affects some ARM builds, which unlike the other builds ignores "ARG CRYPTOGRAPHY_DONT_BUILD_RUST=1" # Requires extra wheel for rPi
# so without this pinning, the newer versions on ARM will forcefully try to build rust, which results in "rust compiler not found" cryptography~=42.0.8
# (introduced once apprise became a dep)
cryptography~=3.4
# Used for CSS filtering # Used for CSS filtering
beautifulsoup4 beautifulsoup4

Loading…
Cancel
Save