From 6020bdceeea78aa65392f1d002286ca52f7e843f Mon Sep 17 00:00:00 2001 From: dgtlmoon Date: Tue, 16 Jan 2024 10:50:53 +0100 Subject: [PATCH] Loguru output tweaks --- changedetectionio/__init__.py | 2 +- changedetectionio/model/Watch.py | 2 +- changedetectionio/processors/__init__.py | 2 +- changedetectionio/store.py | 4 ++-- changedetectionio/update_worker.py | 19 +++++++++++-------- 5 files changed, 16 insertions(+), 13 deletions(-) diff --git a/changedetectionio/__init__.py b/changedetectionio/__init__.py index 46ae3ea7..5dbf208e 100644 --- a/changedetectionio/__init__.py +++ b/changedetectionio/__init__.py @@ -60,7 +60,7 @@ def main(): try: opts, args = getopt.getopt(sys.argv[1:], "6Ccsd:h:p:l:", "port") except getopt.GetoptError: - print('backend.py -s SSL enable -h [host] -p [port] -d [datastore path] -l [debug level]') + print('backend.py -s SSL enable -h [host] -p [port] -d [datastore path] -l [debug level - TRACE, DEBUG(default), INFO, SUCCESS, WARNING, ERROR, CRITICAL]') sys.exit(2) create_datastore_dir = False diff --git a/changedetectionio/model/Watch.py b/changedetectionio/model/Watch.py index d019532c..576c6f64 100644 --- a/changedetectionio/model/Watch.py +++ b/changedetectionio/model/Watch.py @@ -211,7 +211,7 @@ class model(dict): # Read the history file as a dict fname = os.path.join(self.watch_data_dir, "history.txt") if os.path.isfile(fname): - logger.debug("Reading watch history index") + logger.debug(f"Reading watch history index for {self.get('uuid')}") with open(fname, "r") as f: for i in f.readlines(): if ',' in i: diff --git a/changedetectionio/processors/__init__.py b/changedetectionio/processors/__init__.py index 3239e61d..ea8b9ffb 100644 --- a/changedetectionio/processors/__init__.py +++ b/changedetectionio/processors/__init__.py @@ -70,7 +70,7 @@ class difference_detection_processor(): proxy_url = None if preferred_proxy_id: proxy_url = self.datastore.proxy_list.get(preferred_proxy_id).get('url') - logger.debug(f"Using proxy Key: {preferred_proxy_id} as Proxy URL {proxy_url}") + logger.debug(f"Selected proxy key '{preferred_proxy_id}' as proxy URL '{proxy_url}' for {url}") # Now call the fetcher (playwright/requests/etc) with arguments that only a fetcher would need. # When browser_connection_url is None, it method should default to working out whats the best defaults (os env vars etc) diff --git a/changedetectionio/store.py b/changedetectionio/store.py index 3868dfa7..4296515b 100644 --- a/changedetectionio/store.py +++ b/changedetectionio/store.py @@ -83,7 +83,7 @@ class ChangeDetectionStore: for uuid, watch in self.__data['watching'].items(): watch['uuid']=uuid self.__data['watching'][uuid] = Watch.model(datastore_path=self.datastore_path, default=watch) - logger.debug(f"Watching: {uuid} {self.__data['watching'][uuid]['url']}") + logger.info(f"Watching: {uuid} {self.__data['watching'][uuid]['url']}") # First time ran, Create the datastore. except (FileNotFoundError): @@ -616,7 +616,7 @@ class ChangeDetectionStore: for uuid, tag in self.__data['settings']['application'].get('tags', {}).items(): if n == tag.get('title', '').lower().strip(): - logger.error(f">>> Tag {name} already exists") + logger.warning(f"Tag '{name}' already exists, skipping creation.") return uuid # Eventually almost everything todo with a watch will apply as a Tag diff --git a/changedetectionio/update_worker.py b/changedetectionio/update_worker.py index 17913910..538a8a32 100644 --- a/changedetectionio/update_worker.py +++ b/changedetectionio/update_worker.py @@ -220,7 +220,8 @@ class update_worker(threading.Thread): def run(self): from .processors import text_json_diff, restock_diff - + now = time.time() + while not self.app.config.exit.is_set(): update_handler = None @@ -232,13 +233,12 @@ class update_worker(threading.Thread): else: uuid = queued_item_data.item.get('uuid') self.current_uuid = uuid - if uuid in list(self.datastore.data['watching'].keys()) and self.datastore.data['watching'][uuid].get('url'): changed_detected = False contents = b'' process_changedetection_results = True update_obj = {} - logger.debug(f"> Processing UUID {uuid} " + logger.info(f"Processing watch UUID {uuid} " f"Priority {queued_item_data.priority} " f"URL {self.datastore.data['watching'][uuid]['url']}") now = time.time() @@ -280,7 +280,8 @@ class update_worker(threading.Thread): if not isinstance(contents, (bytes, bytearray)): raise Exception("Error - returned data from the fetch handler SHOULD be bytes") except PermissionError as e: - self.app.logger.error("File permission error updating", uuid, str(e)) + logger.critical(f"File permission error updating file, watch: {uuid}") + logger.critical(str(e)) process_changedetection_results = False except content_fetcher.ReplyWithContentButNoText as e: # Totally fine, it's by choice - just continue on, nothing more to care about @@ -428,11 +429,13 @@ class update_worker(threading.Thread): process_changedetection_results = False except UnableToExtractRestockData as e: # Usually when fetcher.instock_data returns empty - self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e)) + logger.error(f"Exception (UnableToExtractRestockData) reached processing watch UUID: {uuid}") + logger.error(str(e)) self.datastore.update_watch(uuid=uuid, update_obj={'last_error': f"Unable to extract restock data for this page unfortunately. (Got code {e.status_code} from server)"}) process_changedetection_results = False except Exception as e: - self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e)) + logger.error(f"Exception reached processing watch UUID: {uuid}") + logger.error(str(e)) self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)}) # Other serious error process_changedetection_results = False @@ -478,9 +481,8 @@ class update_worker(threading.Thread): except Exception as e: # Catch everything possible here, so that if a worker crashes, we don't lose it until restart! - logger.critical("!!!! Exception in update_worker !!!") + logger.critical("!!!! Exception in update_worker while processing process_changedetection_results !!!") logger.critical(str(e)) - self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e)) self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)}) if self.datastore.data['watching'].get(uuid): @@ -500,6 +502,7 @@ class update_worker(threading.Thread): self.current_uuid = None # Done self.q.task_done() + logger.debug(f"Watch {uuid} done in {time.time()-now:.2f}s") # Give the CPU time to interrupt time.sleep(0.1)