Logging loguru output tweaks (#2112)

pull/2116/head
dgtlmoon 11 months ago committed by GitHub
parent 4be0fafa93
commit 399cdf0fbf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -60,7 +60,7 @@ def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "6Ccsd:h:p:l:", "port")
except getopt.GetoptError:
print('backend.py -s SSL enable -h [host] -p [port] -d [datastore path] -l [debug level]')
print('backend.py -s SSL enable -h [host] -p [port] -d [datastore path] -l [debug level - TRACE, DEBUG(default), INFO, SUCCESS, WARNING, ERROR, CRITICAL]')
sys.exit(2)
create_datastore_dir = False

@ -211,7 +211,7 @@ class model(dict):
# Read the history file as a dict
fname = os.path.join(self.watch_data_dir, "history.txt")
if os.path.isfile(fname):
logger.debug("Reading watch history index")
logger.debug(f"Reading watch history index for {self.get('uuid')}")
with open(fname, "r") as f:
for i in f.readlines():
if ',' in i:

@ -70,7 +70,7 @@ class difference_detection_processor():
proxy_url = None
if preferred_proxy_id:
proxy_url = self.datastore.proxy_list.get(preferred_proxy_id).get('url')
logger.debug(f"Using proxy Key: {preferred_proxy_id} as Proxy URL {proxy_url}")
logger.debug(f"Selected proxy key '{preferred_proxy_id}' as proxy URL '{proxy_url}' for {url}")
# Now call the fetcher (playwright/requests/etc) with arguments that only a fetcher would need.
# When browser_connection_url is None, it method should default to working out whats the best defaults (os env vars etc)

@ -83,7 +83,7 @@ class ChangeDetectionStore:
for uuid, watch in self.__data['watching'].items():
watch['uuid']=uuid
self.__data['watching'][uuid] = Watch.model(datastore_path=self.datastore_path, default=watch)
logger.debug(f"Watching: {uuid} {self.__data['watching'][uuid]['url']}")
logger.info(f"Watching: {uuid} {self.__data['watching'][uuid]['url']}")
# First time ran, Create the datastore.
except (FileNotFoundError):
@ -616,7 +616,7 @@ class ChangeDetectionStore:
for uuid, tag in self.__data['settings']['application'].get('tags', {}).items():
if n == tag.get('title', '').lower().strip():
logger.error(f">>> Tag {name} already exists")
logger.warning(f"Tag '{name}' already exists, skipping creation.")
return uuid
# Eventually almost everything todo with a watch will apply as a Tag

@ -220,6 +220,7 @@ class update_worker(threading.Thread):
def run(self):
from .processors import text_json_diff, restock_diff
now = time.time()
while not self.app.config.exit.is_set():
update_handler = None
@ -232,13 +233,12 @@ class update_worker(threading.Thread):
else:
uuid = queued_item_data.item.get('uuid')
self.current_uuid = uuid
if uuid in list(self.datastore.data['watching'].keys()) and self.datastore.data['watching'][uuid].get('url'):
changed_detected = False
contents = b''
process_changedetection_results = True
update_obj = {}
logger.debug(f"> Processing UUID {uuid} "
logger.info(f"Processing watch UUID {uuid} "
f"Priority {queued_item_data.priority} "
f"URL {self.datastore.data['watching'][uuid]['url']}")
now = time.time()
@ -280,7 +280,8 @@ class update_worker(threading.Thread):
if not isinstance(contents, (bytes, bytearray)):
raise Exception("Error - returned data from the fetch handler SHOULD be bytes")
except PermissionError as e:
self.app.logger.error("File permission error updating", uuid, str(e))
logger.critical(f"File permission error updating file, watch: {uuid}")
logger.critical(str(e))
process_changedetection_results = False
except content_fetcher.ReplyWithContentButNoText as e:
# Totally fine, it's by choice - just continue on, nothing more to care about
@ -428,11 +429,13 @@ class update_worker(threading.Thread):
process_changedetection_results = False
except UnableToExtractRestockData as e:
# Usually when fetcher.instock_data returns empty
self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e))
logger.error(f"Exception (UnableToExtractRestockData) reached processing watch UUID: {uuid}")
logger.error(str(e))
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': f"Unable to extract restock data for this page unfortunately. (Got code {e.status_code} from server)"})
process_changedetection_results = False
except Exception as e:
self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e))
logger.error(f"Exception reached processing watch UUID: {uuid}")
logger.error(str(e))
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
# Other serious error
process_changedetection_results = False
@ -478,9 +481,8 @@ class update_worker(threading.Thread):
except Exception as e:
# Catch everything possible here, so that if a worker crashes, we don't lose it until restart!
logger.critical("!!!! Exception in update_worker !!!")
logger.critical("!!!! Exception in update_worker while processing process_changedetection_results !!!")
logger.critical(str(e))
self.app.logger.error("Exception reached processing watch UUID: %s - %s", uuid, str(e))
self.datastore.update_watch(uuid=uuid, update_obj={'last_error': str(e)})
if self.datastore.data['watching'].get(uuid):
@ -500,6 +502,7 @@ class update_worker(threading.Thread):
self.current_uuid = None # Done
self.q.task_done()
logger.debug(f"Watch {uuid} done in {time.time()-now:.2f}s")
# Give the CPU time to interrupt
time.sleep(0.1)

Loading…
Cancel
Save