diff --git a/VERSION b/VERSION index af1b6908..c339f4c7 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.15.1-develop63 +1.15.1-develop64 diff --git a/modules/anidb.py b/modules/anidb.py index 713e312f..0c0a4539 100644 --- a/modules/anidb.py +++ b/modules/anidb.py @@ -1,8 +1,8 @@ -import logging, time +import time from modules import util from modules.util import Failed -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger builders = ["anidb_id", "anidb_relation", "anidb_popular", "anidb_tag"] base_url = "https://anidb.net" @@ -15,11 +15,18 @@ urls = { } class AniDB: - def __init__(self, config, params): + def __init__(self, config): self.config = config - self.username = params["username"] if params else None - self.password = params["password"] if params else None - if params and not self._login(self.username, self.password).xpath("//li[@class='sub-menu my']/@title"): + self.username = None + self.password = None + + def login(self, username, password): + self.username = username + self.password = password + logger.secret(self.username) + logger.secret(self.password) + data = {"show": "main", "xuser": self.username, "xpass": self.password, "xdoautologin": "on"} + if not self._request(urls["login"], data=data).xpath("//li[@class='sub-menu my']/@title"): raise Failed("AniDB Error: Login failed") def _request(self, url, language=None, data=None): @@ -30,14 +37,6 @@ class AniDB: else: return self.config.get_html(url, headers=util.header(language)) - def _login(self, username, password): - return self._request(urls["login"], data={ - "show": "main", - "xuser": username, - "xpass": password, - "xdoautologin": "on" - }) - def _popular(self, language): response = self._request(urls["popular"], language=language) return util.get_int_list(response.xpath("//td[@class='name anime']/a/@href"), "AniDB ID") diff --git a/modules/anilist.py b/modules/anilist.py index 0d08f5a0..ac571ced 100644 --- a/modules/anilist.py +++ b/modules/anilist.py @@ -1,8 +1,8 @@ -import logging, time +import time from modules import util from modules.util import Failed -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger builders = ["anilist_id", "anilist_popular", "anilist_trending", "anilist_relations", "anilist_studio", "anilist_top_rated", "anilist_search"] pretty_names = {"score": "Average Score", "popular": "Popularity", "trending": "Trending"} @@ -264,7 +264,7 @@ class AniList: attr = key mod = "" message += f"\n\t{attr.replace('_', ' ').title()} {util.mod_displays[mod]} {value}" - util.print_multiline(message) + logger.info(message) anilist_ids = self._search(**data) logger.debug("") logger.debug(f"{len(anilist_ids)} AniList IDs Found: {anilist_ids}") diff --git a/modules/builder.py b/modules/builder.py index f6d60747..4530af9c 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -1,4 +1,4 @@ -import logging, os, re, time +import os, re, time from datetime import datetime, timedelta from modules import anidb, anilist, flixpatrol, icheckmovies, imdb, letterboxd, mal, plex, radarr, sonarr, stevenlu, tautulli, tmdb, trakt, tvdb, mdblist, util from modules.util import Failed, ImageData, NotScheduled, NotScheduledRange @@ -8,7 +8,7 @@ from plexapi.exceptions import BadRequest, NotFound from plexapi.video import Movie, Show, Season, Episode from urllib.parse import quote -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger advance_new_agent = ["item_metadata_language", "item_use_original_title"] advance_show = ["item_episode_sorting", "item_keep_episodes", "item_delete_episodes", "item_season_display", "item_episode_sorting"] @@ -367,7 +367,7 @@ class CollectionBuilder: if self.details["delete_not_scheduled"]: try: self.obj = self.library.get_playlist(self.name) if self.playlist else self.library.get_collection(self.name) - util.print_multiline(self.delete()) + logger.info(self.delete()) self.deleted = True suffix = f" and was deleted" except Failed: @@ -1318,7 +1318,7 @@ class CollectionBuilder: logger.debug("") for i, input_data in enumerate(ids, 1): input_id, id_type = input_data - util.print_return(f"Parsing ID {i}/{total_ids}") + logger.ghost(f"Parsing ID {i}/{total_ids}") rating_keys = [] if id_type == "ratingKey": rating_keys = int(input_id) @@ -1449,7 +1449,7 @@ class CollectionBuilder: items.append(item) except Failed as e: logger.error(e) - util.print_end() + logger.exorcise() if not items: return None name = self.obj.title if self.obj else self.name @@ -1665,7 +1665,7 @@ class CollectionBuilder: re.compile(reg) valid_regex.append(reg) except re.error: - util.print_stacktrace() + logger.stacktrace() err = f"{self.Type} Error: Regular Expression Invalid: {reg}" if validate: raise Failed(err) @@ -1768,7 +1768,7 @@ class CollectionBuilder: def add_to_collection(self): logger.info("") - util.separator(f"Adding to {self.name} {self.Type}", space=False, border=False) + logger.separator(f"Adding to {self.name} {self.Type}", space=False, border=False) logger.info("") name, collection_items = self.library.get_collection_name_and_items(self.obj if self.obj else self.name, self.smart_label_collection) total = len(self.added_items) @@ -1779,7 +1779,7 @@ class CollectionBuilder: for i, item in enumerate(self.added_items, 1): current_operation = "=" if item in collection_items else "+" number_text = f"{i}/{total}" - logger.info(util.adjust_space(f"{number_text:>{spacing}} | {name} {self.Type} | {current_operation} | {util.item_title(item)}")) + logger.info(f"{number_text:>{spacing}} | {name} {self.Type} | {current_operation} | {util.item_title(item)}") if item in collection_items: self.remove_item_map[item.ratingKey] = None amount_unchanged += 1 @@ -1803,7 +1803,7 @@ class CollectionBuilder: logger.info(f"Playlist: {self.name} created") elif self.playlist and playlist_adds: self.obj.addItems(playlist_adds) - util.print_end() + logger.exorcise() logger.info("") logger.info(f"{total} {self.collection_level.capitalize()}{'s' if total > 1 else ''} Processed") return amount_added, amount_unchanged @@ -1814,7 +1814,7 @@ class CollectionBuilder: items = [item for _, item in self.remove_item_map.items() if item is not None] if items: logger.info("") - util.separator(f"Removed from {self.name} {self.Type}", space=False, border=False) + logger.separator(f"Removed from {self.name} {self.Type}", space=False, border=False) logger.info("") total = len(items) spacing = len(str(total)) * 2 + 1 @@ -1891,7 +1891,7 @@ class CollectionBuilder: def check_filters(self, item, display): if (self.filters or self.tmdb_filters) and not self.details["only_filter_missing"]: - util.print_return(f"Filtering {display} {item.title}") + logger.ghost(f"Filtering {display} {item.title}") if self.tmdb_filters and isinstance(item, (Movie, Show)): if item.ratingKey not in self.library.movie_rating_key_map and item.ratingKey not in self.library.show_rating_key_map: logger.warning(f"Filter Error: No {'TMDb' if self.library.is_movie else 'TVDb'} ID found for {item.title}") @@ -2003,7 +2003,7 @@ class CollectionBuilder: if (not list(set(filter_data) & set(attrs)) and modifier == "") \ or (list(set(filter_data) & set(attrs)) and modifier == ".not"): return False - util.print_return(f"Filtering {display} {item.title}") + logger.ghost(f"Filtering {display} {item.title}") return True def run_missing(self): @@ -2012,7 +2012,7 @@ class CollectionBuilder: if len(self.missing_movies) > 0: if self.details["show_missing"] is True: logger.info("") - util.separator(f"Missing Movies from Library: {self.name}", space=False, border=False) + logger.separator(f"Missing Movies from Library: {self.name}", space=False, border=False) logger.info("") missing_movies_with_names = [] for missing_id in self.missing_movies: @@ -2054,7 +2054,7 @@ class CollectionBuilder: if len(self.missing_shows) > 0 and self.library.is_show: if self.details["show_missing"] is True: logger.info("") - util.separator(f"Missing Shows from Library: {self.name}", space=False, border=False) + logger.separator(f"Missing Shows from Library: {self.name}", space=False, border=False) logger.info("") missing_shows_with_names = [] for missing_id in self.missing_shows: @@ -2102,7 +2102,7 @@ class CollectionBuilder: self.items = self.library.get_collection_items(self.obj, self.smart_label_collection) elif not self.build_collection: logger.info("") - util.separator(f"Items Found for {self.name} {self.Type}", space=False, border=False) + logger.separator(f"Items Found for {self.name} {self.Type}", space=False, border=False) logger.info("") self.items = self.added_items if not self.items: @@ -2110,7 +2110,7 @@ class CollectionBuilder: def update_item_details(self): logger.info("") - util.separator(f"Updating Details of the Items in {self.name} {self.Type}", space=False, border=False) + logger.separator(f"Updating Details of the Items in {self.name} {self.Type}", space=False, border=False) logger.info("") overlay = None overlay_folder = None @@ -2250,7 +2250,7 @@ class CollectionBuilder: def update_details(self): logger.info("") - util.separator(f"Updating Details of {self.name} {self.Type}", space=False, border=False) + logger.separator(f"Updating Details of {self.name} {self.Type}", space=False, border=False) logger.info("") if self.smart_url and self.smart_url != self.library.smart_filter(self.obj): self.library.update_smart_collection(self.obj, self.smart_url) @@ -2436,7 +2436,7 @@ class CollectionBuilder: def sort_collection(self): logger.info("") - util.separator(f"Sorting {self.name} {self.Type}", space=False, border=False) + logger.separator(f"Sorting {self.name} {self.Type}", space=False, border=False) logger.info("") if self.custom_sort is True: items = self.added_items @@ -2473,7 +2473,7 @@ class CollectionBuilder: def sync_playlist(self): if self.obj and self.valid_users: logger.info("") - util.separator(f"Syncing Playlist to Users", space=False, border=False) + logger.separator(f"Syncing Playlist to Users", space=False, border=False) logger.info("") for user in self.valid_users: try: @@ -2502,7 +2502,7 @@ class CollectionBuilder: playlist=playlist ) except Failed as e: - util.print_stacktrace() + logger.stacktrace() logger.error(f"Webhooks Error: {e}") def run_collections_again(self): diff --git a/modules/cache.py b/modules/cache.py index bbac5143..f16d9387 100644 --- a/modules/cache.py +++ b/modules/cache.py @@ -1,9 +1,9 @@ -import logging, os, random, sqlite3 +import os, random, sqlite3 from contextlib import closing from datetime import datetime, timedelta from modules import util -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger class Cache: def __init__(self, config_path, expiration): diff --git a/modules/config.py b/modules/config.py index c1626a06..d70ab19b 100644 --- a/modules/config.py +++ b/modules/config.py @@ -1,4 +1,4 @@ -import base64, logging, os, requests +import base64, os, requests from datetime import datetime from lxml import html from modules import util, radarr, sonarr @@ -28,7 +28,7 @@ from modules.webhooks import Webhooks from retrying import retry from ruamel import yaml -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger sync_modes = {"append": "Only Add Items to the Collection or Playlist", "sync": "Add & Remove Items from the Collection or Playlist"} mass_update_options = {"tmdb": "Use TMDb Metadata", "omdb": "Use IMDb Metadata through OMDb"} @@ -182,9 +182,10 @@ class ConfigFile: yaml.round_trip_dump(new_config, open(self.config_path, "w", encoding="utf-8"), block_seq_indent=2) self.data = new_config except yaml.scanner.ScannerError as e: + logger.stacktrace() raise Failed(f"YAML Error: {util.tab_new_lines(e)}") except Exception as e: - util.print_stacktrace() + logger.stacktrace() raise Failed(f"YAML Error: {e}") def check_for_attribute(data, attribute, parent=None, test_list=None, default=None, do_print=True, default_is_none=False, req_default=False, var_type="str", throw=False, save=True): @@ -239,7 +240,7 @@ class ConfigFile: warning_message += "\n" warning_message += f"Config Warning: Path does not exist: {os.path.abspath(p)}" if do_print and warning_message: - util.print_multiline(warning_message) + logger.warning(warning_message) if len(temp_list) > 0: return temp_list else: message = "No Paths exist" elif var_type == "lower_list": return util.get_list(data[attribute], lower=True) @@ -269,9 +270,9 @@ class ConfigFile: message = message + "\n" + options raise Failed(f"Config Error: {message}") if do_print: - util.print_multiline(f"Config Warning: {message}") + logger.warning(f"Config Warning: {message}") if data and attribute in data and data[attribute] and test_list is not None and data[attribute] not in test_list: - util.print_multiline(options) + logger.warning(options) return default self.general = { @@ -325,12 +326,12 @@ class ConfigFile: "changes": check_for_attribute(self.data, "changes", parent="webhooks", var_type="list", default_is_none=True) } if self.general["cache"]: - util.separator() + logger.separator() self.Cache = Cache(self.config_path, self.general["cache_expiration"]) else: self.Cache = None - util.separator() + logger.separator() self.NotifiarrFactory = None if "notifiarr" in self.data: @@ -342,7 +343,7 @@ class ConfigFile: "test": check_for_attribute(self.data, "test", parent="notifiarr", var_type="bool", default=False, do_print=False, save=False) }) except Failed as e: - util.print_stacktrace() + logger.stacktrace() logger.error(e) logger.info(f"Notifiarr Connection {'Failed' if self.NotifiarrFactory is None else 'Successful'}") else: @@ -352,12 +353,12 @@ class ConfigFile: try: self.Webhooks.start_time_hooks(self.start_time) except Failed as e: - util.print_stacktrace() + logger.stacktrace() logger.error(f"Webhooks Error: {e}") self.errors = [] - util.separator() + logger.separator() try: self.TMDb = None @@ -371,7 +372,7 @@ class ConfigFile: else: raise Failed("Config Error: tmdb attribute not found") - util.separator() + logger.separator() self.OMDb = None if "omdb" in self.data: @@ -388,7 +389,7 @@ class ConfigFile: else: logger.warning("omdb attribute not found") - util.separator() + logger.separator() self.Mdblist = Mdblist(self) if "mdblist" in self.data: @@ -406,7 +407,7 @@ class ConfigFile: else: logger.warning("mdblist attribute not found") - util.separator() + logger.separator() self.Trakt = None if "trakt" in self.data: @@ -425,7 +426,7 @@ class ConfigFile: else: logger.warning("trakt attribute not found") - util.separator() + logger.separator() self.MyAnimeList = None if "mal" in self.data: @@ -444,23 +445,21 @@ class ConfigFile: else: logger.warning("mal attribute not found") - self.AniDB = None + self.AniDB = AniDB(self) if "anidb" in self.data: - util.separator() + logger.separator() logger.info("Connecting to AniDB...") try: - self.AniDB = AniDB(self, { - "username": check_for_attribute(self.data, "username", parent="anidb", throw=True), - "password": check_for_attribute(self.data, "password", parent="anidb", throw=True) - }) + self.AniDB.login( + check_for_attribute(self.data, "username", parent="anidb", throw=True), + check_for_attribute(self.data, "password", parent="anidb", throw=True) + ) except Failed as e: self.errors.append(e) logger.error(e) logger.info(f"AniDB Connection {'Failed Continuing as Guest ' if self.MyAnimeList is None else 'Successful'}") - if self.AniDB is None: - self.AniDB = AniDB(self, None) - util.separator() + logger.separator() self.playlist_names = [] self.playlist_files = [] @@ -518,7 +517,7 @@ class ConfigFile: self.playlist_names.extend([p for p in playlist_obj.playlists]) self.playlist_files.append(playlist_obj) except Failed as e: - util.print_multiline(e, error=True) + logger.error(e) self.TVDb = TVDb(self, self.general["tvdb_language"]) self.IMDb = IMDb(self) @@ -529,7 +528,7 @@ class ConfigFile: self.Letterboxd = Letterboxd(self) self.StevenLu = StevenLu(self) - util.separator() + logger.separator() logger.info("Connecting to Plex Libraries...") @@ -600,7 +599,7 @@ class ConfigFile: } display_name = f"{params['name']} ({params['mapping_name']})" if lib and "library_name" in lib and lib["library_name"] else params["mapping_name"] - util.separator(f"{display_name} Configuration") + logger.separator(f"{display_name} Configuration") logger.info("") logger.info(f"Connecting to {display_name} Library...") logger.info("") @@ -823,7 +822,7 @@ class ConfigFile: params["skip_library"] = True logger.info("") - util.separator("Plex Configuration", space=False, border=False) + logger.separator("Plex Configuration", space=False, border=False) params["plex"] = { "url": check_for_attribute(lib, "url", parent="plex", var_type="url", default=self.general["plex"]["url"], req_default=True, save=False), "token": check_for_attribute(lib, "token", parent="plex", default=self.general["plex"]["token"], req_default=True, save=False), @@ -836,26 +835,26 @@ class ConfigFile: logger.info(f"{display_name} Library Connection Successful") except Failed as e: self.errors.append(e) - util.print_stacktrace() - util.print_multiline(e, error=True) + logger.stacktrace() + logger.error(e) logger.info("") logger.info(f"{display_name} Library Connection Failed") continue try: logger.info("") - util.separator("Scanning Metadata Files", space=False, border=False) + logger.separator("Scanning Metadata Files", space=False, border=False) library.scan_metadata_files() except Failed as e: self.errors.append(e) - util.print_stacktrace() - util.print_multiline(e, error=True) + logger.stacktrace() + logger.error(e) logger.info("") logger.info(f"{display_name} Metadata Failed to Load") continue if self.general["radarr"]["url"] or (lib and "radarr" in lib): logger.info("") - util.separator("Radarr Configuration", space=False, border=False) + logger.separator("Radarr Configuration", space=False, border=False) logger.info("") logger.info(f"Connecting to {display_name} library's Radarr...") logger.info("") @@ -876,14 +875,14 @@ class ConfigFile: }) except Failed as e: self.errors.append(e) - util.print_stacktrace() - util.print_multiline(e, error=True) + logger.stacktrace() + logger.error(e) logger.info("") logger.info(f"{display_name} library's Radarr Connection {'Failed' if library.Radarr is None else 'Successful'}") if self.general["sonarr"]["url"] or (lib and "sonarr" in lib): logger.info("") - util.separator("Sonarr Configuration", space=False, border=False) + logger.separator("Sonarr Configuration", space=False, border=False) logger.info("") logger.info(f"Connecting to {display_name} library's Sonarr...") logger.info("") @@ -907,14 +906,14 @@ class ConfigFile: }) except Failed as e: self.errors.append(e) - util.print_stacktrace() - util.print_multiline(e, error=True) + logger.stacktrace() + logger.error(e) logger.info("") logger.info(f"{display_name} library's Sonarr Connection {'Failed' if library.Sonarr is None else 'Successful'}") if self.general["tautulli"]["url"] or (lib and "tautulli" in lib): logger.info("") - util.separator("Tautulli Configuration", space=False, border=False) + logger.separator("Tautulli Configuration", space=False, border=False) logger.info("") logger.info(f"Connecting to {display_name} library's Tautulli...") logger.info("") @@ -925,8 +924,8 @@ class ConfigFile: }) except Failed as e: self.errors.append(e) - util.print_stacktrace() - util.print_multiline(e, error=True) + logger.stacktrace() + logger.error(e) logger.info("") logger.info(f"{display_name} library's Tautulli Connection {'Failed' if library.Tautulli is None else 'Successful'}") @@ -935,7 +934,7 @@ class ConfigFile: logger.info("") self.libraries.append(library) - util.separator() + logger.separator() self.library_map = {_l.original_mapping_name: _l for _l in self.libraries} @@ -944,11 +943,12 @@ class ConfigFile: else: raise Failed("Plex Error: No Plex libraries were connected to") - util.separator() + logger.separator() if self.errors: self.notify(self.errors) except Exception as e: + logger.stacktrace() self.notify(e) raise @@ -957,7 +957,7 @@ class ConfigFile: try: self.Webhooks.error_hooks(error, server=server, library=library, collection=collection, playlist=playlist, critical=critical) except Failed as e: - util.print_stacktrace() + logger.stacktrace() logger.error(f"Webhooks Error: {e}") def get_html(self, url, headers=None, params=None): diff --git a/modules/convert.py b/modules/convert.py index 109ec85d..7dc1ecb5 100644 --- a/modules/convert.py +++ b/modules/convert.py @@ -1,9 +1,9 @@ -import logging, re, requests +import re, requests from modules import util from modules.util import Failed from plexapi.exceptions import BadRequest -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger anime_lists_url = "https://raw.githubusercontent.com/Fribb/anime-lists/master/anime-list-full.json" @@ -246,7 +246,7 @@ class Convert: elif url_parsed.scheme == "tmdb": tmdb_id.append(int(url_parsed.netloc)) except requests.exceptions.ConnectionError: library.query(item.refresh) - util.print_stacktrace() + logger.stacktrace() raise Failed("No External GUIDs found") if not tvdb_id and not imdb_id and not tmdb_id: library.query(item.refresh) @@ -278,7 +278,7 @@ class Convert: if int(check_id) in self.mal_to_anidb: anidb_id = self.mal_to_anidb[int(check_id)] else: - raise Failed(f"Convert Error: AniDB ID not found for MyAnimeList ID: {check_id}") + raise Failed(f"AniDB ID not found for MyAnimeList ID: {check_id}") elif item_type == "local": raise Failed("No match in Plex") else: raise Failed(f"Agent {item_type} not supported") @@ -329,7 +329,7 @@ class Convert: cache_ids = ",".join([str(c) for c in cache_ids]) imdb_in = ",".join([str(i) for i in imdb_in]) if imdb_in else None ids = f"{item.guid:<46} | {id_type} ID: {cache_ids:<7} | IMDb ID: {str(imdb_in):<10}" - logger.info(util.adjust_space(f" Cache | {'^' if expired else '+'} | {ids} | {item.title}")) + logger.info(f" Cache | {'^' if expired else '+'} | {ids} | {item.title}") self.config.Cache.update_guid_map(item.guid, cache_ids, imdb_in, expired, guid_type) if (tmdb_id or imdb_id) and library.is_movie: @@ -345,8 +345,8 @@ class Convert: logger.debug(f"TMDb: {tmdb_id}, IMDb: {imdb_id}, TVDb: {tvdb_id}") raise Failed(f"No ID to convert") except Failed as e: - logger.info(util.adjust_space(f'Mapping Error | {item.guid:<46} | {e} for "{item.title}"')) + logger.info(f'Mapping Error | {item.guid:<46} | {e} for "{item.title}"') except BadRequest: - util.print_stacktrace() - logger.info(util.adjust_space(f'Mapping Error | {item.guid:<46} | Bad Request for "{item.title}"')) + logger.stacktrace() + logger.info(f'Mapping Error | {item.guid:<46} | Bad Request for "{item.title}"') return None, None, None diff --git a/modules/flixpatrol.py b/modules/flixpatrol.py index 5733be62..05b85879 100644 --- a/modules/flixpatrol.py +++ b/modules/flixpatrol.py @@ -1,8 +1,7 @@ -import logging from modules import util from modules.util import Failed -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger builders = ["flixpatrol_url", "flixpatrol_demographics", "flixpatrol_popular", "flixpatrol_top"] generations = ["all", "boomers", "x", "y", "z"] @@ -138,7 +137,7 @@ class FlixPatrol: if total_items > 0: ids = [] for i, item in enumerate(items, 1): - util.print_return(f"Finding TMDb ID {i}/{total_items}") + logger.ghost(f"Finding TMDb ID {i}/{total_items}") tmdb_id = None expired = None if self.config.Cache: @@ -152,7 +151,7 @@ class FlixPatrol: if self.config.Cache: self.config.Cache.update_flixpatrol_map(expired, item, tmdb_id, media_type) ids.append((tmdb_id, "tmdb" if is_movie else "tmdb_show")) - logger.info(util.adjust_space(f"Processed {total_items} TMDb IDs")) + logger.info(f"Processed {total_items} TMDb IDs") return ids else: raise Failed(f"FlixPatrol Error: No List Items found in {data}") diff --git a/modules/icheckmovies.py b/modules/icheckmovies.py index 8bfaaef2..866be78d 100644 --- a/modules/icheckmovies.py +++ b/modules/icheckmovies.py @@ -1,8 +1,7 @@ -import logging from modules import util from modules.util import Failed -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger builders = ["icheckmovies_list", "icheckmovies_list_details"] base_url = "https://www.icheckmovies.com/lists/" diff --git a/modules/imdb.py b/modules/imdb.py index c158582e..39e8dbd3 100644 --- a/modules/imdb.py +++ b/modules/imdb.py @@ -1,9 +1,9 @@ -import logging, math, re, time +import math, re, time from modules import util from modules.util import Failed from urllib.parse import urlparse, parse_qs -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger builders = ["imdb_list", "imdb_id", "imdb_chart"] movie_charts = ["box_office", "popular_movies", "top_movies", "top_english", "top_indian", "lowest_rated"] @@ -110,7 +110,7 @@ class IMDb: num_of_pages = math.ceil(int(limit) / item_count) for i in range(1, num_of_pages + 1): start_num = (i - 1) * item_count + 1 - util.print_return(f"Parsing Page {i}/{num_of_pages} {start_num}-{limit if i == num_of_pages else i * item_count}") + logger.ghost(f"Parsing Page {i}/{num_of_pages} {start_num}-{limit if i == num_of_pages else i * item_count}") if search_url: params["count"] = remainder if i == num_of_pages else item_count # noqa params["start"] = start_num # noqa @@ -122,7 +122,7 @@ class IMDb: ids_found = ids_found[:remainder] imdb_ids.extend(ids_found) time.sleep(2) - util.print_end() + logger.exorcise() if len(imdb_ids) > 0: logger.debug(f"{len(imdb_ids)} IMDb IDs Found: {imdb_ids}") return imdb_ids diff --git a/modules/letterboxd.py b/modules/letterboxd.py index d577646c..ba8fefa9 100644 --- a/modules/letterboxd.py +++ b/modules/letterboxd.py @@ -1,8 +1,8 @@ -import logging, time +import time from modules import util from modules.util import Failed -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger builders = ["letterboxd_list", "letterboxd_list_details"] base_url = "https://letterboxd.com" @@ -65,7 +65,7 @@ class Letterboxd: ids = [] for i, item in enumerate(items, 1): letterboxd_id, slug = item - util.print_return(f"Finding TMDb ID {i}/{total_items}") + logger.ghost(f"Finding TMDb ID {i}/{total_items}") tmdb_id = None expired = None if self.config.Cache: @@ -79,7 +79,7 @@ class Letterboxd: if self.config.Cache: self.config.Cache.update_letterboxd_map(expired, letterboxd_id, tmdb_id) ids.append((tmdb_id, "tmdb")) - logger.info(util.adjust_space(f"Processed {total_items} TMDb IDs")) + logger.info(f"Processed {total_items} TMDb IDs") return ids else: raise Failed(f"Letterboxd Error: No List Items found in {data}") diff --git a/modules/library.py b/modules/library.py index 035c68be..c3b3ba07 100644 --- a/modules/library.py +++ b/modules/library.py @@ -1,4 +1,4 @@ -import logging, os, shutil, time +import os, shutil, time from abc import ABC, abstractmethod from modules import util from modules.meta import MetadataFile @@ -7,7 +7,7 @@ from PIL import Image from plexapi.exceptions import BadRequest from ruamel import yaml -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger class Library(ABC): def __init__(self, config, params): @@ -131,7 +131,7 @@ class Library(ABC): self.metadatas.extend([c for c in meta_obj.metadata]) self.metadata_files.append(meta_obj) except Failed as e: - util.print_multiline(e, error=True) + logger.error(e) if len(self.metadata_files) == 0 and not self.library_operation and not self.config.playlist_files: logger.info("") @@ -155,7 +155,7 @@ class Library(ABC): elif self.show_asset_not_needed: logger.info(f"Detail: {poster.prefix}poster update not needed") except Failed: - util.print_stacktrace() + logger.stacktrace() logger.error(f"Detail: {poster.attribute} failed to update {poster.message}") if overlay is not None: @@ -187,7 +187,7 @@ class Library(ABC): poster_uploaded = True logger.info(f"Detail: Overlay: {overlay_name} applied to {item.title}") except (OSError, BadRequest) as e: - util.print_stacktrace() + logger.stacktrace() raise Failed(f"Overlay Error: {e}") background_uploaded = False @@ -205,7 +205,7 @@ class Library(ABC): elif self.show_asset_not_needed: logger.info(f"Detail: {background.prefix}background update not needed") except Failed: - util.print_stacktrace() + logger.stacktrace() logger.error(f"Detail: {background.attribute} failed to update {background.message}") if self.config.Cache: @@ -250,14 +250,14 @@ class Library(ABC): try: yaml.round_trip_dump(self.missing, open(self.missing_path, "w", encoding="utf-8")) except yaml.scanner.ScannerError as e: - util.print_multiline(f"YAML Error: {util.tab_new_lines(e)}", error=True) + logger.error(f"YAML Error: {util.tab_new_lines(e)}") def map_guids(self): items = self.get_all() logger.info(f"Mapping {self.type} Library: {self.name}") logger.info("") for i, item in enumerate(items, 1): - util.print_return(f"Processing: {i}/{len(items)} {item.title}") + logger.ghost(f"Processing: {i}/{len(items)} {item.title}") if item.ratingKey not in self.movie_rating_key_map and item.ratingKey not in self.show_rating_key_map: id_type, main_id, imdb_id = self.config.Convert.get_id(item, self) if main_id: @@ -270,5 +270,5 @@ class Library(ABC): if imdb_id: util.add_dict_list(imdb_id, item.ratingKey, self.imdb_map) logger.info("") - logger.info(util.adjust_space(f"Processed {len(items)} {self.type}s")) + logger.info(f"Processed {len(items)} {self.type}s") return items diff --git a/modules/logs.py b/modules/logs.py new file mode 100644 index 00000000..f6c8b765 --- /dev/null +++ b/modules/logs.py @@ -0,0 +1,264 @@ +import io, logging, os, sys, traceback +from logging.handlers import RotatingFileHandler + +LOG_DIR = "logs" +COLLECTION_DIR = "collections" +PLAYLIST_DIR = "playlists" +MAIN_LOG = "meta.log" +LIBRARY_LOG = "library.log" +COLLECTION_LOG = "collection.log" +PLAYLIST_LOG = "playlist.log" +PLAYLISTS_LOG = "playlists.log" + +CRITICAL = 50 +FATAL = CRITICAL +ERROR = 40 +WARNING = 30 +WARN = WARNING +INFO = 20 +DEBUG = 10 + + +def fmt_filter(record): + record.levelname = f"[{record.levelname}]" + record.filename = f"[{record.filename}:{record.lineno}]" + return True + +_srcfile = os.path.normcase(fmt_filter.__code__.co_filename) + + +class MyLogger: + def __init__(self, logger_name, default_dir, screen_width, separating_character, ignore_ghost, is_debug): + self.logger_name = logger_name + self.default_dir = default_dir + self.screen_width = screen_width + self.separating_character = separating_character + self.is_debug = is_debug + self.ignore_ghost = ignore_ghost + self.log_dir = os.path.join(default_dir, LOG_DIR) + self.playlists_dir = os.path.join(self.log_dir, PLAYLIST_DIR) + self.main_log = os.path.join(self.log_dir, MAIN_LOG) + self.main_handler = None + self.library_handlers = {} + self.collection_handlers = {} + self.playlist_handlers = {} + self.playlists_handler = None + self.secrets = [] + self.spacing = 0 + self.playlists_log = os.path.join(self.playlists_dir, PLAYLISTS_LOG) + os.makedirs(self.log_dir, exist_ok=True) + self._logger = logging.getLogger(self.logger_name) + self._logger.setLevel(logging.DEBUG) + + cmd_handler = logging.StreamHandler() + cmd_handler.setLevel(logging.DEBUG if self.debug else logging.INFO) + + self._logger.addHandler(cmd_handler) + + def _get_handler(self, log_file, count=3): + _handler = RotatingFileHandler(log_file, delay=True, mode="w", backupCount=count, encoding="utf-8") + self._formatter(_handler) + _handler.addFilter(fmt_filter) + if os.path.isfile(log_file): + _handler.doRollover() + return _handler + + def _formatter(self, handler, border=True): + text = f"| %(message)-{self.screen_width - 2}s |" if border else f"%(message)-{self.screen_width - 2}s" + if isinstance(handler, RotatingFileHandler): + text = f"[%(asctime)s] %(filename)-27s %(levelname)-10s {text}" + handler.setFormatter(logging.Formatter(text)) + + def add_main_handler(self): + self.main_handler = self._get_handler(self.main_log, count=10) + self._logger.addHandler(self.main_handler) + + def remove_main_handler(self): + self._logger.removeHandler(self.main_handler) + + def add_library_handler(self, library_key): + if not self.library_handlers: + os.makedirs(os.path.join(self.log_dir, library_key, COLLECTION_DIR), exist_ok=True) + self.library_handlers[library_key] = self._get_handler(os.path.join(self.log_dir, library_key, LIBRARY_LOG)) + self._logger.addHandler(self.library_handlers[library_key]) + + def remove_library_handler(self, library_key): + if library_key in self.library_handlers: + self._logger.removeHandler(self.library_handlers[library_key]) + + def re_add_library_handler(self, library_key): + if library_key in self.library_handlers: + self._logger.addHandler(self.library_handlers[library_key]) + + def add_playlists_handler(self): + os.makedirs(self.playlists_dir, exist_ok=True) + self.playlists_handler = self._get_handler(self.playlists_log, count=10) + self._logger.addHandler(self.playlists_handler) + + def remove_playlists_handler(self): + self._logger.removeHandler(self.playlists_handler) + + def add_collection_handler(self, library_key, collection_key): + collection_dir = os.path.join(self.log_dir, library_key, COLLECTION_DIR, collection_key) + if library_key not in self.collection_handlers: + os.makedirs(collection_dir, exist_ok=True) + self.collection_handlers[library_key] = {} + self.collection_handlers[library_key][collection_key] = self._get_handler(os.path.join(collection_dir, COLLECTION_LOG)) + self._logger.addHandler(self.collection_handlers[library_key][collection_key]) + + def remove_collection_handler(self, library_key, collection_key): + if library_key in self.collection_handlers and collection_key in self.collection_handlers[library_key]: + self._logger.removeHandler(self.collection_handlers[library_key][collection_key]) + + def add_playlist_handler(self, playlist_key): + playlist_dir = os.path.join(self.playlists_dir, playlist_key) + os.makedirs(playlist_dir, exist_ok=True) + self.playlist_handlers[playlist_key] = self._get_handler(os.path.join(playlist_dir, PLAYLIST_LOG)) + self._logger.addHandler(self.playlist_handlers[playlist_key]) + + def remove_playlist_handler(self, playlist_key): + if playlist_key in self.playlist_handlers: + self._logger.removeHandler(self.playlist_handlers[playlist_key]) + + def _centered(self, text, sep=" ", side_space=True, left=False): + if len(text) > self.screen_width - 2: + return text + space = self.screen_width - len(text) - 2 + text = f"{' ' if side_space else sep}{text}{' ' if side_space else sep}" + if space % 2 == 1: + text += sep + space -= 1 + side = int(space / 2) - 1 + final_text = f"{text}{sep * side}{sep * side}" if left else f"{sep * side}{text}{sep * side}" + return final_text + + def separator(self, text=None, space=True, border=True, debug=False, side_space=True, left=False): + sep = " " if space else self.separating_character + for handler in self._logger.handlers: + self._formatter(handler, border=False) + border_text = f"|{self.separating_character * self.screen_width}|" + if border and debug: + self.debug(border_text) + elif border: + self.info(border_text) + if text: + text_list = text.split("\n") + for t in text_list: + if debug: + self.debug(f"|{sep}{self._centered(t, sep=sep, side_space=side_space, left=left)}{sep}|") + else: + self.info(f"|{sep}{self._centered(t, sep=sep, side_space=side_space, left=left)}{sep}|") + if border and debug: + self.debug(border_text) + elif border: + self.info(border_text) + for handler in self._logger.handlers: + self._formatter(handler) + + def debug(self, msg, *args, **kwargs): + if self._logger.isEnabledFor(DEBUG): + self._log(DEBUG, str(msg), args, **kwargs) + + def info_center(self, msg, *args, **kwargs): + self.info(self._centered(str(msg)), *args, **kwargs) + + def info(self, msg, *args, **kwargs): + if self._logger.isEnabledFor(INFO): + self._log(INFO, str(msg), args, **kwargs) + + def warning(self, msg, *args, **kwargs): + if self._logger.isEnabledFor(WARNING): + self._log(WARNING, str(msg), args, **kwargs) + + def error(self, msg, *args, **kwargs): + if self._logger.isEnabledFor(ERROR): + self._log(ERROR, str(msg), args, **kwargs) + + def critical(self, msg, *args, **kwargs): + if self._logger.isEnabledFor(CRITICAL): + self._log(CRITICAL, str(msg), args, **kwargs) + + def stacktrace(self): + self.debug(traceback.format_exc()) + + def _space(self, display_title): + display_title = str(display_title) + space_length = self.spacing - len(display_title) + if space_length > 0: + display_title += " " * space_length + return display_title + + def ghost(self, text): + if not self.ignore_ghost: + print(self._space(f"| {text}"), end="\r") + self.spacing = len(text) + 2 + + def exorcise(self): + if not self.ignore_ghost: + print(self._space(" "), end="\r") + self.spacing = 0 + + def secret(self, text): + if str(text) not in self.secrets: + self.secrets.append(str(text)) + + def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False, stacklevel=1): + if self.spacing > 0: + self.exorcise() + if "\n" in msg: + for i, line in enumerate(msg.split("\n")): + self._log(level, line, args, exc_info=exc_info, extra=extra, stack_info=stack_info, stacklevel=stacklevel) + if i == 0: + for handler in self._logger.handlers: + if isinstance(handler, RotatingFileHandler): + handler.setFormatter(logging.Formatter(" " * 65 + "| %(message)s")) + for handler in self._logger.handlers: + if isinstance(handler, RotatingFileHandler): + handler.setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)s")) + else: + for secret in self.secrets: + if secret in msg: + msg = msg.replace(secret, "(redacted)") + try: + if not _srcfile: + raise ValueError + fn, lno, func, sinfo = self.findCaller(stack_info, stacklevel) + except ValueError: + fn, lno, func, sinfo = "(unknown file)", 0, "(unknown function)", None + if exc_info: + if isinstance(exc_info, BaseException): + exc_info = (type(exc_info), exc_info, exc_info.__traceback__) + elif not isinstance(exc_info, tuple): + exc_info = sys.exc_info() + record = self._logger.makeRecord(self._logger.name, level, fn, lno, msg, args, exc_info, func, extra, sinfo) + self._logger.handle(record) + + def findCaller(self, stack_info=False, stacklevel=1): + f = logging.currentframe() + if f is not None: + f = f.f_back + orig_f = f + while f and stacklevel > 1: + f = f.f_back + stacklevel -= 1 + if not f: + f = orig_f + rv = "(unknown file)", 0, "(unknown function)", None + while hasattr(f, "f_code"): + co = f.f_code + filename = os.path.normcase(co.co_filename) + if filename == _srcfile: + f = f.f_back + continue + sinfo = None + if stack_info: + sio = io.StringIO() + sio.write('Stack (most recent call last):\n') + traceback.print_stack(f, file=sio) + sinfo = sio.getvalue() + if sinfo[-1] == '\n': + sinfo = sinfo[:-1] + sio.close() + rv = (co.co_filename, f.f_lineno, co.co_name, sinfo) + break + return rv diff --git a/modules/mal.py b/modules/mal.py index 47b1cc30..b2e01e86 100644 --- a/modules/mal.py +++ b/modules/mal.py @@ -1,9 +1,9 @@ -import logging, math, re, secrets, time, webbrowser +import math, re, secrets, time, webbrowser from modules import util from modules.util import Failed, TimeoutExpired from ruamel import yaml -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger builders = [ "mal_id", "mal_all", "mal_airing", "mal_upcoming", "mal_tv", "mal_ova", "mal_movie", "mal_special", @@ -52,6 +52,7 @@ class MyAnimeList: self.client_secret = params["client_secret"] self.config_path = params["config_path"] self.authorization = params["authorization"] + logger.secret(self.client_secret) if not self._save(self.authorization): if not self._refresh(): self._authorization() @@ -127,10 +128,11 @@ class MyAnimeList: return self.config.post_json(urls["oauth_token"], data=data) def _request(self, url, authorization=None): - new_authorization = authorization if authorization else self.authorization + token = authorization["access_token"] if authorization else self.authorization["access_token"] + logger.secret(token) if self.config.trace_mode: logger.debug(f"URL: {url}") - response = self.config.get_json(url, headers={"Authorization": f"Bearer {new_authorization['access_token']}"}) + response = self.config.get_json(url, headers={"Authorization": f"Bearer {token}"}) if self.config.trace_mode: logger.debug(f"Response: {response}") if "error" in response: raise Failed(f"MyAnimeList Error: {response['error']}") @@ -181,7 +183,7 @@ class MyAnimeList: logger.debug(data) raise Failed("AniList Error: Connection Failed") start_num = (current_page - 1) * 100 + 1 - util.print_return(f"Parsing Page {current_page}/{num_of_pages} {start_num}-{limit if current_page == num_of_pages else current_page * 100}") + logger.ghost(f"Parsing Page {current_page}/{num_of_pages} {start_num}-{limit if current_page == num_of_pages else current_page * 100}") if current_page > 1: data = self._jiken_request(f"/genre/anime/{genre_id}/{current_page}") if "anime" in data: @@ -192,7 +194,7 @@ class MyAnimeList: current_page += 1 else: chances += 1 - util.print_end() + logger.exorcise() return mal_ids def _studio(self, studio_id, limit): diff --git a/modules/mdblist.py b/modules/mdblist.py index a28cb65c..7710aab4 100644 --- a/modules/mdblist.py +++ b/modules/mdblist.py @@ -1,9 +1,8 @@ -import logging from modules import util from modules.util import Failed from urllib.parse import urlparse -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger builders = ["mdblist_list"] list_sorts = ["score", "released", "updated", "imdbrating", "rogerebert", "imdbvotes", "budget", "revenue"] @@ -60,6 +59,7 @@ class Mdblist: def add_key(self, apikey, expiration): self.apikey = apikey + logger.secret(self.apikey) self.expiration = expiration try: self._request(imdb_id="tt0080684", ignore_cache=True) diff --git a/modules/meta.py b/modules/meta.py index a76e5454..55d2e4fd 100644 --- a/modules/meta.py +++ b/modules/meta.py @@ -1,11 +1,11 @@ -import logging, operator, os, re +import operator, os, re from datetime import datetime from modules import plex, util from modules.util import Failed, ImageData from plexapi.exceptions import NotFound from ruamel import yaml -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger github_base = "https://raw.githubusercontent.com/meisnate12/Plex-Meta-Manager-Configs/master/" @@ -86,7 +86,7 @@ class DataFile: except yaml.scanner.ScannerError as ye: raise Failed(f"YAML Error: {util.tab_new_lines(ye)}") except Exception as e: - util.print_stacktrace() + logger.stacktrace() raise Failed(f"YAML Error: {e}") def apply_template(self, name, data, template_call): @@ -279,12 +279,12 @@ class MetadataFile(DataFile): if not all_items: all_items = library.get_all() for i, item in enumerate(all_items, 1): - util.print_return(f"Processing: {i}/{len(all_items)} {item.title}") + logger.ghost(f"Processing: {i}/{len(all_items)} {item.title}") tmdb_id, tvdb_id, imdb_id = library.get_ids(item) tmdb_item = config.TMDb.get_item(item, tmdb_id, tvdb_id, imdb_id, is_movie=True) if tmdb_item and tmdb_item.collection and tmdb_item.collection.id not in exclude and tmdb_item.collection.name not in exclude: auto_list[tmdb_item.collection.id] = tmdb_item.collection.name - util.print_end() + logger.exorcise() elif auto_type == "actor": people = {} if "data" in methods: @@ -456,7 +456,7 @@ class MetadataFile(DataFile): if not self.metadata: return None logger.info("") - util.separator("Running Metadata") + logger.separator("Running Metadata") logger.info("") for mapping_name, meta in self.metadata.items(): methods = {mm.lower(): mm for mm in meta} @@ -503,7 +503,7 @@ class MetadataFile(DataFile): logger.error(f"Metadata Error: {name} attribute is blank") logger.info("") - util.separator() + logger.separator() logger.info("") year = None if "year" in methods and not self.library.is_music: diff --git a/modules/notifiarr.py b/modules/notifiarr.py index 01eef137..15704d3c 100644 --- a/modules/notifiarr.py +++ b/modules/notifiarr.py @@ -1,9 +1,8 @@ -import logging from json import JSONDecodeError - +from modules import util from modules.util import Failed -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger base_url = "https://notifiarr.com/api/v1/" dev_url = "https://dev.notifiarr.com/api/v1/" @@ -15,6 +14,7 @@ class Notifiarr: self.apikey = params["apikey"] self.develop = params["develop"] self.test = params["test"] + logger.secret(self.apikey) logger.debug(f"Environment: {'Test' if self.test else 'Develop' if self.develop else 'Production'}") url, _ = self.get_url("user/validate/") response = self.config.get(url) @@ -32,6 +32,6 @@ class Notifiarr: def get_url(self, path): url = f"{dev_url if self.develop else base_url}{'notification/test' if self.test else f'{path}{self.apikey}'}" if self.config.trace_mode: - logger.debug(url.replace(self.apikey, "APIKEY")) + logger.debug(url) params = {"event": "pmm"} if self.test else None return url, params diff --git a/modules/omdb.py b/modules/omdb.py index 6b2b042f..1cf954cc 100644 --- a/modules/omdb.py +++ b/modules/omdb.py @@ -1,8 +1,7 @@ -import logging from modules import util from modules.util import Failed -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger base_url = "http://www.omdbapi.com/" @@ -54,6 +53,7 @@ class OMDb: self.apikey = params["apikey"] self.expiration = params["expiration"] self.limit = False + logger.secret(self.apikey) self.get_omdb("tt0080684", ignore_cache=True) def get_omdb(self, imdb_id, ignore_cache=False): diff --git a/modules/plex.py b/modules/plex.py index 97fbe293..89244a2d 100644 --- a/modules/plex.py +++ b/modules/plex.py @@ -1,4 +1,4 @@ -import logging, os, plexapi, requests +import os, plexapi, requests from datetime import datetime from modules import builder, util from modules.library import Library @@ -15,7 +15,7 @@ from retrying import retry from urllib import parse from xml.etree.ElementTree import ParseError -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger builders = ["plex_all", "plex_pilots", "plex_collectionless", "plex_search"] search_translation = { @@ -383,6 +383,8 @@ class Plex(Library): self.url = params["plex"]["url"] self.token = params["plex"]["token"] self.timeout = params["plex"]["timeout"] + logger.secret(self.url) + logger.secret(self.token) try: self.PlexServer = PlexServer(baseurl=self.url, token=self.token, session=self.config.session, timeout=self.timeout) except Unauthorized: @@ -390,7 +392,7 @@ class Plex(Library): except ValueError as e: raise Failed(f"Plex Error: {e}") except (requests.exceptions.ConnectionError, ParseError): - util.print_stacktrace() + logger.stacktrace() raise Failed("Plex Error: Plex url is invalid") self.Plex = None library_names = [] @@ -468,9 +470,9 @@ class Plex(Library): results = [] while self.Plex._totalViewSize is None or container_start <= self.Plex._totalViewSize: results.extend(self.fetchItems(key, container_start, container_size)) - util.print_return(f"Loaded: {container_start}/{self.Plex._totalViewSize}") + logger.ghost(f"Loaded: {container_start}/{self.Plex._totalViewSize}") container_start += container_size - logger.info(util.adjust_space(f"Loaded {self.Plex._totalViewSize} {collection_level.capitalize()}s")) + logger.info(f"Loaded {self.Plex._totalViewSize} {collection_level.capitalize()}s") self._all_items = results return results @@ -520,7 +522,7 @@ class Plex(Library): includeOnDeck=False, includePopularLeaves=False, includeRelated=False, includeRelatedCount=0, includeReviews=False, includeStations=False) except (BadRequest, NotFound) as e: - util.print_stacktrace() + logger.stacktrace() raise Failed(f"Item Failed to Load: {e}") @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) @@ -726,7 +728,7 @@ class Plex(Library): except NotFound: logger.warning(f"Plex Warning: {item.title} has no Season 1 Episode 1 ") elif method == "plex_search": - util.print_multiline(data[1], info=True) + logger.info(data[1]) items = self.get_filter_items(data[2]) elif method == "plex_collectionless": good_collections = [] @@ -755,7 +757,7 @@ class Plex(Library): collection_indexes = [c.index for c in good_collections] all_items = self.get_all() for i, item in enumerate(all_items, 1): - util.print_return(f"Processing: {i}/{len(all_items)} {item.title}") + logger.ghost(f"Processing: {i}/{len(all_items)} {item.title}") add_item = True self.reload(item) for collection in item.collections: @@ -764,7 +766,7 @@ class Plex(Library): break if add_item: items.append(item) - logger.info(util.adjust_space(f"Processed {len(all_items)} {self.type}s")) + logger.info(f"Processed {len(all_items)} {self.type}s") else: raise Failed(f"Plex Error: Method {method} not supported") if len(items) > 0: @@ -819,7 +821,7 @@ class Plex(Library): logger.info(f"{item_type}: {name}{' Advanced' if advanced else ''} Details Update Successful") return True except BadRequest: - util.print_stacktrace() + logger.stacktrace() logger.error(f"{item_type}: {name}{' Advanced' if advanced else ''} Details Update Failed") return False @@ -964,7 +966,7 @@ class Plex(Library): output += missing_seasons if found_episode: output += missing_episodes - util.print_multiline(output, info=True) + logger.info(output) if isinstance(item, Artist): missing_assets = "" found_album = False @@ -989,7 +991,7 @@ class Plex(Library): if album_poster or album_background: self.upload_images(album, poster=album_poster, background=album_background) if self.show_missing_season_assets and found_album and missing_assets: - util.print_multiline(f"Missing Album Posters for {item.title}{missing_assets}", info=True) + logger.info(f"Missing Album Posters for {item.title}{missing_assets}") if isinstance(item, (Movie, Show)) and not poster and overlay: self.upload_images(item, overlay=overlay) diff --git a/modules/radarr.py b/modules/radarr.py index 940fc37e..62a1e6da 100644 --- a/modules/radarr.py +++ b/modules/radarr.py @@ -1,10 +1,9 @@ -import logging from modules import util from modules.util import Failed from arrapi import RadarrAPI from arrapi.exceptions import ArrException -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger availability_translation = {"announced": "announced", "cinemas": "inCinemas", "released": "released", "db": "preDB"} apply_tags_translation = {"": "add", "sync": "replace", "remove": "remove"} @@ -16,6 +15,8 @@ class Radarr: self.library = library self.url = params["url"] self.token = params["token"] + logger.secret(self.url) + logger.secret(self.token) try: self.api = RadarrAPI(self.url, self.token, session=self.config.session) self.api.respect_list_exclusions_when_adding() @@ -42,7 +43,7 @@ class Radarr: else: _ids.append(tmdb_id) logger.info("") - util.separator(f"Adding {'Missing' if _ids else 'Existing'} to Radarr", space=False, border=False) + logger.separator(f"Adding {'Missing' if _ids else 'Existing'} to Radarr", space=False, border=False) logger.debug("") logger.debug(f"Radarr Adds: {_ids if _ids else ''}") for tmdb_id in _paths: @@ -82,13 +83,13 @@ class Radarr: exists.extend(_e) invalid.extend(_i) except ArrException as e: - util.print_stacktrace() + logger.stacktrace() raise Failed(f"Radarr Error: {e}") for i, item in enumerate(tmdb_ids, 1): path = item[1] if isinstance(item, tuple) else None tmdb_id = item[0] if isinstance(item, tuple) else item - util.print_return(f"Loading TMDb ID {i}/{len(tmdb_ids)} ({tmdb_id})") + logger.ghost(f"Loading TMDb ID {i}/{len(tmdb_ids)} ({tmdb_id})") if self.config.Cache: _id = self.config.Cache.query_radarr_adds(tmdb_id, self.library.original_mapping_name) if _id: diff --git a/modules/sonarr.py b/modules/sonarr.py index a959cdb2..e1bc3c17 100644 --- a/modules/sonarr.py +++ b/modules/sonarr.py @@ -1,10 +1,9 @@ -import logging from modules import util from modules.util import Failed from arrapi import SonarrAPI from arrapi.exceptions import ArrException -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger series_types = ["standard", "daily", "anime"] monitor_translation = { @@ -34,6 +33,8 @@ class Sonarr: self.library = library self.url = params["url"] self.token = params["token"] + logger.secret(self.url) + logger.secret(self.token) try: self.api = SonarrAPI(self.url, self.token, session=self.config.session) self.api.respect_list_exclusions_when_adding() @@ -64,7 +65,7 @@ class Sonarr: else: _ids.append(tvdb_id) logger.info("") - util.separator(f"Adding {'Missing' if _ids else 'Existing'} to Sonarr", space=False, border=False) + logger.separator(f"Adding {'Missing' if _ids else 'Existing'} to Sonarr", space=False, border=False) logger.debug("") logger.debug(f"Sonarr Adds: {_ids if _ids else ''}") for tvdb_id in _paths: @@ -108,13 +109,13 @@ class Sonarr: exists.extend(_e) invalid.extend(_i) except ArrException as e: - util.print_stacktrace() + logger.stacktrace() raise Failed(f"Radarr Error: {e}") for i, item in enumerate(tvdb_ids, 1): path = item[1] if isinstance(item, tuple) else None tvdb_id = item[0] if isinstance(item, tuple) else item - util.print_return(f"Loading TVDb ID {i}/{len(tvdb_ids)} ({tvdb_id})") + logger.ghost(f"Loading TVDb ID {i}/{len(tvdb_ids)} ({tvdb_id})") if self.config.Cache: _id = self.config.Cache.query_sonarr_adds(tvdb_id, self.library.original_mapping_name) if _id: diff --git a/modules/stevenlu.py b/modules/stevenlu.py index 4356c7ae..8ac03a79 100644 --- a/modules/stevenlu.py +++ b/modules/stevenlu.py @@ -1,7 +1,7 @@ -import logging +from modules import util from modules.util import Failed -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger builders = ["stevenlu_popular"] base_url = "https://s3.amazonaws.com/popular-movies/movies.json" diff --git a/modules/tautulli.py b/modules/tautulli.py index b266164c..71fe9781 100644 --- a/modules/tautulli.py +++ b/modules/tautulli.py @@ -1,12 +1,9 @@ -import logging - -from plexapi.video import Movie, Show - from modules import util from modules.util import Failed from plexapi.exceptions import BadRequest, NotFound +from plexapi.video import Movie, Show -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger builders = ["tautulli_popular", "tautulli_watched"] @@ -16,10 +13,12 @@ class Tautulli: self.library = library self.url = params["url"] self.apikey = params["apikey"] + logger.secret(self.url) + logger.secret(self.token) try: response = self._request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_library_names") except Exception: - util.print_stacktrace() + logger.stacktrace() raise Failed("Tautulli Error: Invalid url") if response["response"]["result"] != "success": raise Failed(f"Tautulli Error: {response['response']['message']}") @@ -71,5 +70,6 @@ class Tautulli: else: raise Failed(f"Tautulli Error: No Library named {library_name} in the response") def _request(self, url): - logger.debug(f"Tautulli URL: {url.replace(self.apikey, 'APIKEY').replace(self.url, 'URL')}") + if self.config.trace_mode: + logger.debug(f"Tautulli URL: {url}") return self.config.get_json(url) diff --git a/modules/tmdb.py b/modules/tmdb.py index 5756fa00..a5d76dc0 100644 --- a/modules/tmdb.py +++ b/modules/tmdb.py @@ -1,9 +1,8 @@ -import logging from modules import util from modules.util import Failed from tmdbapis import TMDbAPIs, TMDbException, NotFound -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger builders = [ "tmdb_actor", "tmdb_actor_details", "tmdb_collection", "tmdb_collection_details", "tmdb_company", @@ -63,6 +62,7 @@ class TMDb: self.config = config self.apikey = params["apikey"] self.language = params["language"] + logger.secret(self.apikey) try: self.TMDb = TMDbAPIs(self.apikey, language=self.language, session=self.config.session) except TMDbException as e: @@ -257,7 +257,7 @@ class TMDb: try: tmdb_item = self.get_movie(tmdb_id) if is_movie else self.get_show(tmdb_id) except Failed as e: - logger.error(util.adjust_space(str(e))) + logger.error(str(e)) else: - logger.info(util.adjust_space(f"{item.title[:25]:<25} | No TMDb ID for Guid: {item.guid}")) + logger.info(f"{item.title[:25]:<25} | No TMDb ID for Guid: {item.guid}") return tmdb_item diff --git a/modules/trakt.py b/modules/trakt.py index 40c8254c..16b7f359 100644 --- a/modules/trakt.py +++ b/modules/trakt.py @@ -1,9 +1,9 @@ -import logging, requests, webbrowser +import requests, webbrowser from modules import util from modules.util import Failed, TimeoutExpired from ruamel import yaml -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger redirect_uri = "urn:ietf:wg:oauth:2.0:oob" redirect_uri_encoded = redirect_uri.replace(":", "%3A") @@ -35,6 +35,7 @@ class Trakt: self.client_secret = params["client_secret"] self.config_path = params["config_path"] self.authorization = params["authorization"] + logger.secret(self.client_secret) if not self._save(self.authorization): if not self._refresh(): self._authorization() @@ -61,12 +62,14 @@ class Trakt: raise Failed("Trakt Error: New Authorization Failed") def _check(self, authorization=None): + token = self.authorization['access_token'] if authorization is None else authorization['access_token'] headers = { "Content-Type": "application/json", - "Authorization": f"Bearer {self.authorization['access_token'] if authorization is None else authorization['access_token']}", + "Authorization": f"Bearer {token}", "trakt-api-version": "2", "trakt-api-key": self.client_id } + logger.secret(token) response = self.config.get(f"{base_url}/users/settings", headers=headers) return response.status_code == 200 diff --git a/modules/tvdb.py b/modules/tvdb.py index 4a04916d..897d538f 100644 --- a/modules/tvdb.py +++ b/modules/tvdb.py @@ -1,9 +1,9 @@ -import logging, requests, time +import requests, time from lxml.etree import ParserError from modules import util from modules.util import Failed -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger builders = ["tvdb_list", "tvdb_list_details", "tvdb_movie", "tvdb_movie_details", "tvdb_show", "tvdb_show_details"] base_url = "https://www.thetvdb.com" @@ -175,7 +175,7 @@ class TVDb: return ids raise Failed(f"TVDb Error: No TVDb IDs found at {tvdb_url}") except requests.exceptions.MissingSchema: - util.print_stacktrace() + logger.stacktrace() raise Failed(f"TVDb Error: URL Lookup Failed for {tvdb_url}") else: raise Failed(f"TVDb Error: {tvdb_url} must begin with {urls['list']}") diff --git a/modules/util.py b/modules/util.py index f347cd3b..f1dd66c6 100644 --- a/modules/util.py +++ b/modules/util.py @@ -1,8 +1,7 @@ -import glob, logging, os, re, signal, sys, time, traceback +import glob, logging, os, re, signal, sys, time from datetime import datetime, timedelta -from logging.handlers import RotatingFileHandler from pathvalidate import is_valid_filename, sanitize_filename -from plexapi.audio import Artist, Album, Track +from plexapi.audio import Album, Track from plexapi.exceptions import BadRequest, NotFound, Unauthorized from plexapi.video import Season, Episode, Movie @@ -46,10 +45,6 @@ def retry_if_not_failed(exception): def retry_if_not_plex(exception): return not isinstance(exception, (BadRequest, NotFound, Unauthorized)) -separating_character = "=" -screen_width = 100 -spacing = 0 - days_alias = { "monday": 0, "mon": 0, "m": 0, "tuesday": 1, "tues": 1, "tue": 1, "tu": 1, "t": 1, @@ -165,24 +160,6 @@ def windows_input(prompt, timeout=5): print("") raise TimeoutExpired -def print_multiline(lines, info=False, warning=False, error=False, critical=False): - for i, line in enumerate(str(lines).split("\n")): - if critical: logger.critical(line) - elif error: logger.error(line) - elif warning: logger.warning(line) - elif info: logger.info(line) - else: logger.debug(line) - if i == 0: - logger.handlers[1].setFormatter(logging.Formatter(" " * 65 + "| %(message)s")) - logger.handlers[1].setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)s")) - -def print_stacktrace(): - print_multiline(traceback.format_exc()) - -def my_except_hook(exctype, value, tb): - for line in traceback.format_exception(etype=exctype, value=value, tb=tb): - print_multiline(line, critical=True) - def get_id_from_imdb_url(imdb_url): match = re.search("(tt\\d+)", str(imdb_url)) if match: return match.group(1) @@ -198,64 +175,6 @@ def regex_first_int(data, id_type, default=None): else: raise Failed(f"Regex Error: Failed to parse {id_type} from {data}") -def centered(text, sep=" ", side_space=True, left=False): - if len(text) > screen_width - 2: - return text - space = screen_width - len(text) - 2 - text = f"{' ' if side_space else sep}{text}{' ' if side_space else sep}" - if space % 2 == 1: - text += sep - space -= 1 - side = int(space / 2) - 1 - final_text = f"{text}{sep * side}{sep * side}" if left else f"{sep * side}{text}{sep * side}" - return final_text - -def separator(text=None, space=True, border=True, debug=False, side_space=True, left=False): - sep = " " if space else separating_character - for handler in logger.handlers: - apply_formatter(handler, border=False) - border_text = f"|{separating_character * screen_width}|" - if border and debug: - logger.debug(border_text) - elif border: - logger.info(border_text) - if text: - text_list = text.split("\n") - for t in text_list: - if debug: - logger.debug(f"|{sep}{centered(t, sep=sep, side_space=side_space, left=left)}{sep}|") - else: - logger.info(f"|{sep}{centered(t, sep=sep, side_space=side_space, left=left)}{sep}|") - if border and debug: - logger.debug(border_text) - elif border: - logger.info(border_text) - for handler in logger.handlers: - apply_formatter(handler) - -def apply_formatter(handler, border=True): - text = f"| %(message)-{screen_width - 2}s |" if border else f"%(message)-{screen_width - 2}s" - if isinstance(handler, RotatingFileHandler): - text = f"[%(asctime)s] %(filename)-27s %(levelname)-10s {text}" - handler.setFormatter(logging.Formatter(text)) - -def adjust_space(display_title): - display_title = str(display_title) - space_length = spacing - len(display_title) - if space_length > 0: - display_title += " " * space_length - return display_title - -def print_return(text): - print(adjust_space(f"| {text}"), end="\r") - global spacing - spacing = len(text) + 2 - -def print_end(): - print(adjust_space(" "), end="\r") - global spacing - spacing = 0 - def validate_filename(filename): if is_valid_filename(filename): return filename, None diff --git a/modules/webhooks.py b/modules/webhooks.py index a9e7b9df..a0530ef3 100644 --- a/modules/webhooks.py +++ b/modules/webhooks.py @@ -1,9 +1,8 @@ -import logging from json import JSONDecodeError from modules import util from modules.util import Failed -logger = logging.getLogger("Plex Meta Manager") +logger = util.logger class Webhooks: def __init__(self, config, system_webhooks, library=None, notifiarr=None): @@ -16,7 +15,7 @@ class Webhooks: def _request(self, webhooks, json): if self.config.trace_mode: - util.separator("Webhooks", space=False, border=False) + logger.separator("Webhooks", space=False, border=False) logger.debug("") logger.debug(f"JSON: {json}") for webhook in list(set(webhooks)): diff --git a/plex_meta_manager.py b/plex_meta_manager.py index 83141f9a..82765951 100644 --- a/plex_meta_manager.py +++ b/plex_meta_manager.py @@ -1,15 +1,9 @@ -import argparse, logging, os, sys, time +import argparse, os, sys, time, traceback from datetime import datetime -from logging.handlers import RotatingFileHandler - try: import plexapi, schedule - from modules import util - from modules.builder import CollectionBuilder - from modules.config import ConfigFile - from modules.meta import MetadataFile - from modules.util import Failed, NotScheduled + from modules.logs import MyLogger from plexapi.exceptions import NotFound from plexapi.video import Show, Season from ruamel import yaml @@ -29,6 +23,7 @@ parser.add_argument("-t", "--time", "--times", dest="times", help="Times to upda parser.add_argument("-re", "--resume", dest="resume", help="Resume collection run from a specific collection", type=str) parser.add_argument("-r", "--run", dest="run", help="Run without the scheduler", action="store_true", default=False) parser.add_argument("-is", "--ignore-schedules", dest="ignore_schedules", help="Run ignoring collection schedules", action="store_true", default=False) +parser.add_argument("-ig", "--ignore-ghost", dest="ignore_ghost", help="Run ignoring ghost logging", action="store_true", default=False) parser.add_argument("-rt", "--test", "--tests", "--run-test", "--run-tests", dest="test", help="Run in debug mode with only collections that have test: true", action="store_true", default=False) parser.add_argument("-co", "--collection-only", "--collections-only", dest="collection_only", help="Run only collection operations", action="store_true", default=False) parser.add_argument("-lo", "--library-only", "--libraries-only", dest="library_only", help="Run only library operations", action="store_true", default=False) @@ -66,6 +61,7 @@ times = get_arg("PMM_TIME", args.times) run = get_arg("PMM_RUN", args.run, arg_bool=True) test = get_arg("PMM_TEST", args.test, arg_bool=True) ignore_schedules = get_arg("PMM_IGNORE_SCHEDULES", args.ignore_schedules, arg_bool=True) +ignore_ghost = get_arg("PMM_IGNORE_GHOST", args.ignore_ghost, arg_bool=True) collection_only = get_arg("PMM_COLLECTIONS_ONLY", args.collection_only, arg_bool=True) library_only = get_arg("PMM_LIBRARIES_ONLY", args.library_only, arg_bool=True) library_first = get_arg("PMM_LIBRARIES_FIRST", args.library_first, arg_bool=True) @@ -82,12 +78,10 @@ screen_width = get_arg("PMM_WIDTH", args.width, arg_int=True) debug = get_arg("PMM_DEBUG", args.debug, arg_bool=True) trace = get_arg("PMM_TRACE", args.trace, arg_bool=True) -util.separating_character = divider[0] if screen_width < 90 or screen_width > 300: print(f"Argument Error: width argument invalid: {screen_width} must be an integer between 90 and 300 using the default 100") screen_width = 100 -util.screen_width = screen_width default_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config") if config_file and os.path.exists(config_file): @@ -99,22 +93,20 @@ elif not os.path.exists(os.path.join(default_dir, "config.yml")): print(f"Config Error: config not found at {os.path.abspath(default_dir)}") sys.exit(0) -os.makedirs(os.path.join(default_dir, "logs"), exist_ok=True) - -logger = logging.getLogger("Plex Meta Manager") -logger.setLevel(logging.DEBUG) - -def fmt_filter(record): - record.levelname = f"[{record.levelname}]" - record.filename = f"[{record.filename}:{record.lineno}]" - return True +logger = MyLogger("Plex Meta Manager", default_dir, screen_width, divider[0], ignore_ghost, test or debug or trace) -cmd_handler = logging.StreamHandler() -cmd_handler.setLevel(logging.DEBUG if test or debug or trace else logging.INFO) +from modules import util +util.logger = logger +from modules.builder import CollectionBuilder +from modules.config import ConfigFile +from modules.meta import MetadataFile +from modules.util import Failed, NotScheduled -logger.addHandler(cmd_handler) +def my_except_hook(exctype, value, tb): + for _line in traceback.format_exception(etype=exctype, value=value, tb=tb): + logger.critical(_line) -sys.excepthook = util.my_except_hook +sys.excepthook = my_except_hook version = "Unknown" with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "VERSION")) as handle: @@ -127,22 +119,15 @@ with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "VERSION")) a plexapi.BASE_HEADERS['X-Plex-Client-Identifier'] = "Plex-Meta-Manager" def start(attrs): - file_logger = os.path.join(default_dir, "logs", "meta.log") - should_roll_over = os.path.isfile(file_logger) - file_handler = RotatingFileHandler(file_logger, delay=True, mode="w", backupCount=10, encoding="utf-8") - util.apply_formatter(file_handler) - file_handler.addFilter(fmt_filter) - if should_roll_over: - file_handler.doRollover() - logger.addHandler(file_handler) - util.separator() + logger.add_main_handler() + logger.separator() logger.info("") - logger.info(util.centered(" ____ _ __ __ _ __ __ ")) - logger.info(util.centered("| _ \\| | _____ __ | \\/ | ___| |_ __ _ | \\/ | __ _ _ __ __ _ __ _ ___ _ __ ")) - logger.info(util.centered("| |_) | |/ _ \\ \\/ / | |\\/| |/ _ \\ __/ _` | | |\\/| |/ _` | '_ \\ / _` |/ _` |/ _ \\ '__|")) - logger.info(util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | ")) - logger.info(util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| ")) - logger.info(util.centered(" |___/ ")) + logger.info_center(" ____ _ __ __ _ __ __ ") + logger.info_center("| _ \\| | _____ __ | \\/ | ___| |_ __ _ | \\/ | __ _ _ __ __ _ __ _ ___ _ __ ") + logger.info_center("| |_) | |/ _ \\ \\/ / | |\\/| |/ _ \\ __/ _` | | |\\/| |/ _` | '_ \\ / _` |/ _` |/ _ \\ '__|") + logger.info_center("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | ") + logger.info_center("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| ") + logger.info_center(" |___/ ") logger.info(f" Version: {version}") if "time" in attrs and attrs["time"]: start_type = f"{attrs['time']} " elif "test" in attrs and attrs["test"]: start_type = "Test " @@ -153,7 +138,7 @@ def start(attrs): if "time" not in attrs: attrs["time"] = start_time.strftime("%H:%M") attrs["time_obj"] = start_time - util.separator(debug=True) + logger.separator(debug=True) logger.debug(f"--config (PMM_CONFIG): {config_file}") logger.debug(f"--time (PMM_TIME): {times}") logger.debug(f"--run (PMM_RUN): {run}") @@ -165,6 +150,7 @@ def start(attrs): logger.debug(f"--run-libraries (PMM_LIBRARIES): {libraries}") logger.debug(f"--run-metadata-files (PMM_METADATA_FILES): {metadata_files}") logger.debug(f"--ignore-schedules (PMM_IGNORE_SCHEDULES): {ignore_schedules}") + logger.debug(f"--ignore-ghost (PMM_IGNORE_GHOST): {ignore_ghost}") logger.debug(f"--delete-collections (PMM_DELETE_COLLECTIONS): {delete}") logger.debug(f"--resume (PMM_RESUME): {resume}") logger.debug(f"--no-countdown (PMM_NO_COUNTDOWN): {no_countdown}") @@ -175,21 +161,21 @@ def start(attrs): logger.debug(f"--debug (PMM_DEBUG): {debug}") logger.debug(f"--trace (PMM_TRACE): {trace}") logger.debug("") - util.separator(f"Starting {start_type}Run") + logger.separator(f"Starting {start_type}Run") config = None stats = {"created": 0, "modified": 0, "deleted": 0, "added": 0, "unchanged": 0, "removed": 0, "radarr": 0, "sonarr": 0} try: config = ConfigFile(default_dir, attrs, read_only_config) except Exception as e: - util.print_stacktrace() - util.print_multiline(e, critical=True) + logger.stacktrace() + logger.critical(e) else: try: stats = update_libraries(config) except Exception as e: config.notify(e) - util.print_stacktrace() - util.print_multiline(e, critical=True) + logger.stacktrace() + logger.critical(e) logger.info("") end_time = datetime.now() run_time = str(end_time - start_time).split('.')[0] @@ -197,30 +183,22 @@ def start(attrs): try: config.Webhooks.end_time_hooks(start_time, end_time, run_time, stats) except Failed as e: - util.print_stacktrace() + logger.stacktrace() logger.error(f"Webhooks Error: {e}") - util.separator(f"Finished {start_type}Run\nFinished: {end_time.strftime('%H:%M:%S %Y-%m-%d')} Run Time: {run_time}") - logger.removeHandler(file_handler) + logger.separator(f"Finished {start_type}Run\nFinished: {end_time.strftime('%H:%M:%S %Y-%m-%d')} Run Time: {run_time}") + logger.remove_main_handler() def update_libraries(config): for library in config.libraries: if library.skip_library: logger.info("") - util.separator(f"Skipping {library.name} Library") + logger.separator(f"Skipping {library.name} Library") continue try: - os.makedirs(os.path.join(default_dir, "logs", library.mapping_name, "collections"), exist_ok=True) - col_file_logger = os.path.join(default_dir, "logs", library.mapping_name, "library.log") - should_roll_over = os.path.isfile(col_file_logger) - library_handler = RotatingFileHandler(col_file_logger, delay=True, mode="w", backupCount=3, encoding="utf-8") - util.apply_formatter(library_handler) - if should_roll_over: - library_handler.doRollover() - logger.addHandler(library_handler) - + logger.add_library_handler(library.mapping_name) plexapi.server.TIMEOUT = library.timeout logger.info("") - util.separator(f"{library.name} Library") + logger.separator(f"{library.name} Library") if config.library_first and library.library_operation and not config.test_mode and not collection_only: library_operations(config, library) @@ -253,14 +231,14 @@ def update_libraries(config): if config.delete_collections: logger.info("") - util.separator(f"Deleting all Collections from the {library.name} Library", space=False, border=False) + logger.separator(f"Deleting all Collections from the {library.name} Library", space=False, border=False) logger.info("") for collection in library.get_all_collections(): logger.info(f"Collection {collection.title} Deleted") library.query(collection.delete) if not library.is_other and not library.is_music and (library.metadata_files or library.original_mapping_name in config.library_map) and not library_only: logger.info("") - util.separator(f"Mapping {library.name} Library", space=False, border=False) + logger.separator(f"Mapping {library.name} Library", space=False, border=False) logger.info("") library.map_guids() for metadata in library.metadata_files: @@ -268,7 +246,7 @@ def update_libraries(config): if config.requested_metadata_files and metadata_name not in config.requested_metadata_files: continue logger.info("") - util.separator(f"Running {metadata_name} Metadata File\n{metadata.path}") + logger.separator(f"Running {metadata_name} Metadata File\n{metadata.path}") if not config.test_mode and not config.resume_from and not collection_only: try: metadata.update_metadata() @@ -282,42 +260,35 @@ def update_libraries(config): continue if collections_to_run and not library_only: logger.info("") - util.separator(f"{'Test ' if config.test_mode else ''}Collections") - logger.removeHandler(library_handler) + logger.separator(f"{'Test ' if config.test_mode else ''}Collections") + logger.remove_library_handler(library.mapping_name) run_collection(config, library, metadata, collections_to_run) - logger.addHandler(library_handler) + logger.re_add_library_handler(library.mapping_name) if library.run_sort: logger.info("") - util.separator(f"Sorting {library.name} Library's Collections", space=False, border=False) + logger.separator(f"Sorting {library.name} Library's Collections", space=False, border=False) logger.info("") for builder in library.run_sort: logger.info("") - util.separator(f"Sorting {builder.name} Collection", space=False, border=False) + logger.separator(f"Sorting {builder.name} Collection", space=False, border=False) logger.info("") builder.sort_collection() if not config.library_first and library.library_operation and not config.test_mode and not collection_only: library_operations(config, library) - logger.removeHandler(library_handler) + logger.remove_library_handler(library.mapping_name) except Exception as e: library.notify(e) - util.print_stacktrace() - util.print_multiline(e, critical=True) + logger.stacktrace() + logger.critical(e) playlist_status = {} playlist_stats = {} if config.playlist_files: - os.makedirs(os.path.join(default_dir, "logs", "playlists"), exist_ok=True) - pf_file_logger = os.path.join(default_dir, "logs", "playlists", "playlists.log") - should_roll_over = os.path.isfile(pf_file_logger) - playlists_handler = RotatingFileHandler(pf_file_logger, delay=True, mode="w", backupCount=3, encoding="utf-8") - util.apply_formatter(playlists_handler) - if should_roll_over: - playlists_handler.doRollover() - logger.addHandler(playlists_handler) + logger.add_playlists_handler() playlist_status, playlist_stats = run_playlists(config) - logger.removeHandler(playlists_handler) + logger.remove_playlists_handler() has_run_again = False for library in config.libraries: @@ -328,41 +299,37 @@ def update_libraries(config): amount_added = 0 if has_run_again and not library_only: logger.info("") - util.separator("Run Again") + logger.separator("Run Again") logger.info("") for x in range(1, config.general["run_again_delay"] + 1): - util.print_return(f"Waiting to run again in {config.general['run_again_delay'] - x + 1} minutes") + logger.ghost(f"Waiting to run again in {config.general['run_again_delay'] - x + 1} minutes") for y in range(60): time.sleep(1) - util.print_end() + logger.exorcise() for library in config.libraries: if library.run_again: try: - col_file_logger = os.path.join(default_dir, "logs", library.mapping_name, f"library.log") - library_handler = RotatingFileHandler(col_file_logger, mode="w", backupCount=3, encoding="utf-8") - util.apply_formatter(library_handler) - logger.addHandler(library_handler) - library_handler.addFilter(fmt_filter) + logger.re_add_library_handler(library.mapping_name) os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout) logger.info("") - util.separator(f"{library.name} Library Run Again") + logger.separator(f"{library.name} Library Run Again") logger.info("") library.map_guids() for builder in library.run_again: logger.info("") - util.separator(f"{builder.name} Collection in {library.name}") + logger.separator(f"{builder.name} Collection in {library.name}") logger.info("") try: amount_added += builder.run_collections_again() except Failed as e: library.notify(e, collection=builder.name, critical=False) - util.print_stacktrace() - util.print_multiline(e, error=True) - logger.removeHandler(library_handler) + logger.stacktrace() + logger.error(e) + logger.remove_library_handler(library.mapping_name) except Exception as e: library.notify(e) - util.print_stacktrace() - util.print_multiline(e, critical=True) + logger.stacktrace() + logger.critical(e) used_url = [] for library in config.libraries: @@ -387,19 +354,19 @@ def update_libraries(config): def print_status(section, status): logger.info("") - util.separator(f"{section} Summary", space=False, border=False) + logger.separator(f"{section} Summary", space=False, border=False) logger.info("") logger.info(f"{'Title':^{longest}} | + | = | - | {'Status':^13}") - breaker = f"{util.separating_character * longest}|{util.separating_character * 5}|{util.separating_character * 5}|{util.separating_character * 5}|" - util.separator(breaker, space=False, border=False, side_space=False, left=True) + breaker = f"{logger.separating_character * longest}|{logger.separating_character * 5}|{logger.separating_character * 5}|{logger.separating_character * 5}|" + logger.separator(breaker, space=False, border=False, side_space=False, left=True) for name, data in status.items(): logger.info(f"{name:<{longest}} | {data['added']:^3} | {data['unchanged']:^3} | {data['removed']:^3} | {data['status']}") if data["errors"]: for error in data["errors"]: - util.print_multiline(error, info=True) + logger.info(error) logger.info("") - util.separator("Summary") + logger.separator("Summary") for library in config.libraries: print_status(library.name, library.status) if playlist_status: @@ -430,7 +397,7 @@ def update_libraries(config): def library_operations(config, library): logger.info("") - util.separator(f"{library.name} Library Operations") + logger.separator(f"{library.name} Library Operations") logger.info("") logger.debug(f"Assets For All: {library.assets_for_all}") logger.debug(f"Delete Collections With Less: {library.delete_collections_with_less}") @@ -458,16 +425,16 @@ def library_operations(config, library): items = library.search(**{"duplicate": True}) for item in items: item.split() - logger.info(util.adjust_space(f"{item.title[:25]:<25} | Splitting")) + logger.info(f"{item.title[:25]:<25} | Splitting") if library.update_blank_track_titles: tracks = library.get_all(collection_level="track") for i, item in enumerate(tracks, 1): - util.print_return(f"Processing Track: {i}/{len(tracks)} {item.title}") + logger.ghost(f"Processing Track: {i}/{len(tracks)} {item.title}") if not item.title and item.sortTitle: library.edit_query(item, {"title.locked": 1, "title.value": item.sortTitle}) logger.info(f"Track: {item.sortTitle} was updated with sort title") - util.print_end() + logger.exorcise() tmdb_collections = {} if library.items_library_operation: @@ -482,7 +449,7 @@ def library_operations(config, library): except Failed as e: logger.error(e) continue - util.print_return(f"Processing: {i}/{len(items)} {item.title}") + logger.ghost(f"Processing: {i}/{len(items)} {item.title}") if library.assets_for_all: library.find_assets(item) tmdb_id, tvdb_id, imdb_id = library.get_ids(item) @@ -497,7 +464,7 @@ def library_operations(config, library): raise Failed if str(item.userRating) != str(new_rating): library.edit_query(item, {"userRating.value": new_rating, "userRating.locked": 1}) - logger.info(util.adjust_space(f"{item.title[:25]:<25} | User Rating | {new_rating}")) + logger.info(f"{item.title[:25]:<25} | User Rating | {new_rating}") except Failed: pass @@ -527,12 +494,12 @@ def library_operations(config, library): try: omdb_item = config.OMDb.get_omdb(imdb_id) except Failed as e: - logger.error(util.adjust_space(str(e))) + logger.error(str(e)) except Exception: logger.error(f"IMDb ID: {imdb_id}") raise else: - logger.info(util.adjust_space(f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}")) + logger.info(f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}") tvdb_item = None if library.mass_genre_update == "tvdb": @@ -540,9 +507,9 @@ def library_operations(config, library): try: tvdb_item = config.TVDb.get_item(tvdb_id, library.is_movie) except Failed as e: - logger.error(util.adjust_space(str(e))) + logger.error(str(e)) else: - logger.info(util.adjust_space(f"{item.title[:25]:<25} | No TVDb ID for Guid: {item.guid}")) + logger.info(f"{item.title[:25]:<25} | No TVDb ID for Guid: {item.guid}") mdb_item = None if library.mass_audience_rating_update in util.mdb_types or library.mass_critic_rating_update in util.mdb_types \ @@ -556,12 +523,12 @@ def library_operations(config, library): try: mdb_item = config.Mdblist.get_imdb(imdb_id) except Failed as e: - logger.error(util.adjust_space(str(e))) + logger.error(str(e)) except Exception: logger.error(f"IMDb ID: {imdb_id}") raise else: - logger.info(util.adjust_space(f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}")) + logger.info(f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}") if library.tmdb_collections and tmdb_item and tmdb_item.collection: tmdb_collections[tmdb_item.collection.id] = tmdb_item.collection.name @@ -609,20 +576,20 @@ def library_operations(config, library): try: new_rating = get_rating(library.mass_audience_rating_update) if new_rating is None: - logger.info(util.adjust_space(f"{item.title[:25]:<25} | No Rating Found")) + logger.info(f"{item.title[:25]:<25} | No Rating Found") elif str(item.audienceRating) != str(new_rating): library.edit_query(item, {"audienceRating.value": new_rating, "audienceRating.locked": 1}) - logger.info(util.adjust_space(f"{item.title[:25]:<25} | Audience Rating | {new_rating}")) + logger.info(f"{item.title[:25]:<25} | Audience Rating | {new_rating}") except Failed: pass if library.mass_critic_rating_update: try: new_rating = get_rating(library.mass_critic_rating_update) if new_rating is None: - logger.info(util.adjust_space(f"{item.title[:25]:<25} | No Rating Found")) + logger.info(f"{item.title[:25]:<25} | No Rating Found") elif str(item.rating) != str(new_rating): library.edit_query(item, {"rating.value": new_rating, "rating.locked": 1}) - logger.info(util.adjust_space(f"{item.title[:25]:<25} | Critic Rating | {new_rating}")) + logger.info(f"{item.title[:25]:<25} | Critic Rating | {new_rating}") except Failed: pass if library.mass_content_rating_update: @@ -636,10 +603,10 @@ def library_operations(config, library): else: raise Failed if new_rating is None: - logger.info(util.adjust_space(f"{item.title[:25]:<25} | No Content Rating Found")) + logger.info(f"{item.title[:25]:<25} | No Content Rating Found") elif str(item.rating) != str(new_rating): library.edit_query(item, {"contentRating.value": new_rating, "contentRating.locked": 1}) - logger.info(util.adjust_space(f"{item.title[:25]:<25} | Content Rating | {new_rating}")) + logger.info(f"{item.title[:25]:<25} | Content Rating | {new_rating}") except Failed: pass @@ -671,7 +638,7 @@ def library_operations(config, library): if tmdb_collections or library.genre_collections: logger.info("") - util.separator(f"Starting Automated Collections") + logger.separator(f"Starting Automated Collections") logger.info("") new_collections = {} templates = {} @@ -724,7 +691,7 @@ def library_operations(config, library): unmanaged = "Unmanaged Collections " elif library.delete_collections_with_less > 0: unmanaged = "Unmanaged Collections and " - util.separator(f"Deleting All {unmanaged}Collections{print_suffix}", space=False, border=False) + logger.separator(f"Deleting All {unmanaged}Collections{print_suffix}", space=False, border=False) logger.info("") unmanaged_collections = [] for col in library.get_all_collections(): @@ -740,7 +707,7 @@ def library_operations(config, library): if library.show_unmanaged and len(unmanaged_collections) > 0: logger.info("") - util.separator(f"Unmanaged Collections in {library.name} Library", space=False, border=False) + logger.separator(f"Unmanaged Collections in {library.name} Library", space=False, border=False) logger.info("") for col in unmanaged_collections: logger.info(col.title) @@ -748,19 +715,19 @@ def library_operations(config, library): logger.info(f"{len(unmanaged_collections)} Unmanaged Collection{'s' if len(unmanaged_collections) > 1 else ''}") elif library.show_unmanaged: logger.info("") - util.separator(f"No Unmanaged Collections in {library.name} Library", space=False, border=False) + logger.separator(f"No Unmanaged Collections in {library.name} Library", space=False, border=False) logger.info("") if library.assets_for_all and len(unmanaged_collections) > 0: logger.info("") - util.separator(f"Unmanaged Collection Assets Check for {library.name} Library", space=False, border=False) + logger.separator(f"Unmanaged Collection Assets Check for {library.name} Library", space=False, border=False) logger.info("") for col in unmanaged_collections: library.find_assets(col) if library.metadata_backup: logger.info("") - util.separator(f"Metadata Backup for {library.name} Library", space=False, border=False) + logger.separator(f"Metadata Backup for {library.name} Library", space=False, border=False) logger.info("") logger.info(f"Metadata Backup Path: {library.metadata_backup['path']}") logger.info("") @@ -768,7 +735,7 @@ def library_operations(config, library): meta, _, _ = yaml.util.load_yaml_guess_indent(open(library.metadata_backup["path"])) except yaml.scanner.ScannerError as e: meta = {} - util.print_multiline(f"YAML Error: {util.tab_new_lines(e)}", error=True) + logger.error(f"YAML Error: {util.tab_new_lines(e)}") filename, file_extension = os.path.splitext(library.metadata_backup["path"]) i = 1 while os.path.exists(f"{filename}{i}{file_extension}"): @@ -780,7 +747,7 @@ def library_operations(config, library): items = library.get_all(load=True) titles = [i.title for i in items] for i, item in enumerate(items, 1): - util.print_return(f"Processing: {i}/{len(items)} {item.title}") + logger.ghost(f"Processing: {i}/{len(items)} {item.title}") map_key, attrs = library.get_locked_attributes(item, titles) if attrs or library.metadata_backup["add_blank_entries"]: def run_dict(save_dict, the_dict): @@ -790,12 +757,12 @@ def library_operations(config, library): else: save_dict[kk] = vv run_dict(meta["metadata"][map_key], attrs) - util.print_end() + logger.exorcise() try: yaml.round_trip_dump(meta, open(library.metadata_backup["path"], "w", encoding="utf-8"), block_seq_indent=2) logger.info(f"{len(meta['metadata'])} {library.type.capitalize()}{'s' if len(meta['metadata']) > 1 else ''} Backed Up") except yaml.scanner.ScannerError as e: - util.print_multiline(f"YAML Error: {util.tab_new_lines(e)}", error=True) + logger.error(f"YAML Error: {util.tab_new_lines(e)}") def run_collection(config, library, metadata, requested_collections): logger.info("") @@ -821,43 +788,35 @@ def run_collection(config, library, metadata, requested_collections): elif config.resume_from == mapping_name: config.resume_from = None logger.info("") - util.separator(f"Resuming Collections") + logger.separator(f"Resuming Collections") if "name_mapping" in collection_attrs and collection_attrs["name_mapping"]: collection_log_name, output_str = util.validate_filename(collection_attrs["name_mapping"]) else: collection_log_name, output_str = util.validate_filename(mapping_name) - collection_log_folder = os.path.join(default_dir, "logs", library.mapping_name, "collections", collection_log_name) - os.makedirs(collection_log_folder, exist_ok=True) - col_file_logger = os.path.join(collection_log_folder, "collection.log") - should_roll_over = os.path.isfile(col_file_logger) - collection_handler = RotatingFileHandler(col_file_logger, delay=True, mode="w", backupCount=3, encoding="utf-8") - util.apply_formatter(collection_handler) - if should_roll_over: - collection_handler.doRollover() - logger.addHandler(collection_handler) + logger.add_collection_handler(library.mapping_name, collection_log_name) library.status[mapping_name] = {"status": "", "errors": [], "created": False, "modified": False, "deleted": False, "added": 0, "unchanged": 0, "removed": 0, "radarr": 0, "sonarr": 0} try: - util.separator(f"{mapping_name} Collection in {library.name}") + logger.separator(f"{mapping_name} Collection in {library.name}") logger.info("") if output_str: logger.info(output_str) logger.info("") - util.separator(f"Validating {mapping_name} Attributes", space=False, border=False) + logger.separator(f"Validating {mapping_name} Attributes", space=False, border=False) builder = CollectionBuilder(config, metadata, mapping_name, no_missing, collection_attrs, library=library) logger.info("") - util.separator(f"Running {mapping_name} Collection", space=False, border=False) + logger.separator(f"Running {mapping_name} Collection", space=False, border=False) if len(builder.schedule) > 0: - util.print_multiline(builder.schedule, info=True) + logger.info(builder.schedule) if len(builder.smart_filter_details) > 0: logger.info("") - util.print_multiline(builder.smart_filter_details, info=True) + logger.info(builder.smart_filter_details) items_added = 0 items_removed = 0 @@ -896,7 +855,7 @@ def run_collection(config, library, metadata, requested_collections): valid = False if builder.details["delete_below_minimum"] and builder.obj: logger.info("") - util.print_multiline(builder.delete(), info=True) + logger.info(builder.delete()) builder.deleted = True if builder.do_missing and (len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0): @@ -917,10 +876,10 @@ def run_collection(config, library, metadata, requested_collections): library.stats["modified"] += 1 library.status[mapping_name]["modified"] = True except Failed: - util.print_stacktrace() + logger.stacktrace() run_item_details = False logger.info("") - util.separator("No Collection to Update", space=False, border=False) + logger.separator("No Collection to Update", space=False, border=False) else: builder.update_details() @@ -938,7 +897,7 @@ def run_collection(config, library, metadata, requested_collections): builder.load_collection_items() except Failed: logger.info("") - util.separator("No Items Found", space=False, border=False) + logger.separator("No Items Found", space=False, border=False) else: if builder.item_details: builder.update_item_details() @@ -960,29 +919,29 @@ def run_collection(config, library, metadata, requested_collections): else: library.status[mapping_name]["status"] = "Unchanged" except NotScheduled as e: - util.print_multiline(e, info=True) + logger.info(e) library.status[mapping_name]["status"] = "Not Scheduled" except Failed as e: library.notify(e, collection=mapping_name) - util.print_stacktrace() - util.print_multiline(e, error=True) + logger.stacktrace() + logger.error(e) library.status[mapping_name]["status"] = "PMM Failure" library.status[mapping_name]["errors"].append(e) except Exception as e: library.notify(f"Unknown Error: {e}", collection=mapping_name) - util.print_stacktrace() + logger.stacktrace() logger.error(f"Unknown Error: {e}") library.status[mapping_name]["status"] = "Unknown Error" library.status[mapping_name]["errors"].append(e) logger.info("") - util.separator(f"Finished {mapping_name} Collection\nCollection Run Time: {str(datetime.now() - collection_start).split('.')[0]}") - logger.removeHandler(collection_handler) + logger.separator(f"Finished {mapping_name} Collection\nCollection Run Time: {str(datetime.now() - collection_start).split('.')[0]}") + logger.remove_collection_handler(library.mapping_name, collection_log_name) def run_playlists(config): stats = {"created": 0, "modified": 0, "deleted": 0, "added": 0, "unchanged": 0, "removed": 0, "radarr": 0, "sonarr": 0} status = {} logger.info("") - util.separator("Playlists") + logger.separator("Playlists") logger.info("") for playlist_file in config.playlist_files: for mapping_name, playlist_attrs in playlist_file.playlists.items(): @@ -1006,35 +965,26 @@ def run_playlists(config): playlist_log_name, output_str = util.validate_filename(playlist_attrs["name_mapping"]) else: playlist_log_name, output_str = util.validate_filename(mapping_name) - playlist_log_folder = os.path.join(default_dir, "logs", "playlists", playlist_log_name) - os.makedirs(playlist_log_folder, exist_ok=True) - ply_file_logger = os.path.join(playlist_log_folder, "playlist.log") - should_roll_over = os.path.isfile(ply_file_logger) - playlist_handler = RotatingFileHandler(ply_file_logger, delay=True, mode="w", backupCount=3, - encoding="utf-8") - util.apply_formatter(playlist_handler) - if should_roll_over: - playlist_handler.doRollover() - logger.addHandler(playlist_handler) + logger.add_playlist_handler(playlist_log_name) status[mapping_name] = {"status": "", "errors": [], "created": False, "modified": False, "deleted": False, "added": 0, "unchanged": 0, "removed": 0, "radarr": 0, "sonarr": 0} server_name = None library_names = None try: - util.separator(f"{mapping_name} Playlist") + logger.separator(f"{mapping_name} Playlist") logger.info("") if output_str: logger.info(output_str) logger.info("") - util.separator(f"Validating {mapping_name} Attributes", space=False, border=False) + logger.separator(f"Validating {mapping_name} Attributes", space=False, border=False) builder = CollectionBuilder(config, playlist_file, mapping_name, no_missing, playlist_attrs) logger.info("") - util.separator(f"Running {mapping_name} Playlist", space=False, border=False) + logger.separator(f"Running {mapping_name} Playlist", space=False, border=False) if len(builder.schedule) > 0: - util.print_multiline(builder.schedule, info=True) + logger.info(builder.schedule) items_added = 0 items_removed = 0 @@ -1083,7 +1033,7 @@ def run_playlists(config): valid = False if builder.details["delete_below_minimum"] and builder.obj: logger.info("") - util.print_multiline(builder.delete(), info=True) + logger.info(builder.delete()) builder.deleted = True if builder.do_missing and (len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0): @@ -1103,10 +1053,10 @@ def run_playlists(config): stats["modified"] += 1 status[mapping_name]["modified"] = True except Failed: - util.print_stacktrace() + logger.stacktrace() run_item_details = False logger.info("") - util.separator("No Playlist to Update", space=False, border=False) + logger.separator("No Playlist to Update", space=False, border=False) else: builder.update_details() @@ -1119,7 +1069,7 @@ def run_playlists(config): builder.load_collection_items() except Failed: logger.info("") - util.separator("No Items Found", space=False, border=False) + logger.separator("No Items Found", space=False, border=False) else: if builder.item_details: builder.update_item_details() @@ -1132,23 +1082,23 @@ def run_playlists(config): builder.send_notifications(playlist=True) except NotScheduled as e: - util.print_multiline(e, info=True) + logger.info(e) status[mapping_name]["status"] = "Not Scheduled" except Failed as e: config.notify(e, server=server_name, library=library_names, playlist=mapping_name) - util.print_stacktrace() - util.print_multiline(e, error=True) + logger.stacktrace() + logger.error(e) status[mapping_name]["status"] = "PMM Failure" status[mapping_name]["errors"].append(e) except Exception as e: config.notify(f"Unknown Error: {e}", server=server_name, library=library_names, playlist=mapping_name) - util.print_stacktrace() + logger.stacktrace() logger.error(f"Unknown Error: {e}") status[mapping_name]["status"] = "Unknown Error" status[mapping_name]["errors"].append(e) logger.info("") - util.separator(f"Finished {mapping_name} Playlist\nPlaylist Run Time: {str(datetime.now() - playlist_start).split('.')[0]}") - logger.removeHandler(playlist_handler) + logger.separator(f"Finished {mapping_name} Playlist\nPlaylist Run Time: {str(datetime.now() - playlist_start).split('.')[0]}") + logger.remove_playlist_handler(playlist_log_name) return status, stats try: @@ -1196,9 +1146,9 @@ try: minutes = int((seconds % 3600) // 60) time_str = f"{hours} Hour{'s' if hours > 1 else ''} and " if hours > 0 else "" time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}" - util.print_return(f"Current Time: {current_time} | {time_str} until the next run at {og_time_str} | Runs: {', '.join(times_to_run)}") + logger.ghost(f"Current Time: {current_time} | {time_str} until the next run at {og_time_str} | Runs: {', '.join(times_to_run)}") else: logger.error(f"Time Error: {valid_times}") time.sleep(60) except KeyboardInterrupt: - util.separator("Exiting Plex Meta Manager") + logger.separator("Exiting Plex Meta Manager")