diff --git a/README.md b/README.md index ea6f78a3..38fff992 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # Plex Meta Manager -#### Version 1.9.1 +#### Version 1.9.2 The original concept for Plex Meta Manager is [Plex Auto Collections](https://github.com/mza921/Plex-Auto-Collections), but this is rewritten from the ground up to be able to include a scheduler, metadata edits, multiple libraries, and logging. Plex Meta Manager is a Python 3 script that can be continuously run using YAML configuration files to update on a schedule the metadata of the movies, shows, and collections in your libraries as well as automatically build collections based on various methods all detailed in the wiki. Some collection examples that the script can automatically build and update daily include Plex Based Searches like actor, genre, or studio collections or Collections based on TMDb, IMDb, Trakt, TVDb, AniDB, or MyAnimeList lists and various other services. @@ -19,7 +19,7 @@ The script is designed to work with most Metadata agents including the new Plex ## Support -* Before posting on Github about an enhancement, error, or configuration question please visit the [Plex Meta Manager Discord Server](https://discord.gg/NfH6mGFuAB). +* Before posting on Github about an enhancement, error, or configuration question please visit the [Plex Meta Manager Discord Server](https://discord.gg/TsdpsFYqqm). * If you're getting an Error or have an Enhancement post in the [Issues](https://github.com/meisnate12/Plex-Meta-Manager/issues). * If you have a configuration question post in the [Discussions](https://github.com/meisnate12/Plex-Meta-Manager/discussions). * To see user submitted Metadata configuration files, and you to even add your own, go to the [Plex Meta Manager Configs](https://github.com/meisnate12/Plex-Meta-Manager-Configs). diff --git a/modules/builder.py b/modules/builder.py index 786213c1..59f81217 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -1,4 +1,4 @@ -import glob, logging, os, re +import logging, os, re from datetime import datetime, timedelta from modules import anidb, anilist, imdb, letterboxd, mal, plex, radarr, sonarr, tautulli, tmdb, trakttv, tvdb, util from modules.util import Failed @@ -102,8 +102,6 @@ numbered_builders = [ ] smart_collection_invalid = ["collection_mode", "collection_order"] smart_url_collection_invalid = [ - "item_label", "item_label.sync", "item_episode_sorting", "item_keep_episodes", "item_delete_episodes", - "item_season_display", "item_episode_ordering", "item_metadata_language", "item_use_original_title", "run_again", "sync_mode", "show_filtered", "show_missing", "save_missing", "smart_label", "radarr_add", "radarr_folder", "radarr_monitor", "radarr_availability", "radarr_quality", "radarr_tag", "radarr_search", @@ -494,17 +492,19 @@ class CollectionBuilder: for smart_key, smart_data in filter_dict.items(): smart, smart_mod, smart_final = _split(smart_key) - def build_url_arg(arg, mod=None, arg_s=None, mod_s=None): + def build_url_arg(arg, mod=None, arg_s=None, mod_s=None, param_s=None): arg_key = plex.search_translation[smart] if smart in plex.search_translation else smart if mod is None: - mod = plex.modifier_translation[smart_mod] if smart_mod in plex.search_translation else smart_mod + mod = plex.modifier_translation[smart_mod] if smart_mod in plex.modifier_translation else smart_mod if arg_s is None: arg_s = arg if smart in string_filters and smart_mod in ["", ".not"]: mod_s = "does not contain" if smart_mod == ".not" else "contains" elif mod_s is None: mod_s = plex.mod_displays[smart_mod] - display_line = f"{indent}{smart.title().replace('_', ' ')} {mod_s} {arg_s}" + if param_s is None: + param_s = smart.title().replace('_', ' ') + display_line = f"{indent}{param_s} {mod_s} {arg_s}" return f"{arg_key}{mod}={arg}&", display_line if smart_final in plex.movie_only_smart_searches and self.library.is_show: @@ -540,8 +540,15 @@ class CollectionBuilder: results, display_add = build_url_arg(util.check_number(smart_data, smart_final, minimum=1)) elif smart in ["user_rating", "episode_user_rating", "critic_rating", "audience_rating"] and smart_mod in [".gt", ".gte", ".lt", ".lte"]: results, display_add = build_url_arg(util.check_number(smart_data, smart_final, number_type="float", minimum=0, maximum=10)) + elif smart == "hdr": + if isinstance(smart_data, bool): + hdr_mod = "" if smart_data else "!" + hdr_arg = "true" if smart_data else "false" + results, display_add = build_url_arg(1, mod=hdr_mod, arg_s=hdr_arg, mod_s="is", param_s="HDR") + else: + raise Failed("Collection Error: HDR must be true or false") else: - if smart in ["title", "episode_title"] and smart_mod in ["", ".not", ".begins", ".ends"]: + if smart in ["title", "episode_title", "studio"] and smart_mod in ["", ".not", ".begins", ".ends"]: results_list = [(t, t) for t in util.get_list(smart_data, split=False)] elif smart in plex.tags and smart_mod in ["", ".not", ".begins", ".ends"]: if smart_final in plex.tmdb_searches: @@ -552,8 +559,6 @@ class CollectionBuilder: smart_values.append(tmdb_name) else: smart_values.append(tmdb_value) - elif smart == "studio": - smart_values = util.get_list(smart_data, split=False) else: smart_values = util.get_list(smart_data) results_list = [] @@ -640,10 +645,8 @@ class CollectionBuilder: raise Failed(f"Collection Error: {method_name} attribute only works with normal collections") elif method_name not in collectionless_details and self.collectionless: raise Failed(f"Collection Error: {method_name} attribute does not work for Collectionless collection") - elif self.smart_url and method_name in all_builders: - raise Failed(f"Collection Error: {method_name} builder not allowed when using smart_url") - elif self.smart_url and method_name in smart_url_collection_invalid: - raise Failed(f"Collection Error: {method_name} detail not allowed when using smart_url") + elif self.smart_url and (method_name in all_builders or method_name in smart_url_collection_invalid): + raise Failed(f"Collection Error: {method_name} builder not allowed when using smart_filter") elif method_name == "summary": self.summaries[method_name] = method_data elif method_name == "tmdb_summary": @@ -703,16 +706,20 @@ class CollectionBuilder: elif method_name == "sync_mode": if str(method_data).lower() in ["append", "sync"]: self.details[method_name] = method_data.lower() else: raise Failed("Collection Error: sync_mode attribute must be either 'append' or 'sync'") - elif method_name in ["label", "label.sync"]: + elif method_name in ["label", "label.remove", "label.sync"]: if "label" in self.data and "label.sync" in self.data: raise Failed(f"Collection Error: Cannot use label and label.sync together") + if "label.remove" in self.data and "label.sync" in self.data: + raise Failed(f"Collection Error: Cannot use label.remove and label.sync together") if method_name == "label" and "label_sync_mode" in self.data and self.data["label_sync_mode"] == "sync": self.details["label.sync"] = util.get_list(method_data) else: self.details[method_name] = util.get_list(method_data) - elif method_name in ["item_label", "item_label.sync"]: + elif method_name in ["item_label", "item_label.remove", "item_label.sync"]: if "item_label" in self.data and "item_label.sync" in self.data: raise Failed(f"Collection Error: Cannot use item_label and item_label.sync together") + if "item_label.remove" in self.data and "item_label.sync" in self.data: + raise Failed(f"Collection Error: Cannot use item_label.remove and item_label.sync together") self.item_details[method_name] = util.get_list(method_data) elif method_name in plex.item_advance_keys: key, options = plex.item_advance_keys[method_name] @@ -768,7 +775,7 @@ class CollectionBuilder: self.sonarr_options[method_name[7:]] = util.get_bool(method_name, method_data) elif method_name == "sonarr_tag": self.sonarr_options["tag"] = util.get_list(method_data) - elif method_name in ["title", "title.and", "title.not", "title.begins", "title.ends"]: + elif method_name in ["title", "title.and", "title.not", "title.begins", "studio.ends", "studio", "studio.and", "studio.not", "studio.begins", "studio.ends"]: self.methods.append(("plex_search", [{method_name: util.get_list(method_data, split=False)}])) elif method_name in ["year.gt", "year.gte", "year.lt", "year.lte"]: self.methods.append(("plex_search", [{method_name: util.check_year(method_data, current_year, method_name)}])) @@ -928,8 +935,6 @@ class CollectionBuilder: raise Failed(f"Collection Error: {search_final} plex search attribute only works for movie libraries") elif search_final in plex.show_only_searches and self.library.is_movie: raise Failed(f"Collection Error: {search_final} plex search attribute only works for show libraries") - elif search_final not in plex.searches: - raise Failed(f"Collection Error: {search_final} is not a valid plex search attribute") elif search_data is None: raise Failed(f"Collection Error: {search_final} plex search attribute is blank") elif search == "sort_by": @@ -944,7 +949,9 @@ class CollectionBuilder: raise Failed(f"Collection Warning: plex search limit attribute: {search_data} must be an integer greater then 0") else: searches[search] = search_data - elif search == "title" and modifier in ["", ".and", ".not", ".begins", ".ends"]: + elif search_final not in plex.searches: + raise Failed(f"Collection Error: {search_final} is not a valid plex search attribute") + elif search in ["title", "studio"] and modifier in ["", ".and", ".not", ".begins", ".ends"]: searches[search_final] = util.get_list(search_data, split=False) elif search in plex.tags and modifier in ["", ".and", ".not", ".begins", ".ends"]: if search_final in plex.tmdb_searches: @@ -1244,21 +1251,32 @@ class CollectionBuilder: self.details["collection_mode"] = "hide" self.sync = True - try: - self.obj = library.get_collection(self.name) - collection_smart = library.smart(self.obj) - if (self.smart and not collection_smart) or (not self.smart and collection_smart): - logger.info("") - logger.error(f"Collection Error: Converting {self.obj.title} to a {'smart' if self.smart else 'normal'} collection") - library.query(self.obj.delete) + self.build_collection = True + if "build_collection" in methods: + if not self.data[methods["build_collection"]]: + logger.warning(f"Collection Warning: build_collection attribute is blank defaulting to true") + else: + self.build_collection = util.get_bool("build_collection", self.data[methods["build_collection"]]) + + if self.build_collection: + try: + self.obj = library.get_collection(self.name) + collection_smart = library.smart(self.obj) + if (self.smart and not collection_smart) or (not self.smart and collection_smart): + logger.info("") + logger.error(f"Collection Error: Converting {self.obj.title} to a {'smart' if self.smart else 'normal'} collection") + library.query(self.obj.delete) + self.obj = None + except Failed: self.obj = None - except Failed: - self.obj = None - self.plex_map = {} - if self.sync and self.obj: - for item in library.get_collection_items(self.obj, self.smart_label_collection): - self.plex_map[item.ratingKey] = item + self.plex_map = {} + if self.sync and self.obj: + for item in library.get_collection_items(self.obj, self.smart_label_collection): + self.plex_map[item.ratingKey] = item + else: + self.sync = False + self.run_again = False def collect_rating_keys(self, movie_map, show_map): def add_rating_keys(keys): @@ -1583,21 +1601,27 @@ class CollectionBuilder: self.library.collection_order_query(self.obj, self.details["collection_order"]) logger.info(f"Detail: collection_order updated Collection Order to {self.details['collection_order']}") - if "label" in self.details or "label.sync" in self.details: + if "label" in self.details or "label.remove" in self.details or "label.sync" in self.details: item_labels = [label.tag for label in self.obj.labels] - labels = util.get_list(self.details["label" if "label" in self.details else "label.sync"]) + labels = self.details["label" if "label" in self.details else "label.sync"] if "label.sync" in self.details: for label in (la for la in item_labels if la not in labels): self.library.query_data(self.obj.removeLabel, label) logger.info(f"Detail: Label {label} removed") - for label in (la for la in labels if la not in item_labels): - self.library.query_data(self.obj.addLabel, label) - logger.info(f"Detail: Label {label} added") + if "label" in self.details or "label.sync" in self.details: + for label in (la for la in labels if la not in item_labels): + self.library.query_data(self.obj.addLabel, label) + logger.info(f"Detail: Label {label} added") + if "label.remove" in self.details: + for label in self.details["label.remove"]: + if label in item_labels: + self.library.query_data(self.obj.removeLabel, label) + logger.info(f"Detail: Label {label} removed") if len(self.item_details) > 0: labels = None - if "item_label" in self.item_details or "item_label.sync" in self.item_details: - labels = util.get_list(self.item_details["item_label" if "item_label" in self.item_details else "item_label.sync"]) + if "item_label" in self.item_details or "item_label.remove" in self.item_details or "item_label.sync" in self.item_details: + labels = self.item_details["item_label" if "item_label" in self.item_details else "item_label.sync"] for item in self.library.get_collection_items(self.obj, self.smart_label_collection): if labels is not None: item_labels = [label.tag for label in item.labels] @@ -1605,9 +1629,15 @@ class CollectionBuilder: for label in (la for la in item_labels if la not in labels): self.library.query_data(item.removeLabel, label) logger.info(f"Detail: Label {label} removed from {item.title}") - for label in (la for la in labels if la not in item_labels): - self.library.query_data(item.addLabel, label) - logger.info(f"Detail: Label {label} added to {item.title}") + if "item_label" in self.item_details or "item_label.sync" in self.item_details: + for label in (la for la in labels if la not in item_labels): + self.library.query_data(item.addLabel, label) + logger.info(f"Detail: Label {label} added to {item.title}") + if "item_label.remove" in self.item_details: + for label in self.item_details["item_label.remove"]: + if label in item_labels: + self.library.query_data(self.obj.removeLabel, label) + logger.info(f"Detail: Label {label} removed from {item.title}") advance_edits = {} for method_name, method_data in self.item_details.items(): if method_name in plex.item_advance_keys: @@ -1626,24 +1656,11 @@ class CollectionBuilder: if "name_mapping" in self.details: if self.details["name_mapping"]: name_mapping = self.details["name_mapping"] else: logger.error("Collection Error: name_mapping attribute is blank") - for ad in self.library.asset_directory: - path = os.path.join(ad, f"{name_mapping}") - if self.library.asset_folders: - if not os.path.isdir(path): - continue - poster_filter = os.path.join(ad, name_mapping, "poster.*") - background_filter = os.path.join(ad, name_mapping, "background.*") - else: - poster_filter = os.path.join(ad, f"{name_mapping}.*") - background_filter = os.path.join(ad, f"{name_mapping}_background.*") - matches = glob.glob(poster_filter) - if len(matches) > 0: - self.posters["asset_directory"] = os.path.abspath(matches[0]) - matches = glob.glob(background_filter) - if len(matches) > 0: - self.backgrounds["asset_directory"] = os.path.abspath(matches[0]) - for item in self.library.query(self.obj.items): - self.library.update_item_from_assets(item, dirs=[path]) + poster_image, background_image = self.library.update_item_from_assets(self.obj, collection_mode=True, upload=False, name=name_mapping) + if poster_image: + self.posters["asset_directory"] = poster_image + if background_image: + self.backgrounds["asset_directory"] = background_image def set_image(image_method, images, is_background=False): message = f"{'background' if is_background else 'poster'} to [{'File' if image_method in image_file_details else 'URL'}] {images[image_method]}" diff --git a/modules/config.py b/modules/config.py index d59afcbd..716c1e70 100644 --- a/modules/config.py +++ b/modules/config.py @@ -325,6 +325,7 @@ class Config: else: params["name"] = str(library_name) logger.info(f"Connecting to {params['name']} Library...") + params["mapping_name"] = str(library_name) params["asset_directory"] = check_for_attribute(lib, "asset_directory", parent="settings", var_type="list_path", default=self.general["asset_directory"], default_is_none=True, save=False) if params["asset_directory"] is None: @@ -389,6 +390,16 @@ class Config: else: params["mass_critic_rating_update"] = None + if lib and "radarr_add_all" in lib and lib["radarr_add_all"]: + params["radarr_add_all"] = check_for_attribute(lib, "radarr_add_all", var_type="bool", default=False, save=False) + else: + params["radarr_add_all"] = None + + if lib and "sonarr_add_all" in lib and lib["sonarr_add_all"]: + params["sonarr_add_all"] = check_for_attribute(lib, "sonarr_add_all", var_type="bool", default=False, save=False) + else: + params["sonarr_add_all"] = None + try: if lib and "metadata_path" in lib: params["metadata_path"] = [] diff --git a/modules/convert.py b/modules/convert.py index 7eaa939b..ea6ad4ca 100644 --- a/modules/convert.py +++ b/modules/convert.py @@ -2,6 +2,7 @@ import logging, re, requests from lxml import html from modules import util from modules.util import Failed +from plexapi.exceptions import BadRequest from retrying import retry logger = logging.getLogger("Plex Meta Manager") @@ -276,6 +277,8 @@ class Convert: except requests.exceptions.ConnectionError: util.print_stacktrace() raise Failed("No External GUIDs found") + if not tvdb_id and not imdb_id and not tmdb_id: + raise Failed("Refresh Metadata") elif item_type == "imdb": imdb_id = check_id elif item_type == "thetvdb": tvdb_id = int(check_id) elif item_type == "themoviedb": tmdb_id = int(check_id) @@ -354,7 +357,9 @@ class Convert: return "movie", tmdb_id else: raise Failed(f"No ID to convert") - except Failed as e: util.print_end(length, f"Mapping Error | {item.guid:<46} | {e} for {item.title}") - return None, None + except BadRequest: + util.print_stacktrace() + util.print_end(length, f"Mapping Error: | {item.guid} for {item.title} not found") + return None, None diff --git a/modules/meta.py b/modules/meta.py index 65a4970c..9efc9a09 100644 --- a/modules/meta.py +++ b/modules/meta.py @@ -121,30 +121,44 @@ class Metadata: def edit_tags(attr, obj, group, alias, key=None, extra=None, movie_library=False): if key is None: key = f"{attr}s" - if attr in alias and f"{attr}.sync" in alias: + if movie_library and not self.library.is_movie: + logger.error(f"Metadata Error: {attr} attribute only works for movie libraries") + elif attr in alias and f"{attr}.sync" in alias: logger.error(f"Metadata Error: Cannot use {attr} and {attr}.sync together") - elif attr in alias or f"{attr}.sync" in alias: + elif f"{attr}.remove" in alias and f"{attr}.sync" in alias: + logger.error(f"Metadata Error: Cannot use {attr}.remove and {attr}.sync together") + elif attr in alias and group[alias[attr]] is None: + logger.error(f"Metadata Error: {attr} attribute is blank") + elif f"{attr}.remove" in alias and group[alias[f"{attr}.remove"]] is None: + logger.error(f"Metadata Error: {attr}.remove attribute is blank") + elif f"{attr}.sync" in alias and group[alias[f"{attr}.sync"]] is None: + logger.error(f"Metadata Error: {attr}.sync attribute is blank") + elif attr in alias or f"{attr}.remove" in alias or f"{attr}.sync" in alias: attr_key = attr if attr in alias else f"{attr}.sync" - if movie_library and not self.library.is_movie: - logger.error(f"Metadata Error: {attr_key} attribute only works for movie libraries") - elif group[alias[attr_key]] or extra: - item_tags = [item_tag.tag for item_tag in getattr(obj, key)] - input_tags = [] - if group[alias[attr_key]]: - input_tags.extend(util.get_list(group[alias[attr_key]])) - if extra: - input_tags.extend(extra) - if f"{attr}.sync" in alias: - remove_method = getattr(obj, f"remove{attr.capitalize()}") - for tag in (t for t in item_tags if t not in input_tags): - updated = True - remove_method(tag) - logger.info(f"Detail: {attr.capitalize()} {tag} removed") + item_tags = [item_tag.tag for item_tag in getattr(obj, key)] + input_tags = [] + if group[alias[attr_key]]: + input_tags.extend(util.get_list(group[alias[attr_key]])) + if extra: + input_tags.extend(extra) + if f"{attr}.sync" in alias: + remove_method = getattr(obj, f"remove{attr.capitalize()}") + for tag in (t for t in item_tags if t not in input_tags): + updated = True + self.library.query_data(remove_method, tag) + logger.info(f"Detail: {attr.capitalize()} {tag} removed") + if attr in alias or f"{attr}.sync" in alias: add_method = getattr(obj, f"add{attr.capitalize()}") for tag in (t for t in input_tags if t not in item_tags): updated = True - add_method(tag) + self.library.query_data(add_method, tag) logger.info(f"Detail: {attr.capitalize()} {tag} added") + if f"{attr}.remove" in alias: + remove_method = getattr(obj, f"remove{attr.capitalize()}") + for tag in util.get_list(group[alias[f"{attr}.remove"]]): + if tag in item_tags: + self.library.query_data(remove_method, tag) + logger.info(f"Detail: {attr.capitalize()} {tag} removed") else: logger.error(f"Metadata Error: {attr} attribute is blank") diff --git a/modules/plex.py b/modules/plex.py index 52371207..978f4c28 100644 --- a/modules/plex.py +++ b/modules/plex.py @@ -2,6 +2,7 @@ import glob, logging, os, requests from modules import util from modules.meta import Metadata from modules.util import Failed +import plexapi from plexapi import utils from plexapi.exceptions import BadRequest, NotFound, Unauthorized from plexapi.collection import Collections @@ -182,7 +183,7 @@ smart_searches = [ "producer", "producer.not", "subtitle_language", "subtitle_language.not", "writer", "writer.not", - "decade", "resolution", + "decade", "resolution", "hdr", "added", "added.not", "added.before", "added.after", "originally_available", "originally_available.not", "originally_available.before", "originally_available.after", @@ -323,6 +324,7 @@ class PlexAPI: self.Sonarr = None self.Tautulli = None self.name = params["name"] + self.mapping_name = util.validate_filename(params["mapping_name"]) self.missing_path = os.path.join(params["default_dir"], f"{self.name}_missing.yml") self.metadata_path = params["metadata_path"] self.asset_directory = params["asset_directory"] @@ -336,7 +338,9 @@ class PlexAPI: self.mass_genre_update = params["mass_genre_update"] self.mass_audience_rating_update = params["mass_audience_rating_update"] self.mass_critic_rating_update = params["mass_critic_rating_update"] - self.mass_update = self.mass_genre_update or self.mass_audience_rating_update or self.mass_critic_rating_update + self.radarr_add_all = params["radarr_add_all"] + self.sonarr_add_all = params["sonarr_add_all"] + self.mass_update = self.mass_genre_update or self.mass_audience_rating_update or self.mass_critic_rating_update or self.radarr_add_all or self.sonarr_add_all self.plex = params["plex"] self.url = params["plex"]["url"] self.token = params["plex"]["token"] @@ -396,6 +400,11 @@ class PlexAPI: @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) def get_guids(self, item): + item.reload(checkFiles=False, includeAllConcerts=False, includeBandwidths=False, includeChapters=False, + includeChildren=False, includeConcerts=False, includeExternalMedia=False, inclueExtras=False, + includeFields='', includeGeolocation=False, includeLoudnessRamps=False, includeMarkers=False, + includeOnDeck=False, includePopularLeaves=False, includePreferences=False, includeRelated=False, + includeRelatedCount=0, includeReviews=False, includeStations=False) return item.guids @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) @@ -560,16 +569,16 @@ class PlexAPI: or_des = conjunction if o > 0 else f"{search_method}(" ors += f"{or_des}{param}" if has_processed: - logger.info(f"\t\t AND {ors})") + logger.info(f" AND {ors})") else: logger.info(f"Processing {pretty}: {ors})") has_processed = True if search_sort: - logger.info(f"\t\t SORT BY {search_sort})") + logger.info(f" SORT BY {search_sort}") if search_limit: - logger.info(f"\t\t LIMIT {search_limit})") + logger.info(f" LIMIT {search_limit}") logger.debug(f"Search: {search_terms}") - return self.search(sort=sorts[search_sort], maxresults=search_limit, **search_terms) + items = self.search(sort=sorts[search_sort], maxresults=search_limit, **search_terms) elif method == "plex_collectionless": good_collections = [] logger.info("Collections Excluded") @@ -610,7 +619,7 @@ class PlexAPI: else: raise Failed(f"Plex Error: Method {method} not supported") if len(items) > 0: - return items + return [item.ratingKey for item in items] else: raise Failed("Plex Error: No Items found in Plex") @@ -633,7 +642,11 @@ class PlexAPI: if smart_label_collection: return self.get_labeled_items(collection.title if isinstance(collection, Collections) else str(collection)) elif isinstance(collection, Collections): - return self.query(collection.items) + if self.smart(collection): + key = f"/library/sections/{self.Plex.key}/all{self.smart_filter(collection)}" + return self.Plex._search(key, None, 0, plexapi.X_PLEX_CONTAINER_SIZE) + else: + return self.query(collection.items) else: return [] @@ -659,11 +672,16 @@ class PlexAPI: util.print_stacktrace() logger.error(f"{item_type}: {name}{' Advanced' if advanced else ''} Details Update Failed") - def update_item_from_assets(self, item, dirs=None): + def update_item_from_assets(self, item, collection_mode=False, upload=True, dirs=None, name=None): if dirs is None: dirs = self.asset_directory - name = os.path.basename(os.path.dirname(item.locations[0]) if self.is_movie else item.locations[0]) + if not name and collection_mode: + name = item.title + elif not name: + name = os.path.basename(os.path.dirname(item.locations[0]) if self.is_movie else item.locations[0]) for ad in dirs: + poster_image = None + background_image = None if self.asset_folders: if not os.path.isdir(os.path.join(ad, name)): continue @@ -674,13 +692,22 @@ class PlexAPI: background_filter = os.path.join(ad, f"{name}_background.*") matches = glob.glob(poster_filter) if len(matches) > 0: - self.upload_image(item, os.path.abspath(matches[0]), url=False) - logger.info(f"Detail: asset_directory updated {item.title}'s poster to [file] {os.path.abspath(matches[0])}") + poster_image = os.path.abspath(matches[0]) + if upload: + self.upload_image(item, poster_image, url=False) + logger.info(f"Detail: asset_directory updated {item.title}'s poster to [file] {poster_image}") matches = glob.glob(background_filter) if len(matches) > 0: - self.upload_image(item, os.path.abspath(matches[0]), poster=False, url=False) - logger.info(f"Detail: asset_directory updated {item.title}'s background to [file] {os.path.abspath(matches[0])}") - if self.is_show: + background_image = os.path.abspath(matches[0]) + if upload: + self.upload_image(item, background_image, poster=False, url=False) + logger.info(f"Detail: asset_directory updated {item.title}'s background to [file] {background_image}") + if collection_mode: + for ite in self.query(item.items): + self.update_item_from_assets(ite, dirs=[os.path.join(ad, name)]) + if not upload: + return poster_image, background_image + if self.is_show and not collection_mode: for season in self.query(item.seasons): if self.asset_folders: season_filter = os.path.join(ad, name, f"Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}.*") @@ -700,4 +727,5 @@ class PlexAPI: if len(matches) > 0: episode_path = os.path.abspath(matches[0]) self.upload_image(episode, episode_path, url=False) - logger.info(f"Detail: asset_directory updated {item.title} {episode.seasonEpisode.upper()}'s poster to [file] {episode_path}") \ No newline at end of file + logger.info(f"Detail: asset_directory updated {item.title} {episode.seasonEpisode.upper()}'s poster to [file] {episode_path}") + return None, None diff --git a/modules/util.py b/modules/util.py index ad65760d..993b444b 100644 --- a/modules/util.py +++ b/modules/util.py @@ -1,5 +1,6 @@ import logging, re, signal, sys, time, traceback from datetime import datetime +from pathvalidate import is_valid_filename, sanitize_filename from plexapi.exceptions import BadRequest, NotFound, Unauthorized try: @@ -220,7 +221,6 @@ def compile_list(data): else: return data - def get_list(data, lower=False, split=True, int_list=False): if isinstance(data, list): return data elif isinstance(data, dict): return [data] @@ -366,16 +366,22 @@ def centered(text, do_print=True): return final_text def separator(text=None): - logger.handlers[0].setFormatter(logging.Formatter(f"%(message)-{screen_width - 2}s")) - logger.handlers[1].setFormatter(logging.Formatter(f"[%(asctime)s] %(filename)-27s %(levelname)-10s %(message)-{screen_width - 2}s")) + for handler in logger.handlers: + apply_formatter(handler, border=False) logger.info(f"|{separating_character * screen_width}|") if text: text_list = text.split("\n") for t in text_list: logger.info(f"| {centered(t, do_print=False)} |") logger.info(f"|{separating_character * screen_width}|") - logger.handlers[0].setFormatter(logging.Formatter(f"| %(message)-{screen_width - 2}s |")) - logger.handlers[1].setFormatter(logging.Formatter(f"[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)-{screen_width - 2}s |")) + for handler in logger.handlers: + apply_formatter(handler) + +def apply_formatter(handler, border=True): + text = f"| %(message)-{screen_width - 2}s |" if border else f"%(message)-{screen_width - 2}s" + if isinstance(handler, logging.handlers.RotatingFileHandler): + text = f"[%(asctime)s] %(filename)-27s %(levelname)-10s {text}" + handler.setFormatter(logging.Formatter(text)) def print_return(length, text): print(adjust_space(length, f"| {text}"), end="\r") @@ -384,3 +390,11 @@ def print_return(length, text): def print_end(length, text=None): if text: logger.info(adjust_space(length, text)) else: print(adjust_space(length, " "), end="\r") + +def validate_filename(filename): + if is_valid_filename(filename): + return filename + else: + mapping_name = sanitize_filename(filename) + logger.info(f"Folder Name: {filename} is invalid using {mapping_name}") + return mapping_name diff --git a/plex_meta_manager.py b/plex_meta_manager.py index 5289ae08..6cf0be6d 100644 --- a/plex_meta_manager.py +++ b/plex_meta_manager.py @@ -6,7 +6,6 @@ try: from modules.builder import CollectionBuilder from modules.config import Config from modules.util import Failed - from plexapi.exceptions import BadRequest except ModuleNotFoundError: print("Error: Requirements are not installed") sys.exit(0) @@ -18,8 +17,10 @@ parser.add_argument("-t", "--time", dest="time", help="Time to update each day u parser.add_argument("-re", "--resume", dest="resume", help="Resume collection run from a specific collection", type=str) parser.add_argument("-r", "--run", dest="run", help="Run without the scheduler", action="store_true", default=False) parser.add_argument("-rt", "--test", "--tests", "--run-test", "--run-tests", dest="test", help="Run in debug mode with only collections that have test: true", action="store_true", default=False) -parser.add_argument("-cl", "--collection", "--collections", dest="collections", help="Process only specified collections (comma-separated list)", type=str) -parser.add_argument("-l", "--library", "--libraries", dest="libraries", help="Process only specified libraries (comma-separated list)", type=str) +parser.add_argument("-co", "--collection-only", "--collections-only", dest="collection_only", help="Run only collection operations", action="store_true", default=False) +parser.add_argument("-lo", "--library-only", "--libraries-only", dest="library_only", help="Run only library operations", action="store_true", default=False) +parser.add_argument("-rc", "-cl", "--collection", "--collections", "--run-collection", "--run-collections", dest="collections", help="Process only specified collections (comma-separated list)", type=str) +parser.add_argument("-rl", "-l", "--library", "--libraries", "--run-library", "--run-libraries", dest="libraries", help="Process only specified libraries (comma-separated list)", type=str) parser.add_argument("-d", "--divider", dest="divider", help="Character that divides the sections (Default: '=')", default="=", type=str) parser.add_argument("-w", "--width", dest="width", help="Screen Width (Default: 100)", default=100, type=int) args = parser.parse_args() @@ -39,6 +40,8 @@ def check_bool(env_str, default): test = check_bool("PMM_TEST", args.test) debug = check_bool("PMM_DEBUG", args.debug) run = check_bool("PMM_RUN", args.run) +library_only = check_bool("PMM_LIBRARIES_ONLY", args.library_only) +collection_only = check_bool("PMM_COLLECTIONS_ONLY", args.collection_only) collections = os.environ.get("PMM_COLLECTIONS") if os.environ.get("PMM_COLLECTIONS") else args.collections libraries = os.environ.get("PMM_LIBRARIES") if os.environ.get("PMM_LIBRARIES") else args.libraries resume = os.environ.get("PMM_RESUME") if os.environ.get("PMM_RESUME") else args.resume @@ -71,31 +74,32 @@ def fmt_filter(record): record.filename = f"[{record.filename}:{record.lineno}]" return True -file_handler = logging.handlers.TimedRotatingFileHandler(os.path.join(default_dir, "logs", "meta.log"), when="midnight", backupCount=10, encoding="utf-8") -file_handler.addFilter(fmt_filter) -file_handler.setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)-100s |")) - cmd_handler = logging.StreamHandler() -cmd_handler.setFormatter(logging.Formatter("| %(message)-100s |")) cmd_handler.setLevel(logging.DEBUG if test or debug else logging.INFO) logger.addHandler(cmd_handler) -logger.addHandler(file_handler) sys.excepthook = util.my_except_hook -util.separator() -util.centered(" ") -util.centered(" ____ _ __ __ _ __ __ ") -util.centered("| _ \\| | _____ __ | \\/ | ___| |_ __ _ | \\/ | __ _ _ __ __ _ __ _ ___ _ __ ") -util.centered("| |_) | |/ _ \\ \\/ / | |\\/| |/ _ \\ __/ _` | | |\\/| |/ _` | '_ \\ / _` |/ _` |/ _ \\ '__|") -util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | ") -util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| ") -util.centered(" |___/ ") -util.centered(" Version: 1.9.1 ") -util.separator() - def start(config_path, is_test, daily, requested_collections, requested_libraries, resume_from): + file_logger = os.path.join(default_dir, "logs", "meta.log") + should_roll_over = os.path.isfile(file_logger) + file_handler = logging.handlers.RotatingFileHandler(file_logger, delay=True, mode="w", backupCount=10, encoding="utf-8") + util.apply_formatter(file_handler) + file_handler.addFilter(fmt_filter) + if should_roll_over: + file_handler.doRollover() + logger.addHandler(file_handler) + util.separator() + util.centered(" ") + util.centered(" ____ _ __ __ _ __ __ ") + util.centered("| _ \\| | _____ __ | \\/ | ___| |_ __ _ | \\/ | __ _ _ __ __ _ __ _ ___ _ __ ") + util.centered("| |_) | |/ _ \\ \\/ / | |\\/| |/ _ \\ __/ _` | | |\\/| |/ _` | '_ \\ / _` |/ _` |/ _ \\ '__|") + util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | ") + util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| ") + util.centered(" |___/ ") + util.centered(" Version: 1.9.2 ") + util.separator() if daily: start_type = "Daily " elif is_test: start_type = "Test " elif requested_collections: start_type = "Collections " @@ -111,9 +115,19 @@ def start(config_path, is_test, daily, requested_collections, requested_librarie logger.critical(e) logger.info("") util.separator(f"Finished {start_type}Run\nRun Time: {str(datetime.now() - start_time).split('.')[0]}") + logger.addHandler(file_handler) def update_libraries(config, is_test, requested_collections, resume_from): for library in config.libraries: + os.makedirs(os.path.join(default_dir, "logs", library.mapping_name, "collections"), exist_ok=True) + col_file_logger = os.path.join(default_dir, "logs", library.mapping_name, "library.log") + should_roll_over = os.path.isfile(col_file_logger) + library_handler = logging.handlers.RotatingFileHandler(col_file_logger, delay=True, mode="w", backupCount=3, encoding="utf-8") + util.apply_formatter(library_handler) + if should_roll_over: + library_handler.doRollover() + logger.addHandler(library_handler) + os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout) logger.info("") util.separator(f"{library.name} Library") @@ -121,12 +135,12 @@ def update_libraries(config, is_test, requested_collections, resume_from): util.separator(f"Mapping {library.name} Library") logger.info("") movie_map, show_map = map_guids(config, library) - if not is_test and not resume_from and library.mass_update: + if not is_test and not resume_from and not collection_only and library.mass_update: mass_metadata(config, library, movie_map, show_map) for metadata in library.metadata_files: logger.info("") util.separator(f"Running Metadata File\n{metadata.path}") - if not is_test and not resume_from: + if not is_test and not resume_from and not collection_only: try: metadata.update_metadata(config.TMDb, is_test) except Failed as e: @@ -137,34 +151,43 @@ def update_libraries(config, is_test, requested_collections, resume_from): if resume_from and resume_from not in collections_to_run: logger.warning(f"Collection: {resume_from} not in Metadata File: {metadata.path}") continue - if collections_to_run: + if collections_to_run and not library_only: + logger.removeHandler(library_handler) resume_from = run_collection(config, library, metadata, collections_to_run, is_test, resume_from, movie_map, show_map) + logger.addHandler(library_handler) - if library.show_unmanaged is True and not is_test and not requested_collections: - logger.info("") - util.separator(f"Unmanaged Collections in {library.name} Library") - logger.info("") - unmanaged_count = 0 - collections_in_plex = [str(plex_col) for plex_col in library.collections] + if not is_test and not requested_collections: + unmanaged_collections = [] for col in library.get_all_collections(): - if col.title not in collections_in_plex: + if col.title not in library.collections: + unmanaged_collections.append(col) + + if library.show_unmanaged and not library_only: + logger.info("") + util.separator(f"Unmanaged Collections in {library.name} Library") + logger.info("") + for col in unmanaged_collections: logger.info(col.title) - unmanaged_count += 1 - logger.info("{} Unmanaged Collections".format(unmanaged_count)) + logger.info(f"{len(unmanaged_collections)} Unmanaged Collections") + + if library.assets_for_all and not collection_only: + logger.info("") + util.separator(f"All {'Movies' if library.is_movie else 'Shows'} Assets Check for {library.name} Library") + logger.info("") + for col in unmanaged_collections: + library.update_item_from_assets(col, collection_mode=True) + for item in library.get_all(): + library.update_item_from_assets(item) + + logger.removeHandler(library_handler) - if library.assets_for_all is True and not is_test and not requested_collections: - logger.info("") - util.separator(f"All {'Movies' if library.is_movie else 'Shows'} Assets Check for {library.name} Library") - logger.info("") - for item in library.get_all(): - library.update_item_from_assets(item) has_run_again = False for library in config.libraries: if library.run_again: has_run_again = True break - if has_run_again: + if has_run_again and not library_only: logger.info("") util.separator("Run Again") logger.info("") @@ -176,6 +199,11 @@ def update_libraries(config, is_test, requested_collections, resume_from): util.print_end(length) for library in config.libraries: if library.run_again: + col_file_logger = os.path.join(default_dir, "logs", library.mapping_name, f"library.log") + library_handler = logging.handlers.RotatingFileHandler(col_file_logger, mode="w", backupCount=3, encoding="utf-8") + util.apply_formatter(library_handler) + logger.addHandler(library_handler) + library_handler.addFilter(fmt_filter) os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout) logger.info("") util.separator(f"{library.name} Library Run Again") @@ -190,6 +218,7 @@ def update_libraries(config, is_test, requested_collections, resume_from): except Failed as e: util.print_stacktrace() util.print_multiline(e, error=True) + logger.removeHandler(library_handler) used_url = [] for library in config.libraries: @@ -210,22 +239,18 @@ def map_guids(config, library): items = library.Plex.all() for i, item in enumerate(items, 1): length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}") - try: - id_type, main_id = config.Convert.get_id(item, library, length) - except BadRequest: - util.print_stacktrace() - util.print_end(length, f"Mapping Error: | {item.guid} for {item.title} not found") - continue - if not isinstance(main_id, list): - main_id = [main_id] - if id_type == "movie": - for m in main_id: - if m in movie_map: movie_map[m].append(item.ratingKey) - else: movie_map[m] = [item.ratingKey] - elif id_type == "show": - for m in main_id: - if m in show_map: show_map[m].append(item.ratingKey) - else: show_map[m] = [item.ratingKey] + id_type, main_id = config.Convert.get_id(item, library, length) + if main_id: + if not isinstance(main_id, list): + main_id = [main_id] + if id_type == "movie": + for m in main_id: + if m in movie_map: movie_map[m].append(item.ratingKey) + else: movie_map[m] = [item.ratingKey] + elif id_type == "show": + for m in main_id: + if m in show_map: show_map[m].append(item.ratingKey) + else: show_map[m] = [item.ratingKey] util.print_end(length, f"Processed {len(items)} {'Movies' if library.is_movie else 'Shows'}") return movie_map, show_map @@ -234,6 +259,8 @@ def mass_metadata(config, library, movie_map, show_map): logger.info("") util.separator(f"Mass Editing {'Movie' if library.is_movie else 'Show'} Library: {library.name}") logger.info("") + radarr_adds = [] + sonarr_adds = [] items = library.Plex.all() for i, item in enumerate(items, 1): length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}") @@ -257,14 +284,16 @@ def mass_metadata(config, library, movie_map, show_map): if item.ratingKey in rating_keys: tvdb_id = tvdb break - if tmdb_id: - imdb_id = config.Convert.tmdb_to_imdb(tmdb_id) - elif tvdb_id: - tmdb_id = config.Convert.tvdb_to_tmdb(tvdb_id) - imdb_id = config.Convert.tvdb_to_imdb(tvdb_id) + + if library.Radarr and library.radarr_add_all and tmdb_id: + radarr_adds.append(tmdb_id) + if library.Sonarr and library.sonarr_add_all and tvdb_id: + sonarr_adds.append(tvdb_id) tmdb_item = None if library.mass_genre_update == "tmdb" or library.mass_audience_rating_update == "tmdb" or library.mass_critic_rating_update == "tmdb": + if tvdb_id and not tmdb_id: + tmdb_id = config.Convert.tvdb_to_tmdb(tvdb_id) if tmdb_id: try: tmdb_item = config.TMDb.get_movie(tmdb_id) if library.is_movie else config.TMDb.get_show(tmdb_id) @@ -276,6 +305,10 @@ def mass_metadata(config, library, movie_map, show_map): omdb_item = None if library.mass_genre_update in ["omdb", "imdb"] or library.mass_audience_rating_update in ["omdb", "imdb"] or library.mass_critic_rating_update in ["omdb", "imdb"]: if config.OMDb.limit is False: + if tmdb_id and not imdb_id: + imdb_id = config.Convert.tmdb_to_imdb(tmdb_id) + elif tvdb_id and not imdb_id: + imdb_id = config.Convert.tvdb_to_imdb(tvdb_id) if imdb_id: try: omdb_item = config.OMDb.get_omdb(imdb_id) @@ -327,7 +360,21 @@ def mass_metadata(config, library, movie_map, show_map): except Failed: pass + if library.Radarr and library.radarr_add_all: + try: + library.Radarr.add_tmdb(radarr_adds) + except Failed as e: + logger.error(e) + + if library.Sonarr and library.sonarr_add_all: + try: + library.Sonarr.add_tvdb(sonarr_adds) + except Failed as e: + logger.error(e) + + def run_collection(config, library, metadata, requested_collections, is_test, resume_from, movie_map, show_map): + logger.info("") for mapping_name, collection_attrs in requested_collections.items(): if is_test and ("test" not in collection_attrs or collection_attrs["test"] is not True): no_template_test = True @@ -343,15 +390,29 @@ def run_collection(config, library, metadata, requested_collections, is_test, re no_template_test = False if no_template_test: continue - try: - if resume_from and resume_from != mapping_name: - continue - elif resume_from == mapping_name: - resume_from = None - logger.info("") - util.separator(f"Resuming Collections") + if resume_from and resume_from != mapping_name: + continue + elif resume_from == mapping_name: + resume_from = None logger.info("") + util.separator(f"Resuming Collections") + + if "name_mapping" in collection_attrs and collection_attrs["name_mapping"]: + collection_log_name = util.validate_filename(collection_attrs["name_mapping"]) + else: + collection_log_name = util.validate_filename(mapping_name) + collection_log_folder = os.path.join(default_dir, "logs", library.mapping_name, "collections", collection_log_name) + os.makedirs(collection_log_folder, exist_ok=True) + col_file_logger = os.path.join(collection_log_folder, f"collection.log") + should_roll_over = os.path.isfile(col_file_logger) + collection_handler = logging.handlers.RotatingFileHandler(col_file_logger, delay=True, mode="w", backupCount=3, encoding="utf-8") + util.apply_formatter(collection_handler) + if should_roll_over: + collection_handler.doRollover() + logger.addHandler(collection_handler) + + try: util.separator(f"{mapping_name} Collection") logger.info("") @@ -360,26 +421,27 @@ def run_collection(config, library, metadata, requested_collections, is_test, re if len(builder.schedule) > 0: util.print_multiline(builder.schedule, info=True) - logger.info("") - logger.info(f"Sync Mode: {'sync' if builder.sync else 'append'}") - - if len(builder.filters) > 0: + if not builder.smart_url: logger.info("") - for filter_key, filter_value in builder.filters: - logger.info(f"Collection Filter {filter_key}: {filter_value}") + logger.info(f"Sync Mode: {'sync' if builder.sync else 'append'}") + + if len(builder.filters) > 0: + logger.info("") + for filter_key, filter_value in builder.filters: + logger.info(f"Collection Filter {filter_key}: {filter_value}") - if not builder.smart_url: builder.collect_rating_keys(movie_map, show_map) logger.info("") - if len(builder.rating_keys) > 0: + if len(builder.rating_keys) > 0 and builder.build_collection: builder.add_to_collection(movie_map) if len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0: builder.run_missing() - if builder.sync and len(builder.rating_keys) > 0: + if builder.sync and len(builder.rating_keys) > 0 and builder.build_collection: builder.sync_collection() logger.info("") - builder.update_details() + if builder.build_collection: + builder.update_details() if builder.run_again and (len(builder.run_again_movies) > 0 or len(builder.run_again_shows) > 0): library.run_again.append(builder) @@ -390,6 +452,8 @@ def run_collection(config, library, metadata, requested_collections, is_test, re except Exception as e: util.print_stacktrace() logger.error(f"Unknown Error: {e}") + logger.info("") + logger.removeHandler(collection_handler) return resume_from try: diff --git a/requirements.txt b/requirements.txt index b53e2dfa..0fcb4c69 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,3 +9,4 @@ requests>=2.4.2 ruamel.yaml schedule retrying +pathvalidate