From 070c75fb374ca59deb81a00eb8283560ab592e1d Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Fri, 15 Dec 2023 16:14:26 -0500 Subject: [PATCH] [58] small fixes --- VERSION | 2 +- modules/config.py | 8 ++++++-- modules/imdb.py | 15 +++++++++++---- modules/library.py | 3 +++ modules/meta.py | 19 ++++++++++++++++++- 5 files changed, 39 insertions(+), 8 deletions(-) diff --git a/VERSION b/VERSION index ada3a80f..9625b6da 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.19.1-develop57 +1.19.1-develop58 diff --git a/modules/config.py b/modules/config.py index 93f8ca06..f652c1cf 100644 --- a/modules/config.py +++ b/modules/config.py @@ -200,10 +200,14 @@ class ConfigFile: if not self.data["libraries"][library]: continue if "metadata_path" in self.data["libraries"][library]: + logger.warning("Config Warning: metadata_path has been deprecated and split into collection_files and metadata_files, Please visit the wiki to learn more about this transition.") path_dict = self.data["libraries"][library].pop("metadata_path") - self.data["libraries"][library]["collection_files"] = path_dict - self.data["libraries"][library]["metadata_files"] = path_dict + if "collection_files" not in self.data["libraries"][library]: + self.data["libraries"][library]["collection_files"] = path_dict + if "metadata_files" not in self.data["libraries"][library]: + self.data["libraries"][library]["metadata_files"] = path_dict if "overlay_path" in self.data["libraries"][library]: + logger.warning("Config Warning: overlay_path has been deprecated in favor of overlay_files, Please visit the wiki to learn more about this transition.") self.data["libraries"][library]["overlay_files"] = self.data["libraries"][library].pop("overlay_path") if "radarr_add_all" in self.data["libraries"][library]: self.data["libraries"][library]["radarr_add_all_existing"] = self.data["libraries"][library].pop("radarr_add_all") diff --git a/modules/imdb.py b/modules/imdb.py index c4dae2ad..17bb8d7b 100644 --- a/modules/imdb.py +++ b/modules/imdb.py @@ -146,12 +146,17 @@ class IMDb: return valid_users def get_event_years(self, event_id): - return self._request(f"{base_url}/event/{event_id}", xpath="//div[@class='event-history-widget']//a/text()") + final = [] + for event_link in self._request(f"{base_url}/event/{event_id}", xpath="//div[@class='event-history-widget']//a/@href"): + parts = event_link.split("/") + final.append(f"{parts[3]}{f'-{parts[4]}' if parts[4] != '1' else ''}") + return final def get_award_names(self, event_id, event_year): award_names = [] category_names = [] - for text in self._request(f"{base_url}/event/{event_id}/{event_year}", xpath="//div[@class='article']/script/text()")[0].split("\n"): + event_slug = f"{event_year}/1" if "-" not in event_year else event_year.replace("-", "/") + for text in self._request(f"{base_url}/event/{event_id}/{event_slug}/?ref_=ev_eh", xpath="//div[@class='article']/script/text()")[0].split("\n"): if text.strip().startswith("IMDbReactWidgets.NomineesWidget.push"): jsonline = text.strip() obj = json.loads(jsonline[jsonline.find("{"):-3]) @@ -420,7 +425,8 @@ class IMDb: def _award(self, data): final_list = [] - for text in self._request(f"{base_url}/event/{data['event_id']}/{data['event_year']}", xpath="//div[@class='article']/script/text()")[0].split("\n"): + event_slug = f"{data['event_year']}/1" if "-" not in data["event_year"] else data["event_year"].replace("-", "/") + for text in self._request(f"{base_url}/event/{data['event_id']}/{event_slug}/?ref_=ev_eh", xpath="//div[@class='article']/script/text()")[0].split("\n"): if text.strip().startswith("IMDbReactWidgets.NomineesWidget.push"): jsonline = text.strip() obj = json.loads(jsonline[jsonline.find('{'):-3]) @@ -516,7 +522,8 @@ class IMDb: logger.info(f"Processing IMDb Watchlist: {data}") return [(_i, "imdb") for _i in self._watchlist(data, language)] elif method == "imdb_award": - logger.info(f"Processing IMDb Award: {base_url}/{data['event_id']}/{data['event_year']}") + event_slug = f"{data['event_year']}/1" if "-" not in data["event_year"] else data["event_year"].replace("-", "/") + logger.info(f"Processing IMDb Award: {base_url}/{data['event_id']}/{event_slug}/?ref_=ev_eh") for k in ["award_filter", "category_filter", "winning"]: logger.info(f" {k}: {data[k]}") return [(_i, "imdb") for _i in self._award(data)] diff --git a/modules/library.py b/modules/library.py index f3341054..316112bc 100644 --- a/modules/library.py +++ b/modules/library.py @@ -178,6 +178,9 @@ class Library(ABC): except Failed as e: logger.error(e) logger.info("Overlay File Failed To Load") + except NotScheduled as e: + logger.info("") + logger.separator(f"Skipping {e} Overlay File") if not operations_only and not overlays_only and not collection_only: for file_type, images_file, temp_vars, asset_directory in self.scanned_image_files: try: diff --git a/modules/meta.py b/modules/meta.py index 84661b85..c51bd77f 100644 --- a/modules/meta.py +++ b/modules/meta.py @@ -9,7 +9,7 @@ logger = util.logger all_auto = ["genre", "number", "custom"] ms_auto = [ "actor", "year", "content_rating", "original_language", "tmdb_popular_people", "trakt_user_lists", "studio", - "trakt_liked_lists", "trakt_people_list", "subtitle_language", "audio_language", "resolution", "decade" + "trakt_liked_lists", "trakt_people_list", "subtitle_language", "audio_language", "resolution", "decade", "imdb_award" ] auto = { "Movie": ["tmdb_collection", "edition", "country", "director", "producer", "writer"] + all_auto + ms_auto, @@ -978,6 +978,23 @@ class MetadataFile(DataFile): all_keys[role["name"]] = role["name"] person_count += 1 default_template = {"plex_search": {"any": {auto_type: "<>"}}} + elif auto_type == "imdb_award": + if "data" not in methods: + raise Failed(f"Config Error: {map_name} data attribute not found") + elif "data" in self.temp_vars: + dynamic_data = util.parse("Config", "data", self.temp_vars["data"], datatype="dict") + else: + dynamic_data = util.parse("Config", "data", dynamic, parent=map_name, methods=methods, datatype="dict") + lower_methods = {am.lower(): am for am in dynamic_data} + person_depth = util.parse("Config", "event_id", dynamic_data, parent=f"{map_name} data", + methods=lower_methods, datatype="int", default=3, minimum=1) + person_minimum = util.parse("Config", "minimum", dynamic_data, parent=f"{map_name} data", + methods=lower_methods, datatype="int", default=3, + minimum=1) if "minimum" in lower_methods else None + person_limit = util.parse("Config", "limit", dynamic_data, parent=f"{map_name} data", + methods=lower_methods, datatype="int", default=25, + minimum=1) if "limit" in lower_methods else None + elif auto_type == "number": if "data" not in methods: raise Failed(f"Config Error: {map_name} data attribute not found")