From bcaf88ae770fc7e1f22e7d5cdde2020d89b02cb4 Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Fri, 19 Nov 2021 14:28:11 -0500 Subject: [PATCH 01/27] webhook response fix --- modules/webhooks.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/modules/webhooks.py b/modules/webhooks.py index a615a5c4..d0ef40b6 100644 --- a/modules/webhooks.py +++ b/modules/webhooks.py @@ -1,4 +1,5 @@ import logging +from json import JSONDecodeError from modules.util import Failed @@ -25,11 +26,15 @@ class Webhooks: response = self.config.get(url, json=json, params=params) else: response = self.config.post(webhook, json=json) - response_json = response.json() - if self.config.trace_mode: - logger.debug(f"Response: {response_json}") - if response.status_code >= 400 or ("result" in response_json and response_json["result"] == "error"): - raise Failed(f"({response.status_code} [{response.reason}]) {response_json}") + try: + response_json = response.json() + if self.config.trace_mode: + logger.debug(f"Response: {response_json}") + if response.status_code >= 400 or ("result" in response_json and response_json["result"] == "error"): + raise Failed(f"({response.status_code} [{response.reason}]) {response_json}") + except JSONDecodeError: + if response.status_code >= 400: + raise Failed(f"({response.status_code} [{response.reason}])") def start_time_hooks(self, start_time): if self.run_start_webhooks: From 19658eccab9ec1c614759149a6fde7f307b8f251 Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Sat, 20 Nov 2021 17:55:06 -0500 Subject: [PATCH 02/27] #441 added list_minimum for tautulli builders --- modules/builder.py | 3 ++- modules/tautulli.py | 8 ++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/modules/builder.py b/modules/builder.py index 03d64e95..7a347f1e 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -951,7 +951,8 @@ class CollectionBuilder: "list_type": "popular" if method_name == "tautulli_popular" else "watched", "list_days": util.parse("list_days", dict_data, datatype="int", methods=dict_methods, default=30, parent=method_name), "list_size": util.parse("list_size", dict_data, datatype="int", methods=dict_methods, default=10, parent=method_name), - "list_buffer": util.parse("list_buffer", dict_data, datatype="int", methods=dict_methods, default=20, parent=method_name) + "list_buffer": util.parse("list_buffer", dict_data, datatype="int", methods=dict_methods, default=20, parent=method_name), + "list_minimum": util.parse("list_minimum", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) })) def _tmdb(self, method_name, method_data): diff --git a/modules/tautulli.py b/modules/tautulli.py index 82669065..3238f161 100644 --- a/modules/tautulli.py +++ b/modules/tautulli.py @@ -28,6 +28,7 @@ class Tautulli: logger.info(f"Processing Tautulli Most {params['list_type'].capitalize()}: {params['list_size']} {'Movies' if library.is_movie else 'Shows'}") response = self._request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_home_stats&time_range={params['list_days']}&stats_count={query_size}") stat_id = f"{'popular' if params['list_type'] == 'popular' else 'top'}_{'movies' if library.is_movie else 'tv'}" + stat_type = "total_plays" if params['list_type'] == 'popular' else "users_watched" items = None for entry in response["response"]["data"]: @@ -39,9 +40,10 @@ class Tautulli: section_id = self._section_id(library.name) rating_keys = [] - count = 0 for item in items: - if item["section_id"] == section_id and count < int(params['list_size']): + if item["section_id"] == section_id and len(rating_keys) < int(params['list_size']): + if item[stat_type] < params['list_minimum']: + continue try: plex_item = library.fetchItem(int(item["rating_key"])) if not isinstance(plex_item, (Movie, Show)): @@ -53,8 +55,6 @@ class Tautulli: rating_keys.append(new_item[0].ratingKey) else: logger.error(f"Plex Error: Item {item} not found") - continue - count += 1 logger.debug("") logger.debug(f"{len(rating_keys)} Keys Found: {rating_keys}") return rating_keys From b415c3a830ab035229c44026967d814e6dd615d5 Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Sat, 20 Nov 2021 18:09:45 -0500 Subject: [PATCH 03/27] omdb apikey check fix --- modules/omdb.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/omdb.py b/modules/omdb.py index fa388d96..e8d95a75 100644 --- a/modules/omdb.py +++ b/modules/omdb.py @@ -40,11 +40,11 @@ class OMDb: self.config = config self.apikey = params["apikey"] self.limit = False - self.get_omdb("tt0080684") + self.get_omdb("tt0080684", ignore_cache=True) - def get_omdb(self, imdb_id): + def get_omdb(self, imdb_id, ignore_cache=False): expired = None - if self.config.Cache: + if self.config.Cache and not ignore_cache: omdb_dict, expired = self.config.Cache.query_omdb(imdb_id) if omdb_dict and expired is False: return OMDbObj(imdb_id, omdb_dict) @@ -53,7 +53,7 @@ class OMDb: response = self.config.get(base_url, params={"i": imdb_id, "apikey": self.apikey}) if response.status_code < 400: omdb = OMDbObj(imdb_id, response.json()) - if self.config.Cache: + if self.config.Cache and not ignore_cache: self.config.Cache.update_omdb(expired, omdb) return omdb else: From efa89f95bfad71d4c412bd8f672f7bcacab61074 Mon Sep 17 00:00:00 2001 From: James Hu Date: Sat, 20 Nov 2021 15:50:17 -0800 Subject: [PATCH 04/27] Add item_lock_art, item_lock_poster item details --- modules/builder.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/modules/builder.py b/modules/builder.py index 03d64e95..40219d09 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -90,7 +90,8 @@ notification_details = ["collection_creation_webhooks", "collection_addition_web details = ["collection_mode", "collection_order", "collection_level", "collection_minimum", "label"] + boolean_details + string_details + notification_details collectionless_details = ["collection_order", "plex_collectionless", "label", "label_sync_mode", "test"] + \ poster_details + background_details + summary_details + string_details -item_details = ["item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay", "item_assets", "revert_overlay", "item_refresh"] + list(plex.item_advance_keys.keys()) +item_details = ["item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay", "item_assets", "revert_overlay", "item_lock_art", "item_lock_poster", "item_refresh"] + \ + list(plex.item_advance_keys.keys()) radarr_details = ["radarr_add", "radarr_add_existing", "radarr_folder", "radarr_monitor", "radarr_search", "radarr_availability", "radarr_quality", "radarr_tag"] sonarr_details = [ "sonarr_add", "sonarr_add_existing", "sonarr_folder", "sonarr_monitor", "sonarr_language", "sonarr_series", @@ -741,7 +742,7 @@ class CollectionBuilder: raise Failed("Each Overlay can only be used once per Library") self.library.overlays.append(method_data) self.item_details[method_name] = method_data - elif method_name in ["item_assets", "revert_overlay", "item_refresh"]: + elif method_name in ["item_assets", "revert_overlay", "item_lock_art", "item_lock_poster", "item_refresh"]: if util.parse(method_name, method_data, datatype="bool", default=False): self.item_details[method_name] = True elif method_name in plex.item_advance_keys: @@ -1834,6 +1835,13 @@ class CollectionBuilder: if getattr(item, key) != options[method_data]: advance_edits[key] = options[method_data] self.library.edit_item(item, item.title, self.collection_level.capitalize(), advance_edits, advanced=True) + # Locking art and poster should come before refreshing since refreshing can change art and poster (i.e. if + # specified to both lock art/poster and refresh, assume that the current art/poster should be kept) + if "item_lock_art" in self.item_details: + item.lockArt() + if "item_lock_poster" in self.item_details: + item.lockPoster() + if "item_refresh" in self.item_details: item.refresh() From ceb96b3553c566ebafa15f8aa66ef872091456e8 Mon Sep 17 00:00:00 2001 From: James Hu Date: Sat, 20 Nov 2021 16:15:45 -0800 Subject: [PATCH 05/27] Rename to background --- modules/builder.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/modules/builder.py b/modules/builder.py index 40219d09..08880c68 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -90,7 +90,7 @@ notification_details = ["collection_creation_webhooks", "collection_addition_web details = ["collection_mode", "collection_order", "collection_level", "collection_minimum", "label"] + boolean_details + string_details + notification_details collectionless_details = ["collection_order", "plex_collectionless", "label", "label_sync_mode", "test"] + \ poster_details + background_details + summary_details + string_details -item_details = ["item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay", "item_assets", "revert_overlay", "item_lock_art", "item_lock_poster", "item_refresh"] + \ +item_details = ["item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay", "item_assets", "revert_overlay", "item_lock_background", "item_lock_poster", "item_refresh"] + \ list(plex.item_advance_keys.keys()) radarr_details = ["radarr_add", "radarr_add_existing", "radarr_folder", "radarr_monitor", "radarr_search", "radarr_availability", "radarr_quality", "radarr_tag"] sonarr_details = [ @@ -742,7 +742,7 @@ class CollectionBuilder: raise Failed("Each Overlay can only be used once per Library") self.library.overlays.append(method_data) self.item_details[method_name] = method_data - elif method_name in ["item_assets", "revert_overlay", "item_lock_art", "item_lock_poster", "item_refresh"]: + elif method_name in ["item_assets", "revert_overlay", "item_lock_background", "item_lock_poster", "item_refresh"]: if util.parse(method_name, method_data, datatype="bool", default=False): self.item_details[method_name] = True elif method_name in plex.item_advance_keys: @@ -1835,9 +1835,10 @@ class CollectionBuilder: if getattr(item, key) != options[method_data]: advance_edits[key] = options[method_data] self.library.edit_item(item, item.title, self.collection_level.capitalize(), advance_edits, advanced=True) - # Locking art and poster should come before refreshing since refreshing can change art and poster (i.e. if - # specified to both lock art/poster and refresh, assume that the current art/poster should be kept) - if "item_lock_art" in self.item_details: + # Locking background and poster should come before refreshing since refreshing can change background/poster + # (i.e. if specified to both lock background/poster and refresh, assume that the current background/poster + # should be kept) + if "item_lock_background" in self.item_details: item.lockArt() if "item_lock_poster" in self.item_details: item.lockPoster() From c5a9a1c4e0af8bd82f996ccedd7ca65dc3abf622 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Nov 2021 04:25:48 +0000 Subject: [PATCH 06/27] Bump plexapi from 4.7.2 to 4.8.0 Bumps [plexapi](https://github.com/pkkid/python-plexapi) from 4.7.2 to 4.8.0. - [Release notes](https://github.com/pkkid/python-plexapi/releases) - [Commits](https://github.com/pkkid/python-plexapi/compare/4.7.2...4.8.0) --- updated-dependencies: - dependency-name: plexapi dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 60537946..1006ae7c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -PlexAPI==4.7.2 +PlexAPI==4.8.0 tmdbv3api==1.7.6 arrapi==1.2.3 lxml==4.6.4 From 102ee808f68585cd877e200e4a2be5575a3c2db2 Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Mon, 22 Nov 2021 15:47:26 -0500 Subject: [PATCH 07/27] add_existing fix --- modules/builder.py | 3 +++ modules/config.py | 1 + modules/library.py | 1 + modules/plex.py | 11 +++++++++-- 4 files changed, 14 insertions(+), 2 deletions(-) diff --git a/modules/builder.py b/modules/builder.py index 7a347f1e..302747e9 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -609,6 +609,9 @@ class CollectionBuilder: self.sonarr_details["add"] = False self.sonarr_details["add_existing"] = False + if self.radarr_details["add_existing"] or self.sonarr_details["add_existing"]: + self.item_details["add_existing"] = True + if self.collectionless: self.details["collection_mode"] = "hide" self.sync = True diff --git a/modules/config.py b/modules/config.py index 04bb6375..dbb45a67 100644 --- a/modules/config.py +++ b/modules/config.py @@ -495,6 +495,7 @@ class Config: self.errors.append(e) util.print_stacktrace() util.print_multiline(e, error=True) + logger.info("") logger.info(f"{display_name} Library Connection Failed") continue diff --git a/modules/library.py b/modules/library.py index 0d1cceb9..9e063323 100644 --- a/modules/library.py +++ b/modules/library.py @@ -102,6 +102,7 @@ class Library(ABC): logger.info(f"Using Asset Directory: {ad}") if output: + logger.info("") logger.info(output) def upload_images(self, item, poster=None, background=None, overlay=None): diff --git a/modules/plex.py b/modules/plex.py index 0fbc69bd..b067bd1a 100644 --- a/modules/plex.py +++ b/modules/plex.py @@ -230,6 +230,7 @@ class Plex(Library): self.url = params["plex"]["url"] self.token = params["plex"]["token"] self.timeout = params["plex"]["timeout"] + logger.info("") try: self.PlexServer = PlexServer(baseurl=self.url, token=self.token, session=self.config.session, timeout=self.timeout) except Unauthorized: @@ -239,9 +240,15 @@ class Plex(Library): except (requests.exceptions.ConnectionError, ParseError): util.print_stacktrace() raise Failed("Plex Error: Plex url is invalid") - self.Plex = next((s for s in self.PlexServer.library.sections() if s.title == params["name"]), None) + self.Plex = None + library_names = [] + for s in self.PlexServer.library.sections(): + library_names.append(s.title) + if s.title == params["name"]: + self.Plex = s + break if not self.Plex: - raise Failed(f"Plex Error: Plex Library {params['name']} not found") + raise Failed(f"Plex Error: Plex Library {params['name']} not found. Options: {library_names}") if self.Plex.type in ["movie", "show"]: self.type = self.Plex.type.capitalize() else: From 3dd80833ecbb8b6442fbedd460dc20e4e816f74b Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Mon, 22 Nov 2021 16:17:13 -0500 Subject: [PATCH 08/27] webhook better error response --- VERSION | 2 +- modules/notifiarr.py | 1 - modules/webhooks.py | 2 ++ 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/VERSION b/VERSION index 16ee7b3a..b80328c2 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.12.2-develop1115 \ No newline at end of file +1.13.0-develop1122 \ No newline at end of file diff --git a/modules/notifiarr.py b/modules/notifiarr.py index 1c0e8e40..f3f72d2f 100644 --- a/modules/notifiarr.py +++ b/modules/notifiarr.py @@ -28,4 +28,3 @@ class Notifiarr: logger.debug(url.replace(self.apikey, "APIKEY")) params = {"event": "pmm" if self.test else "collections"} return url, params - diff --git a/modules/webhooks.py b/modules/webhooks.py index d0ef40b6..cbe3f47e 100644 --- a/modules/webhooks.py +++ b/modules/webhooks.py @@ -30,6 +30,8 @@ class Webhooks: response_json = response.json() if self.config.trace_mode: logger.debug(f"Response: {response_json}") + if "result" in response_json and response_json["result"] == "error" and "details" in response_json and "response" in response_json["details"]: + raise Failed(f"Notifiarr Error: {response_json['details']['response']}") if response.status_code >= 400 or ("result" in response_json and response_json["result"] == "error"): raise Failed(f"({response.status_code} [{response.reason}]) {response_json}") except JSONDecodeError: From e6e9dfbaa01fab318b420362fccd38301c87f1fd Mon Sep 17 00:00:00 2001 From: James Hu Date: Tue, 23 Nov 2021 11:45:41 -0800 Subject: [PATCH 09/27] Add item_lock_title --- modules/builder.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/modules/builder.py b/modules/builder.py index 87b88bdd..3755f7e4 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -90,8 +90,11 @@ notification_details = ["collection_creation_webhooks", "collection_addition_web details = ["collection_mode", "collection_order", "collection_level", "collection_minimum", "label"] + boolean_details + string_details + notification_details collectionless_details = ["collection_order", "plex_collectionless", "label", "label_sync_mode", "test"] + \ poster_details + background_details + summary_details + string_details -item_details = ["item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay", "item_assets", "revert_overlay", "item_lock_background", "item_lock_poster", "item_refresh"] + \ - list(plex.item_advance_keys.keys()) +item_details = [ + "item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay", "item_assets", "revert_overlay", + "item_lock_background", "item_lock_poster", "item_lock_title", + "item_refresh", +] + list(plex.item_advance_keys.keys()) radarr_details = ["radarr_add", "radarr_add_existing", "radarr_folder", "radarr_monitor", "radarr_search", "radarr_availability", "radarr_quality", "radarr_tag"] sonarr_details = [ "sonarr_add", "sonarr_add_existing", "sonarr_folder", "sonarr_monitor", "sonarr_language", "sonarr_series", @@ -745,7 +748,7 @@ class CollectionBuilder: raise Failed("Each Overlay can only be used once per Library") self.library.overlays.append(method_data) self.item_details[method_name] = method_data - elif method_name in ["item_assets", "revert_overlay", "item_lock_background", "item_lock_poster", "item_refresh"]: + elif method_name in ["item_assets", "revert_overlay", "item_lock_background", "item_lock_poster", "item_lock_title", "item_refresh"]: if util.parse(method_name, method_data, datatype="bool", default=False): self.item_details[method_name] = True elif method_name in plex.item_advance_keys: @@ -1839,13 +1842,15 @@ class CollectionBuilder: if getattr(item, key) != options[method_data]: advance_edits[key] = options[method_data] self.library.edit_item(item, item.title, self.collection_level.capitalize(), advance_edits, advanced=True) - # Locking background and poster should come before refreshing since refreshing can change background/poster - # (i.e. if specified to both lock background/poster and refresh, assume that the current background/poster - # should be kept) + + # Locking should come before refreshing since refreshing can change metadata (i.e. if specified to both lock + # background/poster and also refreshing, assume that the current background/poster should be kept) if "item_lock_background" in self.item_details: item.lockArt() if "item_lock_poster" in self.item_details: item.lockPoster() + if "item_lock_title" in self.item_details: + item.edit(**{"title.locked": 1}) if "item_refresh" in self.item_details: item.refresh() From 9547ad759a272d03bfc6dd253daec5f6d3a2dd2f Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Wed, 24 Nov 2021 08:22:17 -0500 Subject: [PATCH 10/27] use plex path when adding existing to radarr/sonarr --- modules/builder.py | 16 +++++++++------- modules/config.py | 18 +++++++++++++----- modules/radarr.py | 5 ++++- modules/sonarr.py | 5 ++++- plex_meta_manager.py | 10 ++++++---- requirements.txt | 2 +- 6 files changed, 37 insertions(+), 19 deletions(-) diff --git a/modules/builder.py b/modules/builder.py index 3755f7e4..b6a911fa 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -90,11 +90,8 @@ notification_details = ["collection_creation_webhooks", "collection_addition_web details = ["collection_mode", "collection_order", "collection_level", "collection_minimum", "label"] + boolean_details + string_details + notification_details collectionless_details = ["collection_order", "plex_collectionless", "label", "label_sync_mode", "test"] + \ poster_details + background_details + summary_details + string_details -item_details = [ - "item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay", "item_assets", "revert_overlay", - "item_lock_background", "item_lock_poster", "item_lock_title", - "item_refresh", -] + list(plex.item_advance_keys.keys()) +item_bool_details = ["item_assets", "revert_overlay", "item_lock_background", "item_lock_poster", "item_lock_title", "item_refresh"] +item_details = ["item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay"] + item_bool_details + list(plex.item_advance_keys.keys()) radarr_details = ["radarr_add", "radarr_add_existing", "radarr_folder", "radarr_monitor", "radarr_search", "radarr_availability", "radarr_quality", "radarr_tag"] sonarr_details = [ "sonarr_add", "sonarr_add_existing", "sonarr_folder", "sonarr_monitor", "sonarr_language", "sonarr_series", @@ -748,7 +745,7 @@ class CollectionBuilder: raise Failed("Each Overlay can only be used once per Library") self.library.overlays.append(method_data) self.item_details[method_name] = method_data - elif method_name in ["item_assets", "revert_overlay", "item_lock_background", "item_lock_poster", "item_lock_title", "item_refresh"]: + elif method_name in item_bool_details: if util.parse(method_name, method_data, datatype="bool", default=False): self.item_details[method_name] = True elif method_name in plex.item_advance_keys: @@ -1210,7 +1207,7 @@ class CollectionBuilder: rating_keys = [rating_keys] total = len(rating_keys) max_length = len(str(total)) - if self.filters and self.details["show_filtered"] is True: + if (self.filters or self.tmdb_filters) and self.details["show_filtered"] is True: logger.info("") logger.info("Filtering Builder:") for i, key in enumerate(rating_keys, 1): @@ -1821,7 +1818,9 @@ class CollectionBuilder: sync_tags = self.item_details["item_label.sync"] if "item_label.sync" in self.item_details else None tmdb_ids = [] + tmdb_paths = [] tvdb_ids = [] + tvdb_paths = [] for item in self.items: if int(item.ratingKey) in rating_keys and not revert: rating_keys.remove(int(item.ratingKey)) @@ -1831,10 +1830,13 @@ class CollectionBuilder: except Failed as e: logger.error(e) self.library.edit_tags("label", item, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags) + path = os.path.dirname(str(item.locations[0])) if self.library.is_movie else str(item.locations[0]) if item.ratingKey in self.library.movie_rating_key_map: tmdb_ids.append(self.library.movie_rating_key_map[item.ratingKey]) + tmdb_paths.append((self.library.movie_rating_key_map[item.ratingKey], f"{path.replace(self.library.Radarr.plex_path, self.library.Radarr.radarr_path)}/")) if item.ratingKey in self.library.show_rating_key_map: tvdb_ids.append(self.library.show_rating_key_map[item.ratingKey]) + tvdb_paths.append((self.library.show_rating_key_map[item.ratingKey], f"{path.replace(self.library.Sonarr.plex_path, self.library.Sonarr.sonarr_path)}/")) advance_edits = {} for method_name, method_data in self.item_details.items(): if method_name in plex.item_advance_keys: diff --git a/modules/config.py b/modules/config.py index dbb45a67..5ab48ecf 100644 --- a/modules/config.py +++ b/modules/config.py @@ -346,7 +346,9 @@ class Config: "availability": check_for_attribute(self.data, "availability", parent="radarr", test_list=radarr.availability_descriptions, default="announced"), "quality_profile": check_for_attribute(self.data, "quality_profile", parent="radarr", default_is_none=True), "tag": check_for_attribute(self.data, "tag", parent="radarr", var_type="lower_list", default_is_none=True), - "search": check_for_attribute(self.data, "search", parent="radarr", var_type="bool", default=False) + "search": check_for_attribute(self.data, "search", parent="radarr", var_type="bool", default=False), + "radarr_path": check_for_attribute(self.data, "radarr_path", parent="radarr", default_is_none=True), + "plex_path": check_for_attribute(self.data, "plex_path", parent="radarr", default_is_none=True) } self.general["sonarr"] = { "url": check_for_attribute(self.data, "url", parent="sonarr", var_type="url", default_is_none=True), @@ -361,7 +363,9 @@ class Config: "season_folder": check_for_attribute(self.data, "season_folder", parent="sonarr", var_type="bool", default=True), "tag": check_for_attribute(self.data, "tag", parent="sonarr", var_type="lower_list", default_is_none=True), "search": check_for_attribute(self.data, "search", parent="sonarr", var_type="bool", default=False), - "cutoff_search": check_for_attribute(self.data, "cutoff_search", parent="sonarr", var_type="bool", default=False) + "cutoff_search": check_for_attribute(self.data, "cutoff_search", parent="sonarr", var_type="bool", default=False), + "sonarr_path": check_for_attribute(self.data, "sonarr_path", parent="sonarr", default_is_none=True), + "plex_path": check_for_attribute(self.data, "plex_path", parent="sonarr", default_is_none=True) } self.general["tautulli"] = { "url": check_for_attribute(self.data, "url", parent="tautulli", var_type="url", default_is_none=True), @@ -514,9 +518,11 @@ class Config: "root_folder_path": check_for_attribute(lib, "root_folder_path", parent="radarr", default=self.general["radarr"]["root_folder_path"], req_default=True, save=False), "monitor": check_for_attribute(lib, "monitor", parent="radarr", var_type="bool", default=self.general["radarr"]["monitor"], save=False), "availability": check_for_attribute(lib, "availability", parent="radarr", test_list=radarr.availability_descriptions, default=self.general["radarr"]["availability"], save=False), - "quality_profile": check_for_attribute(lib, "quality_profile", parent="radarr",default=self.general["radarr"]["quality_profile"], req_default=True, save=False), + "quality_profile": check_for_attribute(lib, "quality_profile", parent="radarr", default=self.general["radarr"]["quality_profile"], req_default=True, save=False), "tag": check_for_attribute(lib, "tag", parent="radarr", var_type="lower_list", default=self.general["radarr"]["tag"], default_is_none=True, save=False), - "search": check_for_attribute(lib, "search", parent="radarr", var_type="bool", default=self.general["radarr"]["search"], save=False) + "search": check_for_attribute(lib, "search", parent="radarr", var_type="bool", default=self.general["radarr"]["search"], save=False), + "radarr_path": check_for_attribute(lib, "radarr_path", parent="radarr", default=self.general["radarr"]["radarr_path"], default_is_none=True, save=False), + "plex_path": check_for_attribute(lib, "plex_path", parent="radarr", default=self.general["radarr"]["plex_path"], default_is_none=True, save=False) }) except Failed as e: self.errors.append(e) @@ -545,7 +551,9 @@ class Config: "season_folder": check_for_attribute(lib, "season_folder", parent="sonarr", var_type="bool", default=self.general["sonarr"]["season_folder"], save=False), "tag": check_for_attribute(lib, "tag", parent="sonarr", var_type="lower_list", default=self.general["sonarr"]["tag"], default_is_none=True, save=False), "search": check_for_attribute(lib, "search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["search"], save=False), - "cutoff_search": check_for_attribute(lib, "cutoff_search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["cutoff_search"], save=False) + "cutoff_search": check_for_attribute(lib, "cutoff_search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["cutoff_search"], save=False), + "sonarr_path": check_for_attribute(lib, "sonarr_path", parent="sonarr", default=self.general["sonarr"]["sonarr_path"], default_is_none=True, save=False), + "plex_path": check_for_attribute(lib, "plex_path", parent="sonarr", default=self.general["sonarr"]["plex_path"], default_is_none=True, save=False) }) except Failed as e: self.errors.append(e) diff --git a/modules/radarr.py b/modules/radarr.py index c69139e5..6921662e 100644 --- a/modules/radarr.py +++ b/modules/radarr.py @@ -28,12 +28,15 @@ class Radarr: self.quality_profile = params["quality_profile"] self.tag = params["tag"] self.search = params["search"] + self.radarr_path = "" if params["radarr_path"] and params["plex_path"] else params["radarr_path"] + self.plex_path = "" if params["radarr_path"] and params["plex_path"] else params["plex_path"] def add_tmdb(self, tmdb_ids, **options): logger.info("") util.separator("Adding to Radarr", space=False, border=False) logger.debug("") - logger.debug(f"TMDb IDs: {tmdb_ids}") + for tmdb_id in tmdb_ids: + logger.debug(tmdb_id) folder = options["folder"] if "folder" in options else self.root_folder_path monitor = options["monitor"] if "monitor" in options else self.monitor availability = availability_translation[options["availability"] if "availability" in options else self.availability] diff --git a/modules/sonarr.py b/modules/sonarr.py index 533ac127..4b77f11f 100644 --- a/modules/sonarr.py +++ b/modules/sonarr.py @@ -50,12 +50,15 @@ class Sonarr: self.tag = params["tag"] self.search = params["search"] self.cutoff_search = params["cutoff_search"] + self.sonarr_path = "" if params["sonarr_path"] and params["plex_path"] else params["sonarr_path"] + self.plex_path = "" if params["sonarr_path"] and params["plex_path"] else params["plex_path"] def add_tvdb(self, tvdb_ids, **options): logger.info("") util.separator("Adding to Sonarr", space=False, border=False) logger.debug("") - logger.debug(f"TVDb IDs: {tvdb_ids}") + for tvdb_id in tvdb_ids: + logger.debug(tvdb_id) folder = options["folder"] if "folder" in options else self.root_folder_path monitor = monitor_translation[options["monitor"] if "monitor" in options else self.monitor] quality_profile = options["quality"] if "quality" in options else self.quality_profile diff --git a/plex_meta_manager.py b/plex_meta_manager.py index 1934408c..2c94e13e 100644 --- a/plex_meta_manager.py +++ b/plex_meta_manager.py @@ -333,10 +333,11 @@ def library_operations(config, library, items=None): except Failed: pass + path = os.path.dirname(str(item.locations[0])) if library.is_movie else str(item.locations[0]) if library.Radarr and library.radarr_add_all and tmdb_id: - radarr_adds.append(tmdb_id) + radarr_adds.append((tmdb_id, f"{path.replace(library.Radarr.plex_path, library.Radarr.radarr_path)}/")) if library.Sonarr and library.sonarr_add_all and tvdb_id: - sonarr_adds.append(tvdb_id) + sonarr_adds.append((tvdb_id, f"{path.replace(library.Sonarr.plex_path, library.Sonarr.sonarr_path)}/")) tmdb_item = None if library.mass_genre_update == "tmdb" or library.mass_audience_rating_update == "tmdb" or library.mass_critic_rating_update == "tmdb": @@ -427,7 +428,6 @@ def library_operations(config, library, items=None): except Failed: pass - if library.Radarr and library.radarr_add_all: try: library.Radarr.add_tmdb(radarr_adds) @@ -552,10 +552,12 @@ def run_collection(config, library, metadata, requested_collections): logger.info("") logger.info(f"Sync Mode: {'sync' if builder.sync else 'append'}") - if len(builder.filters) > 0: + if builder.filters or builder.tmdb_filters: logger.info("") for filter_key, filter_value in builder.filters: logger.info(f"Collection Filter {filter_key}: {filter_value}") + for filter_key, filter_value in builder.tmdb_filters: + logger.info(f"Collection Filter {filter_key}: {filter_value}") builder.find_rating_keys() diff --git a/requirements.txt b/requirements.txt index 1006ae7c..099a534e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ PlexAPI==4.8.0 tmdbv3api==1.7.6 -arrapi==1.2.3 +arrapi==1.2.6 lxml==4.6.4 requests==2.26.0 ruamel.yaml==0.17.17 From cc6d0ff58a097f32917f54bed1962a60a1dec98c Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Wed, 24 Nov 2021 10:25:10 -0500 Subject: [PATCH 11/27] updated arrapi --- VERSION | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/VERSION b/VERSION index b80328c2..4cb62320 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.13.0-develop1122 \ No newline at end of file +1.13.0-develop1124 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 099a534e..61a6d685 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ PlexAPI==4.8.0 tmdbv3api==1.7.6 -arrapi==1.2.6 +arrapi==1.2.7 lxml==4.6.4 requests==2.26.0 ruamel.yaml==0.17.17 From 574d55f6373764a7b8472dfde4d9504a82898c13 Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Wed, 24 Nov 2021 11:47:21 -0500 Subject: [PATCH 12/27] radarr and sonarr now show what theyre doing --- modules/radarr.py | 38 +++++++++++++++++++++++++++++++------- modules/sonarr.py | 34 +++++++++++++++++++++++++++------- 2 files changed, 58 insertions(+), 14 deletions(-) diff --git a/modules/radarr.py b/modules/radarr.py index 6921662e..5f74a8c0 100644 --- a/modules/radarr.py +++ b/modules/radarr.py @@ -2,7 +2,7 @@ import logging from modules import util from modules.util import Failed from arrapi import RadarrAPI -from arrapi.exceptions import ArrException, Invalid +from arrapi.exceptions import ArrException, Invalid, NotFound logger = logging.getLogger("Plex Meta Manager") @@ -28,8 +28,12 @@ class Radarr: self.quality_profile = params["quality_profile"] self.tag = params["tag"] self.search = params["search"] - self.radarr_path = "" if params["radarr_path"] and params["plex_path"] else params["radarr_path"] - self.plex_path = "" if params["radarr_path"] and params["plex_path"] else params["plex_path"] + self.radarr_path = params["radarr_path"] if params["radarr_path"] and params["plex_path"] else "" + self.plex_path = params["plex_path"] if params["radarr_path"] and params["plex_path"] else "" + logger.debug(params["radarr_path"]) + logger.debug(self.radarr_path) + logger.debug(params["plex_path"]) + logger.debug(self.plex_path) def add_tmdb(self, tmdb_ids, **options): logger.info("") @@ -43,10 +47,30 @@ class Radarr: quality_profile = options["quality"] if "quality" in options else self.quality_profile tags = options["tag"] if "tag" in options else self.tag search = options["search"] if "search" in options else self.search - try: - added, exists, invalid = self.api.add_multiple_movies(tmdb_ids, folder, quality_profile, monitor, search, availability, tags, per_request=100) - except Invalid as e: - raise Failed(f"Radarr Error: {e}") + + added = [] + exists = [] + invalid = [] + movies = [] + for i, item in enumerate(tmdb_ids, 1): + path = item[1] if isinstance(item, tuple) else None + tmdb_id = item[0] if isinstance(item, tuple) else item + util.print_return(f"Loading TMDb ID: {tmdb_id} {i}/{len(tmdb_ids)}") + try: + movie = self.api.get_movie(tmdb_id=tmdb_id) + movies.append((movie, path) if path else movie) + except NotFound: + invalid.append(item) + if len(movies) == 100 or len(tmdb_ids) == i: + try: + _a, _e, _i = self.api.add_multiple_movies(movies, folder, quality_profile, monitor, search, + availability, tags, per_request=100) + added.extend(_a) + exists.extend(_e) + invalid.extend(_i) + movies = [] + except Invalid as e: + raise Failed(f"Radarr Error: {e}") if len(added) > 0: logger.info("") diff --git a/modules/sonarr.py b/modules/sonarr.py index 4b77f11f..2ba7fe04 100644 --- a/modules/sonarr.py +++ b/modules/sonarr.py @@ -2,7 +2,7 @@ import logging from modules import util from modules.util import Failed from arrapi import SonarrAPI -from arrapi.exceptions import ArrException, Invalid +from arrapi.exceptions import ArrException, Invalid, NotFound logger = logging.getLogger("Plex Meta Manager") @@ -50,8 +50,8 @@ class Sonarr: self.tag = params["tag"] self.search = params["search"] self.cutoff_search = params["cutoff_search"] - self.sonarr_path = "" if params["sonarr_path"] and params["plex_path"] else params["sonarr_path"] - self.plex_path = "" if params["sonarr_path"] and params["plex_path"] else params["plex_path"] + self.sonarr_path = params["sonarr_path"] if params["sonarr_path"] and params["plex_path"] else "" + self.plex_path = params["plex_path"] if params["sonarr_path"] and params["plex_path"] else "" def add_tvdb(self, tvdb_ids, **options): logger.info("") @@ -69,10 +69,30 @@ class Sonarr: tags = options["tag"] if "tag" in options else self.tag search = options["search"] if "search" in options else self.search cutoff_search = options["cutoff_search"] if "cutoff_search" in options else self.cutoff_search - try: - added, exists, invalid = self.api.add_multiple_series(tvdb_ids, folder, quality_profile, language_profile, monitor, season, search, cutoff_search, series, tags, per_request=100) - except Invalid as e: - raise Failed(f"Sonarr Error: {e}") + + added = [] + exists = [] + invalid = [] + shows = [] + for i, item in enumerate(tvdb_ids, 1): + path = item[1] if isinstance(item, tuple) else None + tvdb_id = item[0] if isinstance(item, tuple) else item + util.print_return(f"Loading TVDb ID: {tvdb_id} {i}/{len(tvdb_ids)}") + try: + show = self.api.get_series(tvdb_id=tvdb_id) + shows.append((show, path) if path else show) + except NotFound: + invalid.append(item) + if len(shows) == 100 or len(tvdb_ids) == i: + try: + _a, _e, _i = self.api.add_multiple_series(shows, folder, quality_profile, language_profile, monitor, + season, search, cutoff_search, series, tags, per_request=100) + added.extend(_a) + exists.extend(_e) + invalid.extend(_i) + shows = [] + except Invalid as e: + raise Failed(f"Sonarr Error: {e}") if len(added) > 0: logger.info("") From 4e320c4903d3656403144ccc278ed7d41709481e Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Wed, 24 Nov 2021 11:48:32 -0500 Subject: [PATCH 13/27] removed debugs --- modules/radarr.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/modules/radarr.py b/modules/radarr.py index 5f74a8c0..01793318 100644 --- a/modules/radarr.py +++ b/modules/radarr.py @@ -30,10 +30,6 @@ class Radarr: self.search = params["search"] self.radarr_path = params["radarr_path"] if params["radarr_path"] and params["plex_path"] else "" self.plex_path = params["plex_path"] if params["radarr_path"] and params["plex_path"] else "" - logger.debug(params["radarr_path"]) - logger.debug(self.radarr_path) - logger.debug(params["plex_path"]) - logger.debug(self.plex_path) def add_tmdb(self, tmdb_ids, **options): logger.info("") From d443ce712503a822d24b4591ef5e6f1013d7edd3 Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Thu, 25 Nov 2021 17:50:42 -0500 Subject: [PATCH 14/27] fix for asset_directory --- modules/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/config.py b/modules/config.py index 5ab48ecf..f58adca7 100644 --- a/modules/config.py +++ b/modules/config.py @@ -180,7 +180,7 @@ class Config: self.general = { "cache": check_for_attribute(self.data, "cache", parent="settings", var_type="bool", default=True), "cache_expiration": check_for_attribute(self.data, "cache_expiration", parent="settings", var_type="int", default=60), - "asset_directory": check_for_attribute(self.data, "asset_directory", parent="settings", var_type="list_path", default=[os.path.join(default_dir, "assets")]), + "asset_directory": check_for_attribute(self.data, "asset_directory", parent="settings", var_type="list_path", default=[os.path.join(default_dir, "assets")], default_is_none=True), "asset_folders": check_for_attribute(self.data, "asset_folders", parent="settings", var_type="bool", default=True), "assets_for_all": check_for_attribute(self.data, "assets_for_all", parent="settings", var_type="bool", default=False, save=False, do_print=False), "sync_mode": check_for_attribute(self.data, "sync_mode", parent="settings", default="append", test_list=sync_modes), From 62532535c9d6d6b2d686716614c36e05c78efc0e Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Fri, 26 Nov 2021 03:24:36 -0500 Subject: [PATCH 15/27] #430 add new tmdb_discover options --- modules/builder.py | 18 ++++++++++++++++-- modules/convert.py | 6 +++--- modules/radarr.py | 2 +- modules/sonarr.py | 2 +- modules/tmdb.py | 10 ++++++---- 5 files changed, 27 insertions(+), 11 deletions(-) diff --git a/modules/builder.py b/modules/builder.py index b6a911fa..c49b60a8 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -978,10 +978,10 @@ class CollectionBuilder: new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, parent=method_name, regex=regex) elif discover_attr == "sort_by" and self.library.is_movie: options = tmdb.discover_movie_sort if self.library.is_movie else tmdb.discover_tv_sort - new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, parent=method_name, options=options) + new_dictionary[discover_final] = util.parse(discover_attr, discover_data, parent=method_name, options=options) elif discover_attr == "certification_country": if "certification" in dict_data or "certification.lte" in dict_data or "certification.gte" in dict_data: - new_dictionary[discover_attr] = discover_data + new_dictionary[discover_final] = discover_data else: raise Failed(f"Collection Error: {method_name} {discover_attr} attribute: must be used with either certification, certification.lte, or certification.gte") elif discover_attr == "certification": @@ -989,10 +989,24 @@ class CollectionBuilder: new_dictionary[discover_final] = discover_data else: raise Failed(f"Collection Error: {method_name} {discover_final} attribute: must be used with certification_country") + elif discover_attr == "watch_region": + if "with_watch_providers" in dict_data: + new_dictionary[discover_final] = discover_data + else: + raise Failed(f"Collection Error: {method_name} {discover_final} attribute: must be used with with_watch_providers") + elif discover_attr == "with_watch_monetization_types": + if "watch_region" in dict_data: + new_dictionary[discover_final] = util.parse(discover_attr, discover_data, parent=method_name, options=tmdb.discover_monetization_types) + else: + raise Failed(f"Collection Error: {method_name} {discover_final} attribute: must be used with watch_region") elif discover_attr in ["include_adult", "include_null_first_air_dates", "screened_theatrically"]: new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, datatype="bool", parent=method_name) elif discover_final in tmdb.discover_dates: new_dictionary[discover_final] = util.validate_date(discover_data, f"{method_name} {discover_final} attribute", return_as="%m/%d/%Y") + elif discover_attr == "with_status": + new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, datatype="int", parent=method_name, minimum=0, maximum=5) + elif discover_attr == "with_type": + new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, datatype="int", parent=method_name, minimum=0, maximum=6) elif discover_attr in ["primary_release_year", "year", "first_air_date_year"]: new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, datatype="int", parent=method_name, minimum=1800, maximum=self.current_year + 1) elif discover_attr in ["vote_count", "vote_average", "with_runtime"]: diff --git a/modules/convert.py b/modules/convert.py index 58fbdcb2..5a417a5b 100644 --- a/modules/convert.py +++ b/modules/convert.py @@ -291,13 +291,13 @@ class Convert: logger.info(util.adjust_space(f" Cache | {'^' if expired else '+'} | {ids} | {item.title}")) self.config.Cache.update_guid_map(item.guid, cache_ids, imdb_in, expired, guid_type) - if tmdb_id and library.is_movie: + if (tmdb_id or imdb_id) and library.is_movie: update_cache(tmdb_id, "TMDb", imdb_id, "movie") return "movie", tmdb_id, imdb_id - elif tvdb_id and library.is_show: + elif (tvdb_id or imdb_id) and library.is_show: update_cache(tvdb_id, "TVDb", imdb_id, "show") return "show", tvdb_id, imdb_id - elif anidb_id and tmdb_id and library.is_show: + elif anidb_id and (tmdb_id or imdb_id) and library.is_show: update_cache(tmdb_id, "TMDb", imdb_id, "show_movie") return "movie", tmdb_id, imdb_id else: diff --git a/modules/radarr.py b/modules/radarr.py index 01793318..08f087c7 100644 --- a/modules/radarr.py +++ b/modules/radarr.py @@ -51,7 +51,7 @@ class Radarr: for i, item in enumerate(tmdb_ids, 1): path = item[1] if isinstance(item, tuple) else None tmdb_id = item[0] if isinstance(item, tuple) else item - util.print_return(f"Loading TMDb ID: {tmdb_id} {i}/{len(tmdb_ids)}") + util.print_return(f"Loading TMDb ID {i}/{len(tmdb_ids)} ({tmdb_id})") try: movie = self.api.get_movie(tmdb_id=tmdb_id) movies.append((movie, path) if path else movie) diff --git a/modules/sonarr.py b/modules/sonarr.py index 2ba7fe04..487a17ba 100644 --- a/modules/sonarr.py +++ b/modules/sonarr.py @@ -77,7 +77,7 @@ class Sonarr: for i, item in enumerate(tvdb_ids, 1): path = item[1] if isinstance(item, tuple) else None tvdb_id = item[0] if isinstance(item, tuple) else item - util.print_return(f"Loading TVDb ID: {tvdb_id} {i}/{len(tvdb_ids)}") + util.print_return(f"Loading TVDb ID {i}/{len(tvdb_ids)} ({tvdb_id})") try: show = self.api.get_series(tvdb_id=tvdb_id) shows.append((show, path) if path else show) diff --git a/modules/tmdb.py b/modules/tmdb.py index b5c3bf4c..96e32493 100644 --- a/modules/tmdb.py +++ b/modules/tmdb.py @@ -27,8 +27,9 @@ discover_all = [ "year", "primary_release_year", "primary_release_date.gte", "primary_release_date.lte", "release_date.gte", "release_date.lte", "vote_count.gte", "vote_count.lte", "vote_average.gte", "vote_average.lte", "with_runtime.gte", "with_runtime.lte", - "with_companies", "with_genres", "without_genres", "with_keywords", "without_keywords", "include_adult", - "timezone", "screened_theatrically", "include_null_first_air_dates", "limit", + "with_companies", "without_companies ", "with_genres", "without_genres", "with_keywords", "without_keywords", + "with_watch_providers", "without_watch_providers", "watch_region", "with_watch_monetization_types", "with_status", + "include_adult", "timezone", "screened_theatrically", "include_null_first_air_dates", "limit", "with_type", "air_date.gte", "air_date.lte", "first_air_date.gte", "first_air_date.lte", "first_air_date_year", "with_networks" ] discover_movie_only = [ @@ -36,8 +37,8 @@ discover_movie_only = [ "year", "primary_release_year", "primary_release_date", "release_date", "include_adult" ] discover_tv_only = [ - "timezone", "screened_theatrically", "include_null_first_air_dates", - "air_date", "first_air_date", "first_air_date_year", "with_networks", + "timezone", "screened_theatrically", "include_null_first_air_dates", "air_date", + "first_air_date", "first_air_date_year", "with_networks", "with_status", "with_type", ] discover_dates = [ "primary_release_date.gte", "primary_release_date.lte", "release_date.gte", "release_date.lte", @@ -49,6 +50,7 @@ discover_movie_sort = [ "vote_average.asc", "vote_average.desc", "vote_count.asc", "vote_count.desc" ] discover_tv_sort = ["vote_average.desc", "vote_average.asc", "first_air_date.desc", "first_air_date.asc", "popularity.desc", "popularity.asc"] +discover_monetization_types = ["flatrate", "free", "ads", "rent", "buy"] class TMDb: def __init__(self, config, params): From d223da35cffc3f68a5868aa6480381c75310d9fb Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Fri, 26 Nov 2021 19:30:41 -0500 Subject: [PATCH 16/27] added flixpatrol builders --- modules/builder.py | 42 ++++++++++- modules/cache.py | 14 ++++ modules/config.py | 4 +- modules/flixpatrol.py | 162 ++++++++++++++++++++++++++++++++++++++++++ modules/util.py | 21 ++++++ 5 files changed, 239 insertions(+), 4 deletions(-) create mode 100644 modules/flixpatrol.py diff --git a/modules/builder.py b/modules/builder.py index c49b60a8..7b887c82 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -1,6 +1,6 @@ import logging, os, re from datetime import datetime, timedelta -from modules import anidb, anilist, icheckmovies, imdb, letterboxd, mal, plex, radarr, sonarr, stevenlu, tautulli, tmdb, trakt, tvdb, util +from modules import anidb, anilist, flixpatrol, icheckmovies, imdb, letterboxd, mal, plex, radarr, sonarr, stevenlu, tautulli, tmdb, trakt, tvdb, util from modules.util import Failed, ImageData, NotScheduled from PIL import Image from plexapi.exceptions import BadRequest, NotFound @@ -63,8 +63,9 @@ filter_translation = { "writer": "writers" } modifier_alias = {".greater": ".gt", ".less": ".lt"} -all_builders = anidb.builders + anilist.builders + icheckmovies.builders + imdb.builders + letterboxd.builders + \ - mal.builders + plex.builders + stevenlu.builders + tautulli.builders + tmdb.builders + trakt.builders + tvdb.builders +all_builders = anidb.builders + anilist.builders + flixpatrol.builders + icheckmovies.builders + imdb.builders + \ + letterboxd.builders + mal.builders + plex.builders + stevenlu.builders + tautulli.builders + \ + tmdb.builders + trakt.builders + tvdb.builders show_only_builders = ["tmdb_network", "tmdb_show", "tmdb_show_details", "tvdb_show", "tvdb_show_details", "collection_level"] movie_only_builders = [ "letterboxd_list", "letterboxd_list_details", "icheckmovies_list", "icheckmovies_list_details", "stevenlu_popular", @@ -570,6 +571,7 @@ class CollectionBuilder: elif method_name in sonarr_details: self._sonarr(method_name, method_data) elif method_name in anidb.builders: self._anidb(method_name, method_data) elif method_name in anilist.builders: self._anilist(method_name, method_data) + elif method_name in flixpatrol.builders: self._flixpatrol(method_name, method_data) elif method_name in icheckmovies.builders: self._icheckmovies(method_name, method_data) elif method_name in letterboxd.builders: self._letterboxd(method_name, method_data) elif method_name in imdb.builders: self._imdb(method_name, method_data) @@ -861,6 +863,38 @@ class CollectionBuilder: new_dictionary["limit"] = util.parse("limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) self.builders.append((method_name, new_dictionary)) + def _flixpatrol(self, method_name, method_data): + if method_name.startswith("flixpatrol_url"): + flixpatrol_lists = self.config.FlixPatrol.validate_flixpatrol_lists(method_data, self.language, self.library.is_movie) + for flixpatrol_list in flixpatrol_lists: + self.builders.append(("flixpatrol_url", flixpatrol_list)) + elif method_name in flixpatrol.builders: + for dict_data, dict_methods in util.parse(method_name, method_data, datatype="dictlist"): + if method_name == "flixpatrol_demographics": + data = { + "generation": util.parse("generation", dict_data, methods=dict_methods, parent=method_name, default="all", options=flixpatrol.generations), + "gender": util.parse("gender", dict_data, methods=dict_methods, parent=method_name, default="all", options=flixpatrol.gender), + "location": util.parse("location", dict_data, methods=dict_methods, parent=method_name, default="world", options=flixpatrol.demo_locations), + "limit": util.parse("limit", dict_data, datatype="int", methods=dict_methods, parent=method_name, default=10) + } + elif method_name == "flixpatrol_popular": + data = { + "source": util.parse("source", dict_data, methods=dict_methods, parent=method_name, options=flixpatrol.popular), + "time_window": util.parse("time_window", dict_data, methods=dict_methods, parent=method_name, default="today"), + "limit": util.parse("limit", dict_data, datatype="int", methods=dict_methods, parent=method_name, default=10) + } + elif method_name == "flixpatrol_top": + data = { + "platform": util.parse("platform", dict_data, methods=dict_methods, parent=method_name, options=flixpatrol.platforms), + "location": util.parse("location", dict_data, methods=dict_methods, parent=method_name, default="world", options=flixpatrol.locations), + "time_window": util.parse("time_window", dict_data, methods=dict_methods, parent=method_name, default="today"), + "limit": util.parse("limit", dict_data, datatype="int", methods=dict_methods, parent=method_name, default=10) + } + else: + continue + if self.config.FlixPatrol.validate_flixpatrol_dict(method_name, data, self.language, self.library.is_movie): + self.builders.append((method_name, data)) + def _icheckmovies(self, method_name, method_data): if method_name.startswith("icheckmovies_list"): icheckmovies_lists = self.config.ICheckMovies.validate_icheckmovies_lists(method_data, self.language) @@ -1133,6 +1167,8 @@ class CollectionBuilder: ids = self.config.TVDb.get_tvdb_ids(method, value) elif "imdb" in method: ids = self.config.IMDb.get_imdb_ids(method, value, self.language) + elif "flixpatrol" in method: + ids = self.config.FlixPatrol.get_flixpatrol_ids(method, value, self.language, self.library.is_movie) elif "icheckmovies" in method: ids = self.config.ICheckMovies.get_icheckmovies_ids(method, value, self.language) elif "letterboxd" in method: diff --git a/modules/cache.py b/modules/cache.py index 25e3f193..260f9d7b 100644 --- a/modules/cache.py +++ b/modules/cache.py @@ -60,6 +60,14 @@ class Cache: tmdb_id TEXT, expiration_date TEXT)""" ) + cursor.execute( + """CREATE TABLE IF NOT EXISTS flixpatrol_map ( + key INTEGER PRIMARY KEY, + flixpatrol_id TEXT UNIQUE, + tmdb_id TEXT, + media_type TEXT, + expiration_date TEXT)""" + ) cursor.execute( """CREATE TABLE IF NOT EXISTS omdb_data ( key INTEGER PRIMARY KEY, @@ -161,6 +169,12 @@ class Cache: def update_letterboxd_map(self, expired, letterboxd_id, tmdb_id): self._update_map("letterboxd_map", "letterboxd_id", letterboxd_id, "tmdb_id", tmdb_id, expired) + def query_flixpatrol_map(self, flixpatrol_id, media_type): + return self._query_map("flixpatrol_map", flixpatrol_id, "flixpatrol_id", "tmdb_id", media_type=media_type) + + def update_flixpatrol_map(self, expired, flixpatrol_id, tmdb_id, media_type): + self._update_map("flixpatrol_map", "flixpatrol_id", flixpatrol_id, "tmdb_id", tmdb_id, expired, media_type=media_type) + def _query_map(self, map_name, _id, from_id, to_id, media_type=None, return_type=False): id_to_return = None expired = None diff --git a/modules/config.py b/modules/config.py index f58adca7..627e04b2 100644 --- a/modules/config.py +++ b/modules/config.py @@ -6,6 +6,7 @@ from modules.anidb import AniDB from modules.anilist import AniList from modules.cache import Cache from modules.convert import Convert +from modules.flixpatrol import FlixPatrol from modules.icheckmovies import ICheckMovies from modules.imdb import IMDb from modules.letterboxd import Letterboxd @@ -320,8 +321,9 @@ class Config: self.IMDb = IMDb(self) self.Convert = Convert(self) self.AniList = AniList(self) - self.Letterboxd = Letterboxd(self) + self.FlixPatrol = FlixPatrol(self) self.ICheckMovies = ICheckMovies(self) + self.Letterboxd = Letterboxd(self) self.StevenLu = StevenLu(self) util.separator() diff --git a/modules/flixpatrol.py b/modules/flixpatrol.py new file mode 100644 index 00000000..c44ec492 --- /dev/null +++ b/modules/flixpatrol.py @@ -0,0 +1,162 @@ +import logging +from datetime import datetime, timedelta +from modules import util +from modules.util import Failed + +logger = logging.getLogger("Plex Meta Manager") + +builders = ["flixpatrol_url", "flixpatrol_demographics", "flixpatrol_popular", "flixpatrol_top"] +generations = ["all", "boomers", "x", "y", "z"] +generations_translation = {"all": "all-generations", "boomers": "baby-boomers", "x": "generation-x", "y": "generation-y", "z": "generation-z"} +generations_pretty = {"all": "All generations", "boomers": "Baby Boomers", "x": "Generation X", "y": "Generation Y (Millenials)", "z": "Generation Z"} +gender = ["all", "men", "women"] +demo_locations = ["world", "brazil", "canada", "france", "germany", "india", "mexico", "united_kingdom", "united_states"] +locations = [ + "albania", "argentina", "armenia", "australia", "austria", "azerbaijan", "bahamas", "bahrain", "bangladesh", + "belarus", "belgium", "belize", "benin", "bolivia", "bosnia_and_herzegovina", "botswana", "brazil", "bulgaria", + "burkina_faso", "cambodia", "canada", "chile", "colombia", "costa_rica", "croatia", "cyprus", "czech_republic", + "denmark", "dominican_republic", "ecuador", "egypt", "estonia", "finland", "france", "gabon", "germany", "ghana", + "greece", "guatemala", "guinea_bissau", "haiti", "honduras", "hong_kong", "hungary", "iceland", "india", + "indonesia", "ireland", "israel", "italy", "ivory_coast", "jamaica", "japan", "jordan", "kazakhstan", "kenya", + "kuwait", "kyrgyzstan", "laos", "latvia", "lebanon", "lithuania", "luxembourg", "malaysia", "maldives", "mali", + "malta", "mexico", "moldova", "mongolia", "montenegro", "morocco", "mozambique", "namibia", "netherlands", + "new_zealand", "nicaragua", "niger", "nigeria", "north_macedonia", "norway", "oman", "pakistan", "panama", + "papua_new_guinea", "paraguay", "peru", "philippines", "poland", "portugal", "qatar", "romania", "russia", + "rwanda", "salvador", "saudi_arabia", "senegal", "serbia", "singapore", "slovakia", "slovenia", "south_africa", + "south_korea", "spain", "sri_lanka", "sweden", "switzerland", "taiwan", "tajikistan", "tanzania", "thailand", + "togo", "trinidad_and_tobago", "turkey", "turkmenistan", "uganda", "ukraine", "united_arab_emirates", + "united_kingdom", "united_states", "uruguay", "uzbekistan", "venezuela", "vietnam", "zambia", "zimbabwe" +] +popular = ["movie_db", "facebook", "google", "twitter", "twitter_trends", "instagram", "instagram_trends", "youtube", "imdb", "letterboxd", "rotten_tomatoes", "tmdb", "trakt"] +platforms = ["netflix", "hbo", "disney", "amazon", "itunes", "google", "paramount_plus", "hulu", "vudu", "imdb", "amazon_prime", "star_plus"] +base_url = "https://flixpatrol.com" +urls = { + "top10": f"{base_url}/top10/", + "popular_movies": f"{base_url}/popular/movies/", + "popular_shows": f"{base_url}/popular/tv-shows/", + "demographics": f"{base_url}/demographics/" +} + +class FlixPatrol: + def __init__(self, config): + self.config = config + + def _request(self, url, language, xpath): + if self.config.trace_mode: + logger.debug(f"URL: {url}") + return self.config.get_html(url, headers=util.header(language)).xpath(xpath) + + def _tmdb(self, flixpatrol_url, language): + ids = self._request(flixpatrol_url, language, "//script[@type='application/ld+json']/text()") + if len(ids) > 0 and ids[0]: + if "https://www.themoviedb.org" in ids[0]: + return util.regex_first_int(ids[0].split("https://www.themoviedb.org")[1], "TMDB Movie ID") + raise Failed(f"FlixPatrol Error: TMDb Movie ID not found in {ids[0]}") + raise Failed(f"FlixPatrol Error: TMDb Movie ID not found at {flixpatrol_url}") + + def _parse_list(self, list_url, language, is_movie): + flixpatrol_urls = [] + if list_url.startswith(urls["top10"]): + platform = list_url[len(urls["top10"]):].split("/")[0] + flixpatrol_urls = self._request( + list_url, language, + f"//div[@id='{platform}-{'1' if is_movie else '2'}']//a[@class='hover:underline']/@href" + ) + logger.info(flixpatrol_urls) + if not flixpatrol_urls: + flixpatrol_urls = self._request( + list_url, language, + f"//h3[text() = '{'TOP 10 Movies' if is_movie else 'TOP 10 TV Shows'}']/following-sibling::div//a[@class='hover:underline']/@href" + ) + logger.info(flixpatrol_urls) + elif list_url.startswith(tuple([v for k, v in urls.items()])): + flixpatrol_urls = self._request( + list_url, language, + f"//a[@class='flex group' and .//span[.='{'Movie' if is_movie else 'TV Show'}']]/@href" + ) + return flixpatrol_urls + + def validate_flixpatrol_lists(self, flixpatrol_lists, language, is_movie): + valid_lists = [] + print(flixpatrol_lists) + for flixpatrol_list in util.get_list(flixpatrol_lists, split=False): + list_url = flixpatrol_list.strip() + if not list_url.startswith(tuple([v for k, v in urls.items()])): + fails = "\n".join([f"{v} (For {k.replace('_', ' ').title()})" for k, v in urls.items()]) + raise Failed(f"FlixPatrol Error: {list_url} must begin with either:{fails}") + elif len(self._parse_list(list_url, language, is_movie)) > 0: + valid_lists.append(list_url) + else: + raise Failed(f"FlixPatrol Error: {list_url} failed to parse") + print(valid_lists) + return valid_lists + + def validate_flixpatrol_dict(self, method, data, language, is_movie): + return len(self.validate_flixpatrol_lists(self.get_url(method, data, is_movie), language, is_movie)) > 0 + + def get_url(self, method, data, is_movie): + if method == "flixpatrol_demographics": + return f"{urls['demographics']}" \ + f"{generations_translation[data['generation']]}/" \ + f"{'all-genders' if data['gender'] == 'all' else data['gender']}/" \ + f"{data['location'].replace('_', '-')}/" + elif method == "flixpatrol_popular": + return f"{urls['popular_movies'] if is_movie else urls['popular_shows']}" \ + f"{data['source'].replace('_', '-')}/" \ + f"{util.time_window(data['time_window'])}/" + elif method == "flixpatrol_top": + return f"{urls['top10']}" \ + f"{data['platform'].replace('_', '-')}/" \ + f"{data['location'].replace('_', '-')}/" \ + f"{util.time_window(data['time_window'])}/full/" + elif method == "flixpatrol_url": + return data + else: + raise Failed(f"FlixPatrol Error: Method {method} not supported") + + def get_flixpatrol_ids(self, method, data, language, is_movie): + if method == "flixpatrol_demographics": + logger.info("Processing FlixPatrol Demographics:") + logger.info(f"\tGeneration: {generations_pretty[data['generation']]}") + logger.info(f"\tGender: {'All genders' if data['gender'] == 'all' else data['gender'].capitalize()}") + logger.info(f"\tLocation: {data['location'].replace('_', ' ').title()}") + logger.info(f"\tLimit: {data['limit']}") + elif method == "flixpatrol_popular": + logger.info("Processing FlixPatrol Popular:") + logger.info(f"\tSource: {data['source'].replace('_', ' ').title()}") + logger.info(f"\tTime Window: {data['time_window'].replace('_', ' ').title()}") + logger.info(f"\tLimit: {data['limit']}") + elif method == "flixpatrol_top": + logger.info("Processing FlixPatrol Top:") + logger.info(f"\tPlatform: {data['platform'].replace('_', ' ').title()}") + logger.info(f"\tLocation: {data['location'].replace('_', ' ').title()}") + logger.info(f"\tTime Window: {data['time_window'].replace('_', ' ').title()}") + logger.info(f"\tLimit: {data['limit']}") + elif method == "flixpatrol_url": + logger.info(f"Processing FlixPatrol URL: {data}") + url = self.get_url(method, data, is_movie) + + items = self._parse_list(url, language, is_movie) + media_type = "movie" if is_movie else "show" + total_items = len(items) + if total_items > 0: + ids = [] + for i, item in enumerate(items, 1): + util.print_return(f"Finding TMDb ID {i}/{total_items}") + tmdb_id = None + expired = None + if self.config.Cache: + tmdb_id, expired = self.config.Cache.query_flixpatrol_map(item, media_type) + if not tmdb_id or expired is not False: + try: + tmdb_id = self._tmdb(f"{base_url}{item}", language) + except Failed as e: + logger.error(e) + continue + if self.config.Cache: + self.config.Cache.update_flixpatrol_map(expired, item, tmdb_id, media_type) + ids.append((tmdb_id, "tmdb" if is_movie else "tmdb_show")) + logger.info(util.adjust_space(f"Processed {total_items} TMDb IDs")) + return ids + else: + raise Failed(f"FlixPatrol Error: No List Items found in {data}") diff --git a/modules/util.py b/modules/util.py index 0cf0f73a..56b2021b 100644 --- a/modules/util.py +++ b/modules/util.py @@ -251,6 +251,27 @@ def is_locked(filepath): file_object.close() return locked +def time_window(time_window): + today = datetime.now() + if time_window == "today": + return f"{today:%Y-%m-%d}" + elif time_window == "yesterday": + return f"{today - timedelta(days=1):%Y-%m-%d}" + elif time_window == "this_week": + return f"{today:%Y-0%V}" + elif time_window == "last_week": + return f"{today - timedelta(weeks=1):%Y-0%V}" + elif time_window == "this_month": + return f"{today:%Y-%m}" + elif time_window == "last_month": + return f"{today.year}-{today.month - 1 or 12}" + elif time_window == "this_year": + return f"{today.year}" + elif time_window == "last_year": + return f"{today.year - 1}" + else: + return time_window + def glob_filter(filter_in): filter_in = filter_in.translate({ord("["): "[[]", ord("]"): "[]]"}) if "[" in filter_in else filter_in return glob.glob(filter_in) From 7fac92652c350ae5a49c1ea455d86a243e8571a1 Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Sun, 28 Nov 2021 02:14:35 -0500 Subject: [PATCH 17/27] catch webhook errors --- modules/builder.py | 20 +++++++++++++------- modules/config.py | 15 ++++++++++++--- modules/library.py | 3 --- plex_meta_manager.py | 6 +++++- 4 files changed, 30 insertions(+), 14 deletions(-) diff --git a/modules/builder.py b/modules/builder.py index 7b887c82..2d9c55d8 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -2123,13 +2123,19 @@ class CollectionBuilder: (self.details["collection_removal_webhooks"] and len(self.notification_removals) > 0) ): self.obj.reload() - self.library.Webhooks.collection_hooks( - self.details["collection_creation_webhooks"] + self.details["collection_addition_webhooks"] + self.details["collection_removal_webhooks"], - self.obj, - created=self.created, - additions=self.notification_additions, - removals=self.notification_removals - ) + try: + self.library.Webhooks.collection_hooks( + self.details["collection_creation_webhooks"] + + self.details["collection_addition_webhooks"] + + self.details["collection_removal_webhooks"], + self.obj, + created=self.created, + additions=self.notification_additions, + removals=self.notification_removals + ) + except Failed as e: + util.print_stacktrace() + logger.error(f"Webhooks Error: {e}") def run_collections_again(self): self.obj = self.library.get_collection(self.name) diff --git a/modules/config.py b/modules/config.py index 627e04b2..b04fffc5 100644 --- a/modules/config.py +++ b/modules/config.py @@ -125,7 +125,8 @@ class Config: else: endline = "" yaml.round_trip_dump(loaded_config, open(self.config_path, "w"), indent=None, block_seq_indent=2) elif data[attribute] is None: - if default_is_none is True: return None + if default_is_none and var_type == "list": return [] + elif default_is_none: return None else: message = f"{text} is blank" elif var_type == "url": if data[attribute].endswith(("\\", "/")): return data[attribute][:-1] @@ -229,7 +230,11 @@ class Config: logger.warning("notifiarr attribute not found") self.Webhooks = Webhooks(self, self.webhooks, notifiarr=self.NotifiarrFactory) - self.Webhooks.start_time_hooks(self.run_start_time) + try: + self.Webhooks.start_time_hooks(self.run_start_time) + except Failed as e: + util.print_stacktrace() + logger.error(f"Webhooks Error: {e}") self.errors = [] @@ -604,7 +609,11 @@ class Config: def notify(self, text, library=None, collection=None, critical=True): for error in util.get_list(text, split=False): - self.Webhooks.error_hooks(error, library=library, collection=collection, critical=critical) + try: + self.Webhooks.error_hooks(error, library=library, collection=collection, critical=critical) + except Failed as e: + util.print_stacktrace() + logger.error(f"Webhooks Error: {e}") def get_html(self, url, headers=None, params=None): return html.fromstring(self.get(url, headers=headers, params=params).content) diff --git a/modules/library.py b/modules/library.py index 9e063323..4f3854a5 100644 --- a/modules/library.py +++ b/modules/library.py @@ -183,9 +183,6 @@ class Library(ABC): self.config.Cache.update_image_map(item.ratingKey, f"{self.image_table_name}_backgrounds", item.art, background.compare) def notify(self, text, collection=None, critical=True): - for error in util.get_list(text, split=False): - self.Webhooks.error_hooks(error, library=self, collection=collection, critical=critical) - self.config.notify(text, library=self, collection=collection, critical=critical) @abstractmethod diff --git a/plex_meta_manager.py b/plex_meta_manager.py index 2c94e13e..f6e73550 100644 --- a/plex_meta_manager.py +++ b/plex_meta_manager.py @@ -154,7 +154,11 @@ def start(attrs): logger.info("") run_time = str(datetime.now() - start_time).split('.')[0] if config: - config.Webhooks.end_time_hooks(start_time, run_time, stats) + try: + config.Webhooks.end_time_hooks(start_time, run_time, stats) + except Failed as e: + util.print_stacktrace() + logger.error(f"Webhooks Error: {e}") util.separator(f"Finished {start_type}Run\nRun Time: {run_time}") logger.removeHandler(file_handler) From a4d4a09a7394138409f98a2cd5a64e847be61658 Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Sun, 28 Nov 2021 03:18:12 -0500 Subject: [PATCH 18/27] #461 Fix for IMDb IDs --- modules/convert.py | 2 +- modules/util.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/modules/convert.py b/modules/convert.py index 5a417a5b..de8b53f6 100644 --- a/modules/convert.py +++ b/modules/convert.py @@ -198,7 +198,7 @@ class Convert: check_id = guid.netloc if self.config.Cache: cache_id, imdb_check, media_type, expired = self.config.Cache.query_guid_map(item.guid) - if cache_id and not expired: + if (cache_id or imdb_check) and not expired: media_id_type = "movie" if "movie" in media_type else "show" if item_type == "hama" and check_id.startswith("anidb"): anidb_id = int(re.search("-(.*)", check_id).group(1)) diff --git a/modules/util.py b/modules/util.py index 56b2021b..fe2ee0c8 100644 --- a/modules/util.py +++ b/modules/util.py @@ -88,7 +88,9 @@ def get_list(data, lower=False, split=True, int_list=False): elif isinstance(data, dict): return [data] elif split is False: return [str(data)] elif lower is True: return [d.strip().lower() for d in str(data).split(",")] - elif int_list is True: return [int(d.strip()) for d in str(data).split(",")] + elif int_list is True: + try: return [int(d.strip()) for d in str(data).split(",")] + except ValueError: return [] else: return [d.strip() for d in str(data).split(",")] def get_int_list(data, id_type): From 98891693f5a5684c0999bfa52c4f33ec139cca62 Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Sun, 28 Nov 2021 19:27:15 -0500 Subject: [PATCH 19/27] #430 fix watch_region/with_watch_providers --- modules/builder.py | 16 +++++++++------- modules/radarr.py | 4 ++-- modules/tmdb.py | 6 ++++++ 3 files changed, 17 insertions(+), 9 deletions(-) diff --git a/modules/builder.py b/modules/builder.py index 2d9c55d8..bbd4961c 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -1024,10 +1024,10 @@ class CollectionBuilder: else: raise Failed(f"Collection Error: {method_name} {discover_final} attribute: must be used with certification_country") elif discover_attr == "watch_region": - if "with_watch_providers" in dict_data: + if "with_watch_providers" in dict_data or "without_watch_providers" in dict_data or "with_watch_monetization_types" in dict_data: new_dictionary[discover_final] = discover_data else: - raise Failed(f"Collection Error: {method_name} {discover_final} attribute: must be used with with_watch_providers") + raise Failed(f"Collection Error: {method_name} {discover_final} attribute: must be used with either with_watch_providers, without_watch_providers, or with_watch_monetization_types") elif discover_attr == "with_watch_monetization_types": if "watch_region" in dict_data: new_dictionary[discover_final] = util.parse(discover_attr, discover_data, parent=method_name, options=tmdb.discover_monetization_types) @@ -1035,17 +1035,19 @@ class CollectionBuilder: raise Failed(f"Collection Error: {method_name} {discover_final} attribute: must be used with watch_region") elif discover_attr in ["include_adult", "include_null_first_air_dates", "screened_theatrically"]: new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, datatype="bool", parent=method_name) - elif discover_final in tmdb.discover_dates: - new_dictionary[discover_final] = util.validate_date(discover_data, f"{method_name} {discover_final} attribute", return_as="%m/%d/%Y") + elif discover_attr == "vote_average": + new_dictionary[discover_final] = util.parse(discover_final, discover_data, datatype="float", parent=method_name) elif discover_attr == "with_status": new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, datatype="int", parent=method_name, minimum=0, maximum=5) elif discover_attr == "with_type": new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, datatype="int", parent=method_name, minimum=0, maximum=6) - elif discover_attr in ["primary_release_year", "year", "first_air_date_year"]: + elif discover_final in tmdb.discover_dates: + new_dictionary[discover_final] = util.validate_date(discover_data, f"{method_name} {discover_final} attribute", return_as="%m/%d/%Y") + elif discover_attr in tmdb.discover_years: new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, datatype="int", parent=method_name, minimum=1800, maximum=self.current_year + 1) - elif discover_attr in ["vote_count", "vote_average", "with_runtime"]: + elif discover_attr in tmdb.discover_ints: new_dictionary[discover_final] = util.parse(discover_final, discover_data, datatype="int", parent=method_name) - elif discover_final in ["with_cast", "with_crew", "with_people", "with_companies", "with_networks", "with_genres", "without_genres", "with_keywords", "without_keywords", "with_original_language", "timezone"]: + elif discover_final in tmdb.discover_strings: new_dictionary[discover_final] = discover_data elif discover_attr != "limit": raise Failed(f"Collection Error: {method_name} {discover_final} attribute not supported") diff --git a/modules/radarr.py b/modules/radarr.py index 08f087c7..79b986eb 100644 --- a/modules/radarr.py +++ b/modules/radarr.py @@ -2,7 +2,7 @@ import logging from modules import util from modules.util import Failed from arrapi import RadarrAPI -from arrapi.exceptions import ArrException, Invalid, NotFound +from arrapi.exceptions import ArrException, Invalid logger = logging.getLogger("Plex Meta Manager") @@ -55,7 +55,7 @@ class Radarr: try: movie = self.api.get_movie(tmdb_id=tmdb_id) movies.append((movie, path) if path else movie) - except NotFound: + except ArrException: invalid.append(item) if len(movies) == 100 or len(tmdb_ids) == i: try: diff --git a/modules/tmdb.py b/modules/tmdb.py index 96e32493..19bb3c75 100644 --- a/modules/tmdb.py +++ b/modules/tmdb.py @@ -40,6 +40,12 @@ discover_tv_only = [ "timezone", "screened_theatrically", "include_null_first_air_dates", "air_date", "first_air_date", "first_air_date_year", "with_networks", "with_status", "with_type", ] +discover_strings = [ + "with_cast", "with_crew", "with_people", "with_companies", "with_networks", "with_genres", "without_genres", + "with_keywords", "without_keywords", "with_original_language", "timezone", "with_watch_providers", "without_watch_providers" +] +discover_ints = ["vote_count", "with_runtime"] +discover_years = ["primary_release_year", "year", "first_air_date_year"] discover_dates = [ "primary_release_date.gte", "primary_release_date.lte", "release_date.gte", "release_date.lte", "air_date.gte", "air_date.lte", "first_air_date.gte", "first_air_date.lte" From 28cca36aaca860c19e5b4a8b126a61f9f862d026 Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Mon, 29 Nov 2021 09:11:23 -0500 Subject: [PATCH 20/27] NoneType fix --- modules/builder.py | 21 ++++++++------------- 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/modules/builder.py b/modules/builder.py index bbd4961c..b8214a5e 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -1869,9 +1869,7 @@ class CollectionBuilder: remove_tags = self.item_details["item_label.remove"] if "item_label.remove" in self.item_details else None sync_tags = self.item_details["item_label.sync"] if "item_label.sync" in self.item_details else None - tmdb_ids = [] tmdb_paths = [] - tvdb_ids = [] tvdb_paths = [] for item in self.items: if int(item.ratingKey) in rating_keys and not revert: @@ -1883,11 +1881,9 @@ class CollectionBuilder: logger.error(e) self.library.edit_tags("label", item, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags) path = os.path.dirname(str(item.locations[0])) if self.library.is_movie else str(item.locations[0]) - if item.ratingKey in self.library.movie_rating_key_map: - tmdb_ids.append(self.library.movie_rating_key_map[item.ratingKey]) + if self.library.Radarr and item.ratingKey in self.library.movie_rating_key_map: tmdb_paths.append((self.library.movie_rating_key_map[item.ratingKey], f"{path.replace(self.library.Radarr.plex_path, self.library.Radarr.radarr_path)}/")) - if item.ratingKey in self.library.show_rating_key_map: - tvdb_ids.append(self.library.show_rating_key_map[item.ratingKey]) + if self.library.Sonarr and item.ratingKey in self.library.show_rating_key_map: tvdb_paths.append((self.library.show_rating_key_map[item.ratingKey], f"{path.replace(self.library.Sonarr.plex_path, self.library.Sonarr.sonarr_path)}/")) advance_edits = {} for method_name, method_data in self.item_details.items(): @@ -1905,21 +1901,20 @@ class CollectionBuilder: item.lockPoster() if "item_lock_title" in self.item_details: item.edit(**{"title.locked": 1}) - if "item_refresh" in self.item_details: item.refresh() - if len(tmdb_ids) > 0: + if self.library.Radarr and tmdb_paths: if "item_radarr_tag" in self.item_details: - self.library.Radarr.edit_tags(tmdb_ids, self.item_details["item_radarr_tag"], self.item_details["apply_tags"]) + self.library.Radarr.edit_tags([t[0] if isinstance(t, tuple) else t for t in tmdb_paths], self.item_details["item_radarr_tag"], self.item_details["apply_tags"]) if self.radarr_details["add_existing"]: - self.library.Radarr.add_tmdb(tmdb_ids, **self.radarr_details) + self.library.Radarr.add_tmdb(tmdb_paths, **self.radarr_details) - if len(tvdb_ids) > 0: + if self.library.Sonarr and tvdb_paths: if "item_sonarr_tag" in self.item_details: - self.library.Sonarr.edit_tags(tvdb_ids, self.item_details["item_sonarr_tag"], self.item_details["apply_tags"]) + self.library.Sonarr.edit_tags([t[0] if isinstance(t, tuple) else t for t in tvdb_paths], self.item_details["item_sonarr_tag"], self.item_details["apply_tags"]) if self.sonarr_details["add_existing"]: - self.library.Sonarr.add_tvdb(tvdb_ids, **self.sonarr_details) + self.library.Sonarr.add_tvdb(tvdb_paths, **self.sonarr_details) for rating_key in rating_keys: try: From ea516aa1b0aaf4a15fae3a0f905a8b9a150bef72 Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Mon, 29 Nov 2021 10:24:36 -0500 Subject: [PATCH 21/27] cache radarr/sonarr adds --- modules/cache.py | 40 ++++++++++++++++++++++++++++++++++++++++ modules/config.py | 6 +++--- modules/radarr.py | 12 +++++++++++- modules/sonarr.py | 14 ++++++++++++-- modules/tautulli.py | 3 ++- 5 files changed, 68 insertions(+), 7 deletions(-) diff --git a/modules/cache.py b/modules/cache.py index 260f9d7b..366308a9 100644 --- a/modules/cache.py +++ b/modules/cache.py @@ -96,6 +96,18 @@ class Cache: key INTEGER PRIMARY KEY, library TEXT UNIQUE)""" ) + cursor.execute( + """CREATE TABLE IF NOT EXISTS radarr_adds ( + key INTEGER PRIMARY KEY, + tmdb_id TEXT, + library TEXT)""" + ) + cursor.execute( + """CREATE TABLE IF NOT EXISTS sonarr_adds ( + key INTEGER PRIMARY KEY, + tvdb_id TEXT, + library TEXT)""" + ) cursor.execute("SELECT count(name) FROM sqlite_master WHERE type='table' AND name='image_map'") if cursor.fetchone()[0] > 0: cursor.execute(f"SELECT DISTINCT library FROM image_map") @@ -338,3 +350,31 @@ class Cache: with closing(connection.cursor()) as cursor: cursor.execute(f"INSERT OR IGNORE INTO {table_name}(rating_key) VALUES(?)", (rating_key,)) cursor.execute(f"UPDATE {table_name} SET location = ?, compare = ?, overlay = ? WHERE rating_key = ?", (location, compare, overlay, rating_key)) + + def query_radarr_adds(self, tmdb_id, library): + return self.query_arr_adds(tmdb_id, library, "radarr", "tmdb_id") + + def query_sonarr_adds(self, tvdb_id, library): + return self.query_arr_adds(tvdb_id, library, "sonarr", "tvdb_id") + + def query_arr_adds(self, t_id, library, arr, id_type): + with sqlite3.connect(self.cache_path) as connection: + connection.row_factory = sqlite3.Row + with closing(connection.cursor()) as cursor: + cursor.execute(f"SELECT * FROM {arr}_adds WHERE {id_type} = ? AND library = ?", (t_id, library)) + row = cursor.fetchone() + if row and row[id_type]: + return int(row[id_type]) + return None + + def update_radarr_adds(self, tmdb_id, library): + return self.update_arr_adds(tmdb_id, library, "radarr", "tmdb_id") + + def update_sonarr_adds(self, tvdb_id, library): + return self.update_arr_adds(tvdb_id, library, "sonarr", "tvdb_id") + + def update_arr_adds(self, t_id, library, arr, id_type): + with sqlite3.connect(self.cache_path) as connection: + connection.row_factory = sqlite3.Row + with closing(connection.cursor()) as cursor: + cursor.execute(f"INSERT OR IGNORE INTO {arr}_adds({id_type}, library) VALUES(?)", (t_id, library)) diff --git a/modules/config.py b/modules/config.py index b04fffc5..987c2533 100644 --- a/modules/config.py +++ b/modules/config.py @@ -517,7 +517,7 @@ class Config: logger.info(f"Connecting to {display_name} library's Radarr...") logger.info("") try: - library.Radarr = Radarr(self, { + library.Radarr = Radarr(self, library, { "url": check_for_attribute(lib, "url", parent="radarr", var_type="url", default=self.general["radarr"]["url"], req_default=True, save=False), "token": check_for_attribute(lib, "token", parent="radarr", default=self.general["radarr"]["token"], req_default=True, save=False), "add": check_for_attribute(lib, "add", parent="radarr", var_type="bool", default=self.general["radarr"]["add"], save=False), @@ -545,7 +545,7 @@ class Config: logger.info(f"Connecting to {display_name} library's Sonarr...") logger.info("") try: - library.Sonarr = Sonarr(self, { + library.Sonarr = Sonarr(self, library, { "url": check_for_attribute(lib, "url", parent="sonarr", var_type="url", default=self.general["sonarr"]["url"], req_default=True, save=False), "token": check_for_attribute(lib, "token", parent="sonarr", default=self.general["sonarr"]["token"], req_default=True, save=False), "add": check_for_attribute(lib, "add", parent="sonarr", var_type="bool", default=self.general["sonarr"]["add"], save=False), @@ -576,7 +576,7 @@ class Config: logger.info(f"Connecting to {display_name} library's Tautulli...") logger.info("") try: - library.Tautulli = Tautulli(self, { + library.Tautulli = Tautulli(self, library, { "url": check_for_attribute(lib, "url", parent="tautulli", var_type="url", default=self.general["tautulli"]["url"], req_default=True, save=False), "apikey": check_for_attribute(lib, "apikey", parent="tautulli", default=self.general["tautulli"]["apikey"], req_default=True, save=False) }) diff --git a/modules/radarr.py b/modules/radarr.py index 79b986eb..84c71ced 100644 --- a/modules/radarr.py +++ b/modules/radarr.py @@ -11,8 +11,9 @@ apply_tags_translation = {"": "add", "sync": "replace", "remove": "remove"} availability_descriptions = {"announced": "For Announced", "cinemas": "For In Cinemas", "released": "For Released", "db": "For PreDB"} class Radarr: - def __init__(self, config, params): + def __init__(self, config, library, params): self.config = config + self.library = library self.url = params["url"] self.token = params["token"] try: @@ -52,6 +53,11 @@ class Radarr: path = item[1] if isinstance(item, tuple) else None tmdb_id = item[0] if isinstance(item, tuple) else item util.print_return(f"Loading TMDb ID {i}/{len(tmdb_ids)} ({tmdb_id})") + if self.config.Cache: + _id = self.config.Cache.query_radarr_adds(tmdb_id, self.library.original_mapping_name) + if _id: + exists.append(item) + continue try: movie = self.api.get_movie(tmdb_id=tmdb_id) movies.append((movie, path) if path else movie) @@ -72,12 +78,16 @@ class Radarr: logger.info("") for movie in added: logger.info(f"Added to Radarr | {movie.tmdbId:<6} | {movie.title}") + if self.config.Cache: + self.config.Cache.update_radarr_adds(movie.tmdbId, self.library.original_mapping_name) logger.info(f"{len(added)} Movie{'s' if len(added) > 1 else ''} added to Radarr") if len(exists) > 0: logger.info("") for movie in exists: logger.info(f"Already in Radarr | {movie.tmdbId:<6} | {movie.title}") + if self.config.Cache: + self.config.Cache.update_radarr_adds(movie.tmdbId, self.library.original_mapping_name) logger.info(f"{len(exists)} Movie{'s' if len(exists) > 1 else ''} already existing in Radarr") if len(invalid) > 0: diff --git a/modules/sonarr.py b/modules/sonarr.py index 487a17ba..f68ad76d 100644 --- a/modules/sonarr.py +++ b/modules/sonarr.py @@ -29,8 +29,9 @@ monitor_descriptions = { apply_tags_translation = {"": "add", "sync": "replace", "remove": "remove"} class Sonarr: - def __init__(self, config, params): + def __init__(self, config, library, params): self.config = config + self.library = library self.url = params["url"] self.token = params["token"] try: @@ -78,10 +79,15 @@ class Sonarr: path = item[1] if isinstance(item, tuple) else None tvdb_id = item[0] if isinstance(item, tuple) else item util.print_return(f"Loading TVDb ID {i}/{len(tvdb_ids)} ({tvdb_id})") + if self.config.Cache: + _id = self.config.Cache.query_sonarr_adds(tvdb_id, self.library.original_mapping_name) + if _id: + exists.append(item) + continue try: show = self.api.get_series(tvdb_id=tvdb_id) shows.append((show, path) if path else show) - except NotFound: + except ArrException: invalid.append(item) if len(shows) == 100 or len(tvdb_ids) == i: try: @@ -98,12 +104,16 @@ class Sonarr: logger.info("") for series in added: logger.info(f"Added to Sonarr | {series.tvdbId:<6} | {series.title}") + if self.config.Cache: + self.config.Cache.update_sonarr_adds(series.tvdbId, self.library.original_mapping_name) logger.info(f"{len(added)} Series added to Sonarr") if len(exists) > 0: logger.info("") for series in exists: logger.info(f"Already in Sonarr | {series.tvdbId:<6} | {series.title}") + if self.config.Cache: + self.config.Cache.update_sonarr_adds(series.tvdbId, self.library.original_mapping_name) logger.info(f"{len(exists)} Series already existing in Sonarr") if len(invalid) > 0: diff --git a/modules/tautulli.py b/modules/tautulli.py index 3238f161..a2c7cdf2 100644 --- a/modules/tautulli.py +++ b/modules/tautulli.py @@ -11,8 +11,9 @@ logger = logging.getLogger("Plex Meta Manager") builders = ["tautulli_popular", "tautulli_watched"] class Tautulli: - def __init__(self, config, params): + def __init__(self, config, library, params): self.config = config + self.library = library self.url = params["url"] self.apikey = params["apikey"] try: From 6fd699b58a8b6b2509a50f3f75fb7738ed5abffb Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Mon, 29 Nov 2021 11:02:16 -0500 Subject: [PATCH 22/27] #464 added include_video and with_release_type to tmdb_discover --- modules/builder.py | 2 +- modules/tmdb.py | 11 ++++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/modules/builder.py b/modules/builder.py index b8214a5e..0d24426b 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -1033,7 +1033,7 @@ class CollectionBuilder: new_dictionary[discover_final] = util.parse(discover_attr, discover_data, parent=method_name, options=tmdb.discover_monetization_types) else: raise Failed(f"Collection Error: {method_name} {discover_final} attribute: must be used with watch_region") - elif discover_attr in ["include_adult", "include_null_first_air_dates", "screened_theatrically"]: + elif discover_attr in tmdb.discover_booleans: new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, datatype="bool", parent=method_name) elif discover_attr == "vote_average": new_dictionary[discover_final] = util.parse(discover_final, discover_data, datatype="float", parent=method_name) diff --git a/modules/tmdb.py b/modules/tmdb.py index 19bb3c75..6570b2c1 100644 --- a/modules/tmdb.py +++ b/modules/tmdb.py @@ -29,23 +29,24 @@ discover_all = [ "vote_average.gte", "vote_average.lte", "with_runtime.gte", "with_runtime.lte", "with_companies", "without_companies ", "with_genres", "without_genres", "with_keywords", "without_keywords", "with_watch_providers", "without_watch_providers", "watch_region", "with_watch_monetization_types", "with_status", - "include_adult", "timezone", "screened_theatrically", "include_null_first_air_dates", "limit", "with_type", - "air_date.gte", "air_date.lte", "first_air_date.gte", "first_air_date.lte", "first_air_date_year", "with_networks" + "include_adult", "include_video", "timezone", "screened_theatrically", "include_null_first_air_dates", "limit", "with_type", + "air_date.gte", "air_date.lte", "first_air_date.gte", "first_air_date.lte", "first_air_date_year", "with_networks", "with_release_type" ] discover_movie_only = [ - "region", "with_cast", "with_crew", "with_people", "certification_country", "certification", - "year", "primary_release_year", "primary_release_date", "release_date", "include_adult" + "region", "with_cast", "with_crew", "with_people", "certification_country", "certification", "include_video", + "year", "primary_release_year", "primary_release_date", "release_date", "include_adult", "with_release_type" ] discover_tv_only = [ "timezone", "screened_theatrically", "include_null_first_air_dates", "air_date", "first_air_date", "first_air_date_year", "with_networks", "with_status", "with_type", ] discover_strings = [ - "with_cast", "with_crew", "with_people", "with_companies", "with_networks", "with_genres", "without_genres", + "with_cast", "with_crew", "with_people", "with_companies", "with_networks", "with_genres", "without_genres", "with_release_type", "with_keywords", "without_keywords", "with_original_language", "timezone", "with_watch_providers", "without_watch_providers" ] discover_ints = ["vote_count", "with_runtime"] discover_years = ["primary_release_year", "year", "first_air_date_year"] +discover_booleans = ["include_adult", "include_video", "include_null_first_air_dates", "screened_theatrically"] discover_dates = [ "primary_release_date.gte", "primary_release_date.lte", "release_date.gte", "release_date.lte", "air_date.gte", "air_date.lte", "first_air_date.gte", "first_air_date.lte" From 7ac8f2fcb5100ff7e34ae4f46be6a57f7d3164b5 Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Mon, 29 Nov 2021 14:04:43 -0500 Subject: [PATCH 23/27] fix tautulli list_minimum --- modules/radarr.py | 8 ++++++++ modules/sonarr.py | 10 +++++++++- modules/tautulli.py | 2 +- 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/modules/radarr.py b/modules/radarr.py index 84c71ced..0d072c19 100644 --- a/modules/radarr.py +++ b/modules/radarr.py @@ -36,7 +36,15 @@ class Radarr: logger.info("") util.separator("Adding to Radarr", space=False, border=False) logger.debug("") + _ids = [] + _paths = [] for tmdb_id in tmdb_ids: + if isinstance(tmdb_id, tuple): + _paths.append(tmdb_id) + else: + _ids.append(tmdb_id) + logger.debug(f"Radarr Adds: {_ids if _ids else ''}") + for tmdb_id in _paths: logger.debug(tmdb_id) folder = options["folder"] if "folder" in options else self.root_folder_path monitor = options["monitor"] if "monitor" in options else self.monitor diff --git a/modules/sonarr.py b/modules/sonarr.py index f68ad76d..c0753e26 100644 --- a/modules/sonarr.py +++ b/modules/sonarr.py @@ -2,7 +2,7 @@ import logging from modules import util from modules.util import Failed from arrapi import SonarrAPI -from arrapi.exceptions import ArrException, Invalid, NotFound +from arrapi.exceptions import ArrException, Invalid logger = logging.getLogger("Plex Meta Manager") @@ -58,7 +58,15 @@ class Sonarr: logger.info("") util.separator("Adding to Sonarr", space=False, border=False) logger.debug("") + _ids = [] + _paths = [] for tvdb_id in tvdb_ids: + if isinstance(tvdb_id, tuple): + _paths.append(tvdb_id) + else: + _ids.append(tvdb_id) + logger.debug(f"Radarr Adds: {_ids if _ids else ''}") + for tvdb_id in _paths: logger.debug(tvdb_id) folder = options["folder"] if "folder" in options else self.root_folder_path monitor = monitor_translation[options["monitor"] if "monitor" in options else self.monitor] diff --git a/modules/tautulli.py b/modules/tautulli.py index a2c7cdf2..0079304c 100644 --- a/modules/tautulli.py +++ b/modules/tautulli.py @@ -43,7 +43,7 @@ class Tautulli: rating_keys = [] for item in items: if item["section_id"] == section_id and len(rating_keys) < int(params['list_size']): - if item[stat_type] < params['list_minimum']: + if int(item[stat_type]) < params['list_minimum']: continue try: plex_item = library.fetchItem(int(item["rating_key"])) From e2f27c14d4d4829f599259cf291a933f0a17e3d2 Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Tue, 30 Nov 2021 10:18:30 -0500 Subject: [PATCH 24/27] fix cache error --- modules/cache.py | 2 +- modules/webhooks.py | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/modules/cache.py b/modules/cache.py index 366308a9..d88d4509 100644 --- a/modules/cache.py +++ b/modules/cache.py @@ -377,4 +377,4 @@ class Cache: with sqlite3.connect(self.cache_path) as connection: connection.row_factory = sqlite3.Row with closing(connection.cursor()) as cursor: - cursor.execute(f"INSERT OR IGNORE INTO {arr}_adds({id_type}, library) VALUES(?)", (t_id, library)) + cursor.execute(f"INSERT OR IGNORE INTO {arr}_adds({id_type}, library) VALUES(?, ?)", (t_id, library)) diff --git a/modules/webhooks.py b/modules/webhooks.py index cbe3f47e..d7889bb2 100644 --- a/modules/webhooks.py +++ b/modules/webhooks.py @@ -23,7 +23,10 @@ class Webhooks: logger.debug(f"Webhook: {webhook}") if webhook == "notifiarr": url, params = self.notifiarr.get_url("notification/plex/") - response = self.config.get(url, json=json, params=params) + for x in range(6): + response = self.config.get(url, json=json, params=params) + if response.status_code < 500: + break else: response = self.config.post(webhook, json=json) try: From bee1f03b0a6a7c762936b8d8430d3e72f2a3160f Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Tue, 30 Nov 2021 11:10:47 -0500 Subject: [PATCH 25/27] tautulli fix --- modules/tautulli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/tautulli.py b/modules/tautulli.py index 0079304c..177d54b1 100644 --- a/modules/tautulli.py +++ b/modules/tautulli.py @@ -29,7 +29,7 @@ class Tautulli: logger.info(f"Processing Tautulli Most {params['list_type'].capitalize()}: {params['list_size']} {'Movies' if library.is_movie else 'Shows'}") response = self._request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_home_stats&time_range={params['list_days']}&stats_count={query_size}") stat_id = f"{'popular' if params['list_type'] == 'popular' else 'top'}_{'movies' if library.is_movie else 'tv'}" - stat_type = "total_plays" if params['list_type'] == 'popular' else "users_watched" + stat_type = "users_watched" if params['list_type'] == 'popular' else "total_plays" items = None for entry in response["response"]["data"]: From 19faf9dceac39a7259d51cb3162688483eac0a1e Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Tue, 30 Nov 2021 16:22:30 -0500 Subject: [PATCH 26/27] up version --- README.md | 6 ++++++ VERSION | 2 +- config/config.yml.template | 8 ++++++-- modules/builder.py | 1 - modules/flixpatrol.py | 18 ++++++++---------- 5 files changed, 21 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index ad5753b5..074d2c58 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,12 @@ The script works with most Metadata agents including the new Plex Movie Agent, N 3. After that you can start updating Metadata and building automatic Collections by creating a [Metadata File](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Metadata-File) for each Library you want to interact with. 4. Explore the [Wiki](https://github.com/meisnate12/Plex-Meta-Manager/wiki) to see all the different Collection Builders that can be used to create collections. +## IBRACORP Video Walkthrough + +[IBRACORP](https://ibracorp.io/) made a video walkthough for installing Plex Meta Manager on Unraid. While you might not be using Unraid the video goes over many key accepts of Plex Meta Manager and can be a great place to start learning how to use the script. + +[![Plex Meta Manager](https://img.youtube.com/vi/dF69MNoot3w/0.jpg)](https://www.youtube.com/watch?v=dF69MNoot3w "Plex Meta Manager") + ## Support * Before posting on GitHub about an enhancement, error, or configuration question please visit the [Plex Meta Manager Discord Server](https://discord.gg/TsdpsFYqqm). diff --git a/VERSION b/VERSION index 4cb62320..da38e07b 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.13.0-develop1124 \ No newline at end of file +1.13.1 \ No newline at end of file diff --git a/config/config.yml.template b/config/config.yml.template index 543a0ea8..8e85950d 100644 --- a/config/config.yml.template +++ b/config/config.yml.template @@ -37,14 +37,14 @@ webhooks: # Can be individually specified collection_creation: collection_addition: collection_removal: -plex: # Can be individually specified per library as well +plex: # Can be individually specified per library as well; REQUIRED for the script to run url: http://192.168.1.12:32400 token: #################### timeout: 60 clean_bundles: false empty_trash: false optimize: false -tmdb: +tmdb: # REQUIRED for the script to run apikey: ################################ language: en tautulli: # Can be individually specified per library as well @@ -67,6 +67,8 @@ radarr: # Can be individually specified quality_profile: HD-1080p tag: search: false + radarr_path: + plex_path: sonarr: # Can be individually specified per library as well url: http://192.168.1.12:8989 token: ################################ @@ -80,6 +82,8 @@ sonarr: # Can be individually specified tag: search: false cutoff_search: false + sonarr_path: + plex_path: trakt: client_id: ################################################################ client_secret: ################################################################ diff --git a/modules/builder.py b/modules/builder.py index 0d24426b..6cc9a06d 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -1399,7 +1399,6 @@ class CollectionBuilder: results = "" display_add = "" for og_value, result in validation: - print(og_value, result) built_arg = build_url_arg(quote(str(result)) if attr in string_filters else result, arg_s=og_value) display_add += built_arg[1] results += f"{conjunction if len(results) > 0 else ''}{built_arg[0]}" diff --git a/modules/flixpatrol.py b/modules/flixpatrol.py index c44ec492..47ab95ac 100644 --- a/modules/flixpatrol.py +++ b/modules/flixpatrol.py @@ -12,14 +12,14 @@ generations_pretty = {"all": "All generations", "boomers": "Baby Boomers", "x": gender = ["all", "men", "women"] demo_locations = ["world", "brazil", "canada", "france", "germany", "india", "mexico", "united_kingdom", "united_states"] locations = [ - "albania", "argentina", "armenia", "australia", "austria", "azerbaijan", "bahamas", "bahrain", "bangladesh", - "belarus", "belgium", "belize", "benin", "bolivia", "bosnia_and_herzegovina", "botswana", "brazil", "bulgaria", - "burkina_faso", "cambodia", "canada", "chile", "colombia", "costa_rica", "croatia", "cyprus", "czech_republic", - "denmark", "dominican_republic", "ecuador", "egypt", "estonia", "finland", "france", "gabon", "germany", "ghana", - "greece", "guatemala", "guinea_bissau", "haiti", "honduras", "hong_kong", "hungary", "iceland", "india", - "indonesia", "ireland", "israel", "italy", "ivory_coast", "jamaica", "japan", "jordan", "kazakhstan", "kenya", - "kuwait", "kyrgyzstan", "laos", "latvia", "lebanon", "lithuania", "luxembourg", "malaysia", "maldives", "mali", - "malta", "mexico", "moldova", "mongolia", "montenegro", "morocco", "mozambique", "namibia", "netherlands", + "world", "albania", "argentina", "armenia", "australia", "austria", "azerbaijan", "bahamas", "bahrain", + "bangladesh", "belarus", "belgium", "belize", "benin", "bolivia", "bosnia_and_herzegovina", "botswana", "brazil", + "bulgaria", "burkina_faso", "cambodia", "canada", "chile", "colombia", "costa_rica", "croatia", "cyprus", + "czech_republic", "denmark", "dominican_republic", "ecuador", "egypt", "estonia", "finland", "france", "gabon", + "germany", "ghana", "greece", "guatemala", "guinea_bissau", "haiti", "honduras", "hong_kong", "hungary", "iceland", + "india", "indonesia", "ireland", "israel", "italy", "ivory_coast", "jamaica", "japan", "jordan", "kazakhstan", + "kenya", "kuwait", "kyrgyzstan", "laos", "latvia", "lebanon", "lithuania", "luxembourg", "malaysia", "maldives", + "mali", "malta", "mexico", "moldova", "mongolia", "montenegro", "morocco", "mozambique", "namibia", "netherlands", "new_zealand", "nicaragua", "niger", "nigeria", "north_macedonia", "norway", "oman", "pakistan", "panama", "papua_new_guinea", "paraguay", "peru", "philippines", "poland", "portugal", "qatar", "romania", "russia", "rwanda", "salvador", "saudi_arabia", "senegal", "serbia", "singapore", "slovakia", "slovenia", "south_africa", @@ -78,7 +78,6 @@ class FlixPatrol: def validate_flixpatrol_lists(self, flixpatrol_lists, language, is_movie): valid_lists = [] - print(flixpatrol_lists) for flixpatrol_list in util.get_list(flixpatrol_lists, split=False): list_url = flixpatrol_list.strip() if not list_url.startswith(tuple([v for k, v in urls.items()])): @@ -88,7 +87,6 @@ class FlixPatrol: valid_lists.append(list_url) else: raise Failed(f"FlixPatrol Error: {list_url} failed to parse") - print(valid_lists) return valid_lists def validate_flixpatrol_dict(self, method, data, language, is_movie): From cdb850e355d81e7c164ab0f2951ed260d693f4d2 Mon Sep 17 00:00:00 2001 From: meisnate12 Date: Tue, 30 Nov 2021 16:27:40 -0500 Subject: [PATCH 27/27] remove default generation --- modules/builder.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/builder.py b/modules/builder.py index 6cc9a06d..10cd8bb0 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -872,7 +872,7 @@ class CollectionBuilder: for dict_data, dict_methods in util.parse(method_name, method_data, datatype="dictlist"): if method_name == "flixpatrol_demographics": data = { - "generation": util.parse("generation", dict_data, methods=dict_methods, parent=method_name, default="all", options=flixpatrol.generations), + "generation": util.parse("generation", dict_data, methods=dict_methods, parent=method_name, options=flixpatrol.generations), "gender": util.parse("gender", dict_data, methods=dict_methods, parent=method_name, default="all", options=flixpatrol.gender), "location": util.parse("location", dict_data, methods=dict_methods, parent=method_name, default="world", options=flixpatrol.demo_locations), "limit": util.parse("limit", dict_data, datatype="int", methods=dict_methods, parent=method_name, default=10)