diff --git a/README.md b/README.md index 12ec1107..d42410db 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # Plex Meta Manager -#### Version 1.8.0 +#### Version 1.9.0 The original concept for Plex Meta Manager is [Plex Auto Collections](https://github.com/mza921/Plex-Auto-Collections), but this is rewritten from the ground up to be able to include a scheduler, metadata edits, multiple libraries, and logging. Plex Meta Manager is a Python 3 script that can be continuously run using YAML configuration files to update on a schedule the metadata of the movies, shows, and collections in your libraries as well as automatically build collections based on various methods all detailed in the wiki. Some collection examples that the script can automatically build and update daily include Plex Based Searches like actor, genre, or studio collections or Collections based on TMDb, IMDb, Trakt, TVDb, AniDB, or MyAnimeList lists and various other services. diff --git a/config/config.yml.template b/config/config.yml.template index de42f393..8f291cc1 100644 --- a/config/config.yml.template +++ b/config/config.yml.template @@ -2,8 +2,21 @@ libraries: # Library mappings must have a colon (:) placed after them Movies: + metadata_path: + - file: config/Movies.yml # You have to create this file the other are online + - git: meisnate12/MovieCharts + - git: meisnate12/Studios + - git: meisnate12/IMDBGenres + - git: meisnate12/People TV Shows: + metadata_path: + - file: config/TV Shows.yml # You have to create this file the other are online + - git: meisnate12/ShowCharts + - git: meisnate12/Networks Anime: + metadata_path: + - file: config/Anime.yml # You have to create this file the other are online + - git: meisnate12/AnimeCharts settings: # Can be individually specified per library as well cache: true cache_expiration: 60 diff --git a/modules/anidb.py b/modules/anidb.py index 0c8a6f05..785ec679 100644 --- a/modules/anidb.py +++ b/modules/anidb.py @@ -17,56 +17,50 @@ class AniDBAPI: "relation": "/relation/graph" } - def get_AniDB_IDs(self): - return html.fromstring(requests.get("https://raw.githubusercontent.com/Anime-Lists/anime-lists/master/anime-list-master.xml").content) - @retry(stop_max_attempt_number=6, wait_fixed=10000) - def send_request(self, url, language): + def _request(self, url, language): return html.fromstring(requests.get(url, headers={"Accept-Language": language, "User-Agent": "Mozilla/5.0 x64"}).content) - def get_popular(self, language): - response = self.send_request(self.urls["popular"], language) + def _popular(self, language): + response = self._request(self.urls["popular"], language) return util.get_int_list(response.xpath("//td[@class='name anime']/a/@href"), "AniDB ID") - def validate_anidb_id(self, anidb_id, language): - response = self.send_request(f"{self.urls['anime']}/{anidb_id}", language) + def _relations(self, anidb_id, language): + response = self._request(f"{self.urls['anime']}/{anidb_id}{self.urls['relation']}", language) + return util.get_int_list(response.xpath("//area/@href"), "AniDB ID") + + def _validate(self, anidb_id, language): + response = self._request(f"{self.urls['anime']}/{anidb_id}", language) ids = response.xpath(f"//*[text()='a{anidb_id}']/text()") if len(ids) > 0: return util.regex_first_int(ids[0], "AniDB ID") raise Failed(f"AniDB Error: AniDB ID: {anidb_id} not found") - def get_anidb_relations(self, anidb_id, language): - response = self.send_request(f"{self.urls['anime']}/{anidb_id}{self.urls['relation']}", language) - return util.get_int_list(response.xpath("//area/@href"), "AniDB ID") - def validate_anidb_list(self, anidb_list, language): anidb_values = [] for anidb_id in anidb_list: try: - anidb_values.append(self.validate_anidb_id(anidb_id, language)) + anidb_values.append(self._validate(anidb_id, language)) except Failed as e: logger.error(e) if len(anidb_values) > 0: return anidb_values raise Failed(f"AniDB Error: No valid AniDB IDs in {anidb_list}") - def get_items(self, method, data, language, status_message=True): + def get_items(self, method, data, language): pretty = util.pretty_names[method] if method in util.pretty_names else method - if status_message: - logger.debug(f"Data: {data}") + logger.debug(f"Data: {data}") anidb_ids = [] if method == "anidb_popular": - if status_message: - logger.info(f"Processing {pretty}: {data} Anime") - anidb_ids.extend(self.get_popular(language)[:data]) + logger.info(f"Processing {pretty}: {data} Anime") + anidb_ids.extend(self._popular(language)[:data]) else: - if status_message: logger.info(f"Processing {pretty}: {data}") + logger.info(f"Processing {pretty}: {data}") if method == "anidb_id": anidb_ids.append(data) - elif method == "anidb_relation": anidb_ids.extend(self.get_anidb_relations(data, language)) + elif method == "anidb_relation": anidb_ids.extend(self._relations(data, language)) else: raise Failed(f"AniDB Error: Method {method} not supported") - movie_ids, show_ids = self.config.Arms.anidb_to_ids(anidb_ids, language) - if status_message: - logger.debug(f"AniDB IDs Found: {anidb_ids}") - logger.debug(f"TMDb IDs Found: {movie_ids}") - logger.debug(f"TVDb IDs Found: {show_ids}") + movie_ids, show_ids = self.config.Convert.anidb_to_ids(anidb_ids) + logger.debug(f"AniDB IDs Found: {anidb_ids}") + logger.debug(f"TMDb IDs Found: {movie_ids}") + logger.debug(f"TVDb IDs Found: {show_ids}") return movie_ids, show_ids diff --git a/modules/anilist.py b/modules/anilist.py index 24859229..3c366e93 100644 --- a/modules/anilist.py +++ b/modules/anilist.py @@ -19,6 +19,8 @@ pretty_names = { "score": "Average Score", "popular": "Popularity" } +tag_query = "query{MediaTagCollection {name}}" +genre_query = "query{GenreCollection}" class AniListAPI: def __init__(self, config): @@ -26,19 +28,12 @@ class AniListAPI: self.url = "https://graphql.anilist.co" self.tags = {} self.genres = {} - - for tag in self.send_request("query{MediaTagCollection {name}}", {})["data"]["MediaTagCollection"]: - self.tags[tag["name"].lower()] = tag["name"] - for genre in self.send_request("query{GenreCollection}", {})["data"]["GenreCollection"]: - self.genres[genre.lower()] = genre - - @retry(stop_max_attempt_number=6, wait_fixed=10000) - def post(self, query, variables): - return requests.post(self.url, json={"query": query, "variables": variables}) + self.tags = {t["name"].lower(): t["name"] for t in self._request(tag_query, {})["data"]["MediaTagCollection"]} + self.genres = {g.lower(): g for g in self._request(genre_query, {})["data"]["GenreCollection"]} @retry(stop_max_attempt_number=2, retry_on_exception=util.retry_if_not_failed) - def send_request(self, query, variables): - response = self.post(query, variables) + def _request(self, query, variables): + response = requests.post(self.url, json={"query": query, "variables": variables}) json_obj = response.json() if "errors" in json_obj: if json_obj['errors'][0]['message'] == "Too Many Requests.": @@ -51,14 +46,14 @@ class AniListAPI: time.sleep(0.4) return json_obj - def anilist_id(self, anilist_id): + def _validate(self, anilist_id): query = "query ($id: Int) {Media(id: $id) {id title{romaji english}}}" - media = self.send_request(query, {"id": anilist_id})["data"]["Media"] + media = self._request(query, {"id": anilist_id})["data"]["Media"] if media["id"]: return media["id"], media["title"]["english" if media["title"]["english"] else "romaji"] raise Failed(f"AniList Error: No AniList ID found for {anilist_id}") - def get_pagenation(self, query, limit=0, variables=None): + def _pagenation(self, query, limit=0, variables=None): anilist_ids = [] count = 0 page_num = 0 @@ -68,7 +63,7 @@ class AniListAPI: while next_page: page_num += 1 variables["page"] = page_num - json_obj = self.send_request(query, variables) + json_obj = self._request(query, variables) next_page = json_obj["data"]["Page"]["pageInfo"]["hasNextPage"] for media in json_obj["data"]["Page"]["media"]: if media["id"]: @@ -80,7 +75,7 @@ class AniListAPI: break return anilist_ids - def top_rated(self, limit): + def _top_rated(self, limit): query = """ query ($page: Int) { Page(page: $page) { @@ -89,9 +84,9 @@ class AniListAPI: } } """ - return self.get_pagenation(query, limit=limit) + return self._pagenation(query, limit=limit) - def popular(self, limit): + def _popular(self, limit): query = """ query ($page: Int) { Page(page: $page) { @@ -100,9 +95,9 @@ class AniListAPI: } } """ - return self.get_pagenation(query, limit=limit) + return self._pagenation(query, limit=limit) - def season(self, season, year, sort, limit): + def _season(self, season, year, sort, limit): query = """ query ($page: Int, $season: MediaSeason, $year: Int, $sort: [MediaSort]) { Page(page: $page){ @@ -112,9 +107,9 @@ class AniListAPI: } """ variables = {"season": season.upper(), "year": year, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"} - return self.get_pagenation(query, limit=limit, variables=variables) + return self._pagenation(query, limit=limit, variables=variables) - def genre(self, genre, sort, limit): + def _genre(self, genre, sort, limit): query = """ query ($page: Int, $genre: String, $sort: [MediaSort]) { Page(page: $page){ @@ -124,9 +119,9 @@ class AniListAPI: } """ variables = {"genre": genre, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"} - return self.get_pagenation(query, limit=limit, variables=variables) + return self._pagenation(query, limit=limit, variables=variables) - def tag(self, tag, sort, limit): + def _tag(self, tag, sort, limit): query = """ query ($page: Int, $tag: String, $sort: [MediaSort]) { Page(page: $page){ @@ -136,9 +131,9 @@ class AniListAPI: } """ variables = {"tag": tag, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"} - return self.get_pagenation(query, limit=limit, variables=variables) + return self._pagenation(query, limit=limit, variables=variables) - def studio(self, studio_id): + def _studio(self, studio_id): query = """ query ($page: Int, $id: Int) { Studio(id: $id) { @@ -156,7 +151,7 @@ class AniListAPI: name = None while next_page: page_num += 1 - json_obj = self.send_request(query, {"id": studio_id, "page": page_num}) + json_obj = self._request(query, {"id": studio_id, "page": page_num}) if not name: name = json_obj["data"]["Studio"]["name"] next_page = json_obj["data"]["Studio"]["media"]["pageInfo"]["hasNextPage"] @@ -165,7 +160,7 @@ class AniListAPI: anilist_ids.append(media["id"]) return anilist_ids, name - def relations(self, anilist_id, ignore_ids=None): + def _relations(self, anilist_id, ignore_ids=None): query = """ query ($id: Int) { Media(id: $id) { @@ -182,9 +177,9 @@ class AniListAPI: name = "" if not ignore_ids: ignore_ids = [anilist_id] - anilist_id, name = self.anilist_id(anilist_id) + anilist_id, name = self._validate(anilist_id) anilist_ids.append(anilist_id) - json_obj = self.send_request(query, {"id": anilist_id}) + json_obj = self._request(query, {"id": anilist_id}) edges = [media["node"]["id"] for media in json_obj["data"]["Media"]["relations"]["edges"] if media["relationType"] not in ["CHARACTER", "OTHER"] and media["node"]["type"] == "ANIME"] for media in json_obj["data"]["Media"]["relations"]["nodes"]: @@ -194,7 +189,7 @@ class AniListAPI: anilist_ids.append(media["id"]) for next_id in new_anilist_ids: - new_relation_ids, ignore_ids, _ = self.relations(next_id, ignore_ids=ignore_ids) + new_relation_ids, ignore_ids, _ = self._relations(next_id, ignore_ids=ignore_ids) anilist_ids.extend(new_relation_ids) return anilist_ids, ignore_ids, name @@ -215,48 +210,40 @@ class AniListAPI: if studio: query = "query ($id: Int) {Studio(id: $id) {name}}" else: query = "query ($id: Int) {Media(id: $id) {id}}" try: - self.send_request(query, {"id": anilist_id}) + self._request(query, {"id": anilist_id}) anilist_values.append(anilist_id) except Failed as e: logger.error(e) if len(anilist_values) > 0: return anilist_values raise Failed(f"AniList Error: No valid AniList IDs in {anilist_ids}") - def get_items(self, method, data, language, status_message=True): - if status_message: - logger.debug(f"Data: {data}") + def get_items(self, method, data): + logger.debug(f"Data: {data}") pretty = util.pretty_names[method] if method in util.pretty_names else method if method == "anilist_id": - anilist_id, name = self.anilist_id(data) + anilist_id, name = self._validate(data) anilist_ids = [anilist_id] - if status_message: - logger.info(f"Processing {pretty}: ({data}) {name}") + logger.info(f"Processing {pretty}: ({data}) {name}") elif method in ["anilist_popular", "anilist_top_rated"]: - anilist_ids = self.popular(data) if method == "anilist_popular" else self.top_rated(data) - if status_message: - logger.info(f"Processing {pretty}: {data} Anime") + anilist_ids = self._popular(data) if method == "anilist_popular" else self._top_rated(data) + logger.info(f"Processing {pretty}: {data} Anime") elif method == "anilist_season": - anilist_ids = self.season(data["season"], data["year"], data["sort_by"], data["limit"]) - if status_message: - logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from {util.pretty_seasons[data['season']]} {data['year']} sorted by {pretty_names[data['sort_by']]}") + anilist_ids = self._season(data["season"], data["year"], data["sort_by"], data["limit"]) + logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from {util.pretty_seasons[data['season']]} {data['year']} sorted by {pretty_names[data['sort_by']]}") elif method == "anilist_genre": - anilist_ids = self.genre(data["genre"], data["sort_by"], data["limit"]) - if status_message: - logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from the Genre: {data['genre']} sorted by {pretty_names[data['sort_by']]}") + anilist_ids = self._genre(data["genre"], data["sort_by"], data["limit"]) + logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from the Genre: {data['genre']} sorted by {pretty_names[data['sort_by']]}") elif method == "anilist_tag": - anilist_ids = self.tag(data["tag"], data["sort_by"], data["limit"]) - if status_message: - logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from the Tag: {data['tag']} sorted by {pretty_names[data['sort_by']]}") + anilist_ids = self._tag(data["tag"], data["sort_by"], data["limit"]) + logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from the Tag: {data['tag']} sorted by {pretty_names[data['sort_by']]}") elif method in ["anilist_studio", "anilist_relations"]: - if method == "anilist_studio": anilist_ids, name = self.studio(data) - else: anilist_ids, _, name = self.relations(data) - if status_message: - logger.info(f"Processing {pretty}: ({data}) {name} ({len(anilist_ids)} Anime)") + if method == "anilist_studio": anilist_ids, name = self._studio(data) + else: anilist_ids, _, name = self._relations(data) + logger.info(f"Processing {pretty}: ({data}) {name} ({len(anilist_ids)} Anime)") else: raise Failed(f"AniList Error: Method {method} not supported") - movie_ids, show_ids = self.config.Arms.anilist_to_ids(anilist_ids, language) - if status_message: - logger.debug(f"AniList IDs Found: {anilist_ids}") - logger.debug(f"Shows Found: {show_ids}") - logger.debug(f"Movies Found: {movie_ids}") + movie_ids, show_ids = self.config.Convert.anilist_to_ids(anilist_ids) + logger.debug(f"AniList IDs Found: {anilist_ids}") + logger.debug(f"Shows Found: {show_ids}") + logger.debug(f"Movies Found: {movie_ids}") return movie_ids, show_ids diff --git a/modules/arms.py b/modules/arms.py deleted file mode 100644 index 9ad6f840..00000000 --- a/modules/arms.py +++ /dev/null @@ -1,161 +0,0 @@ -import logging, requests -from lxml import html -from modules import util -from modules.util import Failed -from retrying import retry - -logger = logging.getLogger("Plex Meta Manager") - -class ArmsAPI: - def __init__(self, config): - self.config = config - self.arms_url = "https://relations.yuna.moe/api/ids" - self.anidb_url = "https://raw.githubusercontent.com/Anime-Lists/anime-lists/master/anime-list-master.xml" - self.AniDBIDs = self._get_anidb() - - @retry(stop_max_attempt_number=6, wait_fixed=10000) - def _get_anidb(self): - return html.fromstring(requests.get(self.anidb_url).content) - - def anidb_to_tvdb(self, anidb_id): return self._anidb(anidb_id, "tvdbid") - def anidb_to_imdb(self, anidb_id): return self._anidb(anidb_id, "imdbid") - def _anidb(self, input_id, to_id): - ids = self.AniDBIDs.xpath(f"//anime[contains(@anidbid, '{input_id}')]/@{to_id}") - if len(ids) > 0: - try: - if len(ids[0]) > 0: - return ids[0].split(",") if to_id == "imdbid" else int(ids[0]) - raise ValueError - except ValueError: - raise Failed(f"Arms Error: No {util.pretty_ids[to_id]} ID found for AniDB ID: {input_id}") - else: - raise Failed(f"Arms Error: AniDB ID: {input_id} not found") - - @retry(stop_max_attempt_number=6, wait_fixed=10000) - def _request(self, ids): - return requests.post(self.arms_url, json=ids).json() - - def mal_to_anidb(self, mal_id): - anime_ids = self._arms_ids(mal_ids=mal_id) - if anime_ids[0] is None: - raise Failed(f"Arms Error: MyAnimeList ID: {mal_id} does not exist") - if anime_ids[0]["anidb"] is None: - raise Failed(f"Arms Error: No AniDB ID for MyAnimeList ID: {mal_id}") - return anime_ids[0]["anidb"] - - def anidb_to_ids(self, anidb_list, language): - show_ids = [] - movie_ids = [] - for anidb_id in anidb_list: - try: - for imdb_id in self.anidb_to_imdb(anidb_id): - tmdb_id, _ = self.imdb_to_ids(imdb_id, language) - if tmdb_id: - movie_ids.append(tmdb_id) - break - else: - raise Failed - except Failed: - try: - tvdb_id = self.anidb_to_tvdb(anidb_id) - if tvdb_id: - show_ids.append(tvdb_id) - except Failed: - logger.error(f"Arms Error: No TVDb ID or IMDb ID found for AniDB ID: {anidb_id}") - return movie_ids, show_ids - - def anilist_to_ids(self, anilist_ids, language): - anidb_ids = [] - for id_set in self._arms_ids(anilist_ids=anilist_ids): - if id_set["anidb"] is not None: - anidb_ids.append(id_set["anidb"]) - else: - logger.error(f"Arms Error: AniDB ID not found for AniList ID: {id_set['anilist']}") - return self.anidb_to_ids(anidb_ids, language) - - def myanimelist_to_ids(self, mal_ids, language): - anidb_ids = [] - for id_set in self._arms_ids(mal_ids=mal_ids): - if id_set["anidb"] is not None: - anidb_ids.append(id_set["anidb"]) - else: - logger.error(f"Arms Error: AniDB ID not found for MyAnimeList ID: {id_set['myanimelist']}") - return self.anidb_to_ids(anidb_ids, language) - - def _arms_ids(self, anilist_ids=None, anidb_ids=None, mal_ids=None): - all_ids = [] - def collect_ids(ids, id_name): - if ids: - if isinstance(ids, list): - all_ids.extend([{id_name: a_id} for a_id in ids]) - else: - all_ids.append({id_name: ids}) - collect_ids(anilist_ids, "anilist") - collect_ids(anidb_ids, "anidb") - collect_ids(mal_ids, "myanimelist") - converted_ids = [] - if self.config.Cache: - unconverted_ids = [] - for anime_dict in all_ids: - for id_type, anime_id in anime_dict.items(): - query_ids, update = self.config.Cache.query_anime_map(anime_id, id_type) - if not update and query_ids: - converted_ids.append(query_ids) - else: - unconverted_ids.append({id_type: anime_id}) - else: - unconverted_ids = all_ids - - for anime_ids in self._request(unconverted_ids): - if anime_ids: - if self.config.Cache: - self.config.Cache.update_anime(False, anime_ids) - converted_ids.append(anime_ids) - return converted_ids - - def imdb_to_ids(self, imdb_id, language): - update_tmdb = False - update_tvdb = False - if self.config.Cache: - tmdb_id, tvdb_id = self.config.Cache.get_ids_from_imdb(imdb_id) - update_tmdb = False - if not tmdb_id: - tmdb_id, update_tmdb = self.config.Cache.get_tmdb_from_imdb(imdb_id) - if update_tmdb: - tmdb_id = None - update_tvdb = False - if not tvdb_id: - tvdb_id, update_tvdb = self.config.Cache.get_tvdb_from_imdb(imdb_id) - if update_tvdb: - tvdb_id = None - else: - tmdb_id = None - tvdb_id = None - from_cache = tmdb_id is not None or tvdb_id is not None - - if not tmdb_id and not tvdb_id and self.config.TMDb: - try: tmdb_id = self.config.TMDb.convert_imdb_to_tmdb(imdb_id) - except Failed: pass - if not tmdb_id and not tvdb_id and self.config.TMDb: - try: tvdb_id = self.config.TMDb.convert_imdb_to_tvdb(imdb_id) - except Failed: pass - if not tmdb_id and not tvdb_id and self.config.Trakt: - try: tmdb_id = self.config.Trakt.convert_imdb_to_tmdb(imdb_id) - except Failed: pass - if not tmdb_id and not tvdb_id and self.config.Trakt: - try: tvdb_id = self.config.Trakt.convert_imdb_to_tvdb(imdb_id) - except Failed: pass - if tmdb_id and not from_cache: - try: self.config.TMDb.get_movie(tmdb_id) - except Failed: tmdb_id = None - if tvdb_id and not from_cache: - try: self.config.TVDb.get_series(language, tvdb_id) - except Failed: tvdb_id = None - if not tmdb_id and not tvdb_id: - raise Failed(f"Arms Error: No TMDb ID or TVDb ID found for IMDb: {imdb_id}") - if self.config.Cache: - if tmdb_id and update_tmdb is not False: - self.config.Cache.update_imdb("movie", update_tmdb, imdb_id, tmdb_id) - if tvdb_id and update_tvdb is not False: - self.config.Cache.update_imdb("show", update_tvdb, imdb_id, tvdb_id) - return tmdb_id, tvdb_id diff --git a/modules/builder.py b/modules/builder.py index c7fabf6e..4e998474 100644 --- a/modules/builder.py +++ b/modules/builder.py @@ -2,16 +2,19 @@ import glob, logging, os, re from datetime import datetime, timedelta from modules import anidb, anilist, imdb, letterboxd, mal, plex, radarr, sonarr, tautulli, tmdb, trakttv, tvdb, util from modules.util import Failed -from plexapi.collection import Collections from plexapi.exceptions import BadRequest, NotFound +from plexapi.video import Movie, Show +from urllib.parse import quote logger = logging.getLogger("Plex Meta Manager") +string_filters = ["title", "episode_title", "studio"] image_file_details = ["file_poster", "file_background", "asset_directory"] advance_new_agent = ["item_metadata_language", "item_use_original_title"] advance_show = ["item_episode_sorting", "item_keep_episodes", "item_delete_episodes", "item_season_display", "item_episode_sorting"] method_alias = { "actors": "actor", "role": "actor", "roles": "actor", + "collections": "collecion", "plex_collection": "collection", "content_ratings": "content_rating", "contentRating": "content_rating", "contentRatings": "content_rating", "countries": "country", "decades": "decade", @@ -25,6 +28,21 @@ method_alias = { "writers": "writer", "years": "year" } +filter_alias = { + "actor": "actors", + "audience_rating": "audienceRating", + "collection": "collections", + "content_rating": "contentRating", + "country": "countries", + "critic_rating": "rating", + "director": "directors", + "genre": "genres", + "originally_available": "originallyAvailableAt", + "tmdb_vote_count": "vote_count", + "user_rating": "userRating", + "writer": "writers" +} +modifier_alias = {".greater": ".gt", ".less": ".lt"} all_builders = anidb.builders + anilist.builders + imdb.builders + letterboxd.builders + mal.builders + plex.builders + tautulli.builders + tmdb.builders + trakttv.builders + tvdb.builders dictionary_builders = [ "filters", @@ -82,6 +100,17 @@ numbered_builders = [ "trakt_watched", "trakt_collected" ] +smart_collection_invalid = ["collection_mode", "collection_order"] +smart_url_collection_invalid = [ + "item_label", "item_label.sync", "item_episode_sorting", "item_keep_episodes", "item_delete_episodes", + "item_season_display", "item_episode_ordering", "item_metadata_language", "item_use_original_title", + "run_again", "sync_mode", "show_filtered", "show_missing", "save_missing", "smart_label", + "radarr_add", "radarr_folder", "radarr_monitor", "radarr_availability", + "radarr_quality", "radarr_tag", "radarr_search", + "sonarr_add", "sonarr_folder", "sonarr_monitor", "sonarr_quality", "sonarr_language", + "sonarr_series", "sonarr_season", "sonarr_tag", "sonarr_search", "sonarr_cutoff_search", + "filters" +] all_details = [ "sort_title", "content_rating", "collection_mode", "collection_order", "summary", "tmdb_summary", "tmdb_description", "tmdb_biography", "tvdb_summary", @@ -99,6 +128,9 @@ collectionless_details = [ "name_mapping", "label", "label_sync_mode", "test" ] ignored_details = [ + "smart_filter", + "smart_label", + "smart_url", "run_again", "schedule", "sync_mode", @@ -119,6 +151,7 @@ all_filters = [ "content_rating", "content_rating.not", "country", "country.not", "director", "director.not", + "filepath", "filepath.not", "genre", "genre.not", "max_age", "originally_available.gte", "originally_available.lte", @@ -146,10 +179,20 @@ movie_only_filters = [ "writer", "writer.not" ] +def _split(text): + attribute, modifier = os.path.splitext(str(text).lower()) + attribute = method_alias[attribute] if attribute in method_alias else attribute + modifier = modifier_alias[modifier] if modifier in modifier_alias else modifier + final = f"{attribute}{modifier}" + if text != final: + logger.warning(f"Collection Warning: {text} plex search attribute will run as {final}") + return attribute, modifier, final + class CollectionBuilder: - def __init__(self, config, library, name, data): + def __init__(self, config, library, metadata, name, data): self.config = config self.library = library + self.metadata = metadata self.name = name self.data = data self.details = { @@ -164,6 +207,9 @@ class CollectionBuilder: self.missing_shows = [] self.methods = [] self.filters = [] + self.rating_keys = [] + self.run_again_movies = [] + self.run_again_shows = [] self.posters = {} self.backgrounds = {} self.summaries = {} @@ -177,29 +223,31 @@ class CollectionBuilder: methods = {m.lower(): m for m in self.data} if "template" in methods: - if not self.library.templates: + if not self.metadata.templates: raise Failed("Collection Error: No templates found") elif not self.data[methods["template"]]: raise Failed("Collection Error: template attribute is blank") else: - for data_template in util.get_list(self.data[methods["template"]], split=False): - if not isinstance(data_template, dict): + for variables in util.get_list(self.data[methods["template"]], split=False): + if not isinstance(variables, dict): raise Failed("Collection Error: template attribute is not a dictionary") - elif "name" not in data_template: + elif "name" not in variables: raise Failed("Collection Error: template sub-attribute name is required") - elif not data_template["name"]: + elif not variables["name"]: raise Failed("Collection Error: template sub-attribute name is blank") - elif data_template["name"] not in self.library.templates: - raise Failed(f"Collection Error: template {data_template['name']} not found") - elif not isinstance(self.library.templates[data_template["name"]], dict): - raise Failed(f"Collection Error: template {data_template['name']} is not a dictionary") + elif variables["name"] not in self.metadata.templates: + raise Failed(f"Collection Error: template {variables['name']} not found") + elif not isinstance(self.metadata.templates[variables["name"]], dict): + raise Failed(f"Collection Error: template {variables['name']} is not a dictionary") else: - for tm in data_template: - if not data_template[tm]: + for tm in variables: + if not variables[tm]: raise Failed(f"Collection Error: template sub-attribute {tm} is blank") + if "collection_name" not in variables: + variables["collection_name"] = str(self.name) - template_name = data_template["name"] - template = self.library.templates[template_name] + template_name = variables["name"] + template = self.metadata.templates[template_name] default = {} if "default" in template: @@ -229,60 +277,60 @@ class CollectionBuilder: else: raise Failed("Collection Error: template sub-attribute optional is blank") - for method_name, attr_data in template.items(): - if method_name not in self.data and method_name not in ["default", "optional"]: - if attr_data: - def replace_txt(txt): - txt = str(txt) - for option in optional: - if option not in data_template and f"<<{option}>>" in txt: - raise Failed("remove attribute") - for template_method in data_template: - if template_method != "name" and txt == f"<<{template_method}>>": - return data_template[template_method] - elif template_method != "name" and f"<<{template_method}>>" in txt: - txt = txt.replace(f"<<{template_method}>>", str(data_template[template_method])) - if "<>" in txt: - txt = txt.replace("<>", str(self.name)) - for dm in default: - if txt == f"<<{dm}>>": - txt = default[dm] - elif f"<<{dm}>>" in txt: - txt = txt.replace(f"<<{dm}>>", str(default[dm])) - if txt in ["true", "True"]: return True - elif txt in ["false", "False"]: return False - else: - try: return int(txt) - except (ValueError, TypeError): return txt + def check_data(_data): + if isinstance(_data, dict): + final_data = {} + for sm, sd in _data.items(): try: - if isinstance(attr_data, dict): - final_data = {} - for sm in attr_data: - if isinstance(attr_data[sm], list): - temp_list = [] - for li in attr_data[sm]: - temp_list.append(replace_txt(li)) - final_data[sm] = temp_list - else: - final_data[sm] = replace_txt(attr_data[sm]) - elif isinstance(attr_data, list): - final_data = [] - for li in attr_data: - if isinstance(li, dict): - temp_dict = {} - for sm in li: - temp_dict[sm] = replace_txt(li[sm]) - final_data.append(temp_dict) - else: - final_data.append(replace_txt(li)) - else: - final_data = replace_txt(attr_data) + final_data[sm] = check_data(sd) except Failed: continue - self.data[method_name] = final_data - methods[method_name.lower()] = method_name + elif isinstance(_data, list): + final_data = [] + for li in _data: + try: + final_data.append(check_data(li)) + except Failed: + continue + else: + txt = str(_data) + def scan_text(og_txt, var, var_value): + if og_txt == f"<<{var}>>": + return str(var_value) + elif f"<<{var}>>" in str(og_txt): + return str(og_txt).replace(f"<<{var}>>", str(var_value)) + else: + return og_txt + for option in optional: + if option not in variables and f"<<{option}>>" in txt: + raise Failed + for variable, variable_data in variables.items(): + if variable != "name": + txt = scan_text(txt, variable, variable_data) + for dm, dd in default.items(): + txt = scan_text(txt, dm, dd) + if txt in ["true", "True"]: + final_data = True + elif txt in ["false", "False"]: + final_data = False else: - raise Failed(f"Collection Error: template attribute {method_name} is blank") + try: + num_data = float(txt) + final_data = int(num_data) if num_data.is_integer() else num_data + except (ValueError, TypeError): + final_data = txt + return final_data + + for method_name, attr_data in template.items(): + if method_name not in self.data and method_name not in ["default", "optional"]: + if attr_data is None: + logger.error(f"Template Error: template attribute {method_name} is blank") + continue + try: + self.data[method_name] = check_data(attr_data) + methods[method_name.lower()] = method_name + except Failed: + continue skip_collection = True if "schedule" not in methods: @@ -341,8 +389,8 @@ class CollectionBuilder: logger.info(f"Scanning {self.name} Collection") - self.collectionless = "plex_collectionless" in methods self.run_again = "run_again" in methods + self.collectionless = "plex_collectionless" in methods if "tmdb_person" in methods: if self.data[methods["tmdb_person"]]: @@ -354,11 +402,211 @@ class CollectionBuilder: self.summaries["tmdb_person"] = person.biography if hasattr(person, "profile_path") and person.profile_path: self.posters["tmdb_person"] = f"{config.TMDb.image_url}{person.profile_path}" - if len(valid_names) > 0: self.details["tmdb_person"] = valid_names - else: raise Failed(f"Collection Error: No valid TMDb Person IDs in {self.data[methods['tmdb_person']]}") + if len(valid_names) > 0: + self.details["tmdb_person"] = valid_names + else: + raise Failed(f"Collection Error: No valid TMDb Person IDs in {self.data[methods['tmdb_person']]}") else: raise Failed("Collection Error: tmdb_person attribute is blank") + self.smart_sort = "random" + self.smart_label_collection = False + if "smart_label" in methods: + self.smart_label_collection = True + if self.data[methods["smart_label"]]: + if (self.library.is_movie and str(self.data[methods["smart_label"]]).lower() in plex.movie_smart_sorts) \ + or (self.library.is_show and str(self.data[methods["smart_label"]]).lower() in plex.show_smart_sorts): + self.smart_sort = str(self.data[methods["smart_label"]]).lower() + else: + logger.info("") + logger.warning(f"Collection Error: smart_label attribute: {self.data[methods['smart_label']]} is invalid defaulting to random") + else: + logger.info("") + logger.warning("Collection Error: smart_label attribute is blank defaulting to random") + + self.smart_url = None + self.smart_type_key = None + if "smart_url" in methods: + if self.data[methods["smart_url"]]: + try: + self.smart_url, self.smart_type_key = library.get_smart_filter_from_uri(self.data[methods["smart_url"]]) + except ValueError: + raise Failed("Collection Error: smart_url is incorrectly formatted") + else: + raise Failed("Collection Error: smart_url attribute is blank") + + if "smart_filter" in methods: + logger.info("") + smart_filter = self.data[methods["smart_filter"]] + if smart_filter is None: + raise Failed(f"Collection Error: smart_filter attribute is blank") + if not isinstance(smart_filter, dict): + raise Failed(f"Collection Error: smart_filter must be a dictionary: {smart_filter}") + smart_methods = {m.lower(): m for m in smart_filter} + if "any" in smart_methods and "all" in smart_methods: + raise Failed(f"Collection Error: Cannot have more then one base") + if "any" not in smart_methods and "all" not in smart_methods: + raise Failed(f"Collection Error: Must have either any or all as a base for the filter") + + if "type" in smart_methods and self.library.is_show: + if smart_filter[smart_methods["type"]] not in ["shows", "seasons", "episodes"]: + raise Failed(f"Collection Error: type: {smart_filter[smart_methods['type']]} is invalid, must be either shows, season, or episodes") + smart_type = smart_filter[smart_methods["type"]] + elif self.library.is_show: + smart_type = "shows" + else: + smart_type = "movies" + logger.info(f"Smart {smart_type.capitalize()[:-1]} Filter") + self.smart_type_key, smart_sorts = plex.smart_types[smart_type] + + smart_sort = "random" + if "sort_by" in smart_methods: + if smart_filter[smart_methods["sort_by"]] is None: + raise Failed(f"Collection Error: sort_by attribute is blank") + if smart_filter[smart_methods["sort_by"]] not in smart_sorts: + raise Failed(f"Collection Error: sort_by: {smart_filter[smart_methods['sort_by']]} is invalid") + smart_sort = smart_filter[smart_methods["sort_by"]] + logger.info(f"Sort By: {smart_sort}") + + limit = None + if "limit" in smart_methods: + if smart_filter[smart_methods["limit"]] is None: + raise Failed("Collection Error: limit attribute is blank") + if not isinstance(smart_filter[smart_methods["limit"]], int) or smart_filter[smart_methods["limit"]] < 1: + raise Failed("Collection Error: limit attribute must be an integer greater then 0") + limit = smart_filter[smart_methods["limit"]] + logger.info(f"Limit: {limit}") + + validate = True + if "validate" in smart_methods: + if smart_filter[smart_methods["validate"]] is None: + raise Failed("Collection Error: validate attribute is blank") + if not isinstance(smart_filter[smart_methods["validate"]], bool): + raise Failed("Collection Error: validate attribute must be either true or false") + validate = smart_filter[smart_methods["validate"]] + logger.info(f"Validate: {validate}") + + def _filter(filter_dict, fail, is_all=True, level=1): + output = "" + display = f"\n{' ' * level}Match {'all' if is_all else 'any'} of the following:" + level += 1 + indent = f"\n{' ' * level}" + conjunction = f"{'and' if is_all else 'or'}=1&" + for smart_key, smart_data in filter_dict.items(): + smart, smart_mod, smart_final = _split(smart_key) + + def build_url_arg(arg, mod=None, arg_s=None, mod_s=None): + arg_key = plex.search_translation[smart] if smart in plex.search_translation else smart + if mod is None: + mod = plex.modifier_translation[smart_mod] if smart_mod in plex.search_translation else smart_mod + if arg_s is None: + arg_s = arg + if smart in string_filters and smart_mod in ["", ".not"]: + mod_s = "does not contain" if smart_mod == ".not" else "contains" + elif mod_s is None: + mod_s = plex.mod_displays[smart_mod] + display_line = f"{indent}{smart.title().replace('_', ' ')} {mod_s} {arg_s}" + return f"{arg_key}{mod}={arg}&", display_line + + if smart_final in plex.movie_only_smart_searches and self.library.is_show: + raise Failed(f"Collection Error: {smart_final} smart filter attribute only works for movie libraries") + elif smart_final in plex.show_only_smart_searches and self.library.is_movie: + raise Failed(f"Collection Error: {smart_final} smart filter attribute only works for show libraries") + elif smart_final not in plex.smart_searches: + raise Failed(f"Collection Error: {smart_final} is not a valid smart filter attribute") + elif smart_data is None: + raise Failed(f"Collection Error: {smart_final} smart filter attribute is blank") + elif smart in ["all", "any"]: + dicts = util.get_list(smart_data) + results = "" + display_add = "" + for dict_data in dicts: + if not isinstance(dict_data, dict): + raise Failed(f"Collection Error: {smart} must be either a dictionary or list of dictionaries") + inside_filter, inside_display = _filter(dict_data, fail, is_all=smart == "all", level=level) + display_add += inside_display + results += f"{conjunction if len(results) > 0 else ''}push=1&{inside_filter}pop=1&" + elif smart in ["year", "episode_year"] and smart_mod in [".gt", ".gte", ".lt", ".lte"]: + results, display_add = build_url_arg(util.check_year(smart_data, current_year, smart_final)) + elif smart in ["added", "episode_added", "originally_available", "episode_originally_available"] and smart_mod in [".before", ".after"]: + results, display_add = build_url_arg(util.check_date(smart_data, smart_final, return_string=True, plex_date=True)) + elif smart in ["added", "episode_added", "originally_available", "episode_originally_available"] and smart_mod in ["", ".not"]: + in_the_last = util.check_number(smart_data, smart_final, minimum=1) + last_text = "is not in the last" if smart_mod == ".not" else "is in the last" + last_mod = "%3E%3E" if smart_mod == "" else "%3C%3C" + results, display_add = build_url_arg(f"-{in_the_last}d", mod=last_mod, arg_s=f"{in_the_last} Days", mod_s=last_text) + elif smart in ["duration"] and smart_mod in [".gt", ".gte", ".lt", ".lte"]: + results, display_add = build_url_arg(util.check_number(smart_data, smart_final, minimum=1) * 60000) + elif smart in ["plays", "episode_plays"] and smart_mod in [".gt", ".gte", ".lt", ".lte"]: + results, display_add = build_url_arg(util.check_number(smart_data, smart_final, minimum=1)) + elif smart in ["user_rating", "episode_user_rating", "critic_rating", "audience_rating"] and smart_mod in [".gt", ".gte", ".lt", ".lte"]: + results, display_add = build_url_arg(util.check_number(smart_data, smart_final, number_type="float", minimum=0, maximum=10)) + else: + if smart in ["title", "episode_title"] and smart_mod in ["", ".not", ".begins", ".ends"]: + results_list = [(t, t) for t in util.get_list(smart_data, split=False)] + elif smart in plex.tags and smart_mod in ["", ".not", ".begins", ".ends"]: + if smart_final in plex.tmdb_searches: + smart_values = [] + for tmdb_value in util.get_list(smart_data): + if tmdb_value.lower() == "tmdb" and "tmdb_person" in self.details: + for tmdb_name in self.details["tmdb_person"]: + smart_values.append(tmdb_name) + else: + smart_values.append(tmdb_value) + elif smart == "studio": + smart_values = util.get_list(smart_data, split=False) + else: + smart_values = util.get_list(smart_data) + results_list = [] + try: + results_list = self.library.validate_search_list(smart_values, smart, title=False, pairs=True) + except Failed as e: + if fail: + raise + else: + logger.error(e) + elif smart in ["decade", "year", "episode_year"] and smart_mod in ["", ".not"]: + results_list = [(y, y) for y in util.get_year_list(smart_data, current_year, smart_final)] + else: + raise Failed(f"Collection Error: modifier: {smart_mod} not supported with the {smart} plex search attribute") + results = "" + display_add = "" + for og_value, result in results_list: + built_arg = build_url_arg(quote(result) if smart in string_filters else result, arg_s=og_value) + display_add += built_arg[1] + results += f"{conjunction if len(results) > 0 else ''}{built_arg[0]}" + display += display_add + output += f"{conjunction if len(output) > 0 else ''}{results}" + return output, display + + base = "all" if "all" in smart_methods else "any" + base_all = base == "all" + if smart_filter[smart_methods[base]] is None: + raise Failed(f"Collection Error: {base} attribute is blank") + if not isinstance(smart_filter[smart_methods[base]], dict): + raise Failed(f"Collection Error: {base} must be a dictionary: {smart_filter[smart_methods[base]]}") + built_filter, filter_text = _filter(smart_filter[smart_methods[base]], validate, is_all=base_all) + util.print_multiline(f"Filter:{filter_text}") + final_filter = built_filter[:-1] if base_all else f"push=1&{built_filter}pop=1" + self.smart_url = f"?type={self.smart_type_key}&{f'limit={limit}&' if limit else ''}sort={smart_sorts[smart_sort]}&{final_filter}" + + def cant_interact(attr1, attr2, fail=False): + if getattr(self, attr1) and getattr(self, attr2): + message = f"Collection Error: {attr1} & {attr2} attributes cannot go together" + if fail: + raise Failed(message) + else: + setattr(self, attr2, False) + logger.info("") + logger.warning(f"{message} removing {attr2}") + + cant_interact("smart_label_collection", "collectionless") + cant_interact("smart_url", "collectionless") + cant_interact("smart_url", "run_again") + cant_interact("smart_label_collection", "smart_url", fail=True) + + self.smart = self.smart_url or self.smart_label_collection + for method_key, method_data in self.data.items(): if "trakt" in method_key.lower() and not config.Trakt: raise Failed(f"Collection Error: {method_key} requires Trakt todo be configured") elif "imdb" in method_key.lower() and not config.IMDb: raise Failed(f"Collection Error: {method_key} requires TMDb or Trakt to be configured") @@ -389,8 +637,14 @@ class CollectionBuilder: raise Failed(f"Collection Error: {method_name} plex search only works for movie libraries") elif method_name in plex.show_only_searches and self.library.is_movie: raise Failed(f"Collection Error: {method_name} plex search only works for show libraries") + elif method_name in smart_collection_invalid and self.smart: + raise Failed(f"Collection Error: {method_name} attribute only works with normal collections") elif method_name not in collectionless_details and self.collectionless: raise Failed(f"Collection Error: {method_name} attribute does not work for Collectionless collection") + elif self.smart_url and method_name in all_builders: + raise Failed(f"Collection Error: {method_name} builder not allowed when using smart_url") + elif self.smart_url and method_name in smart_url_collection_invalid: + raise Failed(f"Collection Error: {method_name} detail not allowed when using smart_url") elif method_name == "summary": self.summaries[method_name] = method_data elif method_name == "tmdb_summary": @@ -517,13 +771,13 @@ class CollectionBuilder: self.sonarr_options["tag"] = util.get_list(method_data) elif method_name in ["title", "title.and", "title.not", "title.begins", "title.ends"]: self.methods.append(("plex_search", [{method_name: util.get_list(method_data, split=False)}])) - elif method_name in ["year.greater", "year.less"]: + elif method_name in ["year.gt", "year.gte", "year.lt", "year.lte"]: self.methods.append(("plex_search", [{method_name: util.check_year(method_data, current_year, method_name)}])) elif method_name in ["added.before", "added.after", "originally_available.before", "originally_available.after"]: self.methods.append(("plex_search", [{method_name: util.check_date(method_data, method_name, return_string=True, plex_date=True)}])) - elif method_name in ["added", "added.not", "originally_available", "originally_available.not", "duration.greater", "duration.less"]: + elif method_name in ["added", "added.not", "originally_available", "originally_available.not", "duration.gt", "duration.gte", "duration.lt", "duration.lte"]: self.methods.append(("plex_search", [{method_name: util.check_number(method_data, method_name, minimum=1)}])) - elif method_name in ["user_rating.greater", "user_rating.less", "critic_rating.greater", "critic_rating.less", "audience_rating.greater", "audience_rating.less"]: + elif method_name in ["user_rating.gt", "user_rating.gte", "user_rating.lt", "user_rating.lte", "critic_rating.gt", "critic_rating.gte", "critic_rating.lt", "critic_rating.lte", "audience_rating.gt", "audience_rating.gte", "audience_rating.lt", "audience_rating.lte"]: self.methods.append(("plex_search", [{method_name: util.check_number(method_data, method_name, number_type="float", minimum=0, maximum=10)}])) elif method_name in ["decade", "year", "year.not"]: self.methods.append(("plex_search", [{method_name: util.get_year_list(method_data, current_year, method_name)}])) @@ -538,16 +792,10 @@ class CollectionBuilder: final_values.append(value) else: final_values = method_data - search = os.path.splitext(method_name)[0] - valid_values = self.library.validate_search_list(final_values, search) - if len(valid_values) > 0: - self.methods.append(("plex_search", [{method_name: valid_values}])) - else: - logger.warning(f"Collection Warning: No valid {search} values found in {final_values}") + search = self.library.validate_search_list(final_values, os.path.splitext(method_name)[0]) + self.methods.append(("plex_search", [{method_name: search}])) elif method_name == "plex_all": self.methods.append((method_name, [""])) - elif method_name == "plex_collection": - self.methods.append((method_name, self.library.validate_collections(method_data if isinstance(method_data, list) else [method_data]))) elif method_name == "anidb_popular": list_count = util.regex_first_int(method_data, "List Size", default=40) if 1 <= list_count <= 30: @@ -597,18 +845,18 @@ class CollectionBuilder: self.methods.append((method_name[:-8], values)) elif method_name in dictionary_builders: if isinstance(method_data, dict): - def get_int(parent, method, data_in, methods_in, default_in, minimum=1, maximum=None): - if method not in methods_in: - logger.warning(f"Collection Warning: {parent} {method} attribute not found using {default_in} as default") - elif not data_in[methods_in[method]]: - logger.warning(f"Collection Warning: {parent} {methods_in[method]} attribute is blank using {default_in} as default") - elif isinstance(data_in[methods_in[method]], int) and data_in[methods_in[method]] >= minimum: - if maximum is None or data_in[methods_in[method]] <= maximum: - return data_in[methods_in[method]] + def get_int(parent, int_method, data_in, methods_in, default_in, minimum=1, maximum=None): + if int_method not in methods_in: + logger.warning(f"Collection Warning: {parent} {int_method} attribute not found using {default_in} as default") + elif not data_in[methods_in[int_method]]: + logger.warning(f"Collection Warning: {parent} {methods_in[int_method]} attribute is blank using {default_in} as default") + elif isinstance(data_in[methods_in[int_method]], int) and data_in[methods_in[int_method]] >= minimum: + if maximum is None or data_in[methods_in[int_method]] <= maximum: + return data_in[methods_in[int_method]] else: - logger.warning(f"Collection Warning: {parent} {methods_in[method]} attribute {data_in[methods_in[method]]} invalid must an integer <= {maximum} using {default_in} as default") + logger.warning(f"Collection Warning: {parent} {methods_in[int_method]} attribute {data_in[methods_in[int_method]]} invalid must an integer <= {maximum} using {default_in} as default") else: - logger.warning(f"Collection Warning: {parent} {methods_in[method]} attribute {data_in[methods_in[method]]} invalid must an integer >= {minimum} using {default_in} as default") + logger.warning(f"Collection Warning: {parent} {methods_in[int_method]} attribute {data_in[methods_in[int_method]]} invalid must an integer >= {minimum} using {default_in} as default") return default_in if method_name == "filters": for filter_name, filter_data in method_data.items(): @@ -634,9 +882,9 @@ class CollectionBuilder: valid_data = util.check_number(filter_data, f"{filter_method} filter", number_type="float", minimum=0.1, maximum=10) elif filter_method in ["originally_available.gte", "originally_available.lte"]: valid_data = util.check_date(filter_data, f"{filter_method} filter") - elif filter_method == "original_language": + elif filter_method in ["original_language", "original_language.not"]: valid_data = util.get_list(filter_data, lower=True) - elif filter_method == "collection": + elif filter_method in ["collection", "collection.not"]: valid_data = filter_data if isinstance(filter_data, list) else [filter_data] elif filter_method in all_filters: valid_data = util.get_list(filter_data) @@ -660,21 +908,29 @@ class CollectionBuilder: exact_list.append(str(method_data[dict_methods["exclude"]])) if len(prefix_list) == 0 and len(exact_list) == 0: raise Failed("Collection Error: you must have at least one exclusion") + exact_list.append(self.name) new_dictionary["exclude_prefix"] = prefix_list new_dictionary["exclude"] = exact_list self.methods.append((method_name, [new_dictionary])) elif method_name == "plex_search": searches = {} + validate = True + if "validate" in method_data: + if method_data["validate"] is None: + raise Failed("Collection Error: validate plex search attribute is blank") + if not isinstance(method_data["validate"], bool): + raise Failed("Collection Error: validate plex search attribute must be either true or false") + validate = method_data["validate"] for search_name, search_data in method_data.items(): - search, modifier = os.path.splitext(str(search_name).lower()) - if search in method_alias: - search = method_alias[search] - logger.warning(f"Collection Warning: {str(search_name).lower()} plex search attribute will run as {search}{modifier if modifier else ''}") - search_final = f"{search}{modifier}" + search, modifier, search_final = _split(search_name) + if search_name != search_final: + logger.warning(f"Collection Warning: {search_name} plex search attribute will run as {search_final}") if search_final in plex.movie_only_searches and self.library.is_show: raise Failed(f"Collection Error: {search_final} plex search attribute only works for movie libraries") - if search_final in plex.show_only_searches and self.library.is_movie: + elif search_final in plex.show_only_searches and self.library.is_movie: raise Failed(f"Collection Error: {search_final} plex search attribute only works for show libraries") + elif search_final not in plex.searches: + raise Failed(f"Collection Error: {search_final} is not a valid plex search attribute") elif search_data is None: raise Failed(f"Collection Error: {search_final} plex search attribute is blank") elif search == "sort_by": @@ -691,9 +947,7 @@ class CollectionBuilder: searches[search] = search_data elif search == "title" and modifier in ["", ".and", ".not", ".begins", ".ends"]: searches[search_final] = util.get_list(search_data, split=False) - elif (search == "studio" and modifier in ["", ".and", ".not", ".begins", ".ends"]) \ - or (search in ["actor", "audio_language", "collection", "content_rating", "country", "director", "genre", "label", "network", "producer", "subtitle_language", "writer"] and modifier in ["", ".and", ".not"]) \ - or (search == "resolution" and modifier in [""]): + elif search in plex.tags and modifier in ["", ".and", ".not", ".begins", ".ends"]: if search_final in plex.tmdb_searches: final_values = [] for value in util.get_list(search_data): @@ -703,31 +957,26 @@ class CollectionBuilder: else: final_values.append(value) else: - final_values = search_data - valid_values = self.library.validate_search_list(final_values, search) - if valid_values: - searches[search_final] = valid_values - else: - logger.warning(f"Collection Warning: No valid {search} values found in {final_values}") - elif search == "year" and modifier in [".greater", ".less"]: + final_values = util.get_list(search_data) + try: + searches[search_final] = self.library.validate_search_list(final_values, search) + except Failed as e: + if validate: + raise + else: + logger.error(e) + elif search == "year" and modifier in [".gt", ".gte", ".lt", ".lte"]: searches[search_final] = util.check_year(search_data, current_year, search_final) elif search in ["added", "originally_available"] and modifier in [".before", ".after"]: searches[search_final] = util.check_date(search_data, search_final, return_string=True, plex_date=True) - elif (search in ["added", "originally_available"] and modifier in ["", ".not"]) or (search in ["duration"] and modifier in [".greater", ".less"]): + elif search in ["added", "originally_available", "duration"] and modifier in ["", ".not", ".gt", ".gte", ".lt", ".lte"]: searches[search_final] = util.check_number(search_data, search_final, minimum=1) - elif search in ["user_rating", "critic_rating", "audience_rating"] and modifier in [".greater", ".less"]: + elif search in ["user_rating", "critic_rating", "audience_rating"] and modifier in [".gt", ".gte", ".lt", ".lte"]: searches[search_final] = util.check_number(search_data, search_final, number_type="float", minimum=0, maximum=10) - elif (search == "decade" and modifier in [""]) or (search == "year" and modifier in ["", ".not"]): + elif search in ["decade", "year"] and modifier in ["", ".not"]: searches[search_final] = util.get_year_list(search_data, current_year, search_final) - elif (search in ["title", "studio"] and modifier not in ["", ".and", ".not", ".begins", ".ends"]) \ - or (search in ["actor", "audio_language", "collection", "content_rating", "country", "director", "genre", "label", "network", "producer", "subtitle_language", "writer"] and modifier not in ["", ".and", ".not"]) \ - or (search in ["resolution", "decade"] and modifier not in [""]) \ - or (search in ["added", "originally_available"] and modifier not in ["", ".not", ".before", ".after"]) \ - or (search in ["duration", "user_rating", "critic_rating", "audience_rating"] and modifier not in [".greater", ".less"]) \ - or (search in ["year"] and modifier not in ["", ".not", ".greater", ".less"]): - raise Failed(f"Collection Error: modifier: {modifier} not supported with the {search} plex search attribute") else: - raise Failed(f"Collection Error: {search_final} plex search attribute not supported") + raise Failed(f"Collection Error: modifier: {modifier} not supported with the {search} plex search attribute") if len(searches) > 0: self.methods.append((method_name, [searches])) else: @@ -985,6 +1234,10 @@ class CollectionBuilder: self.add_to_radarr = self.library.Radarr.add if self.library.Radarr else False if self.add_to_sonarr is None: self.add_to_sonarr = self.library.Sonarr.add if self.library.Sonarr else False + + if self.smart_url: + self.add_to_radarr = False + self.add_to_sonarr = False if self.collectionless: self.add_to_radarr = False @@ -992,16 +1245,32 @@ class CollectionBuilder: self.details["collection_mode"] = "hide" self.sync = True - def run_methods(self, collection_obj, collection_name, rating_key_map, movie_map, show_map): - items_found = 0 + try: + self.obj = library.get_collection(self.name) + collection_smart = library.smart(self.obj) + if (self.smart and not collection_smart) or (not self.smart and collection_smart): + logger.info("") + logger.error(f"Collection Error: Converting {self.obj.title} to a {'smart' if self.smart else 'normal'} collection") + library.query(self.obj.delete) + self.obj = None + except Failed: + self.obj = None + + self.plex_map = {} + if self.sync and self.obj: + for item in library.get_collection_items(self.obj, self.smart_label_collection): + self.plex_map[item.ratingKey] = item + + def collect_rating_keys(self, movie_map, show_map): + def add_rating_keys(keys): + if not isinstance(keys, list): + keys = [keys] + self.rating_keys.extend([key for key in keys if key not in self.rating_keys]) for method, values in self.methods: logger.debug("") logger.debug(f"Method: {method}") logger.debug(f"Values: {values}") for value in values: - items = [] - missing_movies = [] - missing_shows = [] def check_map(input_ids): movie_ids, show_ids = input_ids items_found_inside = 0 @@ -1009,121 +1278,257 @@ class CollectionBuilder: items_found_inside += len(movie_ids) for movie_id in movie_ids: if movie_id in movie_map: - items.extend(movie_map[movie_id]) - else: - missing_movies.append(movie_id) + add_rating_keys(movie_map[movie_id]) + elif movie_id not in self.missing_movies: + self.missing_movies.append(movie_id) if len(show_ids) > 0: items_found_inside += len(show_ids) for show_id in show_ids: if show_id in show_map: - items.extend(show_map[show_id]) - else: - missing_shows.append(show_id) + add_rating_keys(show_map[show_id]) + elif show_id not in self.missing_shows: + self.missing_shows.append(show_id) return items_found_inside - logger.info("") + logger.debug("") logger.debug(f"Value: {value}") - if "plex" in method: - items = self.library.get_items(method, value) - items_found += len(items) - elif "tautulli" in method: - items = self.library.Tautulli.get_items(self.library, time_range=value["list_days"], stats_count=value["list_size"], list_type=value["list_type"], stats_count_buffer=value["list_buffer"]) - items_found += len(items) - elif "anidb" in method: items_found += check_map(self.config.AniDB.get_items(method, value, self.library.Plex.language)) - elif "anilist" in method: items_found += check_map(self.config.AniList.get_items(method, value, self.library.Plex.language)) - elif "mal" in method: items_found += check_map(self.config.MyAnimeList.get_items(method, value, self.library.Plex.language)) - elif "tvdb" in method: items_found += check_map(self.config.TVDb.get_items(method, value, self.library.Plex.language)) - elif "imdb" in method: items_found += check_map(self.config.IMDb.get_items(method, value, self.library.Plex.language)) - elif "letterboxd" in method: items_found += check_map(self.config.Letterboxd.get_items(method, value, self.library.Plex.language)) - elif "tmdb" in method: items_found += check_map(self.config.TMDb.get_items(method, value, self.library.is_movie)) - elif "trakt" in method: items_found += check_map(self.config.Trakt.get_items(method, value, self.library.is_movie)) + logger.info("") + if "plex" in method: add_rating_keys(self.library.get_items(method, value)) + elif "tautulli" in method: add_rating_keys(self.library.Tautulli.get_items(self.library, value)) + elif "anidb" in method: check_map(self.config.AniDB.get_items(method, value, self.library.Plex.language)) + elif "anilist" in method: check_map(self.config.AniList.get_items(method, value)) + elif "mal" in method: check_map(self.config.MyAnimeList.get_items(method, value)) + elif "tvdb" in method: check_map(self.config.TVDb.get_items(method, value, self.library.Plex.language)) + elif "imdb" in method: check_map(self.config.IMDb.get_items(method, value, self.library.Plex.language)) + elif "letterboxd" in method: check_map(self.config.Letterboxd.get_items(method, value, self.library.Plex.language)) + elif "tmdb" in method: check_map(self.config.TMDb.get_items(method, value, self.library.is_movie)) + elif "trakt" in method: check_map(self.config.Trakt.get_items(method, value, self.library.is_movie)) else: logger.error(f"Collection Error: {method} method not supported") - if len(items) > 0: rating_key_map = self.library.add_to_collection(collection_obj if collection_obj else collection_name, items, self.filters, self.details["show_filtered"], rating_key_map, movie_map, show_map) - else: logger.error("No items found to add to this collection ") - - if len(missing_movies) > 0 or len(missing_shows) > 0: - logger.info("") - arr_filters = [] - for filter_method, filter_data in self.filters: - if (filter_method.startswith("original_language") and self.library.is_movie) or filter_method.startswith("tmdb_vote_count"): - arr_filters.append((filter_method, filter_data)) - if len(missing_movies) > 0: - missing_movies_with_names = [] - for missing_id in missing_movies: - try: - movie = self.config.TMDb.get_movie(missing_id) - except Failed as e: - logger.error(e) - continue - match = True - for filter_method, filter_data in arr_filters: - if (filter_method == "original_language" and movie.original_language not in filter_data) \ - or (filter_method == "original_language.not" and movie.original_language in filter_data) \ - or (filter_method == "tmdb_vote_count.gte" and movie.vote_count < filter_data) \ - or (filter_method == "tmdb_vote_count.lte" and movie.vote_count > filter_data): - match = False + def add_to_collection(self, movie_map): + name, collection_items = self.library.get_collection_name_and_items(self.obj if self.obj else self.name, self.smart_label_collection) + total = len(self.rating_keys) + max_length = len(str(total)) + length = 0 + for i, item in enumerate(self.rating_keys, 1): + try: + current = self.library.fetchItem(item.ratingKey if isinstance(item, (Movie, Show)) else int(item)) + if not isinstance(current, (Movie, Show)): + raise NotFound + except (BadRequest, NotFound): + logger.error(f"Plex Error: Item {item} not found") + continue + match = True + if self.filters: + length = util.print_return(length, f"Filtering {(' ' * (max_length - len(str(i)))) + str(i)}/{total} {current.title}") + for filter_method, filter_data in self.filters: + modifier = filter_method[-4:] + method = filter_method[:-4] if modifier in [".not", ".lte", ".gte"] else filter_method + method_name = filter_alias[method] if method in filter_alias else method + if method_name == "max_age": + threshold_date = datetime.now() - timedelta(days=filter_data) + if current.originallyAvailableAt is None or current.originallyAvailableAt < threshold_date: + match = False + break + elif method_name == "original_language": + movie = None + for key, value in movie_map.items(): + if current.ratingKey in value: + try: + movie = self.config.TMDb.get_movie(key) break - if match: - missing_movies_with_names.append((movie.title, missing_id)) - if self.details["show_missing"] is True: - logger.info(f"{collection_name} Collection | ? | {movie.title} (TMDb: {missing_id})") - elif self.details["show_filtered"] is True: - logger.info(f"{collection_name} Collection | X | {movie.title} (TMDb: {missing_id})") - logger.info(f"{len(missing_movies_with_names)} Movie{'s' if len(missing_movies_with_names) > 1 else ''} Missing") - if self.details["save_missing"] is True: - self.library.add_missing(collection_name, missing_movies_with_names, True) - if self.add_to_radarr and self.library.Radarr: - try: - self.library.Radarr.add_tmdb([missing_id for title, missing_id in missing_movies_with_names], **self.radarr_options) - except Failed as e: - logger.error(e) - if self.run_again: - self.missing_movies.extend([missing_id for title, missing_id in missing_movies_with_names]) - if len(missing_shows) > 0 and self.library.is_show: - missing_shows_with_names = [] - for missing_id in missing_shows: - try: - title = str(self.config.TVDb.get_series(self.library.Plex.language, missing_id).title.encode("ascii", "replace").decode()) - except Failed as e: - logger.error(e) - continue - match = True - if arr_filters: - show = self.config.TMDb.get_show(self.config.TMDb.convert_tvdb_to_tmdb(missing_id)) - for filter_method, filter_data in arr_filters: - if (filter_method == "tmdb_vote_count.gte" and show.vote_count < filter_data) \ - or (filter_method == "tmdb_vote_count.lte" and show.vote_count > filter_data): - match = False + except Failed: + pass + if movie is None: + logger.warning(f"Filter Error: No TMDb ID found for {current.title}") + continue + if (modifier == ".not" and movie.original_language in filter_data) or ( + modifier != ".not" and movie.original_language not in filter_data): + match = False + break + elif method_name == "audio_track_title": + jailbreak = False + for media in current.media: + for part in media.parts: + for audio in part.audioStreams(): + for check_title in filter_data: + title = audio.title if audio.title else "" + if check_title.lower() in title.lower(): + jailbreak = True + break + if jailbreak: break + if jailbreak: break + if jailbreak: break + if (jailbreak and modifier == ".not") or (not jailbreak and modifier != ".not"): + match = False + break + elif method_name == "filepath": + jailbreak = False + for location in current.locations: + for check_text in filter_data: + if check_text.lower() in location.lower(): + jailbreak = True + break + if jailbreak: break + if (jailbreak and modifier == ".not") or (not jailbreak and modifier != ".not"): + match = False + break + elif modifier in [".gte", ".lte"]: + if method_name == "vote_count": + tmdb_item = None + for key, value in movie_map.items(): + if current.ratingKey in value: + try: + tmdb_item = self.config.TMDb.get_movie(key) if self.library.is_movie else self.config.TMDb.get_show(key) break - if match: - missing_shows_with_names.append((title, missing_id)) - if self.details["show_missing"] is True: - logger.info(f"{collection_name} Collection | ? | {title} (TVDB: {missing_id})") - elif self.details["show_filtered"] is True: - logger.info(f"{collection_name} Collection | X | {title} (TVDb: {missing_id})") - logger.info(f"{len(missing_shows_with_names)} Show{'s' if len(missing_shows_with_names) > 1 else ''} Missing") - if self.details["save_missing"] is True: - self.library.add_missing(collection_name, missing_shows_with_names, False) - if self.add_to_sonarr and self.library.Sonarr: - try: - self.library.Sonarr.add_tvdb([missing_id for title, missing_id in missing_shows_with_names], **self.sonarr_options) - except Failed as e: - logger.error(e) - if self.run_again: - self.missing_shows.extend([missing_id for title, missing_id in missing_shows_with_names]) + except Failed: + pass + if tmdb_item is None: + logger.warning(f"Filter Error: No TMDb ID found for {current.title}") + continue + attr = tmdb_item.vote_count + else: + attr = getattr(current, method_name) / 60000 if method_name == "duration" else getattr(current, method_name) + if attr is None or (modifier == ".lte" and attr > filter_data) or (modifier == ".gte" and attr < filter_data): + match = False + break + else: + attrs = [] + if method_name in ["video_resolution", "audio_language", "subtitle_language"]: + for media in current.media: + if method_name == "video_resolution": + attrs.extend([media.videoResolution]) + for part in media.parts: + if method_name == "audio_language": + attrs.extend([a.language for a in part.audioStreams()]) + if method_name == "subtitle_language": + attrs.extend([s.language for s in part.subtitleStreams()]) + elif method_name in ["contentRating", "studio", "year", "rating", "originallyAvailableAt"]: + attrs = [str(getattr(current, method_name))] + elif method_name in ["actors", "countries", "directors", "genres", "writers", "collections"]: + attrs = [getattr(x, "tag") for x in getattr(current, method_name)] + else: + raise Failed(f"Filter Error: filter: {method_name} not supported") - if self.sync and items_found > 0: - logger.info("") - count_removed = 0 - for ratingKey, item in rating_key_map.items(): - if item is not None: - logger.info(f"{collection_name} Collection | - | {item.title}") - item.removeCollection(collection_name) - count_removed += 1 - logger.info(f"{count_removed} {'Movie' if self.library.is_movie else 'Show'}{'s' if count_removed == 1 else ''} Removed") + if (not list(set(filter_data) & set(attrs)) and modifier != ".not")\ + or (list(set(filter_data) & set(attrs)) and modifier == ".not"): + match = False + break + length = util.print_return(length, f"Filtering {(' ' * (max_length - len(str(i)))) + str(i)}/{total} {current.title}") + if match: + util.print_end(length, f"{name} Collection | {'=' if current in collection_items else '+'} | {current.title}") + if current in collection_items: + self.plex_map[current.ratingKey] = None + elif self.smart_label_collection: + self.library.query_data(current.addLabel, name) + else: + self.library.query_data(current.addCollection, name) + elif self.details["show_filtered"] is True: + logger.info(f"{name} Collection | X | {current.title}") + media_type = f"{'Movie' if self.library.is_movie else 'Show'}{'s' if total > 1 else ''}" + util.print_end(length, f"{total} {media_type} Processed") + + def run_missing(self): logger.info("") + arr_filters = [] + for filter_method, filter_data in self.filters: + if (filter_method.startswith("original_language") and self.library.is_movie) or filter_method.startswith("tmdb_vote_count"): + arr_filters.append((filter_method, filter_data)) + if len(self.missing_movies) > 0: + missing_movies_with_names = [] + for missing_id in self.missing_movies: + try: + movie = self.config.TMDb.get_movie(missing_id) + except Failed as e: + logger.error(e) + continue + match = True + for filter_method, filter_data in arr_filters: + if (filter_method == "original_language" and movie.original_language not in filter_data) \ + or (filter_method == "original_language.not" and movie.original_language in filter_data) \ + or (filter_method == "tmdb_vote_count.gte" and movie.vote_count < filter_data) \ + or (filter_method == "tmdb_vote_count.lte" and movie.vote_count > filter_data): + match = False + break + if match: + missing_movies_with_names.append((movie.title, missing_id)) + if self.details["show_missing"] is True: + logger.info(f"{self.name} Collection | ? | {movie.title} (TMDb: {missing_id})") + elif self.details["show_filtered"] is True: + logger.info(f"{self.name} Collection | X | {movie.title} (TMDb: {missing_id})") + logger.info(f"{len(missing_movies_with_names)} Movie{'s' if len(missing_movies_with_names) > 1 else ''} Missing") + if self.details["save_missing"] is True: + self.library.add_missing(self.name, missing_movies_with_names, True) + if (self.add_to_radarr and self.library.Radarr) or self.run_again: + missing_tmdb_ids = [missing_id for title, missing_id in missing_movies_with_names] + if self.add_to_radarr and self.library.Radarr: + try: + self.library.Radarr.add_tmdb(missing_tmdb_ids, **self.radarr_options) + except Failed as e: + logger.error(e) + if self.run_again: + self.run_again_movies.extend(missing_tmdb_ids) + if len(self.missing_shows) > 0 and self.library.is_show: + missing_shows_with_names = [] + for missing_id in self.missing_shows: + try: + title = str(self.config.TVDb.get_series(self.library.Plex.language, missing_id).title.encode("ascii", "replace").decode()) + except Failed as e: + logger.error(e) + continue + match = True + if arr_filters: + show = self.config.TMDb.get_show(self.config.Convert.tvdb_to_tmdb(missing_id)) + for filter_method, filter_data in arr_filters: + if (filter_method == "tmdb_vote_count.gte" and show.vote_count < filter_data) \ + or (filter_method == "tmdb_vote_count.lte" and show.vote_count > filter_data): + match = False + break + if match: + missing_shows_with_names.append((title, missing_id)) + if self.details["show_missing"] is True: + logger.info(f"{self.name} Collection | ? | {title} (TVDB: {missing_id})") + elif self.details["show_filtered"] is True: + logger.info(f"{self.name} Collection | X | {title} (TVDb: {missing_id})") + logger.info(f"{len(missing_shows_with_names)} Show{'s' if len(missing_shows_with_names) > 1 else ''} Missing") + if self.details["save_missing"] is True: + self.library.add_missing(self.name, missing_shows_with_names, False) + if (self.add_to_sonarr and self.library.Sonarr) or self.run_again: + missing_tvdb_ids = [missing_id for title, missing_id in missing_shows_with_names] + if self.add_to_sonarr and self.library.Sonarr: + try: + self.library.Sonarr.add_tvdb(missing_tvdb_ids, **self.sonarr_options) + except Failed as e: + logger.error(e) + if self.run_again: + self.run_again_shows.extend(missing_tvdb_ids) + + def sync_collection(self): + logger.info("") + count_removed = 0 + for ratingKey, item in self.rating_key_map.items(): + if item is not None: + logger.info(f"{self.name} Collection | - | {item.title}") + if self.smart_label_collection: + self.library.query_data(item.removeLabel, self.name) + else: + self.library.query_data(item.removeCollection, self.name) + count_removed += 1 + logger.info(f"{count_removed} {'Movie' if self.library.is_movie else 'Show'}{'s' if count_removed == 1 else ''} Removed") + + def update_details(self): + if not self.obj and self.smart_url: + self.library.create_smart_collection(self.name, self.smart_type_key, self.smart_url) + elif not self.obj and self.smart_label_collection: + try: + self.library.create_smart_labels(self.name, sort=self.smart_sort) + except Failed: + raise Failed(f"Collection Error: Label: {self.name} was not added to any items in the Library") + self.obj = self.library.get_collection(self.name) + + if self.smart_url and self.smart_url != self.library.smart_filter(self.obj): + self.library.update_smart_collection(self.obj, self.smart_url) + logger.info(f"Detail: Smart Filter updated to {self.smart_url}") - def update_details(self, collection): edits = {} def get_summary(summary_method, summaries): logger.info(f"Detail: {summary_method} updated Collection Summary") @@ -1149,51 +1554,58 @@ class CollectionBuilder: elif "tmdb_show_details" in self.summaries: summary = get_summary("tmdb_show_details", self.summaries) else: summary = None if summary: - edits["summary.value"] = summary - edits["summary.locked"] = 1 + if str(summary) != str(self.obj.summary): + edits["summary.value"] = summary + edits["summary.locked"] = 1 if "sort_title" in self.details: - edits["titleSort.value"] = self.details["sort_title"] - edits["titleSort.locked"] = 1 - logger.info(f"Detail: sort_title updated Collection Sort Title to {self.details['sort_title']}") + if str(self.details["sort_title"]) != str(self.obj.titleSort): + edits["titleSort.value"] = self.details["sort_title"] + edits["titleSort.locked"] = 1 + logger.info(f"Detail: sort_title updated Collection Sort Title to {self.details['sort_title']}") if "content_rating" in self.details: - edits["contentRating.value"] = self.details["content_rating"] - edits["contentRating.locked"] = 1 - logger.info(f"Detail: content_rating updated Collection Content Rating to {self.details['content_rating']}") + if str(self.details["content_rating"]) != str(self.obj.contentRating): + edits["contentRating.value"] = self.details["content_rating"] + edits["contentRating.locked"] = 1 + logger.info(f"Detail: content_rating updated Collection Content Rating to {self.details['content_rating']}") if "collection_mode" in self.details: - collection.modeUpdate(mode=self.details["collection_mode"]) - logger.info(f"Detail: collection_mode updated Collection Mode to {self.details['collection_mode']}") + if int(self.obj.collectionMode) not in plex.collection_mode_keys\ + or plex.collection_mode_keys[int(self.obj.collectionMode)] != self.details["collection_mode"]: + self.library.collection_mode_query(self.obj, self.details["collection_mode"]) + logger.info(f"Detail: collection_mode updated Collection Mode to {self.details['collection_mode']}") if "collection_order" in self.details: - collection.sortUpdate(sort=self.details["collection_order"]) - logger.info(f"Detail: collection_order updated Collection Order to {self.details['collection_order']}") + if int(self.obj.collectionSort) not in plex.collection_order_keys\ + or plex.collection_order_keys[int(self.obj.collectionSort)] != self.details["collection_order"]: + self.library.collection_order_query(self.obj, self.details["collection_order"]) + logger.info(f"Detail: collection_order updated Collection Order to {self.details['collection_order']}") if "label" in self.details or "label.sync" in self.details: - item_labels = [label.tag for label in collection.labels] + item_labels = [label.tag for label in self.obj.labels] labels = util.get_list(self.details["label" if "label" in self.details else "label.sync"]) if "label.sync" in self.details: for label in (la for la in item_labels if la not in labels): - collection.removeLabel(label) + self.library.query_data(self.obj.removeLabel, label) logger.info(f"Detail: Label {label} removed") for label in (la for la in labels if la not in item_labels): - collection.addLabel(label) + self.library.query_data(self.obj.addLabel, label) logger.info(f"Detail: Label {label} added") if len(self.item_details) > 0: labels = None if "item_label" in self.item_details or "item_label.sync" in self.item_details: labels = util.get_list(self.item_details["item_label" if "item_label" in self.item_details else "item_label.sync"]) - for item in collection.items(): + for item in self.library.get_collection_items(self.obj, self.smart_label_collection): if labels is not None: item_labels = [label.tag for label in item.labels] if "item_label.sync" in self.item_details: for label in (la for la in item_labels if la not in labels): - item.removeLabel(label) + self.library.query_data(item.removeLabel, label) logger.info(f"Detail: Label {label} removed from {item.title}") for label in (la for la in labels if la not in item_labels): - item.addLabel(label) + self.library.query_data(item.addLabel, label) logger.info(f"Detail: Label {label} added to {item.title}") advance_edits = {} for method_name, method_data in self.item_details.items(): @@ -1205,8 +1617,7 @@ class CollectionBuilder: if len(edits) > 0: logger.debug(edits) - collection.edit(**edits) - collection.reload() + self.library.edit_query(self.obj, edits) logger.info("Details: have been updated") if self.library.asset_directory: @@ -1216,63 +1627,27 @@ class CollectionBuilder: else: logger.error("Collection Error: name_mapping attribute is blank") for ad in self.library.asset_directory: path = os.path.join(ad, f"{name_mapping}") - if not os.path.isdir(path): - continue if self.library.asset_folders: - poster_path = os.path.join(ad, f"{name_mapping}", "poster.*") + if not os.path.isdir(path): + continue + poster_filter = os.path.join(ad, name_mapping, "poster.*") + background_filter = os.path.join(ad, name_mapping, "background.*") else: - poster_path = os.path.join(ad, f"{name_mapping}.*") - matches = glob.glob(poster_path) + poster_filter = os.path.join(ad, f"{name_mapping}.*") + background_filter = os.path.join(ad, f"{name_mapping}_background.*") + matches = glob.glob(poster_filter) if len(matches) > 0: self.posters["asset_directory"] = os.path.abspath(matches[0]) - if self.library.asset_folders: - matches = glob.glob(os.path.join(ad, f"{name_mapping}", "background.*")) - if len(matches) > 0: - self.backgrounds["asset_directory"] = os.path.abspath(matches[0]) - dirs = [folder for folder in os.listdir(path) if os.path.isdir(os.path.join(path, folder))] - if len(dirs) > 0: - for item in collection.items(): - folder = os.path.basename(os.path.dirname(item.locations[0]) if self.library.is_movie else item.locations[0]) - if folder in dirs: - matches = glob.glob(os.path.join(path, folder, "poster.*")) - poster_path = os.path.abspath(matches[0]) if len(matches) > 0 else None - matches = glob.glob(os.path.join(path, folder, "background.*")) - background_path = os.path.abspath(matches[0]) if len(matches) > 0 else None - if poster_path: - item.uploadPoster(filepath=poster_path) - logger.info(f"Detail: asset_directory updated {item.title}'s poster to [file] {poster_path}") - if background_path: - item.uploadArt(filepath=background_path) - logger.info(f"Detail: asset_directory updated {item.title}'s background to [file] {background_path}") - if poster_path is None and background_path is None: - logger.warning(f"No Files Found: {os.path.join(path, folder)}") - if self.library.is_show: - for season in item.seasons(): - matches = glob.glob(os.path.join(path, folder, f"Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}.*")) - if len(matches) > 0: - season_path = os.path.abspath(matches[0]) - season.uploadPoster(filepath=season_path) - logger.info(f"Detail: asset_directory updated {item.title} Season {season.seasonNumber}'s poster to [file] {season_path}") - for episode in season.episodes(): - matches = glob.glob(os.path.join(path, folder, f"{episode.seasonEpisode.upper()}.*")) - if len(matches) > 0: - episode_path = os.path.abspath(matches[0]) - episode.uploadPoster(filepath=episode_path) - logger.info(f"Detail: asset_directory updated {item.title} {episode.seasonEpisode.upper()}'s poster to [file] {episode_path}") - else: - logger.warning(f"No Folder: {os.path.join(path, folder)}") + matches = glob.glob(background_filter) + if len(matches) > 0: + self.backgrounds["asset_directory"] = os.path.abspath(matches[0]) + for item in self.library.query(self.obj.items): + self.library.update_item_from_assets(item, dirs=[path]) def set_image(image_method, images, is_background=False): message = f"{'background' if is_background else 'poster'} to [{'File' if image_method in image_file_details else 'URL'}] {images[image_method]}" try: - if image_method in image_file_details and is_background: - collection.uploadArt(filepath=images[image_method]) - elif image_method in image_file_details: - collection.uploadPoster(filepath=images[image_method]) - elif is_background: - collection.uploadArt(url=images[image_method]) - else: - collection.uploadPoster(url=images[image_method]) + self.library.upload_image(self.obj, images[image_method], poster=not is_background, url=image_method not in image_file_details) logger.info(f"Detail: {image_method} updated collection {message}") except BadRequest: logger.error(f"Detail: {image_method} failed to update {message}") @@ -1324,15 +1699,15 @@ class CollectionBuilder: elif "tmdb_show_details" in self.backgrounds: set_image("tmdb_show_details", self.backgrounds, is_background=True) else: logger.info("No background to update") - def run_collections_again(self, collection_obj, movie_map, show_map): - collection_items = collection_obj.items() if isinstance(collection_obj, Collections) else [] - name = collection_obj.title if isinstance(collection_obj, Collections) else collection_obj + def run_collections_again(self, movie_map, show_map): + self.obj = self.library.get_collection(self.name) + name, collection_items = self.library.get_collection_name_and_items(self.obj, self.smart_label_collection) rating_keys = [] - for mm in self.missing_movies: + for mm in self.run_again_movies: if mm in movie_map: rating_keys.extend(movie_map[mm]) if self.library.is_show: - for sm in self.missing_shows: + for sm in self.run_again_shows: if sm in show_map: rating_keys.extend(show_map[sm]) if len(rating_keys) > 0: @@ -1344,14 +1719,16 @@ class CollectionBuilder: continue if current in collection_items: logger.info(f"{name} Collection | = | {current.title}") + elif self.smart_label_collection: + self.library.query_data(current.addLabel, name) else: - current.addCollection(name) + self.library.query_data(current.addCollection, name) logger.info(f"{name} Collection | + | {current.title}") logger.info(f"{len(rating_keys)} {'Movie' if self.library.is_movie else 'Show'}{'s' if len(rating_keys) > 1 else ''} Processed") - if len(self.missing_movies) > 0: + if len(self.run_again_movies) > 0: logger.info("") - for missing_id in self.missing_movies: + for missing_id in self.run_again_movies: if missing_id not in movie_map: try: movie = self.config.TMDb.get_movie(missing_id) @@ -1361,11 +1738,11 @@ class CollectionBuilder: if self.details["show_missing"] is True: logger.info(f"{name} Collection | ? | {movie.title} (TMDb: {missing_id})") logger.info("") - logger.info(f"{len(self.missing_movies)} Movie{'s' if len(self.missing_movies) > 1 else ''} Missing") + logger.info(f"{len(self.run_again_movies)} Movie{'s' if len(self.run_again_movies) > 1 else ''} Missing") - if len(self.missing_shows) > 0 and self.library.is_show: + if len(self.run_again_shows) > 0 and self.library.is_show: logger.info("") - for missing_id in self.missing_shows: + for missing_id in self.run_again_shows: if missing_id not in show_map: try: title = str(self.config.TVDb.get_series(self.library.Plex.language, missing_id).title.encode("ascii", "replace").decode()) @@ -1374,4 +1751,4 @@ class CollectionBuilder: continue if self.details["show_missing"] is True: logger.info(f"{name} Collection | ? | {title} (TVDb: {missing_id})") - logger.info(f"{len(self.missing_shows)} Show{'s' if len(self.missing_shows) > 1 else ''} Missing") + logger.info(f"{len(self.run_again_shows)} Show{'s' if len(self.run_again_shows) > 1 else ''} Missing") diff --git a/modules/cache.py b/modules/cache.py index f8cdbedc..df3b5b92 100644 --- a/modules/cache.py +++ b/modules/cache.py @@ -1,7 +1,6 @@ import logging, os, random, sqlite3 from contextlib import closing from datetime import datetime, timedelta -from modules.util import Failed logger = logging.getLogger("Plex Meta Manager") @@ -11,29 +10,42 @@ class Cache: with sqlite3.connect(cache) as connection: connection.row_factory = sqlite3.Row with closing(connection.cursor()) as cursor: - cursor.execute("SELECT count(name) FROM sqlite_master WHERE type='table' AND name='guids'") + cursor.execute("SELECT count(name) FROM sqlite_master WHERE type='table' AND name='guid_map'") if cursor.fetchone()[0] == 0: logger.info(f"Initializing cache database at {cache}") else: logger.info(f"Using cache database at {cache}") + cursor.execute("DROP TABLE IF EXISTS guids") + cursor.execute("DROP TABLE IF EXISTS imdb_map") cursor.execute( - """CREATE TABLE IF NOT EXISTS guids ( + """CREATE TABLE IF NOT EXISTS guid_map ( INTEGER PRIMARY KEY, plex_guid TEXT UNIQUE, + t_id TEXT, + media_type TEXT, + expiration_date TEXT)""" + ) + cursor.execute( + """CREATE TABLE IF NOT EXISTS imdb_to_tmdb_map ( + INTEGER PRIMARY KEY, + imdb_id TEXT UNIQUE, tmdb_id TEXT, - imdb_id TEXT, - tvdb_id TEXT, - anidb_id TEXT, - expiration_date TEXT, - media_type TEXT)""" + media_type TEXT, + expiration_date TEXT)""" ) cursor.execute( - """CREATE TABLE IF NOT EXISTS imdb_map ( + """CREATE TABLE IF NOT EXISTS imdb_to_tvdb_map ( INTEGER PRIMARY KEY, imdb_id TEXT UNIQUE, - t_id TEXT, - expiration_date TEXT, - media_type TEXT)""" + tvdb_id TEXT UNIQUE, + expiration_date TEXT)""" + ) + cursor.execute( + """CREATE TABLE IF NOT EXISTS tmdb_to_tvdb_map ( + INTEGER PRIMARY KEY, + tmdb_id TEXT UNIQUE, + tvdb_id TEXT UNIQUE, + expiration_date TEXT)""" ) cursor.execute( """CREATE TABLE IF NOT EXISTS letterboxd_map ( @@ -68,145 +80,85 @@ class Cache: self.expiration = expiration self.cache_path = cache - def get_ids_from_imdb(self, imdb_id): - tmdb_id, tmdb_expired = self.get_tmdb_id("movie", imdb_id=imdb_id) - tvdb_id, tvdb_expired = self.get_tvdb_id("show", imdb_id=imdb_id) - return tmdb_id, tvdb_id - - def get_tmdb_id(self, media_type, plex_guid=None, imdb_id=None, tvdb_id=None, anidb_id=None): - return self.get_id_from(media_type, "tmdb_id", plex_guid=plex_guid, imdb_id=imdb_id, tvdb_id=tvdb_id, anidb_id=anidb_id) - - def get_imdb_id(self, media_type, plex_guid=None, tmdb_id=None, tvdb_id=None, anidb_id=None): - return self.get_id_from(media_type, "imdb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, tvdb_id=tvdb_id, anidb_id=anidb_id) - - def get_tvdb_id(self, media_type, plex_guid=None, tmdb_id=None, imdb_id=None, anidb_id=None): - return self.get_id_from(media_type, "tvdb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, imdb_id=imdb_id, anidb_id=anidb_id) - - def get_anidb_id(self, media_type, plex_guid=None, tmdb_id=None, imdb_id=None, tvdb_id=None): - return self.get_id_from(media_type, "anidb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, imdb_id=imdb_id, tvdb_id=tvdb_id) - - def get_id_from(self, media_type, id_from, plex_guid=None, tmdb_id=None, imdb_id=None, tvdb_id=None, anidb_id=None): - if plex_guid: return self.get_id(media_type, "plex_guid", id_from, plex_guid) - elif tmdb_id: return self.get_id(media_type, "tmdb_id", id_from, tmdb_id) - elif imdb_id: return self.get_id(media_type, "imdb_id", id_from, imdb_id) - elif tvdb_id: return self.get_id(media_type, "tvdb_id", id_from, tvdb_id) - elif anidb_id: return self.get_id(media_type, "anidb_id", id_from, anidb_id) - else: return None, None - - def get_id(self, media_type, from_id, to_id, key): + def query_guid_map(self, plex_guid): id_to_return = None + media_type = None expired = None with sqlite3.connect(self.cache_path) as connection: connection.row_factory = sqlite3.Row with closing(connection.cursor()) as cursor: - cursor.execute(f"SELECT * FROM guids WHERE {from_id} = ? AND media_type = ?", (key, media_type)) - row = cursor.fetchone() - if row and row[to_id]: - datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d") - time_between_insertion = datetime.now() - datetime_object - id_to_return = int(row[to_id]) - expired = time_between_insertion.days > self.expiration - return id_to_return, expired - - def get_ids(self, media_type, plex_guid=None, tmdb_id=None, imdb_id=None, tvdb_id=None): - ids_to_return = {} - expired = None - if plex_guid: - key = plex_guid - key_type = "plex_guid" - elif tmdb_id: - key = tmdb_id - key_type = "tmdb_id" - elif imdb_id: - key = imdb_id - key_type = "imdb_id" - elif tvdb_id: - key = tvdb_id - key_type = "tvdb_id" - else: - raise Failed("ID Required") - with sqlite3.connect(self.cache_path) as connection: - connection.row_factory = sqlite3.Row - with closing(connection.cursor()) as cursor: - cursor.execute(f"SELECT * FROM guids WHERE {key_type} = ? AND media_type = ?", (key, media_type)) + cursor.execute(f"SELECT * FROM guid_map WHERE plex_guid = ?", (plex_guid,)) row = cursor.fetchone() if row: - if row["plex_guid"]: ids_to_return["plex"] = row["plex_guid"] - if row["tmdb_id"]: ids_to_return["tmdb"] = int(row["tmdb_id"]) - if row["imdb_id"]: ids_to_return["imdb"] = row["imdb_id"] - if row["tvdb_id"]: ids_to_return["tvdb"] = int(row["tvdb_id"]) - if row["anidb_id"]: ids_to_return["anidb"] = int(row["anidb_id"]) - datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d") - time_between_insertion = datetime.now() - datetime_object + time_between_insertion = datetime.now() - datetime.strptime(row["expiration_date"], "%Y-%m-%d") + id_to_return = row["t_id"] + media_type = row["media_type"] expired = time_between_insertion.days > self.expiration - return ids_to_return, expired + return id_to_return, media_type, expired - def update_guid(self, media_type, plex_guid, tmdb_id, imdb_id, tvdb_id, anidb_id, expired): - expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration))) - with sqlite3.connect(self.cache_path) as connection: - connection.row_factory = sqlite3.Row - with closing(connection.cursor()) as cursor: - cursor.execute("INSERT OR IGNORE INTO guids(plex_guid) VALUES(?)", (plex_guid,)) - cursor.execute( - """UPDATE guids SET - tmdb_id = ?, - imdb_id = ?, - tvdb_id = ?, - anidb_id = ?, - expiration_date = ?, - media_type = ? - WHERE plex_guid = ?""", (tmdb_id, imdb_id, tvdb_id, anidb_id, expiration_date.strftime("%Y-%m-%d"), media_type, plex_guid)) - if imdb_id and (tmdb_id or tvdb_id): - cursor.execute("INSERT OR IGNORE INTO imdb_map(imdb_id) VALUES(?)", (imdb_id,)) - cursor.execute("UPDATE imdb_map SET t_id = ?, expiration_date = ?, media_type = ? WHERE imdb_id = ?", (tmdb_id if media_type == "movie" else tvdb_id, expiration_date.strftime("%Y-%m-%d"), media_type, imdb_id)) + def update_guid_map(self, media_type, plex_guid, t_id, expired): + self._update_map("guid_map", "plex_guid", plex_guid, "t_id", t_id, expired, media_type=media_type) - def get_tmdb_from_imdb(self, imdb_id): return self.query_imdb_map("movie", imdb_id) - def get_tvdb_from_imdb(self, imdb_id): return self.query_imdb_map("show", imdb_id) - def query_imdb_map(self, media_type, imdb_id): - id_to_return = None - expired = None - with sqlite3.connect(self.cache_path) as connection: - connection.row_factory = sqlite3.Row - with closing(connection.cursor()) as cursor: - cursor.execute("SELECT * FROM imdb_map WHERE imdb_id = ? AND media_type = ?", (imdb_id, media_type)) - row = cursor.fetchone() - if row and row["t_id"]: - datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d") - time_between_insertion = datetime.now() - datetime_object - id_to_return = int(row["t_id"]) - expired = time_between_insertion.days > self.expiration - return id_to_return, expired + def query_imdb_to_tmdb_map(self, media_type, _id, imdb=True): + from_id = "imdb_id" if imdb else "tmdb_id" + to_id = "tmdb_id" if imdb else "imdb_id" + return self._query_map("imdb_to_tmdb_map", _id, from_id, to_id, media_type=media_type) - def update_imdb(self, media_type, expired, imdb_id, t_id): - expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration))) - with sqlite3.connect(self.cache_path) as connection: - connection.row_factory = sqlite3.Row - with closing(connection.cursor()) as cursor: - cursor.execute("INSERT OR IGNORE INTO imdb_map(imdb_id) VALUES(?)", (imdb_id,)) - cursor.execute("UPDATE imdb_map SET t_id = ?, expiration_date = ?, media_type = ? WHERE imdb_id = ?", (t_id, expiration_date.strftime("%Y-%m-%d"), media_type, imdb_id)) + def update_imdb_to_tmdb_map(self, media_type, expired, imdb_id, tmdb_id): + self._update_map("imdb_to_tmdb_map", "imdb_id", imdb_id, "tmdb_id", tmdb_id, expired, media_type=media_type) + + def query_imdb_to_tvdb_map(self, _id, imdb=True): + from_id = "imdb_id" if imdb else "tvdb_id" + to_id = "tvdb_id" if imdb else "imdb_id" + return self._query_map("imdb_to_tvdb_map", _id, from_id, to_id) + + def update_imdb_to_tvdb_map(self, expired, imdb_id, tvdb_id): + self._update_map("imdb_to_tvdb_map", "imdb_id", imdb_id, "tvdb_id", tvdb_id, expired) + + def query_tmdb_to_tvdb_map(self, _id, tmdb=True): + from_id = "tmdb_id" if tmdb else "tvdb_id" + to_id = "tvdb_id" if tmdb else "tmdb_id" + return self._query_map("tmdb_to_tvdb_map", _id, from_id, to_id) + + def update_tmdb_to_tvdb_map(self, expired, tmdb_id, tvdb_id): + self._update_map("tmdb_to_tvdb_map", "tmdb_id", tmdb_id, "tvdb_id", tvdb_id, expired) def query_letterboxd_map(self, letterboxd_id): - tmdb_id = None + return self._query_map("letterboxd_map", letterboxd_id, "letterboxd_id", "tmdb_id") + + def update_letterboxd_map(self, expired, letterboxd_id, tmdb_id): + self._update_map("letterboxd_map", "letterboxd_id", letterboxd_id, "tmdb_id", tmdb_id, expired) + + def _query_map(self, map_name, _id, from_id, to_id, media_type=None): + id_to_return = None expired = None with sqlite3.connect(self.cache_path) as connection: connection.row_factory = sqlite3.Row with closing(connection.cursor()) as cursor: - cursor.execute("SELECT * FROM letterboxd_map WHERE letterboxd_id = ?", (letterboxd_id, )) + if media_type is None: + cursor.execute(f"SELECT * FROM {map_name} WHERE {from_id} = ?", (_id,)) + else: + cursor.execute(f"SELECT * FROM {map_name} WHERE {from_id} = ? AND media_type = ?", (_id, media_type)) row = cursor.fetchone() - if row and row["tmdb_id"]: + if row and row[to_id]: datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d") time_between_insertion = datetime.now() - datetime_object - tmdb_id = int(row["tmdb_id"]) + id_to_return = int(row[to_id]) expired = time_between_insertion.days > self.expiration - return tmdb_id, expired + return id_to_return, expired - def update_letterboxd(self, expired, letterboxd_id, tmdb_id): + def _update_map(self, map_name, val1_name, val1, val2_name, val2, expired, media_type=None): expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration))) with sqlite3.connect(self.cache_path) as connection: connection.row_factory = sqlite3.Row with closing(connection.cursor()) as cursor: - cursor.execute("INSERT OR IGNORE INTO letterboxd_map(letterboxd_id) VALUES(?)", (letterboxd_id,)) - cursor.execute("UPDATE letterboxd_map SET tmdb_id = ?, expiration_date = ? WHERE letterboxd_id = ?", (tmdb_id, expiration_date.strftime("%Y-%m-%d"), letterboxd_id)) + cursor.execute(f"INSERT OR IGNORE INTO {map_name}({val1_name}) VALUES(?)", (val1,)) + if media_type is None: + sql = f"UPDATE {map_name} SET {val2_name} = ?, expiration_date = ? WHERE {val1_name} = ?" + cursor.execute(sql, (val2, expiration_date.strftime("%Y-%m-%d"), val1)) + else: + sql = f"UPDATE {map_name} SET {val2_name} = ?, expiration_date = ?{'' if media_type is None else ', media_type = ?'} WHERE {val1_name} = ?" + cursor.execute(sql, (val2, expiration_date.strftime("%Y-%m-%d"), media_type, val1)) def query_omdb(self, imdb_id): omdb_dict = {} @@ -260,7 +212,7 @@ class Cache: expired = time_between_insertion.days > self.expiration return ids, expired - def update_anime(self, expired, anime_ids): + def update_anime_map(self, expired, anime_ids): expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration))) with sqlite3.connect(self.cache_path) as connection: connection.row_factory = sqlite3.Row diff --git a/modules/config.py b/modules/config.py index 378d48fe..d59afcbd 100644 --- a/modules/config.py +++ b/modules/config.py @@ -1,10 +1,9 @@ -import glob, logging, os, re, requests, time +import logging, os from modules import util from modules.anidb import AniDBAPI from modules.anilist import AniListAPI -from modules.arms import ArmsAPI -from modules.builder import CollectionBuilder from modules.cache import Cache +from modules.convert import Convert from modules.imdb import IMDbAPI from modules.letterboxd import LetterboxdAPI from modules.mal import MyAnimeListAPI @@ -17,8 +16,6 @@ from modules.tmdb import TMDbAPI from modules.trakttv import TraktAPI from modules.tvdb import TVDbAPI from modules.util import Failed -from plexapi.exceptions import BadRequest -from retrying import retry from ruamel import yaml logger = logging.getLogger("Plex Meta Manager") @@ -47,7 +44,7 @@ sonarr_series_types = { "daily": "Episodes released daily or less frequently that use year-month-day (2017-05-25)", "anime": "Episodes released using an absolute episode number" } -mass_genre_update_options = {"tmdb": "Use TMDb Metadata", "omdb": "Use IMDb Metadata through OMDb"} +mass_update_options = {"tmdb": "Use TMDb Metadata", "omdb": "Use IMDb Metadata through OMDb"} library_types = {"movie": "For Movie Libraries", "show": "For Show Libraries"} class Config: @@ -144,7 +141,7 @@ class Config: else: message = f"Path {os.path.abspath(data[attribute])} does not exist" elif var_type == "list": return util.get_list(data[attribute]) elif var_type == "list_path": - temp_list = [path for path in util.get_list(data[attribute], split=True) if os.path.exists(os.path.abspath(path))] + temp_list = [p for p in util.get_list(data[attribute], split=True) if os.path.exists(os.path.abspath(p))] if len(temp_list) > 0: return temp_list else: message = "No Paths exist" elif var_type == "lower_list": return util.get_list(data[attribute], lower=True) @@ -153,7 +150,7 @@ class Config: if var_type == "path" and default and os.path.exists(os.path.abspath(default)): return default elif var_type == "path" and default: - if attribute in data and data[attribute]: + if data and attribute in data and data[attribute]: message = f"neither {data[attribute]} or the default path {default} could be found" else: message = f"no {text} found and the default path {default} could not be found" @@ -206,7 +203,7 @@ class Config: try: self.tmdb["apikey"] = check_for_attribute(self.data, "apikey", parent="tmdb", throw=True) except Failed as e: raise Failed(e) self.tmdb["language"] = check_for_attribute(self.data, "language", parent="tmdb", default="en") - self.TMDb = TMDbAPI(self.tmdb) + self.TMDb = TMDbAPI(self, self.tmdb) logger.info(f"TMDb Connection {'Failed' if self.TMDb is None else 'Successful'}") else: raise Failed("Config Error: tmdb attribute not found") @@ -265,8 +262,7 @@ class Config: self.TVDb = TVDbAPI(self) self.IMDb = IMDbAPI(self) self.AniDB = AniDBAPI(self) - self.Arms = ArmsAPI(self) - self.AniDBIDs = self.AniDB.get_AniDB_IDs() + self.Convert = Convert(self) self.AniList = AniListAPI(self) self.Letterboxd = LetterboxdAPI(self) @@ -278,6 +274,9 @@ class Config: self.general["plex"]["url"] = check_for_attribute(self.data, "url", parent="plex", default_is_none=True) self.general["plex"]["token"] = check_for_attribute(self.data, "token", parent="plex", default_is_none=True) self.general["plex"]["timeout"] = check_for_attribute(self.data, "timeout", parent="plex", var_type="int", default=60) + self.general["plex"]["clean_bundles"] = check_for_attribute(self.data, "clean_bundles", parent="plex", var_type="bool", default=False) + self.general["plex"]["empty_trash"] = check_for_attribute(self.data, "empty_trash", parent="plex", var_type="bool", default=False) + self.general["plex"]["optimize"] = check_for_attribute(self.data, "optimize", parent="plex", var_type="bool", default=False) self.general["radarr"] = {} self.general["radarr"]["url"] = check_for_attribute(self.data, "url", parent="radarr", default_is_none=True) @@ -367,20 +366,64 @@ class Config: params["save_missing"] = check_for_attribute(lib, "save_missing", var_type="bool", default=self.general["save_missing"], do_print=False, save=False) if lib and "mass_genre_update" in lib and lib["mass_genre_update"]: - params["mass_genre_update"] = check_for_attribute(lib, "mass_genre_update", test_list=mass_genre_update_options, default_is_none=True, save=False) + params["mass_genre_update"] = check_for_attribute(lib, "mass_genre_update", test_list=mass_update_options, default_is_none=True, save=False) + if self.OMDb is None and params["mass_genre_update"] == "omdb": + params["mass_genre_update"] = None + logger.error("Config Error: mass_genre_update cannot be omdb without a successful OMDb Connection") else: params["mass_genre_update"] = None - if params["mass_genre_update"] == "omdb" and self.OMDb is None: - params["mass_genre_update"] = None - logger.error("Config Error: mass_genre_update cannot be omdb without a successful OMDb Connection") + if lib and "mass_audience_rating_update" in lib and lib["mass_audience_rating_update"]: + params["mass_audience_rating_update"] = check_for_attribute(lib, "mass_audience_rating_update", test_list=mass_update_options, default_is_none=True, save=False) + if self.OMDb is None and params["mass_audience_rating_update"] == "omdb": + params["mass_audience_rating_update"] = None + logger.error("Config Error: mass_audience_rating_update cannot be omdb without a successful OMDb Connection") + else: + params["mass_audience_rating_update"] = None + + if lib and "mass_critic_rating_update" in lib and lib["mass_critic_rating_update"]: + params["mass_critic_rating_update"] = check_for_attribute(lib, "mass_critic_rating_update", test_list=mass_update_options, default_is_none=True, save=False) + if self.OMDb is None and params["mass_critic_rating_update"] == "omdb": + params["mass_critic_rating_update"] = None + logger.error("Config Error: mass_critic_rating_update cannot be omdb without a successful OMDb Connection") + else: + params["mass_critic_rating_update"] = None try: - params["metadata_path"] = check_for_attribute(lib, "metadata_path", var_type="path", default=os.path.join(default_dir, f"{library_name}.yml"), throw=True) + if lib and "metadata_path" in lib: + params["metadata_path"] = [] + if lib["metadata_path"] is None: + raise Failed("Config Error: metadata_path attribute is blank") + paths_to_check = lib["metadata_path"] if isinstance(lib["metadata_path"], list) else [lib["metadata_path"]] + for path in paths_to_check: + if isinstance(path, dict): + if "url" in path: + if path["url"] is None: + logger.error("Config Error: metadata_path url is blank") + else: + params["metadata_path"].append(("URL", path["url"])) + if "git" in path: + if path["git"] is None: + logger.error("Config Error: metadata_path git is blank") + else: + params["metadata_path"].append(("Git", path['git'])) + if "file" in path: + if path["file"] is None: + logger.error("Config Error: metadata_path file is blank") + else: + params["metadata_path"].append(("File", path['file'])) + else: + params["metadata_path"].append(("File", path)) + else: + params["metadata_path"] = [("File", os.path.join(default_dir, f"{library_name}.yml"))] + params["default_dir"] = default_dir params["plex"] = {} params["plex"]["url"] = check_for_attribute(lib, "url", parent="plex", default=self.general["plex"]["url"], req_default=True, save=False) params["plex"]["token"] = check_for_attribute(lib, "token", parent="plex", default=self.general["plex"]["token"], req_default=True, save=False) params["plex"]["timeout"] = check_for_attribute(lib, "timeout", parent="plex", var_type="int", default=self.general["plex"]["timeout"], save=False) + params["plex"]["clean_bundles"] = check_for_attribute(lib, "clean_bundles", parent="plex", var_type="bool", default=self.general["plex"]["clean_bundles"], save=False) + params["plex"]["empty_trash"] = check_for_attribute(lib, "empty_trash", parent="plex", var_type="bool", default=self.general["plex"]["empty_trash"], save=False) + params["plex"]["optimize"] = check_for_attribute(lib, "optimize", parent="plex", var_type="bool", default=self.general["plex"]["optimize"], save=False) library = PlexAPI(params, self.TMDb, self.TVDb) logger.info(f"{params['name']} Library Connection Successful") except Failed as e: @@ -388,7 +431,7 @@ class Config: logger.info(f"{params['name']} Library Connection Failed") continue - if self.general["radarr"]["url"] or "radarr" in lib: + if self.general["radarr"]["url"] or (lib and "radarr" in lib): logger.info("") logger.info(f"Connecting to {params['name']} library's Radarr...") radarr_params = {} @@ -408,7 +451,7 @@ class Config: util.print_multiline(e, error=True) logger.info(f"{params['name']} library's Radarr Connection {'Failed' if library.Radarr is None else 'Successful'}") - if self.general["sonarr"]["url"] or "sonarr" in lib: + if self.general["sonarr"]["url"] or (lib and "sonarr" in lib): logger.info("") logger.info(f"Connecting to {params['name']} library's Sonarr...") sonarr_params = {} @@ -434,7 +477,7 @@ class Config: util.print_multiline(e, error=True) logger.info(f"{params['name']} library's Sonarr Connection {'Failed' if library.Sonarr is None else 'Successful'}") - if self.general["tautulli"]["url"] or "tautulli" in lib: + if self.general["tautulli"]["url"] or (lib and "tautulli" in lib): logger.info("") logger.info(f"Connecting to {params['name']} library's Tautulli...") tautulli_params = {} @@ -458,441 +501,3 @@ class Config: util.separator() - def update_libraries(self, test, requested_collections, resume_from): - for library in self.libraries: - os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout) - logger.info("") - util.separator(f"{library.name} Library") - logger.info("") - util.separator(f"Mapping {library.name} Library") - logger.info("") - movie_map, show_map = self.map_guids(library) - if not test and not resume_from: - if library.mass_genre_update: - self.mass_metadata(library, movie_map, show_map) - try: library.update_metadata(self.TMDb, test) - except Failed as e: logger.error(e) - logger.info("") - util.separator(f"{library.name} Library {'Test ' if test else ''}Collections") - collections = {c: library.collections[c] for c in util.get_list(requested_collections) if c in library.collections} if requested_collections else library.collections - if resume_from and resume_from not in collections: - logger.warning(f"Collection: {resume_from} not in {library.name}") - continue - if collections: - for mapping_name, collection_attrs in collections.items(): - if test and ("test" not in collection_attrs or collection_attrs["test"] is not True): - no_template_test = True - if "template" in collection_attrs and collection_attrs["template"]: - for data_template in util.get_list(collection_attrs["template"], split=False): - if "name" in data_template \ - and data_template["name"] \ - and library.templates \ - and data_template["name"] in library.templates \ - and library.templates[data_template["name"]] \ - and "test" in library.templates[data_template["name"]] \ - and library.templates[data_template["name"]]["test"] is True: - no_template_test = False - if no_template_test: - continue - try: - if resume_from and resume_from != mapping_name: - continue - elif resume_from == mapping_name: - resume_from = None - logger.info("") - util.separator(f"Resuming Collections") - - logger.info("") - util.separator(f"{mapping_name} Collection") - logger.info("") - - rating_key_map = {} - try: - builder = CollectionBuilder(self, library, mapping_name, collection_attrs) - except Failed as ef: - util.print_multiline(ef, error=True) - continue - except Exception as ee: - util.print_stacktrace() - logger.error(ee) - continue - - try: - collection_obj = library.get_collection(mapping_name) - collection_name = collection_obj.title - except Failed: - collection_obj = None - collection_name = mapping_name - - if len(builder.schedule) > 0: - util.print_multiline(builder.schedule, info=True) - - logger.info("") - if builder.sync: - logger.info("Sync Mode: sync") - if collection_obj: - for item in collection_obj.items(): - rating_key_map[item.ratingKey] = item - else: - logger.info("Sync Mode: append") - - for i, f in enumerate(builder.filters): - if i == 0: - logger.info("") - logger.info(f"Collection Filter {f[0]}: {f[1]}") - - builder.run_methods(collection_obj, collection_name, rating_key_map, movie_map, show_map) - - try: - plex_collection = library.get_collection(collection_name) - except Failed as e: - logger.debug(e) - continue - - builder.update_details(plex_collection) - - if builder.run_again and (len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0): - library.run_again.append(builder) - - except Exception as e: - util.print_stacktrace() - logger.error(f"Unknown Error: {e}") - - if library.assets_for_all is True and not test and not requested_collections: - logger.info("") - util.separator(f"All {'Movies' if library.is_movie else 'Shows'} Assets Check for {library.name} Library") - logger.info("") - for item in library.get_all(): - folder = os.path.basename(os.path.dirname(item.locations[0]) if library.is_movie else item.locations[0]) - for ad in library.asset_directory: - if library.asset_folders: - poster_path = os.path.join(ad, folder, "poster.*") - else: - poster_path = os.path.join(ad, f"{folder}.*") - matches = glob.glob(poster_path) - if len(matches) > 0: - item.uploadPoster(filepath=os.path.abspath(matches[0])) - logger.info(f"Detail: asset_directory updated {item.title}'s poster to [file] {os.path.abspath(matches[0])}") - if library.asset_folders: - matches = glob.glob(os.path.join(ad, folder, "background.*")) - if len(matches) > 0: - item.uploadArt(filepath=os.path.abspath(matches[0])) - logger.info(f"Detail: asset_directory updated {item.title}'s background to [file] {os.path.abspath(matches[0])}") - if library.is_show: - for season in item.seasons(): - matches = glob.glob(os.path.join(ad, folder, f"Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}.*")) - if len(matches) > 0: - season_path = os.path.abspath(matches[0]) - season.uploadPoster(filepath=season_path) - logger.info(f"Detail: asset_directory updated {item.title} Season {season.seasonNumber}'s poster to [file] {season_path}") - for episode in season.episodes(): - matches = glob.glob(os.path.join(ad, folder, f"{episode.seasonEpisode.upper()}.*")) - if len(matches) > 0: - episode_path = os.path.abspath(matches[0]) - episode.uploadPoster(filepath=episode_path) - logger.info(f"Detail: asset_directory updated {item.title} {episode.seasonEpisode.upper()}'s poster to [file] {episode_path}") - - if library.show_unmanaged is True and not test and not requested_collections: - logger.info("") - util.separator(f"Unmanaged Collections in {library.name} Library") - logger.info("") - unmanaged_count = 0 - collections_in_plex = [str(plex_col) for plex_col in collections] - for col in library.get_all_collections(): - if col.title not in collections_in_plex: - logger.info(col.title) - unmanaged_count += 1 - logger.info("{} Unmanaged Collections".format(unmanaged_count)) - else: - logger.info("") - logger.error("No collection to update") - - has_run_again = False - for library in self.libraries: - if library.run_again: - has_run_again = True - break - - if has_run_again: - logger.info("") - util.separator("Run Again") - logger.info("") - length = 0 - for x in range(1, self.general["run_again_delay"] + 1): - length = util.print_return(length, f"Waiting to run again in {self.general['run_again_delay'] - x + 1} minutes") - for y in range(60): - time.sleep(1) - util.print_end(length) - for library in self.libraries: - if library.run_again: - os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout) - logger.info("") - util.separator(f"{library.name} Library Run Again") - logger.info("") - collections = {c: library.collections[c] for c in util.get_list(requested_collections) if c in library.collections} if requested_collections else library.collections - if collections: - util.separator(f"Mapping {library.name} Library") - logger.info("") - movie_map, show_map = self.map_guids(library) - for builder in library.run_again: - logger.info("") - util.separator(f"{builder.name} Collection") - logger.info("") - try: - collection_obj = library.get_collection(builder.name) - except Failed as e: - util.print_multiline(e, error=True) - continue - builder.run_collections_again(collection_obj, movie_map, show_map) - - def mass_metadata(self, library, movie_map, show_map): - length = 0 - logger.info("") - util.separator(f"Mass Editing {'Movie' if library.is_movie else 'Show'} Library: {library.name}") - logger.info("") - items = library.Plex.all() - for i, item in enumerate(items, 1): - length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}") - ids = {} - if self.Cache: - ids, expired = self.Cache.get_ids("movie" if library.is_movie else "show", plex_guid=item.guid) - elif library.is_movie: - for tmdb, rating_keys in movie_map.items(): - if item.ratingKey in rating_keys: - ids["tmdb"] = tmdb - break - else: - for tvdb, rating_keys in show_map.items(): - if item.ratingKey in rating_keys: - ids["tvdb"] = tvdb - break - - if library.mass_genre_update: - if library.mass_genre_update == "tmdb": - if "tmdb" not in ids: - util.print_end(length, f"{item.title[:25]:<25} | No TMDb for Guid: {item.guid}") - continue - try: - tmdb_item = self.TMDb.get_movie(ids["tmdb"]) if library.is_movie else self.TMDb.get_show(ids["tmdb"]) - except Failed as e: - util.print_end(length, str(e)) - continue - new_genres = [genre.name for genre in tmdb_item.genres] - elif library.mass_genre_update in ["omdb", "imdb"]: - if self.OMDb.limit is True: - break - if "imdb" not in ids: - util.print_end(length, f"{item.title[:25]:<25} | No IMDb for Guid: {item.guid}") - continue - try: - omdb_item = self.OMDb.get_omdb(ids["imdb"]) - except Failed as e: - util.print_end(length, str(e)) - continue - new_genres = omdb_item.genres - else: - raise Failed - item_genres = [genre.tag for genre in item.genres] - display_str = "" - for genre in (g for g in item_genres if g not in new_genres): - item.removeGenre(genre) - display_str += f"{', ' if len(display_str) > 0 else ''}-{genre}" - for genre in (g for g in new_genres if g not in item_genres): - item.addGenre(genre) - display_str += f"{', ' if len(display_str) > 0 else ''}+{genre}" - if len(display_str) > 0: - util.print_end(length, f"{item.title[:25]:<25} | Genres | {display_str}") - - def map_guids(self, library): - movie_map = {} - show_map = {} - length = 0 - logger.info(f"Mapping {'Movie' if library.is_movie else 'Show'} Library: {library.name}") - items = library.Plex.all() - for i, item in enumerate(items, 1): - length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}") - try: - id_type, main_id = self.get_id(item, library, length) - except BadRequest: - util.print_stacktrace() - util.print_end(length, f"{'Cache | ! |' if self.Cache else 'Mapping Error:'} | {item.guid} for {item.title} not found") - continue - if isinstance(main_id, list): - if id_type == "movie": - for m in main_id: - if m in movie_map: - movie_map[m].append(item.ratingKey) - else: - movie_map[m] = [item.ratingKey] - elif id_type == "show": - for m in main_id: - if m in show_map: - show_map[m].append(item.ratingKey) - else: - show_map[m] = [item.ratingKey] - else: - if id_type == "movie": - if main_id in movie_map: - movie_map[main_id].append(item.ratingKey) - else: - movie_map[main_id] = [item.ratingKey] - elif id_type == "show": - if main_id in show_map: - show_map[main_id].append(item.ratingKey) - else: - show_map[main_id] = [item.ratingKey] - util.print_end(length, f"Processed {len(items)} {'Movies' if library.is_movie else 'Shows'}") - return movie_map, show_map - - @retry(stop_max_attempt_number=6, wait_fixed=10000) - def get_guids(self, item): - return item.guids - - def get_id(self, item, library, length): - expired = None - tmdb_id = None - imdb_id = None - tvdb_id = None - anidb_id = None - mal_id = None - error_message = None - if self.Cache: - if library.is_movie: tmdb_id, expired = self.Cache.get_tmdb_id("movie", plex_guid=item.guid) - else: tvdb_id, expired = self.Cache.get_tvdb_id("show", plex_guid=item.guid) - if not tvdb_id and library.is_show: - tmdb_id, expired = self.Cache.get_tmdb_id("show", plex_guid=item.guid) - anidb_id, expired = self.Cache.get_anidb_id("show", plex_guid=item.guid) - if expired or (not tmdb_id and library.is_movie) or (not tvdb_id and not tmdb_id and library.is_show): - guid = requests.utils.urlparse(item.guid) - item_type = guid.scheme.split(".")[-1] - check_id = guid.netloc - - if item_type == "plex" and check_id == "movie": - try: - for guid_tag in self.get_guids(item): - url_parsed = requests.utils.urlparse(guid_tag.id) - if url_parsed.scheme == "tmdb": tmdb_id = int(url_parsed.netloc) - elif url_parsed.scheme == "imdb": imdb_id = url_parsed.netloc - except requests.exceptions.ConnectionError: - util.print_stacktrace() - logger.error(f"{'Cache | ! |' if self.Cache else 'Mapping Error:'} {item.guid:<46} | No External GUIDs found for {item.title}") - return None, None - elif item_type == "plex" and check_id == "show": - try: - for guid_tag in self.get_guids(item): - url_parsed = requests.utils.urlparse(guid_tag.id) - if url_parsed.scheme == "tvdb": tvdb_id = int(url_parsed.netloc) - elif url_parsed.scheme == "imdb": imdb_id = url_parsed.netloc - elif url_parsed.scheme == "tmdb": tmdb_id = int(url_parsed.netloc) - except requests.exceptions.ConnectionError: - util.print_stacktrace() - logger.error(f"{'Cache | ! |' if self.Cache else 'Mapping Error:'} {item.guid:<46} | No External GUIDs found for {item.title}") - return None, None - elif item_type == "imdb": imdb_id = check_id - elif item_type == "thetvdb": tvdb_id = int(check_id) - elif item_type == "themoviedb": tmdb_id = int(check_id) - elif item_type == "hama": - if check_id.startswith("tvdb"): tvdb_id = int(re.search("-(.*)", check_id).group(1)) - elif check_id.startswith("anidb"): anidb_id = re.search("-(.*)", check_id).group(1) - else: error_message = f"Hama Agent ID: {check_id} not supported" - elif item_type == "myanimelist": mal_id = check_id - elif item_type == "local": error_message = "No match in Plex" - else: error_message = f"Agent {item_type} not supported" - - if not error_message: - if mal_id and not anidb_id: - try: anidb_id = self.Arms.mal_to_anidb(mal_id) - except Failed: pass - if anidb_id and not tvdb_id: - try: tvdb_id = self.Arms.anidb_to_tvdb(anidb_id) - except Failed: pass - if anidb_id and not imdb_id: - try: imdb_id = self.Arms.anidb_to_imdb(anidb_id) - except Failed: pass - if not tmdb_id and imdb_id: - if isinstance(imdb_id, list): - tmdb_id = [] - new_imdb_id = [] - for imdb in imdb_id: - try: - tmdb_id.append(self.TMDb.convert_imdb_to_tmdb(imdb)) - new_imdb_id.append(imdb) - except Failed: - if self.Trakt: - try: - tmdb_id.append(self.Trakt.convert_imdb_to_tmdb(imdb)) - new_imdb_id.append(imdb) - except Failed: - continue - else: - continue - imdb_id = new_imdb_id - else: - try: tmdb_id = self.TMDb.convert_imdb_to_tmdb(imdb_id) - except Failed: pass - if not tmdb_id and self.Trakt: - try: tmdb_id = self.Trakt.convert_imdb_to_tmdb(imdb_id) - except Failed: pass - if not tmdb_id and tvdb_id and library.is_show: - try: tmdb_id = self.TMDb.convert_tvdb_to_tmdb(tvdb_id) - except Failed: pass - if not tmdb_id and self.Trakt: - try: tmdb_id = self.Trakt.convert_tvdb_to_tmdb(tvdb_id) - except Failed: pass - if not imdb_id and tmdb_id and library.is_movie: - try: imdb_id = self.TMDb.convert_tmdb_to_imdb(tmdb_id) - except Failed: pass - if not imdb_id and self.Trakt: - try: imdb_id = self.Trakt.convert_tmdb_to_imdb(tmdb_id) - except Failed: pass - if not imdb_id and tvdb_id and library.is_show: - try: imdb_id = self.TMDb.convert_tvdb_to_imdb(tvdb_id) - except Failed: pass - if not imdb_id and self.Trakt: - try: imdb_id = self.Trakt.convert_tvdb_to_imdb(tvdb_id) - except Failed: pass - if not tvdb_id and library.is_show: - if tmdb_id: - try: tvdb_id = self.TMDb.convert_tmdb_to_tvdb(tmdb_id) - except Failed: pass - if not tvdb_id and self.Trakt: - try: tvdb_id = self.Trakt.convert_tmdb_to_tvdb(tmdb_id) - except Failed: pass - if not tvdb_id and imdb_id: - try: tvdb_id = self.TMDb.convert_imdb_to_tvdb(imdb_id) - except Failed: pass - if not tvdb_id and self.Trakt: - try: tvdb_id = self.Trakt.convert_imdb_to_tvdb(tmdb_id) - except Failed: pass - - if (not tmdb_id and library.is_movie) or (not tvdb_id and not (anidb_id and tmdb_id) and library.is_show): - service_name = "TMDb ID" if library.is_movie else "TVDb ID" - - if self.Trakt: api_name = "TMDb or Trakt" - else: api_name = "TMDb" - - if tmdb_id and imdb_id: id_name = f"TMDb ID: {tmdb_id} or IMDb ID: {imdb_id}" - elif imdb_id and tvdb_id: id_name = f"IMDb ID: {imdb_id} or TVDb ID: {tvdb_id}" - elif tmdb_id: id_name = f"TMDb ID: {tmdb_id}" - elif imdb_id: id_name = f"IMDb ID: {imdb_id}" - elif tvdb_id: id_name = f"TVDb ID: {tvdb_id}" - else: id_name = None - - if anidb_id and not tmdb_id and not tvdb_id: error_message = f"Unable to convert AniDB ID: {anidb_id} to TMDb ID or TVDb ID" - elif id_name: error_message = f"Unable to convert {id_name} to {service_name} using {api_name}" - else: error_message = f"No ID to convert to {service_name}" - if self.Cache and ((tmdb_id and library.is_movie) or ((tvdb_id or (anidb_id and tmdb_id)) and library.is_show)): - if not isinstance(tmdb_id, list): tmdb_id = [tmdb_id] - if not isinstance(imdb_id, list): imdb_id = [imdb_id] - for i in range(len(tmdb_id)): - try: imdb_value = imdb_id[i] - except IndexError: imdb_value = None - util.print_end(length, f"Cache | {'^' if expired is True else '+'} | {item.guid:<46} | {tmdb_id[i] if tmdb_id[i] else 'None':<6} | {imdb_value if imdb_value else 'None':<10} | {tvdb_id if tvdb_id else 'None':<6} | {anidb_id if anidb_id else 'None':<5} | {item.title}") - self.Cache.update_guid("movie" if library.is_movie else "show", item.guid, tmdb_id[i], imdb_value, tvdb_id, anidb_id, expired) - - if tmdb_id and library.is_movie: return "movie", tmdb_id - elif tvdb_id and library.is_show: return "show", tvdb_id - elif anidb_id and tmdb_id: return "movie", tmdb_id - else: - util.print_end(length, f"{'Cache | ! |' if self.Cache else 'Mapping Error:'} {item.guid:<46} | {error_message} for {item.title}") - return None, None diff --git a/modules/convert.py b/modules/convert.py new file mode 100644 index 00000000..7eaa939b --- /dev/null +++ b/modules/convert.py @@ -0,0 +1,360 @@ +import logging, re, requests +from lxml import html +from modules import util +from modules.util import Failed +from retrying import retry + +logger = logging.getLogger("Plex Meta Manager") + +class Convert: + def __init__(self, config): + self.config = config + self.arms_url = "https://relations.yuna.moe/api/ids" + self.anidb_url = "https://raw.githubusercontent.com/Anime-Lists/anime-lists/master/anime-list-master.xml" + self.AniDBIDs = self._get_anidb() + + @retry(stop_max_attempt_number=6, wait_fixed=10000) + def _get_anidb(self): + return html.fromstring(requests.get(self.anidb_url).content) + + def _anidb(self, input_id, to_id, fail=False): + ids = self.AniDBIDs.xpath(f"//anime[contains(@anidbid, '{input_id}')]/@{to_id}") + if len(ids) > 0: + try: + if len(ids[0]) > 0: + return util.get_list(ids[0]) if to_id == "imdbid" else int(ids[0]) + raise ValueError + except ValueError: + fail_text = f"Convert Error: No {util.pretty_ids[to_id]} ID found for AniDB ID: {input_id}" + else: + fail_text = f"Convert Error: AniDB ID: {input_id} not found" + if fail: + raise Failed(fail_text) + return [] if to_id == "imdbid" else None + + @retry(stop_max_attempt_number=6, wait_fixed=10000) + def _request(self, ids): + return requests.post(self.arms_url, json=ids).json() + + def _arms_ids(self, anilist_ids=None, anidb_ids=None, mal_ids=None): + all_ids = [] + def collect_ids(ids, id_name): + if ids: + if isinstance(ids, list): + all_ids.extend([{id_name: a_id} for a_id in ids]) + else: + all_ids.append({id_name: ids}) + collect_ids(anilist_ids, "anilist") + collect_ids(anidb_ids, "anidb") + collect_ids(mal_ids, "myanimelist") + converted_ids = [] + unconverted_ids = [] + unconverted_id_sets = [] + + for anime_dict in all_ids: + if self.config.Cache: + for id_type, anime_id in anime_dict.items(): + query_ids, expired = self.config.Cache.query_anime_map(anime_id, id_type) + if query_ids and not expired: + converted_ids.append(query_ids) + else: + unconverted_ids.append({id_type: anime_id}) + if len(unconverted_ids) == 100: + unconverted_id_sets.append(unconverted_ids) + unconverted_ids = [] + else: + unconverted_ids.append(anime_dict) + if len(unconverted_ids) == 100: + unconverted_id_sets.append(unconverted_ids) + unconverted_ids = [] + for unconverted_id_set in unconverted_id_sets: + for anime_ids in self._request(unconverted_id_set): + if anime_ids: + if self.config.Cache: + self.config.Cache.update_anime_map(False, anime_ids) + converted_ids.append(anime_ids) + return converted_ids + + def anidb_to_ids(self, anidb_list): + show_ids = [] + movie_ids = [] + for anidb_id in anidb_list: + imdb_ids = self.anidb_to_imdb(anidb_id) + tmdb_ids = [] + if imdb_ids: + for imdb_id in imdb_ids: + tmdb_id = self.imdb_to_tmdb(imdb_id) + if tmdb_id: + tmdb_ids.append(tmdb_id) + tvdb_id = self.anidb_to_tvdb(anidb_id) + if tvdb_id: + show_ids.append(tvdb_id) + if tmdb_ids: + movie_ids.extend(tmdb_ids) + if not tvdb_id and not tmdb_ids: + logger.error(f"Convert Error: No TVDb ID or IMDb ID found for AniDB ID: {anidb_id}") + return movie_ids, show_ids + + def anilist_to_ids(self, anilist_ids): + anidb_ids = [] + for id_set in self._arms_ids(anilist_ids=anilist_ids): + if id_set["anidb"] is not None: + anidb_ids.append(id_set["anidb"]) + else: + logger.error(f"Convert Error: AniDB ID not found for AniList ID: {id_set['anilist']}") + return self.anidb_to_ids(anidb_ids) + + def myanimelist_to_ids(self, mal_ids): + anidb_ids = [] + for id_set in self._arms_ids(mal_ids=mal_ids): + if id_set["anidb"] is not None: + anidb_ids.append(id_set["anidb"]) + else: + logger.error(f"Convert Error: AniDB ID not found for MyAnimeList ID: {id_set['myanimelist']}") + return self.anidb_to_ids(anidb_ids) + + def anidb_to_tvdb(self, anidb_id, fail=False): + return self._anidb(anidb_id, "tvdbid", fail=fail) + + def anidb_to_imdb(self, anidb_id, fail=False): + return self._anidb(anidb_id, "imdbid", fail=fail) + + def tmdb_to_imdb(self, tmdb_id, is_movie=True, fail=False): + media_type = "movie" if is_movie else "show" + expired = False + if self.config.Cache and is_movie: + cache_id, expired = self.config.Cache.query_imdb_to_tmdb_map(media_type, tmdb_id, imdb=False) + if cache_id and not expired: + return cache_id + imdb_id = None + try: + imdb_id = self.config.TMDb.convert_from(tmdb_id, "imdb_id", is_movie) + except Failed: + if self.config.Trakt: + try: + imdb_id = self.config.Trakt.convert(tmdb_id, "tmdb", "imdb", "movie" if is_movie else "show") + except Failed: + pass + if fail and imdb_id is None: + raise Failed(f"Convert Error: No IMDb ID Found for TMDb ID: {tmdb_id}") + if self.config.Cache and imdb_id: + self.config.Cache.update_imdb_to_tmdb_map(media_type, expired, imdb_id, tmdb_id) + return imdb_id + + def imdb_to_tmdb(self, imdb_id, is_movie=True, fail=False): + media_type = "movie" if is_movie else "show" + expired = False + if self.config.Cache and is_movie: + cache_id, expired = self.config.Cache.query_imdb_to_tmdb_map(media_type, imdb_id, imdb=True) + if cache_id and not expired: + return cache_id + tmdb_id = None + try: + tmdb_id = self.config.TMDb.convert_to(imdb_id, "imdb_id", is_movie) + except Failed: + if self.config.Trakt: + try: + tmdb_id = self.config.Trakt.convert(imdb_id, "imdb", "tmdb", media_type) + except Failed: + pass + if fail and tmdb_id is None: + raise Failed(f"Convert Error: No TMDb ID Found for IMDb ID: {imdb_id}") + if self.config.Cache and tmdb_id: + self.config.Cache.update_imdb_to_tmdb_map(media_type, expired, imdb_id, tmdb_id) + return tmdb_id + + def tmdb_to_tvdb(self, tmdb_id, fail=False): + expired = False + if self.config.Cache: + cache_id, expired = self.config.Cache.query_tmdb_to_tvdb_map(tmdb_id, tmdb=True) + if cache_id and not expired: + return cache_id + tvdb_id = None + try: + tvdb_id = self.config.TMDb.convert_from(tmdb_id, "tvdb_id", False) + except Failed: + if self.config.Trakt: + try: + tvdb_id = self.config.Trakt.convert(tmdb_id, "tmdb", "tvdb", "show") + except Failed: + pass + if fail and tvdb_id is None: + raise Failed(f"Convert Error: No TVDb ID Found for TMDb ID: {tmdb_id}") + if self.config.Cache and tvdb_id: + self.config.Cache.update_tmdb_to_tvdb_map(expired, tmdb_id, tvdb_id) + return tvdb_id + + def tvdb_to_tmdb(self, tvdb_id, fail=False): + expired = False + if self.config.Cache: + cache_id, expired = self.config.Cache.query_tmdb_to_tvdb_map(tvdb_id, tmdb=False) + if cache_id and not expired: + return cache_id + tmdb_id = None + try: + tmdb_id = self.config.TMDb.convert_to(tvdb_id, "tvdb_id", False) + except Failed: + if self.config.Trakt: + try: + tmdb_id = self.config.Trakt.convert(tvdb_id, "tvdb", "tmdb", "show") + except Failed: + pass + if fail and tmdb_id is None: + raise Failed(f"Convert Error: No TMDb ID Found for TVDb ID: {tvdb_id}") + if self.config.Cache and tmdb_id: + self.config.Cache.update_tmdb_to_tvdb_map(expired, tmdb_id, tvdb_id) + return tmdb_id + + def tvdb_to_imdb(self, tvdb_id, fail=False): + expired = False + if self.config.Cache: + cache_id, expired = self.config.Cache.query_imdb_to_tvdb_map(tvdb_id, imdb=False) + if cache_id and not expired: + return cache_id + imdb_id = None + try: + imdb_id = self.tmdb_to_imdb(self.tvdb_to_tmdb(tvdb_id), False) + except Failed: + if self.config.Trakt: + try: + imdb_id = self.config.Trakt.convert(tvdb_id, "tvdb", "imdb", "show") + except Failed: + pass + if fail and imdb_id is None: + raise Failed(f"Convert Error: No IMDb ID Found for TVDb ID: {tvdb_id}") + if self.config.Cache and imdb_id: + self.config.Cache.update_imdb_to_tvdb_map(expired, imdb_id, tvdb_id) + return imdb_id + + def imdb_to_tvdb(self, imdb_id, fail=False): + expired = False + if self.config.Cache: + cache_id, expired = self.config.Cache.query_imdb_to_tvdb_map(imdb_id, imdb=True) + if cache_id and not expired: + return cache_id + tvdb_id = None + try: + tvdb_id = self.tmdb_to_tvdb(self.imdb_to_tmdb(imdb_id, False)) + except Failed: + if self.config.Trakt: + try: + tvdb_id = self.config.Trakt.convert(imdb_id, "imdb", "tvdb", "show") + except Failed: + pass + if fail and tvdb_id is None: + raise Failed(f"Convert Error: No TVDb ID Found for IMDb ID: {imdb_id}") + if self.config.Cache and tvdb_id: + self.config.Cache.update_imdb_to_tvdb_map(expired, imdb_id, tvdb_id) + return tvdb_id + + def get_id(self, item, library, length): + expired = None + if self.config.Cache: + cache_id, media_type, expired = self.config.Cache.query_guid_map(item.guid) + if cache_id and not expired: + media_id_type = "movie" if "movie" in media_type else "show" + return media_id_type, util.get_list(cache_id, int_list=True) + try: + tmdb_id = None + imdb_id = None + tvdb_id = None + anidb_id = None + guid = requests.utils.urlparse(item.guid) + item_type = guid.scheme.split(".")[-1] + check_id = guid.netloc + + if item_type == "plex": + tmdb_id = [] + imdb_id = [] + tvdb_id = [] + try: + for guid_tag in library.get_guids(item): + url_parsed = requests.utils.urlparse(guid_tag.id) + if url_parsed.scheme == "tvdb": tvdb_id.append(int(url_parsed.netloc)) + elif url_parsed.scheme == "imdb": imdb_id.append(url_parsed.netloc) + elif url_parsed.scheme == "tmdb": tmdb_id.append(int(url_parsed.netloc)) + except requests.exceptions.ConnectionError: + util.print_stacktrace() + raise Failed("No External GUIDs found") + elif item_type == "imdb": imdb_id = check_id + elif item_type == "thetvdb": tvdb_id = int(check_id) + elif item_type == "themoviedb": tmdb_id = int(check_id) + elif item_type == "hama": + if check_id.startswith("tvdb"): tvdb_id = int(re.search("-(.*)", check_id).group(1)) + elif check_id.startswith("anidb"): anidb_id = re.search("-(.*)", check_id).group(1) + else: raise Failed(f"Hama Agent ID: {check_id} not supported") + elif item_type == "myanimelist": + anime_ids = self._arms_ids(mal_ids=check_id) + if anime_ids[0] and anime_ids[0]["anidb"]: anidb_id = anime_ids[0]["anidb"] + else: raise Failed(f"Unable to convert MyAnimeList ID: {check_id} to AniDB ID") + elif item_type == "local": raise Failed("No match in Plex") + else: raise Failed(f"Agent {item_type} not supported") + + if anidb_id: + tvdb_id = self.anidb_to_tvdb(anidb_id) + if not tvdb_id: + imdb_id = self.anidb_to_imdb(anidb_id) + if not imdb_id and not tvdb_id: + raise Failed(f"Unable to convert AniDB ID: {anidb_id} to TVDb ID or IMDb ID") + + if not tmdb_id and imdb_id: + if isinstance(imdb_id, list): + tmdb_id = [] + for imdb in imdb_id: + try: + tmdb_id.append(self.imdb_to_tmdb(imdb, fail=True)) + except Failed: + continue + else: + tmdb_id = self.imdb_to_tmdb(imdb_id) + if not tmdb_id: + raise Failed(f"Unable to convert IMDb ID: {util.compile_list(imdb_id)} to TMDb ID") + if not anidb_id and not tvdb_id and tmdb_id and library.is_show: + if isinstance(tmdb_id, list): + tvdb_id = [] + for tmdb in tmdb_id: + try: + tvdb_id.append(self.tmdb_to_tvdb(tmdb, fail=True)) + except Failed: + continue + else: + tvdb_id = self.tmdb_to_tvdb(tmdb_id) + if not tvdb_id: + raise Failed(f"Unable to convert TMDb ID: {util.compile_list(tmdb_id)} to TVDb ID") + + if tvdb_id: + if isinstance(tvdb_id, list): + new_tvdb_id = [] + for tvdb in tvdb_id: + try: + new_tvdb_id.append(int(tvdb)) + except ValueError: + continue + tvdb_id = new_tvdb_id + else: + try: + tvdb_id = int(tvdb_id) + except ValueError: + tvdb_id = None + + def update_cache(cache_ids, id_type, guid_type): + if self.config.Cache: + cache_ids = util.compile_list(cache_ids) + util.print_end(length, f" Cache | {'^' if expired else '+'} | {item.guid:<46} | {id_type} ID: {cache_ids:<6} | {item.title}") + self.config.Cache.update_guid_map(guid_type, item.guid, cache_ids, expired) + + if tmdb_id and library.is_movie: + update_cache(tmdb_id, "TMDb", "movie") + return "movie", tmdb_id + elif tvdb_id and library.is_show: + update_cache(tvdb_id, "TVDb", "show") + return "show", tvdb_id + elif anidb_id and tmdb_id and library.is_show: + update_cache(tmdb_id, "TMDb", "show_movie") + return "movie", tmdb_id + else: + raise Failed(f"No ID to convert") + + except Failed as e: + util.print_end(length, f"Mapping Error | {item.guid:<46} | {e} for {item.title}") + return None, None diff --git a/modules/imdb.py b/modules/imdb.py index 0eb250fe..c9d19449 100644 --- a/modules/imdb.py +++ b/modules/imdb.py @@ -21,12 +21,12 @@ class IMDbAPI: imdb_url = imdb_url.strip() if not imdb_url.startswith(self.urls["list"]) and not imdb_url.startswith(self.urls["search"]) and not imdb_url.startswith(self.urls["keyword"]): raise Failed(f"IMDb Error: {imdb_url} must begin with either:\n{self.urls['list']} (For Lists)\n{self.urls['search']} (For Searches)\n{self.urls['keyword']} (For Keyword Searches)") - total, _ = self.get_total(self.fix_url(imdb_url), language) + total, _ = self._total(self._fix_url(imdb_url), language) if total > 0: return imdb_url raise Failed(f"IMDb Error: {imdb_url} failed to parse") - def fix_url(self, imdb_url): + def _fix_url(self, imdb_url): if imdb_url.startswith(self.urls["list"]): try: list_id = re.search("(\\d+)", str(imdb_url)).group(1) except AttributeError: raise Failed(f"IMDb Error: Failed to parse List ID from {imdb_url}") @@ -36,10 +36,10 @@ class IMDbAPI: else: return imdb_url - def get_total(self, imdb_url, language): + def _total(self, imdb_url, language): header = {"Accept-Language": language} if imdb_url.startswith(self.urls["keyword"]): - results = self.send_request(imdb_url, header).xpath("//div[@class='desc']/text()") + results = self._request(imdb_url, header).xpath("//div[@class='desc']/text()") total = None for result in results: if "title" in result: @@ -52,15 +52,15 @@ class IMDbAPI: raise Failed(f"IMDb Error: No Results at URL: {imdb_url}") return total, 50 else: - try: results = self.send_request(imdb_url, header).xpath("//div[@class='desc']/span/text()")[0].replace(",", "") + try: results = self._request(imdb_url, header).xpath("//div[@class='desc']/span/text()")[0].replace(",", "") except IndexError: raise Failed(f"IMDb Error: Failed to parse URL: {imdb_url}") try: total = int(re.findall("(\\d+) title", results)[0]) except IndexError: raise Failed(f"IMDb Error: No Results at URL: {imdb_url}") return total, 250 - def get_imdb_ids_from_url(self, imdb_url, language, limit): - current_url = self.fix_url(imdb_url) - total, item_count = self.get_total(current_url, language) + def _ids_from_url(self, imdb_url, language, limit): + current_url = self._fix_url(imdb_url) + total, item_count = self._total(current_url, language) header = {"Accept-Language": language} length = 0 imdb_ids = [] @@ -76,9 +76,9 @@ class IMDbAPI: start_num = (i - 1) * item_count + 1 length = util.print_return(length, f"Parsing Page {i}/{num_of_pages} {start_num}-{limit if i == num_of_pages else i * item_count}") if imdb_url.startswith(self.urls["keyword"]): - response = self.send_request(f"{current_url}&page={i}", header) + response = self._request(f"{current_url}&page={i}", header) else: - response = self.send_request(f"{current_url}&count={remainder if i == num_of_pages else item_count}&start={start_num}", header) + response = self._request(f"{current_url}&count={remainder if i == num_of_pages else item_count}&start={start_num}", header) if imdb_url.startswith(self.urls["keyword"]) and i == num_of_pages: imdb_ids.extend(response.xpath("//div[contains(@class, 'lister-item-image')]//a/img//@data-tconst")[:remainder]) else: @@ -88,39 +88,39 @@ class IMDbAPI: else: raise Failed(f"IMDb Error: No IMDb IDs Found at {imdb_url}") @retry(stop_max_attempt_number=6, wait_fixed=10000) - def send_request(self, url, header): + def _request(self, url, header): return html.fromstring(requests.get(url, headers=header).content) - def get_items(self, method, data, language, status_message=True): + def get_items(self, method, data, language): pretty = util.pretty_names[method] if method in util.pretty_names else method - if status_message: - logger.debug(f"Data: {data}") + logger.debug(f"Data: {data}") show_ids = [] movie_ids = [] if method == "imdb_id": - if status_message: - logger.info(f"Processing {pretty}: {data}") - tmdb_id, tvdb_id = self.config.Arms.imdb_to_ids(data, language) + logger.info(f"Processing {pretty}: {data}") + tmdb_id = self.config.Convert.imdb_to_tmdb(data) + tvdb_id = self.config.Convert.imdb_to_tvdb(data) + if not tmdb_id and not tvdb_id: + logger.error(f"Convert Error: No TMDb ID or TVDb ID found for IMDb: {data}") if tmdb_id: movie_ids.append(tmdb_id) if tvdb_id: show_ids.append(tvdb_id) elif method == "imdb_list": - if status_message: - status = f"{data['limit']} Items at " if data['limit'] > 0 else '' - logger.info(f"Processing {pretty}: {status}{data['url']}") - imdb_ids = self.get_imdb_ids_from_url(data["url"], language, data["limit"]) + status = f"{data['limit']} Items at " if data['limit'] > 0 else '' + logger.info(f"Processing {pretty}: {status}{data['url']}") + imdb_ids = self._ids_from_url(data["url"], language, data["limit"]) total_ids = len(imdb_ids) length = 0 for i, imdb_id in enumerate(imdb_ids, 1): length = util.print_return(length, f"Converting IMDb ID {i}/{total_ids}") - try: - tmdb_id, tvdb_id = self.config.Arms.imdb_to_ids(imdb_id, language) - if tmdb_id: movie_ids.append(tmdb_id) - if tvdb_id: show_ids.append(tvdb_id) - except Failed as e: logger.warning(e) + tmdb_id = self.config.Convert.imdb_to_tmdb(imdb_id) + tvdb_id = self.config.Convert.imdb_to_tvdb(imdb_id) + if not tmdb_id and not tvdb_id: + logger.error(f"Convert Error: No TMDb ID or TVDb ID found for IMDb: {imdb_id}") + if tmdb_id: movie_ids.append(tmdb_id) + if tvdb_id: show_ids.append(tvdb_id) util.print_end(length, f"Processed {total_ids} IMDb IDs") else: raise Failed(f"IMDb Error: Method {method} not supported") - if status_message: - logger.debug(f"TMDb IDs Found: {movie_ids}") - logger.debug(f"TVDb IDs Found: {show_ids}") + logger.debug(f"TMDb IDs Found: {movie_ids}") + logger.debug(f"TVDb IDs Found: {show_ids}") return movie_ids, show_ids diff --git a/modules/letterboxd.py b/modules/letterboxd.py index 7b214a53..bdef073f 100644 --- a/modules/letterboxd.py +++ b/modules/letterboxd.py @@ -14,15 +14,11 @@ class LetterboxdAPI: self.url = "https://letterboxd.com" @retry(stop_max_attempt_number=6, wait_fixed=10000) - def send_request(self, url, language): + def _request(self, url, language): return html.fromstring(requests.get(url, headers={"Accept-Language": language, "User-Agent": "Mozilla/5.0 x64"}).content) - def get_list_description(self, list_url, language): - descriptions = self.send_request(list_url, language).xpath("//meta[@property='og:description']/@content") - return descriptions[0] if len(descriptions) > 0 and len(descriptions[0]) > 0 else None - - def parse_list(self, list_url, language): - response = self.send_request(list_url, language) + def _parse_list(self, list_url, language): + response = self._request(list_url, language) letterboxd_ids = response.xpath("//div[@class='poster film-poster really-lazy-load']/@data-film-id") items = [] for letterboxd_id in letterboxd_ids: @@ -30,14 +26,11 @@ class LetterboxdAPI: items.append((letterboxd_id, slugs[0])) next_url = response.xpath("//a[@class='next']/@href") if len(next_url) > 0: - items.extend(self.parse_list(f"{self.url}{next_url[0]}", language)) + items.extend(self._parse_list(f"{self.url}{next_url[0]}", language)) return items - def get_tmdb_from_slug(self, slug, language): - return self.get_tmdb(f"{self.url}{slug}", language) - - def get_tmdb(self, letterboxd_url, language): - response = self.send_request(letterboxd_url, language) + def _tmdb(self, letterboxd_url, language): + response = self._request(letterboxd_url, language) ids = response.xpath("//a[@data-track-action='TMDb']/@href") if len(ids) > 0 and ids[0]: if "themoviedb.org/movie" in ids[0]: @@ -45,32 +38,36 @@ class LetterboxdAPI: raise Failed(f"Letterboxd Error: TMDb Movie ID not found in {ids[0]}") raise Failed(f"Letterboxd Error: TMDb Movie ID not found at {letterboxd_url}") - def get_items(self, method, data, language, status_message=True): + def get_list_description(self, list_url, language): + descriptions = self._request(list_url, language).xpath("//meta[@property='og:description']/@content") + return descriptions[0] if len(descriptions) > 0 and len(descriptions[0]) > 0 else None + + def get_items(self, method, data, language): pretty = util.pretty_names[method] if method in util.pretty_names else method movie_ids = [] - if status_message: - logger.info(f"Processing {pretty}: {data}") - items = self.parse_list(data, language) + logger.info(f"Processing {pretty}: {data}") + items = self._parse_list(data, language) total_items = len(items) - if total_items == 0: - raise Failed(f"Letterboxd Error: No List Items found in {data}") - length = 0 - for i, item in enumerate(items, 1): - length = util.print_return(length, f"Finding TMDb ID {i}/{total_items}") - tmdb_id = None - expired = None - if self.config.Cache: - tmdb_id, expired = self.config.Cache.query_letterboxd_map(item[0]) - if not tmdb_id or expired is not False: - try: - tmdb_id = self.get_tmdb_from_slug(item[1], language) - except Failed as e: - logger.error(e) - continue + if total_items > 0: + length = 0 + for i, item in enumerate(items, 1): + letterboxd_id, slug = item + length = util.print_return(length, f"Finding TMDb ID {i}/{total_items}") + tmdb_id = None + expired = None if self.config.Cache: - self.config.Cache.update_letterboxd(expired, item[0], tmdb_id) - movie_ids.append(tmdb_id) - util.print_end(length, f"Processed {total_items} TMDb IDs") - if status_message: - logger.debug(f"TMDb IDs Found: {movie_ids}") + tmdb_id, expired = self.config.Cache.query_letterboxd_map(letterboxd_id) + if not tmdb_id or expired is not False: + try: + tmdb_id = self._tmdb(f"{self.url}{slug}", language) + except Failed as e: + logger.error(e) + continue + if self.config.Cache: + self.config.Cache.update_letterboxd_map(expired, letterboxd_id, tmdb_id) + movie_ids.append(tmdb_id) + util.print_end(length, f"Processed {total_items} TMDb IDs") + else: + logger.error(f"Letterboxd Error: No List Items found in {data}") + logger.debug(f"TMDb IDs Found: {movie_ids}") return movie_ids, [] diff --git a/modules/mal.py b/modules/mal.py index db40a5d1..5be31e4a 100644 --- a/modules/mal.py +++ b/modules/mal.py @@ -1,4 +1,4 @@ -import json, logging, re, requests, secrets, webbrowser +import logging, re, requests, secrets, webbrowser from modules import util from modules.util import Failed, TimeoutExpired from retrying import retry @@ -87,11 +87,11 @@ class MyAnimeListAPI: self.client_secret = params["client_secret"] self.config_path = params["config_path"] self.authorization = authorization - if not self.save_authorization(self.authorization): - if not self.refresh_authorization(): - self.get_authorization() + if not self._save(self.authorization): + if not self._refresh(): + self._authorization() - def get_authorization(self): + def _authorization(self): code_verifier = secrets.token_urlsafe(100)[:128] url = f"{self.urls['oauth_authorize']}?response_type=code&client_id={self.client_id}&code_challenge={code_verifier}" logger.info("") @@ -114,21 +114,21 @@ class MyAnimeListAPI: "code_verifier": code_verifier, "grant_type": "authorization_code" } - new_authorization = self.oauth_request(data) + new_authorization = self._oauth(data) if "error" in new_authorization: raise Failed("MyAnimeList Error: Invalid code") - if not self.save_authorization(new_authorization): + if not self._save(new_authorization): raise Failed("MyAnimeList Error: New Authorization Failed") - def check_authorization(self, authorization): + def _check(self, authorization): try: - self.send_request(self.urls["suggestions"], authorization=authorization) + self._request(self.urls["suggestions"], authorization=authorization) return True except Failed as e: logger.debug(e) return False - def refresh_authorization(self): + def _refresh(self): if self.authorization and "refresh_token" in self.authorization and self.authorization["refresh_token"]: logger.info("Refreshing Access Token...") data = { @@ -137,12 +137,12 @@ class MyAnimeListAPI: "refresh_token": self.authorization["refresh_token"], "grant_type": "refresh_token" } - refreshed_authorization = self.oauth_request(data) - return self.save_authorization(refreshed_authorization) + refreshed_authorization = self._oauth(data) + return self._save(refreshed_authorization) return False - def save_authorization(self, authorization): - if authorization is not None and "access_token" in authorization and authorization["access_token"] and self.check_authorization(authorization): + def _save(self, authorization): + if authorization is not None and "access_token" in authorization and authorization["access_token"] and self._check(authorization): if self.authorization != authorization: yaml.YAML().allow_duplicate_keys = True config, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.config_path)) @@ -159,69 +159,62 @@ class MyAnimeListAPI: return False @retry(stop_max_attempt_number=6, wait_fixed=10000) - def oauth_request(self, data): + def _oauth(self, data): return requests.post(self.urls["oauth_token"], data).json() @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) - def send_request(self, url, authorization=None): + def _request(self, url, authorization=None): new_authorization = authorization if authorization else self.authorization response = requests.get(url, headers={"Authorization": f"Bearer {new_authorization['access_token']}"}).json() if "error" in response: raise Failed(f"MyAnimeList Error: {response['error']}") else: return response - def request_and_parse_mal_ids(self, url): - data = self.send_request(url) + def _parse_request(self, url): + data = self._request(url) return [d["node"]["id"] for d in data["data"]] if "data" in data else [] - def get_username(self): - return self.send_request(f"{self.urls['user']}/@me")["name"] + def _username(self): + return self._request(f"{self.urls['user']}/@me")["name"] - def get_ranked(self, ranking_type, limit): + def _ranked(self, ranking_type, limit): url = f"{self.urls['ranking']}?ranking_type={ranking_type}&limit={limit}" - return self.request_and_parse_mal_ids(url) + return self._parse_request(url) - def get_season(self, season, year, sort_by, limit): + def _season(self, season, year, sort_by, limit): url = f"{self.urls['season']}/{year}/{season}?sort={sort_by}&limit={limit}" - return self.request_and_parse_mal_ids(url) + return self._parse_request(url) - def get_suggestions(self, limit): + def _suggestions(self, limit): url = f"{self.urls['suggestions']}?limit={limit}" - return self.request_and_parse_mal_ids(url) + return self._parse_request(url) - def get_userlist(self, username, status, sort_by, limit): + def _userlist(self, username, status, sort_by, limit): final_status = "" if status == "all" else f"status={status}&" url = f"{self.urls['user']}/{username}/animelist?{final_status}sort={sort_by}&limit={limit}" - return self.request_and_parse_mal_ids(url) + return self._parse_request(url) - def get_items(self, method, data, language, status_message=True): - if status_message: - logger.debug(f"Data: {data}") + def get_items(self, method, data): + logger.debug(f"Data: {data}") pretty = util.pretty_names[method] if method in util.pretty_names else method if method == "mal_id": mal_ids = [data] - if status_message: - logger.info(f"Processing {pretty}: {data}") + logger.info(f"Processing {pretty}: {data}") elif method in mal_ranked_name: - mal_ids = self.get_ranked(mal_ranked_name[method], data) - if status_message: - logger.info(f"Processing {pretty}: {data} Anime") + mal_ids = self._ranked(mal_ranked_name[method], data) + logger.info(f"Processing {pretty}: {data} Anime") elif method == "mal_season": - mal_ids = self.get_season(data["season"], data["year"], data["sort_by"], data["limit"]) - if status_message: - logger.info(f"Processing {pretty}: {data['limit']} Anime from {util.pretty_seasons[data['season']]} {data['year']} sorted by {pretty_names[data['sort_by']]}") + mal_ids = self._season(data["season"], data["year"], data["sort_by"], data["limit"]) + logger.info(f"Processing {pretty}: {data['limit']} Anime from {util.pretty_seasons[data['season']]} {data['year']} sorted by {pretty_names[data['sort_by']]}") elif method == "mal_suggested": - mal_ids = self.get_suggestions(data) - if status_message: - logger.info(f"Processing {pretty}: {data} Anime") + mal_ids = self._suggestions(data) + logger.info(f"Processing {pretty}: {data} Anime") elif method == "mal_userlist": - mal_ids = self.get_userlist(data["username"], data["status"], data["sort_by"], data["limit"]) - if status_message: - logger.info(f"Processing {pretty}: {data['limit']} Anime from {self.get_username() if data['username'] == '@me' else data['username']}'s {pretty_names[data['status']]} list sorted by {pretty_names[data['sort_by']]}") + mal_ids = self._userlist(data["username"], data["status"], data["sort_by"], data["limit"]) + logger.info(f"Processing {pretty}: {data['limit']} Anime from {self._username() if data['username'] == '@me' else data['username']}'s {pretty_names[data['status']]} list sorted by {pretty_names[data['sort_by']]}") else: raise Failed(f"MyAnimeList Error: Method {method} not supported") - movie_ids, show_ids = self.config.Arms.myanimelist_to_ids(mal_ids, language) - if status_message: - logger.debug(f"MyAnimeList IDs Found: {mal_ids}") - logger.debug(f"Shows Found: {show_ids}") - logger.debug(f"Movies Found: {movie_ids}") + movie_ids, show_ids = self.config.Convert.myanimelist_to_ids(mal_ids) + logger.debug(f"MyAnimeList IDs Found: {mal_ids}") + logger.debug(f"Shows Found: {show_ids}") + logger.debug(f"Movies Found: {movie_ids}") return movie_ids, show_ids diff --git a/modules/meta.py b/modules/meta.py new file mode 100644 index 00000000..65a4970c --- /dev/null +++ b/modules/meta.py @@ -0,0 +1,379 @@ +import logging, os, re, requests +from datetime import datetime +from modules import plex, util +from modules.util import Failed +from plexapi.exceptions import NotFound +from ruamel import yaml + +logger = logging.getLogger("Plex Meta Manager") + +class Metadata: + def __init__(self, library, file_type, path): + self.library = library + self.type = file_type + self.path = path + self.github_base = "https://raw.githubusercontent.com/meisnate12/Plex-Meta-Manager-Configs/master/" + logger.info("") + logger.info(f"Loading Metadata {file_type}: {path}") + def get_dict(attribute, attr_data, check_list=None): + if attribute in attr_data: + if attr_data[attribute]: + if isinstance(attr_data[attribute], dict): + if check_list: + new_dict = {} + for a_name, a_data in attr_data[attribute].items(): + if a_name in check_list: + logger.error(f"Config Warning: Skipping duplicate {attribute[:-1] if attribute[-1] == 's' else attribute}: {a_name}") + else: + new_dict[a_name] = a_data + return new_dict + else: + return attr_data[attribute] + else: + logger.warning(f"Config Warning: {attribute} must be a dictionary") + else: + logger.warning(f"Config Warning: {attribute} attribute is blank") + return None + try: + if file_type in ["URL", "Git"]: + content_path = path if file_type == "URL" else f"{self.github_base}{path}.yml" + response = requests.get(content_path) + if response.status_code >= 400: + raise Failed(f"URL Error: No file found at {content_path}") + content = response.content + elif os.path.exists(os.path.abspath(path)): + content = open(path, encoding="utf-8") + else: + raise Failed(f"File Error: File does not exist {path}") + data, ind, bsi = yaml.util.load_yaml_guess_indent(content) + self.metadata = get_dict("metadata", data, library.metadatas) + self.templates = get_dict("templates", data) + self.collections = get_dict("collections", data, library.collections) + + if self.metadata is None and self.collections is None: + raise Failed("YAML Error: metadata or collections attribute is required") + logger.info(f"Metadata File Loaded Successfully") + except yaml.scanner.ScannerError as ye: + raise Failed(f"YAML Error: {util.tab_new_lines(ye)}") + except Exception as e: + util.print_stacktrace() + raise Failed(f"YAML Error: {e}") + + def get_collections(self, requested_collections): + if requested_collections: + return {c: self.collections[c] for c in util.get_list(requested_collections) if c in self.collections} + else: + return self.collections + + def update_metadata(self, TMDb, test): + logger.info("") + util.separator(f"Running Metadata") + logger.info("") + if not self.metadata: + raise Failed("No metadata to edit") + for mapping_name, meta in self.metadata.items(): + methods = {mm.lower(): mm for mm in meta} + if test and ("test" not in methods or meta[methods["test"]] is not True): + continue + + updated = False + edits = {} + advance_edits = {} + + def add_edit(name, current, group, alias, key=None, value=None, var_type="str"): + if value or name in alias: + if value or group[alias[name]]: + if key is None: key = name + if value is None: value = group[alias[name]] + try: + if var_type == "date": + final_value = util.check_date(value, name, return_string=True, plex_date=True) + elif var_type == "float": + final_value = util.check_number(value, name, number_type="float", minimum=0, maximum=10) + else: + final_value = value + if str(current) != str(final_value): + edits[f"{key}.value"] = final_value + edits[f"{key}.locked"] = 1 + logger.info(f"Detail: {name} updated to {final_value}") + except Failed as ee: + logger.error(ee) + else: + logger.error(f"Metadata Error: {name} attribute is blank") + + def add_advanced_edit(attr, obj, group, alias, show_library=False, new_agent=False): + key, options = plex.advance_keys[attr] + if attr in alias: + if new_agent and self.library.agent not in plex.new_plex_agents: + logger.error(f"Metadata Error: {attr} attribute only works for with the New Plex Movie Agent and New Plex TV Agent") + elif show_library and not self.library.is_show: + logger.error(f"Metadata Error: {attr} attribute only works for show libraries") + elif group[alias[attr]]: + method_data = str(group[alias[attr]]).lower() + if method_data not in options: + logger.error(f"Metadata Error: {group[alias[attr]]} {attr} attribute invalid") + elif getattr(obj, key) != options[method_data]: + advance_edits[key] = options[method_data] + logger.info(f"Detail: {attr} updated to {method_data}") + else: + logger.error(f"Metadata Error: {attr} attribute is blank") + + def edit_tags(attr, obj, group, alias, key=None, extra=None, movie_library=False): + if key is None: + key = f"{attr}s" + if attr in alias and f"{attr}.sync" in alias: + logger.error(f"Metadata Error: Cannot use {attr} and {attr}.sync together") + elif attr in alias or f"{attr}.sync" in alias: + attr_key = attr if attr in alias else f"{attr}.sync" + if movie_library and not self.library.is_movie: + logger.error(f"Metadata Error: {attr_key} attribute only works for movie libraries") + elif group[alias[attr_key]] or extra: + item_tags = [item_tag.tag for item_tag in getattr(obj, key)] + input_tags = [] + if group[alias[attr_key]]: + input_tags.extend(util.get_list(group[alias[attr_key]])) + if extra: + input_tags.extend(extra) + if f"{attr}.sync" in alias: + remove_method = getattr(obj, f"remove{attr.capitalize()}") + for tag in (t for t in item_tags if t not in input_tags): + updated = True + remove_method(tag) + logger.info(f"Detail: {attr.capitalize()} {tag} removed") + add_method = getattr(obj, f"add{attr.capitalize()}") + for tag in (t for t in input_tags if t not in item_tags): + updated = True + add_method(tag) + logger.info(f"Detail: {attr.capitalize()} {tag} added") + else: + logger.error(f"Metadata Error: {attr} attribute is blank") + + def set_image(attr, obj, group, alias, poster=True, url=True): + if group[alias[attr]]: + message = f"{'poster' if poster else 'background'} to [{'URL' if url else 'File'}] {group[alias[attr]]}" + self.library.upload_image(obj, group[alias[attr]], poster=poster, url=url) + logger.info(f"Detail: {attr} updated {message}") + else: + logger.error(f"Metadata Error: {attr} attribute is blank") + + def set_images(obj, group, alias): + if "url_poster" in alias: + set_image("url_poster", obj, group, alias) + elif "file_poster" in alias: + set_image("file_poster", obj, group, alias, url=False) + if "url_background" in alias: + set_image("url_background", obj, group, alias, poster=False) + elif "file_background" in alias: + set_image("file_background", obj, group, alias, poster=False, url=False) + + logger.info("") + util.separator() + logger.info("") + year = None + if "year" in methods: + year = util.check_number(meta[methods["year"]], "year", minimum=1800, maximum=datetime.now().year + 1) + + title = mapping_name + if "title" in methods: + if meta[methods["title"]] is None: + logger.error("Metadata Error: title attribute is blank") + else: + title = meta[methods["title"]] + + item = self.library.search_item(title, year=year) + + if item is None: + item = self.library.search_item(f"{title} (SUB)", year=year) + + if item is None and "alt_title" in methods: + if meta[methods["alt_title"]] is None: + logger.error("Metadata Error: alt_title attribute is blank") + else: + alt_title = meta["alt_title"] + item = self.library.search_item(alt_title, year=year) + + if item is None: + logger.error(f"Plex Error: Item {mapping_name} not found") + logger.error(f"Skipping {mapping_name}") + continue + + item_type = "Movie" if self.library.is_movie else "Show" + logger.info(f"Updating {item_type}: {title}...") + + tmdb_item = None + tmdb_is_movie = None + if ("tmdb_show" in methods or "tmdb_id" in methods) and "tmdb_movie" in methods: + logger.error("Metadata Error: Cannot use tmdb_movie and tmdb_show when editing the same metadata item") + + if "tmdb_show" in methods or "tmdb_id" in methods or "tmdb_movie" in methods: + try: + if "tmdb_show" in methods or "tmdb_id" in methods: + data = meta[methods["tmdb_show" if "tmdb_show" in methods else "tmdb_id"]] + if data is None: + logger.error("Metadata Error: tmdb_show attribute is blank") + else: + tmdb_is_movie = False + tmdb_item = TMDb.get_show(util.regex_first_int(data, "Show")) + elif "tmdb_movie" in methods: + if meta[methods["tmdb_movie"]] is None: + logger.error("Metadata Error: tmdb_movie attribute is blank") + else: + tmdb_is_movie = True + tmdb_item = TMDb.get_movie(util.regex_first_int(meta[methods["tmdb_movie"]], "Movie")) + except Failed as e: + logger.error(e) + + originally_available = None + original_title = None + rating = None + studio = None + tagline = None + summary = None + genres = [] + if tmdb_item: + originally_available = tmdb_item.release_date if tmdb_is_movie else tmdb_item.first_air_date + if tmdb_item and tmdb_is_movie is True and tmdb_item.original_title != tmdb_item.title: + original_title = tmdb_item.original_title + elif tmdb_item and tmdb_is_movie is False and tmdb_item.original_name != tmdb_item.name: + original_title = tmdb_item.original_name + rating = tmdb_item.vote_average + if tmdb_is_movie is True and tmdb_item.production_companies: + studio = tmdb_item.production_companies[0].name + elif tmdb_is_movie is False and tmdb_item.networks: + studio = tmdb_item.networks[0].name + tagline = tmdb_item.tagline if len(tmdb_item.tagline) > 0 else None + summary = tmdb_item.overview + genres = [genre.name for genre in tmdb_item.genres] + + edits = {} + add_edit("title", item.title, meta, methods, value=title) + add_edit("sort_title", item.titleSort, meta, methods, key="titleSort") + add_edit("originally_available", str(item.originallyAvailableAt)[:-9], meta, methods, + key="originallyAvailableAt", value=originally_available, var_type="date") + add_edit("critic_rating", item.rating, meta, methods, value=rating, key="rating", var_type="float") + add_edit("audience_rating", item.audienceRating, meta, methods, key="audienceRating", var_type="float") + add_edit("content_rating", item.contentRating, meta, methods, key="contentRating") + add_edit("original_title", item.originalTitle, meta, methods, key="originalTitle", value=original_title) + add_edit("studio", item.studio, meta, methods, value=studio) + add_edit("tagline", item.tagline, meta, methods, value=tagline) + add_edit("summary", item.summary, meta, methods, value=summary) + self.library.edit_item(item, mapping_name, item_type, edits) + + advance_edits = {} + add_advanced_edit("episode_sorting", item, meta, methods, show_library=True) + add_advanced_edit("keep_episodes", item, meta, methods, show_library=True) + add_advanced_edit("delete_episodes", item, meta, methods, show_library=True) + add_advanced_edit("season_display", item, meta, methods, show_library=True) + add_advanced_edit("episode_ordering", item, meta, methods, show_library=True) + add_advanced_edit("metadata_language", item, meta, methods, new_agent=True) + add_advanced_edit("use_original_title", item, meta, methods, new_agent=True) + self.library.edit_item(item, mapping_name, item_type, advance_edits, advanced=True) + + edit_tags("genre", item, meta, methods, extra=genres) + edit_tags("label", item, meta, methods) + edit_tags("collection", item, meta, methods) + edit_tags("country", item, meta, methods, key="countries", movie_library=True) + edit_tags("director", item, meta, methods, movie_library=True) + edit_tags("producer", item, meta, methods, movie_library=True) + edit_tags("writer", item, meta, methods, movie_library=True) + + logger.info(f"{item_type}: {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}") + + set_images(item, meta, methods) + + if "seasons" in methods and self.library.is_show: + if meta[methods["seasons"]]: + for season_id in meta[methods["seasons"]]: + updated = False + logger.info("") + logger.info(f"Updating season {season_id} of {mapping_name}...") + if isinstance(season_id, int): + season = None + for s in item.seasons(): + if s.index == season_id: + season = s + break + if season is None: + logger.error(f"Metadata Error: Season: {season_id} not found") + else: + season_dict = meta[methods["seasons"]][season_id] + season_methods = {sm.lower(): sm for sm in season_dict} + + if "title" in season_methods and season_dict[season_methods["title"]]: + title = season_dict[season_methods["title"]] + else: + title = season.title + if "sub" in season_methods: + if season_dict[season_methods["sub"]] is None: + logger.error("Metadata Error: sub attribute is blank") + elif season_dict[season_methods["sub"]] is True and "(SUB)" not in title: + title = f"{title} (SUB)" + elif season_dict[season_methods["sub"]] is False and title.endswith(" (SUB)"): + title = title[:-6] + else: + logger.error("Metadata Error: sub attribute must be True or False") + + edits = {} + add_edit("title", season.title, season_dict, season_methods, value=title) + add_edit("summary", season.summary, season_dict, season_methods) + self.library.edit_item(season, season_id, "Season", edits) + set_images(season, season_dict, season_methods) + else: + logger.error(f"Metadata Error: Season: {season_id} invalid, it must be an integer") + logger.info(f"Season {season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}") + else: + logger.error("Metadata Error: seasons attribute is blank") + elif "seasons" in methods: + logger.error("Metadata Error: seasons attribute only works for show libraries") + + if "episodes" in methods and self.library.is_show: + if meta[methods["episodes"]]: + for episode_str in meta[methods["episodes"]]: + updated = False + logger.info("") + match = re.search("[Ss]\\d+[Ee]\\d+", episode_str) + if match: + output = match.group(0)[1:].split("E" if "E" in match.group(0) else "e") + season_id = int(output[0]) + episode_id = int(output[1]) + logger.info(f"Updating episode S{season_id}E{episode_id} of {mapping_name}...") + try: + episode = item.episode(season=season_id, episode=episode_id) + except NotFound: + logger.error(f"Metadata Error: episode {episode_id} of season {season_id} not found") + else: + episode_dict = meta[methods["episodes"]][episode_str] + episode_methods = {em.lower(): em for em in episode_dict} + + if "title" in episode_methods and episode_dict[episode_methods["title"]]: + title = episode_dict[episode_methods["title"]] + else: + title = episode.title + if "sub" in episode_dict: + if episode_dict[episode_methods["sub"]] is None: + logger.error("Metadata Error: sub attribute is blank") + elif episode_dict[episode_methods["sub"]] is True and "(SUB)" not in title: + title = f"{title} (SUB)" + elif episode_dict[episode_methods["sub"]] is False and title.endswith(" (SUB)"): + title = title[:-6] + else: + logger.error("Metadata Error: sub attribute must be True or False") + edits = {} + add_edit("title", episode.title, episode_dict, episode_methods, value=title) + add_edit("sort_title", episode.titleSort, episode_dict, episode_methods, + key="titleSort") + add_edit("rating", episode.rating, episode_dict, episode_methods) + add_edit("originally_available", str(episode.originallyAvailableAt)[:-9], + episode_dict, episode_methods, key="originallyAvailableAt") + add_edit("summary", episode.summary, episode_dict, episode_methods) + self.library.edit_item(episode, f"{season_id} Episode: {episode_id}", "Season", edits) + edit_tags("director", episode, episode_dict, episode_methods) + edit_tags("writer", episode, episode_dict, episode_methods) + set_images(episode, episode_dict, episode_methods) + logger.info(f"Episode S{episode_id}E{season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}") + else: + logger.error(f"Metadata Error: episode {episode_str} invalid must have S##E## format") + else: + logger.error("Metadata Error: episodes attribute is blank") + elif "episodes" in methods: + logger.error("Metadata Error: episodes attribute only works for show libraries") diff --git a/modules/plex.py b/modules/plex.py index c9a4e400..9a4cd821 100644 --- a/modules/plex.py +++ b/modules/plex.py @@ -1,17 +1,18 @@ -import logging, os, re, requests -from datetime import datetime, timedelta +import glob, logging, os, requests from modules import util +from modules.meta import Metadata from modules.util import Failed +from plexapi import utils from plexapi.exceptions import BadRequest, NotFound, Unauthorized from plexapi.collection import Collections from plexapi.server import PlexServer -from plexapi.video import Movie, Show from retrying import retry from ruamel import yaml +from urllib import parse logger = logging.getLogger("Plex Meta Manager") -builders = ["plex_all", "plex_collection", "plex_collectionless", "plex_search"] +builders = ["plex_all", "plex_collectionless", "plex_search"] search_translation = { "audio_language": "audioLanguage", "content_rating": "contentRating", @@ -20,13 +21,32 @@ search_translation = { "originally_available": "originallyAvailableAt", "audience_rating": "audienceRating", "critic_rating": "rating", - "user_rating": "userRating" + "user_rating": "userRating", + "plays": "viewCount", + "episode_title": "episode.title", + "episode_added": "episode.addedAt", + "episode_originally_available": "episode.originallyAvailableAt", + "episode_year": "episode.year", + "episode_user_rating": "episode.userRating", + "episode_plays": "episode.viewCount" +} +modifier_translation = { + "": "", + ".not": "!", + ".gt": "%3E%3E", + ".gte": "%3E", + ".lt": "%3C%3C", + ".lte": "%3C", + ".before": "%3C%3C", + ".after": "%3E%3E", + ".begins": "%3C", + ".ends": "%3E" } episode_sorting_options = {"default": "-1", "oldest": "0", "newest": "1"} keep_episodes_options = {"all": 0, "5_latest": 5, "3_latest": 3, "latest": 1, "past_3": -3, "past_7": -7, "past_30": -30} delete_episodes_options = {"never": 0, "day": 1, "week": 7, "refresh": 100} season_display_options = {"default": -1, "show": 0, "hide": 1} -episode_ordering_options = {"default": None, "tmdb_aired": "tmdbAiring", "tvdb_aired": "airing", "tvdb_dvd": "dvd", "tvdb_absolute": "absolute"} +episode_ordering_options = {"default": None, "tmdb_aired": "tmdbAiring", "tvdb_aired": "aired", "tvdb_dvd": "dvd", "tvdb_absolute": "absolute"} plex_languages = ["default", "ar-SA", "ca-ES", "cs-CZ", "da-DK", "de-DE", "el-GR", "en-AU", "en-CA", "en-GB", "en-US", "es-ES", "es-MX", "et-EE", "fa-IR", "fi-FI", "fr-CA", "fr-FR", "he-IL", "hi-IN", "hu-HU", "id-ID", "it-IT", "ja-JP", "ko-KR", "lt-LT", "lv-LV", "nb-NO", "nl-NL", "pl-PL", "pt-BR", "pt-PT", "ro-RO", @@ -34,6 +54,8 @@ plex_languages = ["default", "ar-SA", "ca-ES", "cs-CZ", "da-DK", "de-DE", "el-GR metadata_language_options = {lang.lower(): lang for lang in plex_languages} metadata_language_options["default"] = None use_original_title_options = {"default": -1, "no": 0, "yes": 1} +collection_mode_keys = {-1: "default", 0: "hide", 1: "hideItems", 2: "showItems"} +collection_order_keys = {0: "release", 1: "alpha", 2: "custom"} advance_keys = { "episode_sorting": ("episodeSort", episode_sorting_options), "keep_episodes": ("autoDeletionItemPolicyUnwatchedLibrary", keep_episodes_options), @@ -53,20 +75,6 @@ item_advance_keys = { "item_use_original_title": ("useOriginalTitle", use_original_title_options) } new_plex_agents = ["tv.plex.agents.movie", "tv.plex.agents.series"] -filter_alias = { - "actor": "actors", - "audience_rating": "audienceRating", - "collection": "collections", - "content_rating": "contentRating", - "country": "countries", - "critic_rating": "rating", - "director": "directors", - "genre": "genres", - "originally_available": "originallyAvailableAt", - "tmdb_vote_count": "vote_count", - "user_rating": "userRating", - "writer": "writers" -} searches = [ "title", "title.and", "title.not", "title.begins", "title.ends", "studio", "studio.and", "studio.not", "studio.begins", "studio.ends", @@ -83,13 +91,14 @@ searches = [ "subtitle_language", "subtitle_language.and", "subtitle_language.not", "writer", "writer.and", "writer.not", "decade", "resolution", - "added.before", "added.after", + "added", "added.not", "added.before", "added.after", + "originally_available", "originally_available.not", "originally_available.before", "originally_available.after", - "duration.greater", "duration.less", - "user_rating.greater", "user_rating.less", - "audience_rating.greater", "audience_rating.less", - "critic_rating.greater", "critic_rating.less", - "year", "year.not", "year.greater", "year.less" + "duration.gt", "duration.gte", "duration.lt", "duration.lte", + "user_rating.gt", "user_rating.gte", "user_rating.lt", "user_rating.lte", + "critic_rating.gt", "critic_rating.gte", "critic_rating.lt", "critic_rating.lte", + "audience_rating.gt", "audience_rating.gte", "audience_rating.lt", "audience_rating.lte", + "year", "year.not", "year.gt", "year.gte", "year.lt", "year.lte" ] movie_only_searches = [ "audio_language", "audio_language.and", "audio_language.not", @@ -97,7 +106,7 @@ movie_only_searches = [ "subtitle_language", "subtitle_language.and", "subtitle_language.not", "decade", "resolution", "originally_available.before", "originally_available.after", - "duration.greater", "duration.less" + "duration.gt", "duration.gte", "duration.lt", "duration.lte" ] show_only_searches = [ "network", "network.and", "network.not", @@ -124,8 +133,144 @@ modifiers = { ".ends": ">", ".before": "<<", ".after": ">>", - ".greater": ">>", - ".less": "<<" + ".gt": ">>", + ".gte": "__gte", + ".lt": "<<", + ".lte": "__lte" +} +mod_displays = { + "": "is", + ".not": "is not", + ".begins": "begins with", + ".ends": "ends with", + ".before": "is before", + ".after": "is after", + ".gt": "is greater than", + ".gte": "is greater than or equal", + ".lt": "is less than", + ".lte": "is less than or equal" +} +tags = [ + "actor", + "audio_language", + "collection", + "content_rating", + "country", + "director", + "genre", + "label", + "network", + "producer", + "resolution", + "studio", + "subtitle_language", + "writer" +] +smart_searches = [ + "all", "any", + "title", "title.not", "title.begins", "title.ends", + "studio", "studio.not", "studio.begins", "studio.ends", + "actor", "actor.not", + "audio_language", "audio_language.not", + "collection", "collection.not", + "content_rating", "content_rating.not", + "country", "country.not", + "director", "director.not", + "genre", "genre.not", + "label", "label.not", + "network", "network.not", + "producer", "producer.not", + "subtitle_language", "subtitle_language.not", + "writer", "writer.not", + "decade", "resolution", + "added", "added.not", "added.before", "added.after", + "originally_available", "originally_available.not", + "originally_available.before", "originally_available.after", + "plays.gt", "plays.gte", "plays.lt", "plays.lte", + "duration.gt", "duration.gte", "duration.lt", "duration.lte", + "user_rating.gt", "user_rating.gte", "user_rating.lt", "user_rating.lte", + "audience_rating.gt", "audience_rating.gte", "audience_rating.lt","audience_rating.lte", + "critic_rating.gt", "critic_rating.gte", "critic_rating.lt","critic_rating.lte", + "year", "year.not", "year.gt", "year.gte", "year.lt","year.lte", + "episode_title", "episode_title.not", "episode_title.begins", "episode_title.ends", + "episode_added", "episode_added.not", "episode_added.before", "episode_added.after", + "episode_originally_available", "episode_originally_available.not", + "episode_originally_available.before", "episode_originally_available.after", + "episode_year", "episode_year.not", "episode_year.gt", "episode_year.gte", "episode_year.lt","episode_year.lte", + "episode_user_rating.gt", "episode_user_rating.gte", "episode_user_rating.lt","episode_user_rating.lte", + "episode_plays.gt", "episode_plays.gte", "episode_plays.lt", "episode_plays.lte" +] +movie_only_smart_searches = [ + "country", "country.not", + "director", "director.not", + "producer", "producer.not", + "writer", "writer.not", + "decade", + "originally_available", "originally_available.not", + "originally_available.before", "originally_available.after", + "plays.gt", "plays.gte", "plays.lt", "plays.lte", + "duration.gt", "duration.gte", "duration.lt", "duration.lte" +] +show_only_smart_searches = [ + "episode_title", "episode_title.not", "episode_title.begins", "episode_title.ends", + "episode_added", "episode_added.not", "episode_added.before", "episode_added.after", + "episode_originally_available", "episode_originally_available.not", + "episode_originally_available.before", "episode_originally_available.after", + "episode_year", "episode_year.not", "episode_year.gt", "episode_year.gte", "episode_year.lt","episode_year.lte", + "episode_user_rating.gt", "episode_user_rating.gte", "episode_user_rating.lt","episode_user_rating.lte", + "episode_plays.gt", "episode_plays.gte", "episode_plays.lt", "episode_plays.lte" +] +movie_smart_sorts = { + "title.asc": "titleSort", "title.desc": "titleSort%3Adesc", + "year.asc": "year", "year.desc": "year%3Adesc", + "originally_available.asc": "originallyAvailableAt", "originally_available.desc": "originallyAvailableAt%3Adesc", + "critic_rating.asc": "rating", "critic_rating.desc": "rating%3Adesc", + "audience_rating.asc": "audienceRating", "audience_rating.desc": "audienceRating%3Adesc", + "user_rating.asc": "userRating", "user_rating.desc": "userRating%3Adesc", + "content_rating.asc": "contentRating", "content_rating.desc": "contentRating%3Adesc", + "duration.asc": "duration", "duration.desc": "duration%3Adesc", + "plays.asc": "viewCount", "plays.desc": "viewCount%3Adesc", + "added.asc": "addedAt", "added.desc": "addedAt%3Adesc", + "random": "random" +} +show_smart_sorts = { + "title.asc": "titleSort", "title.desc": "titleSort%3Adesc", + "year.asc": "year", "year.desc": "year%3Adesc", + "originally_available.asc": "originallyAvailableAt", "originally_available.desc": "originallyAvailableAt%3Adesc", + "critic_rating.asc": "rating", "critic_rating.desc": "rating%3Adesc", + "audience_rating.asc": "audienceRating", "audience_rating.desc": "audienceRating%3Adesc", + "user_rating.asc": "userRating", "user_rating.desc": "userRating%3Adesc", + "content_rating.asc": "contentRating", "content_rating.desc": "contentRating%3Adesc", + "added.asc": "addedAt", "added.desc": "addedAt%3Adesc", + "episode_added.asc": "episode.addedAt", "episode_added.desc": "episode.addedAt%3Adesc", + "random": "random" +} +season_smart_sorts = { + "season.asc": "season.index%2Cseason.titleSort", "season.desc": "season.index%3Adesc%2Cseason.titleSort", + "show.asc": "show.titleSort%2Cindex", "show.desc": "show.titleSort%3Adesc%2Cindex", + "user_rating.asc": "userRating", "user_rating.desc": "userRating%3Adesc", + "added.asc": "addedAt", "added.desc": "addedAt%3Adesc", + "random": "random" +} +episode_smart_sorts = { + "title.asc": "titleSort", "title.desc": "titleSort%3Adesc", + "show.asc": "show.titleSort%2Cseason.index%3AnullsLast%2Cepisode.index%3AnullsLast%2Cepisode.originallyAvailableAt%3AnullsLast%2Cepisode.titleSort%2Cepisode.id", + "show.desc": "show.titleSort%3Adesc%2Cseason.index%3AnullsLast%2Cepisode.index%3AnullsLast%2Cepisode.originallyAvailableAt%3AnullsLast%2Cepisode.titleSort%2Cepisode.id", + "year.asc": "year", "year.desc": "year%3Adesc", + "originally_available.asc": "originallyAvailableAt", "originally_available.desc": "originallyAvailableAt%3Adesc", + "critic_rating.asc": "rating", "critic_rating.desc": "rating%3Adesc", + "audience_rating.asc": "audienceRating", "audience_rating.desc": "audienceRating%3Adesc", + "user_rating.asc": "userRating", "user_rating.desc": "userRating%3Adesc", + "duration.asc": "duration", "duration.desc": "duration%3Adesc", + "plays.asc": "viewCount", "plays.desc": "viewCount%3Adesc", + "added.asc": "addedAt", "added.desc": "addedAt%3Adesc", + "random": "random" +} +smart_types = { + "movies": (1, movie_smart_sorts), + "shows": (2, show_smart_sorts), + "seasons": (3, season_smart_sorts), + "episodes": (4, episode_smart_sorts), } class PlexAPI: @@ -148,35 +293,27 @@ class PlexAPI: self.agent = self.Plex.agent self.is_movie = self.Plex.type == "movie" self.is_show = self.Plex.type == "show" + self.collections = [] + self.metadatas = [] - logger.info(f"Using Metadata File: {params['metadata_path']}") - try: - self.data, ind, bsi = yaml.util.load_yaml_guess_indent(open(params["metadata_path"], encoding="utf-8")) - except yaml.scanner.ScannerError as ye: - raise Failed(f"YAML Error: {util.tab_new_lines(ye)}") - except Exception as e: - util.print_stacktrace() - raise Failed(f"YAML Error: {e}") - - def get_dict(attribute): - if attribute in self.data: - if self.data[attribute]: - if isinstance(self.data[attribute], dict): - return self.data[attribute] - else: - logger.warning(f"Config Warning: {attribute} must be a dictionary") - else: - logger.warning(f"Config Warning: {attribute} attribute is blank") - return None - - self.metadata = get_dict("metadata") - self.templates = get_dict("templates") - self.collections = get_dict("collections") - - if self.metadata is None and self.collections is None: - raise Failed("YAML Error: metadata attributes or collections attribute required") + self.metadata_files = [] + for file_type, metadata_file in params["metadata_path"]: + try: + meta_obj = Metadata(self, file_type, metadata_file) + if meta_obj.collections: + self.collections.extend([c for c in meta_obj.collections]) + if meta_obj.metadata: + self.metadatas.extend([c for c in meta_obj.metadata]) + self.metadata_files.append(meta_obj) + except Failed as e: + logger.error(e) + + if len(self.metadata_files) == 0: + logger.info("") + raise Failed("Metadata File Error: No valid metadata files found") if params["asset_directory"]: + logger.info("") for ad in params["asset_directory"]: logger.info(f"Using Asset Directory: {ad}") @@ -186,7 +323,7 @@ class PlexAPI: self.Sonarr = None self.Tautulli = None self.name = params["name"] - self.missing_path = os.path.join(os.path.dirname(os.path.abspath(params["metadata_path"])), f"{os.path.splitext(os.path.basename(params['metadata_path']))[0]}_missing.yml") + self.missing_path = os.path.join(params["default_dir"], f"{self.name}_missing.yml") self.metadata_path = params["metadata_path"] self.asset_directory = params["asset_directory"] self.asset_folders = params["asset_folders"] @@ -197,66 +334,173 @@ class PlexAPI: self.show_missing = params["show_missing"] self.save_missing = params["save_missing"] self.mass_genre_update = params["mass_genre_update"] + self.mass_audience_rating_update = params["mass_audience_rating_update"] + self.mass_critic_rating_update = params["mass_critic_rating_update"] + self.mass_update = self.mass_genre_update or self.mass_audience_rating_update or self.mass_critic_rating_update self.plex = params["plex"] self.url = params["plex"]["url"] self.token = params["plex"]["token"] self.timeout = params["plex"]["timeout"] + self.clean_bundles = params["plex"]["clean_bundles"] + self.empty_trash = params["plex"]["empty_trash"] + self.optimize = params["plex"]["optimize"] self.missing = {} self.run_again = [] def get_all_collections(self): return self.search(libtype="collection") - @retry(stop_max_attempt_number=6, wait_fixed=10000) + @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) def search(self, title=None, libtype=None, sort=None, maxresults=None, **kwargs): return self.Plex.search(title=title, sort=sort, maxresults=maxresults, libtype=libtype, **kwargs) - @retry(stop_max_attempt_number=6, wait_fixed=10000) + @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) + def exact_search(self, title, libtype=None, year=None): + if year: + terms = {"title=": title, "year": year} + else: + terms = {"title=": title} + return self.Plex.search(libtype=libtype, **terms) + + @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) + def get_labeled_items(self, label): + return self.Plex.search(label=label) + + @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) def fetchItem(self, data): return self.PlexServer.fetchItem(data) - @retry(stop_max_attempt_number=6, wait_fixed=10000) + @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) def get_all(self): return self.Plex.all() - @retry(stop_max_attempt_number=6, wait_fixed=10000) + @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) def server_search(self, data): return self.PlexServer.search(data) - @retry(stop_max_attempt_number=6, wait_fixed=10000) - def add_collection(self, item, name): - item.addCollection(name) + @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) + def query(self, method): + return method() + + @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) + def query_data(self, method, data): + return method(data) + + @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) + def collection_mode_query(self, collection, data): + collection.modeUpdate(mode=data) + + @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) + def collection_order_query(self, collection, data): + collection.sortUpdate(sort=data) + + @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) + def get_guids(self, item): + return item.guids + + @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) + def edit_query(self, item, edits, advanced=False): + if advanced: + item.editAdvanced(**edits) + else: + item.edit(**edits) + item.reload() + + @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) + def upload_image(self, item, location, poster=True, url=True): + if poster and url: + item.uploadPoster(url=location) + elif poster: + item.uploadPoster(filepath=location) + elif url: + item.uploadArt(url=location) + else: + item.uploadArt(filepath=location) @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) - def get_search_choices(self, search_name): + def get_search_choices(self, search_name, title=True): try: choices = {} for choice in self.Plex.listFilterChoices(search_name): - choices[choice.title.lower()] = choice.title - choices[choice.key.lower()] = choice.title + choices[choice.title.lower()] = choice.title if title else choice.key + choices[choice.key.lower()] = choice.title if title else choice.key return choices except NotFound: raise Failed(f"Collection Error: plex search attribute: {search_name} only supported with Plex's New TV Agent") - @retry(stop_max_attempt_number=6, wait_fixed=10000) - def refresh_item(self, rating_key): - requests.put(f"{self.url}/library/metadata/{rating_key}/refresh?X-Plex-Token={self.token}") - - def validate_search_list(self, data, search_name): + @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) + def get_labels(self): + return {label.title: label.key for label in self.Plex.listFilterChoices(field="label")} + + @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) + def _query(self, key, post=False, put=False): + if post: method = self.Plex._server._session.post + elif put: method = self.Plex._server._session.put + else: method = None + self.Plex._server.query(key, method=method) + + def create_smart_labels(self, title, sort): + labels = self.get_labels() + if title not in labels: + raise Failed(f"Plex Error: Label: {title} does not exist") + smart_type = 1 if self.is_movie else 2 + sort_type = movie_smart_sorts[sort] if self.is_movie else show_smart_sorts[sort] + uri_args = f"?type={smart_type}&sort={sort_type}&label={labels[title]}" + self.create_smart_collection(title, smart_type, uri_args) + + def create_smart_collection(self, title, smart_type, uri_args): + args = { + "type": smart_type, + "title": title, + "smart": 1, + "sectionId": self.Plex.key, + "uri": self.build_smart_filter(uri_args) + } + self._query(f"/library/collections{utils.joinArgs(args)}", post=True) + + def get_smart_filter_from_uri(self, uri): + smart_filter = parse.parse_qs(parse.urlparse(uri.replace("/#!/", "/")).query)["key"][0] + args = smart_filter[smart_filter.index("?"):] + return self.build_smart_filter(args), int(args[args.index("type=") + 5:args.index("type=") + 6]) + + def build_smart_filter(self, uri_args): + return f"server://{self.PlexServer.machineIdentifier}/com.plexapp.plugins.library/library/sections/{self.Plex.key}/all{uri_args}" + + def update_smart_collection(self, collection, uri_args): + self._query(f"/library/collections/{collection.ratingKey}/items{utils.joinArgs({'uri': self.build_smart_filter(uri_args)})}", put=True) + + def smart(self, collection): + return utils.cast(bool, self.get_collection(collection)._data.attrib.get('smart', '0')) + + def smart_filter(self, collection): + smart_filter = self.get_collection(collection)._data.attrib.get('content') + return smart_filter[smart_filter.index("?"):] + + def validate_search_list(self, data, search_name, title=True, pairs=False): final_search = search_translation[search_name] if search_name in search_translation else search_name - search_choices = self.get_search_choices(final_search) + search_choices = self.get_search_choices(final_search, title=title) valid_list = [] for value in util.get_list(data): if str(value).lower() in search_choices: - valid_list.append(search_choices[str(value).lower()]) + if pairs: + valid_list.append((value, search_choices[str(value).lower()])) + else: + valid_list.append(search_choices[str(value).lower()]) else: - logger.error(f"Plex Error: {search_name}: {value} not found") + raise Failed(f"Plex Error: {search_name}: {value} not found") return valid_list def get_collection(self, data): - collection = util.choose_from_list(self.search(title=str(data), libtype="collection"), "collection", str(data), exact=True) - if collection: return collection - else: raise Failed(f"Plex Error: Collection {data} not found") + if isinstance(data, int): + collection = self.fetchItem(data) + elif isinstance(data, Collections): + collection = data + else: + collection = util.choose_from_list(self.search(title=str(data), libtype="collection"), "collection", str(data), exact=True) + if collection: + return collection + else: + raise Failed(f"Plex Error: Collection {data} not found") def validate_collections(self, collections): valid_collections = [] @@ -267,20 +511,14 @@ class PlexAPI: raise Failed(f"Collection Error: No valid Plex Collections in {collections}") return valid_collections - def get_items(self, method, data, status_message=True): - if status_message: - logger.debug(f"Data: {data}") + def get_items(self, method, data): + logger.debug(f"Data: {data}") pretty = util.pretty_names[method] if method in util.pretty_names else method media_type = "Movie" if self.is_movie else "Show" items = [] if method == "plex_all": - if status_message: - logger.info(f"Processing {pretty} {media_type}s") + logger.info(f"Processing {pretty} {media_type}s") items = self.get_all() - elif method == "plex_collection": - if status_message: - logger.info(f"Processing {pretty} {data}") - items = data.items() elif method == "plex_search": search_terms = {} has_processed = False @@ -298,9 +536,9 @@ class PlexAPI: final_mod = ">>" elif search in ["added", "originally_available"] and modifier == ".not": final_mod = "<<" - elif search in ["critic_rating", "audience_rating"] and modifier == ".greater": - final_mod = "__gte" - elif search in ["critic_rating", "audience_rating"] and modifier == ".less": + elif search in ["critic_rating", "audience_rating"] and modifier == ".gt": + final_mod = "__gt" + elif search in ["critic_rating", "audience_rating"] and modifier == ".lt": final_mod = "__lt" else: final_mod = modifiers[modifier] if modifier in modifiers else "" @@ -313,50 +551,57 @@ class PlexAPI: else: search_terms[final_method] = search_data - if status_message: - if search in ["added", "originally_available"] or modifier in [".greater", ".less", ".before", ".after"]: - ors = f"{search_method}({search_data}" - else: - ors = "" - conjunction = " AND " if final_mod == "&" else " OR " - for o, param in enumerate(search_data): - or_des = conjunction if o > 0 else f"{search_method}(" - ors += f"{or_des}{param}" - if has_processed: - logger.info(f"\t\t AND {ors})") - else: - logger.info(f"Processing {pretty}: {ors})") - has_processed = True - if status_message: - if search_sort: - logger.info(f"\t\t SORT BY {search_sort})") - if search_limit: - logger.info(f"\t\t LIMIT {search_limit})") - logger.debug(f"Search: {search_terms}") + if search in ["added", "originally_available"] or modifier in [".gt", ".gte", ".lt", ".lte", ".before", ".after"]: + ors = f"{search_method}({search_data}" + else: + ors = "" + conjunction = " AND " if final_mod == "&" else " OR " + for o, param in enumerate(search_data): + or_des = conjunction if o > 0 else f"{search_method}(" + ors += f"{or_des}{param}" + if has_processed: + logger.info(f"\t\t AND {ors})") + else: + logger.info(f"Processing {pretty}: {ors})") + has_processed = True + if search_sort: + logger.info(f"\t\t SORT BY {search_sort})") + if search_limit: + logger.info(f"\t\t LIMIT {search_limit})") + logger.debug(f"Search: {search_terms}") return self.search(sort=sorts[search_sort], maxresults=search_limit, **search_terms) elif method == "plex_collectionless": good_collections = [] + logger.info("Collections Excluded") for col in self.get_all_collections(): keep_collection = True for pre in data["exclude_prefix"]: if col.title.startswith(pre) or (col.titleSort and col.titleSort.startswith(pre)): keep_collection = False + logger.info(f"{col.title} excluded by prefix match {pre}") break if keep_collection: for ext in data["exclude"]: if col.title == ext or (col.titleSort and col.titleSort == ext): keep_collection = False + logger.info(f"{col.title} excluded by exact match") break if keep_collection: - good_collections.append(col.index) + logger.info(f"Collection Passed: {col.title}") + good_collections.append(col) + logger.info("") + logger.info("Collections Not Excluded (Items in these collections are not added to Collectionless)") + for col in good_collections: + logger.info(col.title) + collection_indexes = [c.index for c in good_collections] all_items = self.get_all() length = 0 for i, item in enumerate(all_items, 1): length = util.print_return(length, f"Processing: {i}/{len(all_items)} {item.title}") add_item = True - item.reload() + self.query(item.reload) for collection in item.collections: - if collection.id in good_collections: + if collection.id in collection_indexes: add_item = False break if add_item: @@ -384,113 +629,17 @@ class PlexAPI: except yaml.scanner.ScannerError as e: logger.error(f"YAML Error: {util.tab_new_lines(e)}") - def add_to_collection(self, collection, items, filters, show_filtered, rating_key_map, movie_map, show_map): - name = collection.title if isinstance(collection, Collections) else collection - collection_items = collection.items() if isinstance(collection, Collections) else [] - total = len(items) - max_length = len(str(total)) - length = 0 - for i, item in enumerate(items, 1): - try: - current = self.fetchItem(item.ratingKey if isinstance(item, (Movie, Show)) else int(item)) - if not isinstance(current, (Movie, Show)): - raise NotFound - except (BadRequest, NotFound): - logger.error(f"Plex Error: Item {item} not found") - continue - match = True - if filters: - length = util.print_return(length, f"Filtering {(' ' * (max_length - len(str(i)))) + str(i)}/{total} {current.title}") - for filter_method, filter_data in filters: - modifier = filter_method[-4:] - method = filter_method[:-4] if modifier in [".not", ".lte", ".gte"] else filter_method - method_name = filter_alias[method] if method in filter_alias else method - if method_name == "max_age": - threshold_date = datetime.now() - timedelta(days=filter_data) - if current.originallyAvailableAt is None or current.originallyAvailableAt < threshold_date: - match = False - break - elif method_name == "original_language": - movie = None - for key, value in movie_map.items(): - if current.ratingKey in value: - try: - movie = self.TMDb.get_movie(key) - break - except Failed: - pass - if movie is None: - logger.warning(f"Filter Error: No TMDb ID found for {current.title}") - continue - if (modifier == ".not" and movie.original_language in filter_data) or (modifier != ".not" and movie.original_language not in filter_data): - match = False - break - elif method_name == "audio_track_title": - jailbreak = False - for media in current.media: - for part in media.parts: - for audio in part.audioStreams(): - for check_title in filter_data: - title = audio.title if audio.title else "" - if check_title.lower() in title.lower(): - jailbreak = True - break - if jailbreak: break - if jailbreak: break - if jailbreak: break - if (jailbreak and modifier == ".not") or (not jailbreak and modifier != ".not"): - match = False - break - elif modifier in [".gte", ".lte"]: - if method_name == "vote_count": - tmdb_item = None - for key, value in movie_map.items(): - if current.ratingKey in value: - try: - tmdb_item = self.TMDb.get_movie(key) if self.is_movie else self.TMDb.get_show(key) - break - except Failed: - pass - if tmdb_item is None: - logger.warning(f"Filter Error: No TMDb ID found for {current.title}") - continue - attr = tmdb_item.vote_count - else: - attr = getattr(current, method_name) / 60000 if method_name == "duration" else getattr(current, method_name) - if attr is None or (modifier == ".lte" and attr > filter_data) or (modifier == ".gte" and attr < filter_data): - match = False - break - else: - attrs = [] - if method_name in ["video_resolution", "audio_language", "subtitle_language"]: - for media in current.media: - if method_name == "video_resolution": - attrs.extend([media.videoResolution]) - for part in media.parts: - if method_name == "audio_language": - attrs.extend([a.language for a in part.audioStreams()]) - if method_name == "subtitle_language": - attrs.extend([s.language for s in part.subtitleStreams()]) - elif method_name in ["contentRating", "studio", "year", "rating", "originallyAvailableAt"]: - attrs = [str(getattr(current, method_name))] - elif method_name in ["actors", "countries", "directors", "genres", "writers", "collections"]: - attrs = [getattr(x, "tag") for x in getattr(current, method_name)] - else: - raise Failed(f"Filter Error: filter: {method_name} not supported") + def get_collection_items(self, collection, smart_label_collection): + if smart_label_collection: + return self.get_labeled_items(collection.title if isinstance(collection, Collections) else str(collection)) + elif isinstance(collection, Collections): + return self.query(collection.items) + else: + return [] - if (not list(set(filter_data) & set(attrs)) and modifier != ".not") or (list(set(filter_data) & set(attrs)) and modifier == ".not"): - match = False - break - length = util.print_return(length, f"Filtering {(' ' * (max_length - len(str(i)))) + str(i)}/{total} {current.title}") - if match: - util.print_end(length, f"{name} Collection | {'=' if current in collection_items else '+'} | {current.title}") - if current in collection_items: rating_key_map[current.ratingKey] = None - else: self.add_collection(current, name) - elif show_filtered is True: - logger.info(f"{name} Collection | X | {current.title}") - media_type = f"{'Movie' if self.is_movie else 'Show'}{'s' if total > 1 else ''}" - util.print_end(length, f"{total} {media_type} Processed") - return rating_key_map + def get_collection_name_and_items(self, collection, smart_label_collection): + name = collection.title if isinstance(collection, Collections) else str(collection) + return name, self.get_collection_items(collection, smart_label_collection) def search_item(self, data, year=None): kwargs = {} @@ -502,326 +651,53 @@ class PlexAPI: if len(edits) > 0: logger.debug(f"Details Update: {edits}") try: - if advanced: - item.editAdvanced(**edits) - else: - item.edit(**edits) - item.reload() + self.edit_query(item, edits, advanced=advanced) if advanced and "languageOverride" in edits: - self.refresh_item(item.ratingKey) + self.query(item.refresh) logger.info(f"{item_type}: {name}{' Advanced' if advanced else ''} Details Update Successful") except BadRequest: util.print_stacktrace() logger.error(f"{item_type}: {name}{' Advanced' if advanced else ''} Details Update Failed") - def update_metadata(self, TMDb, test): - logger.info("") - util.separator(f"{self.name} Library Metadata") - logger.info("") - if not self.metadata: - raise Failed("No metadata to edit") - for mapping_name, meta in self.metadata.items(): - methods = {mm.lower(): mm for mm in meta} - if test and ("test" not in methods or meta[methods["test"]] is not True): - continue - - updated = False - edits = {} - advance_edits = {} - def add_edit(name, current, group, alias, key=None, value=None, var_type="str"): - if value or name in alias: - if value or group[alias[name]]: - if key is None: key = name - if value is None: value = group[alias[name]] - try: - if var_type == "date": - final_value = util.check_date(value, name, return_string=True, plex_date=True) - elif var_type == "float": - final_value = util.check_number(value, name, number_type="float", minimum=0, maximum=10) - else: - final_value = value - if str(current) != str(final_value): - edits[f"{key}.value"] = final_value - edits[f"{key}.locked"] = 1 - logger.info(f"Detail: {name} updated to {final_value}") - except Failed as ee: - logger.error(ee) - else: - logger.error(f"Metadata Error: {name} attribute is blank") - - def add_advanced_edit(attr, obj, group, alias, show_library=False, new_agent=False): - key, options = advance_keys[attr] - if attr in alias: - if new_agent and self.agent not in new_plex_agents: - logger.error(f"Metadata Error: {attr} attribute only works for with the New Plex Movie Agent and New Plex TV Agent") - elif show_library and not self.is_show: - logger.error(f"Metadata Error: {attr} attribute only works for show libraries") - elif group[alias[attr]]: - method_data = str(group[alias[attr]]).lower() - if method_data not in options: - logger.error(f"Metadata Error: {group[alias[attr]]} {attr} attribute invalid") - elif getattr(obj, key) != options[method_data]: - advance_edits[key] = options[method_data] - logger.info(f"Detail: {attr} updated to {method_data}") - else: - logger.error(f"Metadata Error: {attr} attribute is blank") - - def edit_tags(attr, obj, group, alias, key=None, extra=None, movie_library=False): - if key is None: - key = f"{attr}s" - if attr in alias and f"{attr}.sync" in alias: - logger.error(f"Metadata Error: Cannot use {attr} and {attr}.sync together") - elif attr in alias or f"{attr}.sync" in alias: - attr_key = attr if attr in alias else f"{attr}.sync" - if movie_library and not self.is_movie: - logger.error(f"Metadata Error: {attr_key} attribute only works for movie libraries") - elif group[alias[attr_key]] or extra: - item_tags = [item_tag.tag for item_tag in getattr(obj, key)] - input_tags = [] - if group[alias[attr_key]]: - input_tags.extend(util.get_list(group[alias[attr_key]])) - if extra: - input_tags.extend(extra) - if f"{attr}.sync" in alias: - remove_method = getattr(obj, f"remove{attr.capitalize()}") - for tag in (t for t in item_tags if t not in input_tags): - updated = True - remove_method(tag) - logger.info(f"Detail: {attr.capitalize()} {tag} removed") - add_method = getattr(obj, f"add{attr.capitalize()}") - for tag in (t for t in input_tags if t not in item_tags): - updated = True - add_method(tag) - logger.info(f"Detail: {attr.capitalize()} {tag} added") + def update_item_from_assets(self, item, dirs=None): + if dirs is None: + dirs = self.asset_directory + name = os.path.basename(os.path.dirname(item.locations[0]) if self.is_movie else item.locations[0]) + for ad in dirs: + if self.asset_folders: + if not os.path.isdir(os.path.join(ad, name)): + continue + poster_filter = os.path.join(ad, name, "poster.*") + background_filter = os.path.join(ad, name, "background.*") + else: + poster_filter = os.path.join(ad, f"{name}.*") + background_filter = os.path.join(ad, f"{name}_background.*") + matches = glob.glob(poster_filter) + if len(matches) > 0: + self.upload_image(item, os.path.abspath(matches[0]), url=False) + logger.info(f"Detail: asset_directory updated {item.title}'s poster to [file] {os.path.abspath(matches[0])}") + matches = glob.glob(background_filter) + if len(matches) > 0: + self.upload_image(item, os.path.abspath(matches[0]), poster=False, url=False) + logger.info(f"Detail: asset_directory updated {item.title}'s background to [file] {os.path.abspath(matches[0])}") + if self.is_show: + for season in self.query(item.seasons): + if self.asset_folders: + season_filter = os.path.join(ad, name, f"Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}.*") else: - logger.error(f"Metadata Error: {attr} attribute is blank") - - def set_image(attr, obj, group, alias, is_background=False): - if group[alias[attr]]: - message = f"{'background' if is_background else 'poster'} to [{'File' if attr.startswith('file') else 'URL'}] {group[alias[attr]]}" - if group[alias[attr]] and attr.startswith("url") and is_background: - obj.uploadArt(url=group[alias[attr]]) - elif group[alias[attr]] and attr.startswith("url"): - obj.uploadPoster(url=group[alias[attr]]) - elif group[alias[attr]] and attr.startswith("file") and is_background: - obj.uploadArt(filepath=group[alias[attr]]) - elif group[alias[attr]] and attr.startswith("file"): - obj.uploadPoster(filepath=group[alias[attr]]) - logger.info(f"Detail: {attr} updated {message}") - else: - logger.error(f"Metadata Error: {attr} attribute is blank") - - def set_images(obj, group, alias): - if "url_poster" in alias: - set_image("url_poster", obj, group, alias) - elif "file_poster" in alias: - set_image("file_poster", obj, group, alias) - if "url_background" in alias: - set_image("url_background", obj, group, alias, is_background=True) - elif "file_background" in alias: - set_image("file_background", obj, group, alias, is_background=True) - - logger.info("") - util.separator() - logger.info("") - year = None - if "year" in methods: - year = util.check_number(meta[methods["year"]], "year", minimum=1800, maximum=datetime.now().year + 1) - - title = mapping_name - if "title" in methods: - if meta[methods["title"]] is None: logger.error("Metadata Error: title attribute is blank") - else: title = meta[methods["title"]] - - item = self.search_item(title, year=year) - - if item is None: - item = self.search_item(f"{title} (SUB)", year=year) - - if item is None and "alt_title" in methods: - if meta[methods["alt_title"]] is None: - logger.error("Metadata Error: alt_title attribute is blank") - else: - alt_title = meta["alt_title"] - item = self.search_item(alt_title, year=year) - - if item is None: - logger.error(f"Plex Error: Item {mapping_name} not found") - logger.error(f"Skipping {mapping_name}") - continue - - item_type = "Movie" if self.is_movie else "Show" - logger.info(f"Updating {item_type}: {title}...") - - tmdb_item = None - tmdb_is_movie = None - if ("tmdb_show" in methods or "tmdb_id" in methods) and "tmdb_movie" in methods: - logger.error("Metadata Error: Cannot use tmdb_movie and tmdb_show when editing the same metadata item") - - if "tmdb_show" in methods or "tmdb_id" in methods or "tmdb_movie" in methods: - try: - if "tmdb_show" in methods or "tmdb_id" in methods: - data = meta[methods["tmdb_show" if "tmdb_show" in methods else "tmdb_id"]] - if data is None: - logger.error("Metadata Error: tmdb_show attribute is blank") - else: - tmdb_is_movie = False - tmdb_item = TMDb.get_show(util.regex_first_int(data, "Show")) - elif "tmdb_movie" in methods: - if meta[methods["tmdb_movie"]] is None: - logger.error("Metadata Error: tmdb_movie attribute is blank") + season_filter = os.path.join(ad, f"{name}_Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}.*") + matches = glob.glob(season_filter) + if len(matches) > 0: + season_path = os.path.abspath(matches[0]) + self.upload_image(season, season_path, url=False) + logger.info(f"Detail: asset_directory updated {item.title} Season {season.seasonNumber}'s poster to [file] {season_path}") + for episode in self.query(season.episodes): + if self.asset_folders: + episode_filter = os.path.join(ad, name, f"{episode.seasonEpisode.upper()}.*") else: - tmdb_is_movie = True - tmdb_item = TMDb.get_movie(util.regex_first_int(meta[methods["tmdb_movie"]], "Movie")) - except Failed as e: - logger.error(e) - - originally_available = None - original_title = None - rating = None - studio = None - tagline = None - summary = None - genres = [] - if tmdb_item: - originally_available = tmdb_item.release_date if tmdb_is_movie else tmdb_item.first_air_date - if tmdb_item and tmdb_is_movie is True and tmdb_item.original_title != tmdb_item.title: - original_title = tmdb_item.original_title - elif tmdb_item and tmdb_is_movie is False and tmdb_item.original_name != tmdb_item.name: - original_title = tmdb_item.original_name - rating = tmdb_item.vote_average - if tmdb_is_movie is True and tmdb_item.production_companies: - studio = tmdb_item.production_companies[0].name - elif tmdb_is_movie is False and tmdb_item.networks: - studio = tmdb_item.networks[0].name - tagline = tmdb_item.tagline if len(tmdb_item.tagline) > 0 else None - summary = tmdb_item.overview - genres = [genre.name for genre in tmdb_item.genres] - - edits = {} - add_edit("title", item.title, meta, methods, value=title) - add_edit("sort_title", item.titleSort, meta, methods, key="titleSort") - add_edit("originally_available", str(item.originallyAvailableAt)[:-9], meta, methods, key="originallyAvailableAt", value=originally_available, var_type="date") - add_edit("critic_rating", item.rating, meta, methods, value=rating, key="rating", var_type="float") - add_edit("audience_rating", item.audienceRating, meta, methods, key="audienceRating", var_type="float") - add_edit("content_rating", item.contentRating, meta, methods, key="contentRating") - add_edit("original_title", item.originalTitle, meta, methods, key="originalTitle", value=original_title) - add_edit("studio", item.studio, meta, methods, value=studio) - add_edit("tagline", item.tagline, meta, methods, value=tagline) - add_edit("summary", item.summary, meta, methods, value=summary) - self.edit_item(item, mapping_name, item_type, edits) - - advance_edits = {} - add_advanced_edit("episode_sorting", item, meta, methods, show_library=True) - add_advanced_edit("keep_episodes", item, meta, methods, show_library=True) - add_advanced_edit("delete_episodes", item, meta, methods, show_library=True) - add_advanced_edit("season_display", item, meta, methods, show_library=True) - add_advanced_edit("episode_ordering", item, meta, methods, show_library=True) - add_advanced_edit("metadata_language", item, meta, methods, new_agent=True) - add_advanced_edit("use_original_title", item, meta, methods, new_agent=True) - self.edit_item(item, mapping_name, item_type, advance_edits, advanced=True) - - edit_tags("genre", item, meta, methods, extra=genres) - edit_tags("label", item, meta, methods) - edit_tags("collection", item, meta, methods) - edit_tags("country", item, meta, methods, key="countries", movie_library=True) - edit_tags("director", item, meta, methods, movie_library=True) - edit_tags("producer", item, meta, methods, movie_library=True) - edit_tags("writer", item, meta, methods, movie_library=True) - - logger.info(f"{item_type}: {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}") - - set_images(item, meta, methods) - - if "seasons" in methods and self.is_show: - if meta[methods["seasons"]]: - for season_id in meta[methods["seasons"]]: - updated = False - logger.info("") - logger.info(f"Updating season {season_id} of {mapping_name}...") - if isinstance(season_id, int): - season = None - for s in item.seasons(): - if s.index == season_id: - season = s - break - if season is None: - logger.error(f"Metadata Error: Season: {season_id} not found") - else: - season_dict = meta[methods["seasons"]][season_id] - season_methods = {sm.lower(): sm for sm in season_dict} - - if "title" in season_methods and season_dict[season_methods["title"]]: - title = season_dict[season_methods["title"]] - else: - title = season.title - if "sub" in season_methods: - if season_dict[season_methods["sub"]] is None: - logger.error("Metadata Error: sub attribute is blank") - elif season_dict[season_methods["sub"]] is True and "(SUB)" not in title: - title = f"{title} (SUB)" - elif season_dict[season_methods["sub"]] is False and title.endswith(" (SUB)"): - title = title[:-6] - else: - logger.error("Metadata Error: sub attribute must be True or False") - - edits = {} - add_edit("title", season.title, season_dict, season_methods, value=title) - add_edit("summary", season.summary, season_dict, season_methods) - self.edit_item(season, season_id, "Season", edits) - set_images(season, season_dict, season_methods) - else: - logger.error(f"Metadata Error: Season: {season_id} invalid, it must be an integer") - logger.info(f"Season {season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}") - else: - logger.error("Metadata Error: seasons attribute is blank") - elif "seasons" in methods: - logger.error("Metadata Error: seasons attribute only works for show libraries") - - if "episodes" in methods and self.is_show: - if meta[methods["episodes"]]: - for episode_str in meta[methods["episodes"]]: - updated = False - logger.info("") - match = re.search("[Ss]\\d+[Ee]\\d+", episode_str) - if match: - output = match.group(0)[1:].split("E" if "E" in match.group(0) else "e") - season_id = int(output[0]) - episode_id = int(output[1]) - logger.info(f"Updating episode S{season_id}E{episode_id} of {mapping_name}...") - try: episode = item.episode(season=season_id, episode=episode_id) - except NotFound: logger.error(f"Metadata Error: episode {episode_id} of season {season_id} not found") - else: - episode_dict = meta[methods["episodes"]][episode_str] - episode_methods = {em.lower(): em for em in episode_dict} - - if "title" in episode_methods and episode_dict[episode_methods["title"]]: - title = episode_dict[episode_methods["title"]] - else: - title = episode.title - if "sub" in episode_dict: - if episode_dict[episode_methods["sub"]] is None: - logger.error("Metadata Error: sub attribute is blank") - elif episode_dict[episode_methods["sub"]] is True and "(SUB)" not in title: - title = f"{title} (SUB)" - elif episode_dict[episode_methods["sub"]] is False and title.endswith(" (SUB)"): - title = title[:-6] - else: - logger.error("Metadata Error: sub attribute must be True or False") - edits = {} - add_edit("title", episode.title, episode_dict, episode_methods, value=title) - add_edit("sort_title", episode.titleSort, episode_dict, episode_methods, key="titleSort") - add_edit("rating", episode.rating, episode_dict, episode_methods) - add_edit("originally_available", str(episode.originallyAvailableAt)[:-9], episode_dict, episode_methods, key="originallyAvailableAt") - add_edit("summary", episode.summary, episode_dict, episode_methods) - self.edit_item(episode, f"{season_id} Episode: {episode_id}", "Season", edits) - edit_tags("director", episode, episode_dict, episode_methods) - edit_tags("writer", episode, episode_dict, episode_methods) - set_images(episode, episode_dict, episode_methods) - logger.info(f"Episode S{episode_id}E{season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}") - else: - logger.error(f"Metadata Error: episode {episode_str} invalid must have S##E## format") - else: - logger.error("Metadata Error: episodes attribute is blank") - elif "episodes" in methods: - logger.error("Metadata Error: episodes attribute only works for show libraries") + episode_filter = os.path.join(ad, f"{name}_{episode.seasonEpisode.upper()}.*") + matches = glob.glob(episode_filter) + if len(matches) > 0: + episode_path = os.path.abspath(matches[0]) + self.upload_image(episode, episode_path, url=False) + logger.info(f"Detail: asset_directory updated {item.title} {episode.seasonEpisode.upper()}'s poster to [file] {episode_path}") \ No newline at end of file diff --git a/modules/radarr.py b/modules/radarr.py index b8b52e17..9f211251 100644 --- a/modules/radarr.py +++ b/modules/radarr.py @@ -38,7 +38,7 @@ class RadarrAPI: def get_profile_id(self, profile_name): profiles = "" - for profile in self.send_get("qualityProfile" if self.version == "v3" else "profile"): + for profile in self._get("qualityProfile" if self.version == "v3" else "profile"): if len(profiles) > 0: profiles += ", " profiles += profile["name"] @@ -47,19 +47,19 @@ class RadarrAPI: raise Failed(f"Radarr Error: quality_profile: {profile_name} does not exist in radarr. Profiles available: {profiles}") def get_tags(self): - return {tag["label"]: tag["id"] for tag in self.send_get("tag")} + return {tag["label"]: tag["id"] for tag in self._get("tag")} def add_tags(self, tags): added = False for label in tags: - if label not in self.tags: + if str(label).lower() not in self.tags: added = True - self.send_post("tag", {"label": str(label)}) + self._post("tag", {"label": str(label).lower()}) if added: self.tags = self.get_tags() def lookup(self, tmdb_id): - results = self.send_get("movie/lookup", params={"term": f"tmdb:{tmdb_id}"}) + results = self._get("movie/lookup", params={"term": f"tmdb:{tmdb_id}"}) if results: return results[0] else: @@ -78,7 +78,7 @@ class RadarrAPI: search = options["search"] if "search" in options else self.search if tags: self.add_tags(tags) - tag_nums = [self.tags[label] for label in tags if label in self.tags] + tag_nums = [self.tags[label.lower()] for label in tags if label.lower() in self.tags] for tmdb_id in tmdb_ids: try: movie_info = self.lookup(tmdb_id) @@ -105,7 +105,7 @@ class RadarrAPI: } if tag_nums: url_json["tags"] = tag_nums - response = self.send_post("movie", url_json) + response = self._post("movie", url_json) if response.status_code < 400: logger.info(f"Added to Radarr | {tmdb_id:<6} | {movie_info['title']}") add_count += 1 @@ -118,7 +118,7 @@ class RadarrAPI: logger.info(f"{add_count} Movie{'s' if add_count > 1 else ''} added to Radarr") @retry(stop_max_attempt_number=6, wait_fixed=10000) - def send_get(self, url, params=None): + def _get(self, url, params=None): url_params = {"apikey": f"{self.token}"} if params: for param in params: @@ -126,5 +126,5 @@ class RadarrAPI: return requests.get(f"{self.base_url}{url}", params=url_params).json() @retry(stop_max_attempt_number=6, wait_fixed=10000) - def send_post(self, url, url_json): + def _post(self, url, url_json): return requests.post(f"{self.base_url}{url}", json=url_json, params={"apikey": f"{self.token}"}) diff --git a/modules/sonarr.py b/modules/sonarr.py index 38ff9e89..de7c0c5b 100644 --- a/modules/sonarr.py +++ b/modules/sonarr.py @@ -58,7 +58,7 @@ class SonarrAPI: endpoint = "languageProfile" else: endpoint = "profile" - for profile in self.send_get(endpoint): + for profile in self._get(endpoint): if len(profiles) > 0: profiles += ", " profiles += profile["name"] @@ -67,19 +67,19 @@ class SonarrAPI: raise Failed(f"Sonarr Error: {profile_type}: {profile_name} does not exist in sonarr. Profiles available: {profiles}") def get_tags(self): - return {tag["label"]: tag["id"] for tag in self.send_get("tag")} + return {tag["label"]: tag["id"] for tag in self._get("tag")} def add_tags(self, tags): added = False for label in tags: - if label not in self.tags: + if str(label).lower() not in self.tags: added = True - self.send_post("tag", {"label": str(label)}) + self._post("tag", {"label": str(label).lower()}) if added: self.tags = self.get_tags() def lookup(self, tvdb_id): - results = self.send_get("series/lookup", params={"term": f"tvdb:{tvdb_id}"}) + results = self._get("series/lookup", params={"term": f"tvdb:{tvdb_id}"}) if results: return results[0] else: @@ -101,7 +101,7 @@ class SonarrAPI: cutoff_search = options["cutoff_search"] if "cutoff_search" in options else self.cutoff_search if tags: self.add_tags(tags) - tag_nums = [self.tags[label] for label in tags if label in self.tags] + tag_nums = [self.tags[label.lower()] for label in tags if label.lower() in self.tags] for tvdb_id in tvdb_ids: try: show_info = self.lookup(tvdb_id) @@ -135,7 +135,7 @@ class SonarrAPI: } if tag_nums: url_json["tags"] = tag_nums - response = self.send_post("series", url_json) + response = self._post("series", url_json) if response.status_code < 400: logger.info(f"Added to Sonarr | {tvdb_id:<6} | {show_info['title']}") add_count += 1 @@ -152,7 +152,7 @@ class SonarrAPI: logger.info(f"{add_count} Show{'s' if add_count > 1 else ''} added to Sonarr") @retry(stop_max_attempt_number=6, wait_fixed=10000) - def send_get(self, url, params=None): + def _get(self, url, params=None): url_params = {"apikey": f"{self.token}"} if params: for param in params: @@ -160,5 +160,5 @@ class SonarrAPI: return requests.get(f"{self.base_url}{url}", params=url_params).json() @retry(stop_max_attempt_number=6, wait_fixed=10000) - def send_post(self, url, url_json): + def _post(self, url, url_json): return requests.post(f"{self.base_url}{url}", json=url_json, params={"apikey": f"{self.token}"}) diff --git a/modules/tautulli.py b/modules/tautulli.py index c258c62e..cd2e17c5 100644 --- a/modules/tautulli.py +++ b/modules/tautulli.py @@ -1,6 +1,8 @@ import logging, requests from modules import util from modules.util import Failed +from plexapi.exceptions import BadRequest, NotFound +from plexapi.video import Movie, Show from retrying import retry logger = logging.getLogger("Plex Meta Manager") @@ -19,17 +21,11 @@ class TautulliAPI: self.url = params["url"] self.apikey = params["apikey"] - def get_popular(self, library, time_range=30, stats_count=20, stats_count_buffer=20, status_message=True): - return self.get_items(library, time_range=time_range, stats_count=stats_count, list_type="popular", stats_count_buffer=stats_count_buffer, status_message=status_message) - - def get_top(self, library, time_range=30, stats_count=20, stats_count_buffer=20, status_message=True): - return self.get_items(library, time_range=time_range, stats_count=stats_count, list_type="top", stats_count_buffer=stats_count_buffer, status_message=status_message) - - def get_items(self, library, time_range=30, stats_count=20, list_type="popular", stats_count_buffer=20, status_message=True): - if status_message: - logger.info(f"Processing Tautulli Most {'Popular' if list_type == 'popular' else 'Watched'}: {stats_count} {'Movies' if library.is_movie else 'Shows'}") - response = self.send_request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_home_stats&time_range={time_range}&stats_count={int(stats_count) + int(stats_count_buffer)}") - stat_id = f"{'popular' if list_type == 'popular' else 'top'}_{'movies' if library.is_movie else 'tv'}" + def get_items(self, library, params): + query_size = int(params["list_size"]) + int(params["list_buffer"]) + logger.info(f"Processing Tautulli Most {params['list_type'].capitalize()}: {params['list_size']} {'Movies' if library.is_movie else 'Shows'}") + response = self._request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_home_stats&time_range={params['list_days']}&stats_count={query_size}") + stat_id = f"{'popular' if params['list_type'] == 'popular' else 'top'}_{'movies' if library.is_movie else 'tv'}" items = None for entry in response["response"]["data"]: @@ -39,17 +35,26 @@ class TautulliAPI: if items is None: raise Failed("Tautulli Error: No Items found in the response") - section_id = self.get_section_id(library.name) + section_id = self._section_id(library.name) rating_keys = [] count = 0 for item in items: - if item["section_id"] == section_id and count < int(stats_count): - rating_keys.append(item["rating_key"]) + if item["section_id"] == section_id and count < int(params['list_size']): + try: + library.fetchItem(int(item["rating_key"])) + rating_keys.append(item["rating_key"]) + except (BadRequest, NotFound): + new_item = library.exact_search(item["title"], year=item["year"]) + if new_item: + rating_keys.append(new_item[0].ratingKey) + else: + logger.error(f"Plex Error: Item {item} not found") + continue count += 1 return rating_keys - def get_section_id(self, library_name): - response = self.send_request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_library_names") + def _section_id(self, library_name): + response = self._request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_library_names") section_id = None for entry in response["response"]["data"]: if entry["section_name"] == library_name: @@ -59,6 +64,6 @@ class TautulliAPI: else: raise Failed(f"Tautulli Error: No Library named {library_name} in the response") @retry(stop_max_attempt_number=6, wait_fixed=10000) - def send_request(self, url): + def _request(self, url): logger.debug(f"Tautulli URL: {url.replace(self.apikey, '################################')}") return requests.get(url).json() diff --git a/modules/tests.py b/modules/tests.py deleted file mode 100644 index 5212fd19..00000000 --- a/modules/tests.py +++ /dev/null @@ -1,346 +0,0 @@ -import logging -from modules import util -from modules.config import Config -from modules.util import Failed - -logger = logging.getLogger("Plex Meta Manager") - -def run_tests(default_dir): - try: - config = Config(default_dir) - logger.info("") - util.separator("Mapping Tests") - for library in config.libraries: - config.map_guids(library) - anidb_tests(config) - imdb_tests(config) - mal_tests(config) - tautulli_tests(config) - tmdb_tests(config) - trakt_tests(config) - tvdb_tests(config) - util.separator("Finished All Plex Meta Manager Tests") - except KeyboardInterrupt: - util.separator("Canceled Plex Meta Manager Tests") - -def anidb_tests(config): - if config.AniDB: - util.separator("AniDB Tests") - - try: - config.AniDB.get_items("anidb_id", 69, "en", status_message=False) - logger.info("Success | Get AniDB ID") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Get AniDB ID: {e}") - - try: - config.AniDB.get_items("anidb_relation", 69, "en", status_message=False) - logger.info("Success | Get AniDB Relation") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Get AniDB Relation: {e}") - - try: - config.AniDB.get_items("anidb_popular", 30, "en", status_message=False) - logger.info("Success | Get AniDB Popular") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Get AniDB Popular: {e}") - - try: - config.AniDB.validate_anidb_list(["69", "112"], "en") - logger.info("Success | Validate AniDB List") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Validate AniDB List: {e}") - - else: - util.separator("AniDB Not Configured") - -def imdb_tests(config): - if config.IMDb: - util.separator("IMDb Tests") - - tmdb_ids, tvdb_ids = config.IMDb.get_items("imdb_list", {"url": "https://www.imdb.com/search/title/?groups=top_1000", "limit": 0}, "en", status_message=False) - if len(tmdb_ids) == 1000: logger.info("Success | IMDb URL get TMDb IDs") - else: logger.error(f"Failure | IMDb URL get TMDb IDs: {len(tmdb_ids)} Should be 1000") - - tmdb_ids, tvdb_ids = config.IMDb.get_items("imdb_list", {"url": "https://www.imdb.com/list/ls026173135/", "limit": 0}, "en", status_message=False) - if len(tmdb_ids) == 250: logger.info("Success | IMDb URL get TMDb IDs") - else: logger.error(f"Failure | IMDb URL get TMDb IDs: {len(tmdb_ids)} Should be 250") - - tmdb_ids, tvdb_ids = config.IMDb.get_items("imdb_id", "tt0814243", "en", status_message=False) - if len(tmdb_ids) == 1: logger.info("Success | IMDb ID get TMDb IDs") - else: logger.error(f"Failure | IMDb ID get TMDb IDs: {len(tmdb_ids)} Should be 1") - - else: - util.separator("IMDb Not Configured") - -def mal_tests(config): - if config.MyAnimeList: - util.separator("MyAnimeList Tests") - - mal_list_tests = [ - ("mal_all", 10), - ("mal_airing", 10), - ("mal_upcoming", 10), - ("mal_tv", 10), - ("mal_movie", 10), - ("mal_ova", 10), - ("mal_special", 10), - ("mal_popular", 10), - ("mal_favorite", 10), - ("mal_suggested", 10), - ("mal_userlist", {"limit": 10, "username": "@me", "status": "completed", "sort_by": "list_score"}), - ("mal_season", {"limit": 10, "season": "fall", "year": 2020, "sort_by": "anime_score"}) - ] - - for mal_list_test in mal_list_tests: - try: - config.MyAnimeList.get_items(mal_list_test[0], mal_list_test[1], status_message=False) - logger.info(f"Success | Get Anime using {util.pretty_names[mal_list_test[0]]}") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Get Anime using {util.pretty_names[mal_list_test[0]]}: {e}") - else: - util.separator("MyAnimeList Not Configured") - -def tautulli_tests(config): - if config.libraries[0].Tautulli: - util.separator("Tautulli Tests") - - try: - config.libraries[0].Tautulli.get_section_id(config.libraries[0].name) - logger.info("Success | Get Section ID") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Get Section ID: {e}") - - try: - config.libraries[0].Tautulli.get_popular(config.libraries[0], status_message=False) - logger.info("Success | Get Popular") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Get Popular: {e}") - - try: - config.libraries[0].Tautulli.get_top(config.libraries[0], status_message=False) - logger.info("Success | Get Top") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Get Top: {e}") - else: - util.separator("Tautulli Not Configured") - -def tmdb_tests(config): - if config.TMDb: - util.separator("TMDb Tests") - - try: - config.TMDb.convert_imdb_to_tmdb("tt0076759") - logger.info("Success | Convert IMDb to TMDb") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Convert IMDb to TMDb: {e}") - - try: - config.TMDb.convert_tmdb_to_imdb(11) - logger.info("Success | Convert TMDb to IMDb") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Convert TMDb to IMDb: {e}") - - try: - config.TMDb.convert_imdb_to_tvdb("tt0458290") - logger.info("Success | Convert IMDb to TVDb") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Convert IMDb to TVDb: {e}") - - try: - config.TMDb.convert_tvdb_to_imdb(83268) - logger.info("Success | Convert TVDb to IMDb") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Convert TVDb to IMDb: {e}") - - tmdb_list_tests = [ - ([11], "Movie"), - ([4194], "Show"), - ([10], "Collection"), - ([1], "Person"), - ([1], "Company"), - ([2739], "Network"), - ([8136], "List") - ] - - for tmdb_list_test in tmdb_list_tests: - try: - config.TMDb.validate_tmdb_list(tmdb_list_test[0], tmdb_type=tmdb_list_test[1]) - logger.info(f"Success | Get TMDb {tmdb_list_test[1]}") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Get TMDb {tmdb_list_test[1]}: {e}") - - tmdb_list_tests = [ - ("tmdb_discover", {"sort_by": "popularity.desc", "limit": 100}, True), - ("tmdb_discover", {"sort_by": "popularity.desc", "limit": 100}, False), - ("tmdb_company", 1, True), - ("tmdb_company", 1, False), - ("tmdb_network", 2739, False), - ("tmdb_keyword", 180547, True), - ("tmdb_keyword", 180547, False), - ("tmdb_now_playing", 10, True), - ("tmdb_popular", 10, True), - ("tmdb_popular", 10, False), - ("tmdb_top_rated", 10, True), - ("tmdb_top_rated", 10, False), - ("tmdb_trending_daily", 10, True), - ("tmdb_trending_daily", 10, False), - ("tmdb_trending_weekly", 10, True), - ("tmdb_trending_weekly", 10, False), - ("tmdb_list", 7068209, True), - ("tmdb_list", 7068209, False), - ("tmdb_movie", 11, True), - ("tmdb_collection", 10, True), - ("tmdb_show", 4194, False) - ] - - for tmdb_list_test in tmdb_list_tests: - try: - config.TMDb.get_items(tmdb_list_test[0], tmdb_list_test[1], tmdb_list_test[2], status_message=False) - logger.info(f"Success | Get {'Movies' if tmdb_list_test[2] else 'Shows'} using {util.pretty_names[tmdb_list_test[0]]}") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Get {'Movies' if tmdb_list_test[2] else 'Shows'} using {util.pretty_names[tmdb_list_test[0]]}: {e}") - else: - util.separator("TMDb Not Configured") - -def trakt_tests(config): - if config.Trakt: - util.separator("Trakt Tests") - - try: - config.Trakt.convert_imdb_to_tmdb("tt0076759") - logger.info("Success | Convert IMDb to TMDb") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Convert IMDb to TMDb: {e}") - - try: - config.Trakt.convert_tmdb_to_imdb(11) - logger.info("Success | Convert TMDb to IMDb") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Convert TMDb to IMDb: {e}") - - try: - config.Trakt.convert_imdb_to_tvdb("tt0458290") - logger.info("Success | Convert IMDb to TVDb") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Convert IMDb to TVDb: {e}") - - try: - config.Trakt.convert_tvdb_to_imdb(83268) - logger.info("Success | Convert TVDb to IMDb") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Convert TVDb to IMDb: {e}") - - try: - config.Trakt.convert_tmdb_to_tvdb(11) - logger.info("Success | Convert TMDb to TVDb") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Convert TMDb to TVDb: {e}") - - try: - config.Trakt.convert_tvdb_to_tmdb(83268) - logger.info("Success | Convert TVDb to TMDb") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Convert TVDb to TMDb: {e}") - - try: - config.Trakt.validate_trakt_list(["https://trakt.tv/users/movistapp/lists/christmas-movies"]) - logger.info("Success | Get List") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Get List: {e}") - - try: - config.Trakt.validate_trakt_watchlist(["me"], True) - logger.info("Success | Get Watchlist Movies") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Get Watchlist Movies: {e}") - - try: - config.Trakt.validate_trakt_watchlist(["me"], False) - logger.info("Success | Get Watchlist Shows") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Get Watchlist Shows: {e}") - - trakt_list_tests = [ - ("trakt_list", "https://trakt.tv/users/movistapp/lists/christmas-movies", True), - ("trakt_trending", 10, True), - ("trakt_trending", 10, False), - ("trakt_watchlist", "me", True), - ("trakt_watchlist", "me", False) - ] - - for trakt_list_test in trakt_list_tests: - try: - config.Trakt.get_items(trakt_list_test[0], trakt_list_test[1], trakt_list_test[2], status_message=False) - logger.info(f"Success | Get {'Movies' if trakt_list_test[2] else 'Shows'} using {util.pretty_names[trakt_list_test[0]]}") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | Get {'Movies' if trakt_list_test[2] else 'Shows'} using {util.pretty_names[trakt_list_test[0]]}: {e}") - else: - util.separator("Trakt Not Configured") - -def tvdb_tests(config): - if config.TVDb: - util.separator("TVDb Tests") - - tmdb_ids, tvdb_ids = config.TVDb.get_items("tvdb_list", "https://www.thetvdb.com/lists/arrowverse", "en", status_message=False) - if len(tvdb_ids) == 10 and len(tmdb_ids) == 0: logger.info("Success | TVDb URL get TVDb IDs and TMDb IDs") - else: logger.error(f"Failure | TVDb URL get TVDb IDs and TMDb IDs: {len(tvdb_ids)} Should be 10 and {len(tmdb_ids)} Should be 0") - - tmdb_ids, tvdb_ids = config.TVDb.get_items("tvdb_list", "https://www.thetvdb.com/lists/6957", "en", status_message=False) - if len(tvdb_ids) == 4 and len(tmdb_ids) == 2: logger.info("Success | TVDb URL get TVDb IDs and TMDb IDs") - else: logger.error(f"Failure | TVDb URL get TVDb IDs and TMDb IDs: {len(tvdb_ids)} Should be 4 and {len(tmdb_ids)} Should be 2") - - try: - config.TVDb.get_items("tvdb_show", "https://www.thetvdb.com/series/arrow", "en", status_message=False) - logger.info("Success | TVDb URL get TVDb Series ID") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | TVDb URL get TVDb Series ID: {e}") - - try: - config.TVDb.get_items("tvdb_show", 279121, "en", status_message=False) - logger.info("Success | TVDb ID get TVDb Series ID") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | TVDb ID get TVDb Series ID: {e}") - - try: - config.TVDb.get_items("tvdb_movie", "https://www.thetvdb.com/movies/the-lord-of-the-rings-the-fellowship-of-the-ring", "en", status_message=False) - logger.info("Success | TVDb URL get TVDb Movie ID") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | TVDb URL get TVDb Movie ID: {e}") - - try: - config.TVDb.get_items("tvdb_movie", 107, "en", status_message=False) - logger.info("Success | TVDb ID get TVDb Movie ID") - except Failed as e: - util.print_stacktrace() - logger.error(f"Failure | TVDb ID get TVDb Movie ID: {e}") - - else: - util.separator("TVDb Not Configured") diff --git a/modules/tmdb.py b/modules/tmdb.py index 4df51913..819583e6 100644 --- a/modules/tmdb.py +++ b/modules/tmdb.py @@ -109,7 +109,8 @@ discover_tv_sort = [ ] class TMDbAPI: - def __init__(self, params): + def __init__(self, config, params): + self.config = config self.TMDb = tmdbv3api.TMDb() self.TMDb.api_key = params["apikey"] self.TMDb.language = params["language"] @@ -131,29 +132,22 @@ class TMDbAPI: self.image_url = "https://image.tmdb.org/t/p/original" @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) - def convert_from_tmdb(self, tmdb_id, convert_to, is_movie): + def convert_from(self, tmdb_id, convert_to, is_movie): try: id_to_return = self.Movie.external_ids(tmdb_id)[convert_to] if is_movie else self.TV.external_ids(tmdb_id)[convert_to] if not id_to_return or (convert_to == "tvdb_id" and id_to_return == 0): raise Failed(f"TMDb Error: No {convert_to.upper().replace('B_', 'b ')} found for TMDb ID {tmdb_id}") return id_to_return except TMDbException: - raise Failed(f"TMDb Error: {'Movie' if is_movie else 'Show'} TMDb ID: {tmdb_id} not found") + raise Failed(f"TMDb Error: TMDb {'Movie' if is_movie else 'Show'} ID: {tmdb_id} not found") @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) - def convert_to_tmdb(self, external_id, external_source, is_movie): + def convert_to(self, external_id, external_source, is_movie): search_results = self.Movie.external(external_id=external_id, external_source=external_source) search = search_results["movie_results" if is_movie else "tv_results"] if len(search) == 1: return int(search[0]["id"]) else: raise Failed(f"TMDb Error: No TMDb ID found for {external_source.upper().replace('B_', 'b ')} {external_id}") - def convert_tmdb_to_imdb(self, tmdb_id, is_movie=True): return self.convert_from_tmdb(tmdb_id, "imdb_id", is_movie) - def convert_imdb_to_tmdb(self, imdb_id, is_movie=True): return self.convert_to_tmdb(imdb_id, "imdb_id", is_movie) - def convert_tmdb_to_tvdb(self, tmdb_id): return self.convert_from_tmdb(tmdb_id, "tvdb_id", False) - def convert_tvdb_to_tmdb(self, tvdb_id): return self.convert_to_tmdb(tvdb_id, "tvdb_id", False) - def convert_tvdb_to_imdb(self, tvdb_id): return self.convert_tmdb_to_imdb(self.convert_tvdb_to_tmdb(tvdb_id), False) - def convert_imdb_to_tvdb(self, imdb_id): return self.convert_tmdb_to_tvdb(self.convert_imdb_to_tmdb(imdb_id, False)) - def get_movie_show_or_collection(self, tmdb_id, is_movie): if is_movie: try: return self.get_collection(tmdb_id) @@ -183,22 +177,22 @@ class TMDbAPI: except TMDbException as e: raise Failed(f"TMDb Error: No Person found for TMDb ID {tmdb_id}: {e}") @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) - def get_person_credits(self, tmdb_id): + def _person_credits(self, tmdb_id): try: return self.Person.combined_credits(tmdb_id) except TMDbException as e: raise Failed(f"TMDb Error: No Person found for TMDb ID {tmdb_id}: {e}") @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) - def get_company(self, tmdb_id): + def _company(self, tmdb_id): try: return self.Company.details(tmdb_id) except TMDbException as e: raise Failed(f"TMDb Error: No Company found for TMDb ID {tmdb_id}: {e}") @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) - def get_network(self, tmdb_id): + def _network(self, tmdb_id): try: return self.Network.details(tmdb_id) except TMDbException as e: raise Failed(f"TMDb Error: No Network found for TMDb ID {tmdb_id}: {e}") @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) - def get_keyword(self, tmdb_id): + def _keyword(self, tmdb_id): try: return self.Keyword.details(tmdb_id) except TMDbException as e: raise Failed(f"TMDb Error: No Keyword found for TMDb ID {tmdb_id}: {e}") @@ -207,17 +201,17 @@ class TMDbAPI: try: return self.List.details(tmdb_id, all_details=True) except TMDbException as e: raise Failed(f"TMDb Error: No List found for TMDb ID {tmdb_id}: {e}") - def get_credits(self, tmdb_id, actor=False, crew=False, director=False, producer=False, writer=False): + def _credits(self, tmdb_id, actor=False, crew=False, director=False, producer=False, writer=False): movie_ids = [] show_ids = [] - actor_credits = self.get_person_credits(tmdb_id) + actor_credits = self._person_credits(tmdb_id) if actor: for credit in actor_credits.cast: if credit.media_type == "movie": movie_ids.append(credit.id) elif credit.media_type == "tv": try: - show_ids.append(self.convert_tmdb_to_tvdb(credit.id)) + show_ids.append(self.config.Convert.tmdb_to_tvdb(credit.id, fail=True)) except Failed as e: logger.warning(e) for credit in actor_credits.crew: @@ -229,12 +223,12 @@ class TMDbAPI: movie_ids.append(credit.id) elif credit.media_type == "tv": try: - show_ids.append(self.convert_tmdb_to_tvdb(credit.id)) + show_ids.append(self.config.Convert.tmdb_to_tvdb(credit.id, fail=True)) except Failed as e: logger.warning(e) return movie_ids, show_ids - def get_pagenation(self, method, amount, is_movie): + def _pagenation(self, method, amount, is_movie): ids = [] count = 0 for x in range(int(amount / 20) + 1): @@ -246,15 +240,16 @@ class TMDbAPI: else: raise Failed(f"TMDb Error: {method} method not supported") for tmdb_item in tmdb_items: try: - ids.append(tmdb_item.id if is_movie else self.convert_tmdb_to_tvdb(tmdb_item.id)) + ids.append(tmdb_item.id if is_movie else self.config.Convert.tmdb_to_tvdb(tmdb_item.id, fail=True)) count += 1 - except Failed: + except Failed as e: + logger.error(e) pass if count == amount: break if count == amount: break return ids - def get_discover(self, attrs, amount, is_movie): + def _discover(self, attrs, amount, is_movie): ids = [] count = 0 for date_attr in discover_dates: @@ -269,17 +264,35 @@ class TMDbAPI: tmdb_items = self.Discover.discover_movies(attrs) if is_movie else self.Discover.discover_tv_shows(attrs) for tmdb_item in tmdb_items: try: - ids.append(tmdb_item.id if is_movie else self.convert_tmdb_to_tvdb(tmdb_item.id)) + ids.append(tmdb_item.id if is_movie else self.config.Convert.tmdb_to_tvdb(tmdb_item.id, fail=True)) count += 1 - except Failed: + except Failed as e: + logger.error(e) pass if count == amount: break if count == amount: break return ids, amount - def get_items(self, method, data, is_movie, status_message=True): - if status_message: - logger.debug(f"Data: {data}") + def validate_tmdb_list(self, tmdb_list, tmdb_type): + tmdb_values = [] + for tmdb_id in tmdb_list: + try: tmdb_values.append(self.validate_tmdb(tmdb_id, tmdb_type)) + except Failed as e: logger.error(e) + if len(tmdb_values) == 0: raise Failed(f"TMDb Error: No valid TMDb IDs in {tmdb_list}") + return tmdb_values + + def validate_tmdb(self, tmdb_id, tmdb_type): + if tmdb_type == "Movie": self.get_movie(tmdb_id) + elif tmdb_type == "Show": self.get_show(tmdb_id) + elif tmdb_type == "Collection": self.get_collection(tmdb_id) + elif tmdb_type == "Person": self.get_person(tmdb_id) + elif tmdb_type == "Company": self._company(tmdb_id) + elif tmdb_type == "Network": self._network(tmdb_id) + elif tmdb_type == "List": self.get_list(tmdb_id) + return tmdb_id + + def get_items(self, method, data, is_movie): + logger.debug(f"Data: {data}") pretty = util.pretty_names[method] if method in util.pretty_names else method media_type = "Movie" if is_movie else "Show" movie_ids = [] @@ -291,32 +304,30 @@ class TMDbAPI: if method in ["tmdb_company", "tmdb_network", "tmdb_keyword"]: tmdb_id = int(data) if method == "tmdb_company": - tmdb_name = str(self.get_company(tmdb_id).name) + tmdb_name = str(self._company(tmdb_id).name) attrs = {"with_companies": tmdb_id} elif method == "tmdb_network": - tmdb_name = str(self.get_network(tmdb_id).name) + tmdb_name = str(self._network(tmdb_id).name) attrs = {"with_networks": tmdb_id} elif method == "tmdb_keyword": - tmdb_name = str(self.get_keyword(tmdb_id).name) + tmdb_name = str(self._keyword(tmdb_id).name) attrs = {"with_keywords": tmdb_id} limit = 0 else: attrs = data.copy() limit = int(attrs.pop("limit")) - if is_movie: movie_ids, amount = self.get_discover(attrs, limit, is_movie) - else: show_ids, amount = self.get_discover(attrs, limit, is_movie) - if status_message: - if method in ["tmdb_company", "tmdb_network", "tmdb_keyword"]: - logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({amount} {media_type}{'' if amount == 1 else 's'})") - elif method == "tmdb_discover": - logger.info(f"Processing {pretty}: {amount} {media_type}{'' if amount == 1 else 's'}") - for attr, value in attrs.items(): - logger.info(f" {attr}: {value}") + if is_movie: movie_ids, amount = self._discover(attrs, limit, is_movie) + else: show_ids, amount = self._discover(attrs, limit, is_movie) + if method in ["tmdb_company", "tmdb_network", "tmdb_keyword"]: + logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({amount} {media_type}{'' if amount == 1 else 's'})") + elif method == "tmdb_discover": + logger.info(f"Processing {pretty}: {amount} {media_type}{'' if amount == 1 else 's'}") + for attr, value in attrs.items(): + logger.info(f" {attr}: {value}") elif method in ["tmdb_popular", "tmdb_top_rated", "tmdb_now_playing", "tmdb_trending_daily", "tmdb_trending_weekly"]: - if is_movie: movie_ids = self.get_pagenation(method, data, is_movie) - else: show_ids = self.get_pagenation(method, data, is_movie) - if status_message: - logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}") + if is_movie: movie_ids = self._pagenation(method, data, is_movie) + else: show_ids = self._pagenation(method, data, is_movie) + logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}") else: tmdb_id = int(data) if method == "tmdb_list": @@ -326,7 +337,7 @@ class TMDbAPI: if tmdb_item.media_type == "movie": movie_ids.append(tmdb_item.id) elif tmdb_item.media_type == "tv": - try: show_ids.append(self.convert_tmdb_to_tvdb(tmdb_item.id)) + try: show_ids.append(self.config.Convert.tmdb_to_tvdb(tmdb_item.id, fail=True)) except Failed: pass elif method == "tmdb_movie": tmdb_name = str(self.get_movie(tmdb_id).title) @@ -338,38 +349,19 @@ class TMDbAPI: movie_ids.append(tmdb_item["id"]) elif method == "tmdb_show": tmdb_name = str(self.get_show(tmdb_id).name) - show_ids.append(self.convert_tmdb_to_tvdb(tmdb_id)) + show_ids.append(self.config.Convert.tmdb_to_tvdb(tmdb_id, fail=True)) else: tmdb_name = str(self.get_person(tmdb_id).name) - if method == "tmdb_actor": movie_ids, show_ids = self.get_credits(tmdb_id, actor=True) - elif method == "tmdb_director": movie_ids, show_ids = self.get_credits(tmdb_id, director=True) - elif method == "tmdb_producer": movie_ids, show_ids = self.get_credits(tmdb_id, producer=True) - elif method == "tmdb_writer": movie_ids, show_ids = self.get_credits(tmdb_id, writer=True) - elif method == "tmdb_crew": movie_ids, show_ids = self.get_credits(tmdb_id, crew=True) + if method == "tmdb_actor": movie_ids, show_ids = self._credits(tmdb_id, actor=True) + elif method == "tmdb_director": movie_ids, show_ids = self._credits(tmdb_id, director=True) + elif method == "tmdb_producer": movie_ids, show_ids = self._credits(tmdb_id, producer=True) + elif method == "tmdb_writer": movie_ids, show_ids = self._credits(tmdb_id, writer=True) + elif method == "tmdb_crew": movie_ids, show_ids = self._credits(tmdb_id, crew=True) else: raise Failed(f"TMDb Error: Method {method} not supported") - if status_message and len(movie_ids) > 0: + if len(movie_ids) > 0: logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(movie_ids)} Movie{'' if len(movie_ids) == 1 else 's'})") - if status_message and not is_movie and len(show_ids) > 0: + if not is_movie and len(show_ids) > 0: logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(show_ids)} Show{'' if len(show_ids) == 1 else 's'})") - if status_message: - logger.debug(f"TMDb IDs Found: {movie_ids}") - logger.debug(f"TVDb IDs Found: {show_ids}") + logger.debug(f"TMDb IDs Found: {movie_ids}") + logger.debug(f"TVDb IDs Found: {show_ids}") return movie_ids, show_ids - - def validate_tmdb_list(self, tmdb_list, tmdb_type): - tmdb_values = [] - for tmdb_id in tmdb_list: - try: tmdb_values.append(self.validate_tmdb(tmdb_id, tmdb_type)) - except Failed as e: logger.error(e) - if len(tmdb_values) == 0: raise Failed(f"TMDb Error: No valid TMDb IDs in {tmdb_list}") - return tmdb_values - - def validate_tmdb(self, tmdb_id, tmdb_type): - if tmdb_type == "Movie": self.get_movie(tmdb_id) - elif tmdb_type == "Show": self.get_show(tmdb_id) - elif tmdb_type == "Collection": self.get_collection(tmdb_id) - elif tmdb_type == "Person": self.get_person(tmdb_id) - elif tmdb_type == "Company": self.get_company(tmdb_id) - elif tmdb_type == "Network": self.get_network(tmdb_id) - elif tmdb_type == "List": self.get_list(tmdb_id) - return tmdb_id diff --git a/modules/trakttv.py b/modules/trakttv.py index b24263ea..37d82b13 100644 --- a/modules/trakttv.py +++ b/modules/trakttv.py @@ -37,11 +37,11 @@ class TraktAPI: self.config_path = params["config_path"] self.authorization = authorization Trakt.configuration.defaults.client(self.client_id, self.client_secret) - if not self.save_authorization(self.authorization): - if not self.refresh_authorization(): - self.get_authorization() + if not self._save(self.authorization): + if not self._refresh(): + self._authorization() - def get_authorization(self): + def _authorization(self): url = Trakt["oauth"].authorize_url(self.redirect_uri) logger.info(f"Navigate to: {url}") logger.info("If you get an OAuth error your client_id or client_secret is invalid") @@ -52,10 +52,10 @@ class TraktAPI: new_authorization = Trakt["oauth"].token(pin, self.redirect_uri) if not new_authorization: raise Failed("Trakt Error: Invalid trakt pin. If you're sure you typed it in correctly your client_id or client_secret may be invalid") - if not self.save_authorization(new_authorization): + if not self._save(new_authorization): raise Failed("Trakt Error: New Authorization Failed") - def check_authorization(self, authorization): + def _check(self, authorization): try: with Trakt.configuration.oauth.from_response(authorization, refresh=True): if Trakt["users/settings"].get(): @@ -63,15 +63,15 @@ class TraktAPI: except ValueError: pass return False - def refresh_authorization(self): + def _refresh(self): if self.authorization and "refresh_token" in self.authorization and self.authorization["refresh_token"]: logger.info("Refreshing Access Token...") refreshed_authorization = Trakt["oauth"].token_refresh(self.authorization["refresh_token"], self.redirect_uri) - return self.save_authorization(refreshed_authorization) + return self._save(refreshed_authorization) return False - def save_authorization(self, authorization): - if authorization and self.check_authorization(authorization): + def _save(self, authorization): + if authorization and self._check(authorization): if self.authorization != authorization: yaml.YAML().allow_duplicate_keys = True config, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.config_path)) @@ -90,30 +90,23 @@ class TraktAPI: return True return False - def convert_tmdb_to_imdb(self, tmdb_id, is_movie=True): return self.convert_id(tmdb_id, "tmdb", "imdb", "movie" if is_movie else "show") - def convert_imdb_to_tmdb(self, imdb_id, is_movie=True): return self.convert_id(imdb_id, "imdb", "tmdb", "movie" if is_movie else "show") - def convert_tmdb_to_tvdb(self, tmdb_id): return self.convert_id(tmdb_id, "tmdb", "tvdb", "show") - def convert_tvdb_to_tmdb(self, tvdb_id): return self.convert_id(tvdb_id, "tvdb", "tmdb", "show") - def convert_tvdb_to_imdb(self, tvdb_id): return self.convert_id(tvdb_id, "tvdb", "imdb", "show") - def convert_imdb_to_tvdb(self, imdb_id): return self.convert_id(imdb_id, "imdb", "tvdb", "show") - @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) - def convert_id(self, external_id, from_source, to_source, media_type): + def convert(self, external_id, from_source, to_source, media_type): lookup = Trakt["search"].lookup(external_id, from_source, media_type) if lookup: lookup = lookup[0] if isinstance(lookup, list) else lookup if lookup.get_key(to_source): return lookup.get_key(to_source) if to_source == "imdb" else int(lookup.get_key(to_source)) - raise Failed(f"No {to_source.upper().replace('B', 'b')} ID found for {from_source.upper().replace('B', 'b')} ID {external_id}") + raise Failed(f"Trakt Error: No {to_source.upper().replace('B', 'b')} ID found for {from_source.upper().replace('B', 'b')} ID: {external_id}") def collection(self, data, is_movie): - return self.user_list("collection", data, is_movie) + return self._user_list("collection", data, is_movie) - def watchlist(self, data, is_movie): - return self.user_list("watchlist", data, is_movie) + def _watchlist(self, data, is_movie): + return self._user_list("watchlist", data, is_movie) @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) - def user_list(self, list_type, data, is_movie): + def _user_list(self, list_type, data, is_movie): items = Trakt[f"users/{data}/{list_type}"].movies() if is_movie else Trakt[f"users/{data}/{list_type}"].shows() if items is None: raise Failed("Trakt Error: No List found") else: return [i for i in items] @@ -126,16 +119,16 @@ class TraktAPI: else: return trakt_list @retry(stop_max_attempt_number=6, wait_fixed=10000) - def send_request(self, url): + def _request(self, url): return requests.get(url, headers={"Content-Type": "application/json", "trakt-api-version": "2", "trakt-api-key": self.client_id}).json() - def get_collection(self, username, is_movie): - items = self.send_request(f"{self.base_url}/users/{username}/collection/{'movies' if is_movie else 'shows'}") + def _collection(self, username, is_movie): + items = self._request(f"{self.base_url}/users/{username}/collection/{'movies' if is_movie else 'shows'}") if is_movie: return [item["movie"]["ids"]["tmdb"] for item in items], [] else: return [], [item["show"]["ids"]["tvdb"] for item in items] - def get_pagenation(self, pagenation, amount, is_movie): - items = self.send_request(f"{self.base_url}/{'movies' if is_movie else 'shows'}/{pagenation}?limit={amount}") + def _pagenation(self, pagenation, amount, is_movie): + items = self._request(f"{self.base_url}/{'movies' if is_movie else 'shows'}/{pagenation}?limit={amount}") if pagenation == "popular" and is_movie: return [item["ids"]["tmdb"] for item in items], [] elif pagenation == "popular": return [], [item["ids"]["tvdb"] for item in items] elif is_movie: return [item["movie"]["ids"]["tmdb"] for item in items], [] @@ -146,9 +139,9 @@ class TraktAPI: for value in values: try: if trakt_type == "watchlist" and is_movie is not None: - self.watchlist(value, is_movie) + self._watchlist(value, is_movie) elif trakt_type == "collection" and is_movie is not None: - self.get_collection(value, is_movie) + self._collection(value, is_movie) else: self.standard_list(value) trakt_values.append(value) @@ -163,33 +156,31 @@ class TraktAPI: raise Failed(f"Trakt Error: No valid Trakt Lists in {values}") return trakt_values - def get_items(self, method, data, is_movie, status_message=True): - if status_message: - logger.debug(f"Data: {data}") + def get_items(self, method, data, is_movie): + logger.debug(f"Data: {data}") pretty = self.aliases[method] if method in self.aliases else method media_type = "Movie" if is_movie else "Show" if method in ["trakt_trending", "trakt_popular", "trakt_recommended", "trakt_watched", "trakt_collected"]: - movie_ids, show_ids = self.get_pagenation(method[6:], data, is_movie) - if status_message: - logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}") + movie_ids, show_ids = self._pagenation(method[6:], data, is_movie) + logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}") elif method == "trakt_collection": - movie_ids, show_ids = self.get_collection(data, is_movie) - if status_message: - logger.info(f"Processing {pretty} {media_type}s for {data}") + movie_ids, show_ids = self._collection(data, is_movie) + logger.info(f"Processing {pretty} {media_type}s for {data}") else: show_ids = [] movie_ids = [] - if method == "trakt_watchlist": trakt_items = self.watchlist(data, is_movie) + if method == "trakt_watchlist": trakt_items = self._watchlist(data, is_movie) elif method == "trakt_list": trakt_items = self.standard_list(data).items() else: raise Failed(f"Trakt Error: Method {method} not supported") - if status_message: logger.info(f"Processing {pretty}: {data}") + logger.info(f"Processing {pretty}: {data}") for trakt_item in trakt_items: - if isinstance(trakt_item, Movie): movie_ids.append(int(trakt_item.get_key("tmdb"))) - elif isinstance(trakt_item, Show) and trakt_item.pk[1] not in show_ids: show_ids.append(int(trakt_item.pk[1])) - elif (isinstance(trakt_item, (Season, Episode))) and trakt_item.show.pk[1] not in show_ids: show_ids.append(int(trakt_item.show.pk[1])) - if status_message: - logger.debug(f"Trakt {media_type} Found: {trakt_items}") - if status_message: - logger.debug(f"TMDb IDs Found: {movie_ids}") - logger.debug(f"TVDb IDs Found: {show_ids}") + if isinstance(trakt_item, Movie): + movie_ids.append(int(trakt_item.get_key("tmdb"))) + elif isinstance(trakt_item, Show) and trakt_item.pk[1] not in show_ids: + show_ids.append(int(trakt_item.pk[1])) + elif (isinstance(trakt_item, (Season, Episode))) and trakt_item.show.pk[1] not in show_ids: + show_ids.append(int(trakt_item.show.pk[1])) + logger.debug(f"Trakt {media_type} Found: {trakt_items}") + logger.debug(f"TMDb IDs Found: {movie_ids}") + logger.debug(f"TVDb IDs Found: {show_ids}") return movie_ids, show_ids diff --git a/modules/tvdb.py b/modules/tvdb.py index bfba51cc..8024dac8 100644 --- a/modules/tvdb.py +++ b/modules/tvdb.py @@ -25,7 +25,7 @@ class TVDbObj: else: raise Failed(f"TVDb Error: {tvdb_url} must begin with {TVDb.movies_url if is_movie else TVDb.series_url}") - response = TVDb.send_request(tvdb_url, language) + response = TVDb._request(tvdb_url, language) results = response.xpath(f"//*[text()='TheTVDB.com {self.media_type} ID']/parent::node()/span/text()") if len(results) > 0: self.id = int(results[0]) @@ -57,15 +57,17 @@ class TVDbObj: if len(results) > 0: try: tmdb_id = util.regex_first_int(results[0], "TMDb ID") - except Failed as e: - logger.error(e) - if not tmdb_id: + except Failed: + pass + if tmdb_id is None: results = response.xpath("//*[text()='IMDB']/@href") if len(results) > 0: try: - tmdb_id, _ = TVDb.config.Arms.imdb_to_ids(util.get_id_from_imdb_url(results[0]), language) - except Failed as e: - logger.error(e) + tmdb_id = TVDb.config.Convert.imdb_to_tmdb(util.get_id_from_imdb_url(results[0]), fail=True) + except Failed: + pass + if tmdb_id is None: + raise Failed(f"TVDB Error: No TMDb ID found for {self.title}") self.tmdb_id = tmdb_id self.tvdb_url = tvdb_url self.language = language @@ -104,16 +106,16 @@ class TVDbAPI: return TVDbObj(tvdb_url, language, True, self) def get_list_description(self, tvdb_url, language): - description = self.send_request(tvdb_url, language).xpath("//div[@class='block']/div[not(@style='display:none')]/p/text()") + description = self._request(tvdb_url, language).xpath("//div[@class='block']/div[not(@style='display:none')]/p/text()") return description[0] if len(description) > 0 and len(description[0]) > 0 else "" - def get_tvdb_ids_from_url(self, tvdb_url, language): + def _ids_from_url(self, tvdb_url, language): show_ids = [] movie_ids = [] tvdb_url = tvdb_url.strip() if tvdb_url.startswith((self.list_url, self.alt_list_url)): try: - items = self.send_request(tvdb_url, language).xpath("//div[@class='col-xs-12 col-sm-12 col-md-8 col-lg-8 col-md-pull-4']/div[@class='row']") + items = self._request(tvdb_url, language).xpath("//div[@class='col-xs-12 col-sm-12 col-md-8 col-lg-8 col-md-pull-4']/div[@class='row']") for item in items: title = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/text()")[0] item_url = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/@href")[0] @@ -143,26 +145,24 @@ class TVDbAPI: raise Failed(f"TVDb Error: {tvdb_url} must begin with {self.list_url}") @retry(stop_max_attempt_number=6, wait_fixed=10000) - def send_request(self, url, language): + def _request(self, url, language): return html.fromstring(requests.get(url, headers={"Accept-Language": language}).content) - def get_items(self, method, data, language, status_message=True): + def get_items(self, method, data, language): pretty = util.pretty_names[method] if method in util.pretty_names else method show_ids = [] movie_ids = [] - if status_message: - logger.info(f"Processing {pretty}: {data}") + logger.info(f"Processing {pretty}: {data}") if method == "tvdb_show": show_ids.append(self.get_series(language, data).id) elif method == "tvdb_movie": - movie_ids.append(self.get_movie(language, data).id) + movie_ids.append(self.get_movie(language, data).tmdb_id) elif method == "tvdb_list": - tmdb_ids, tvdb_ids = self.get_tvdb_ids_from_url(data, language) + tmdb_ids, tvdb_ids = self._ids_from_url(data, language) movie_ids.extend(tmdb_ids) show_ids.extend(tvdb_ids) else: raise Failed(f"TVDb Error: Method {method} not supported") - if status_message: - logger.debug(f"TMDb IDs Found: {movie_ids}") - logger.debug(f"TVDb IDs Found: {show_ids}") + logger.debug(f"TMDb IDs Found: {movie_ids}") + logger.debug(f"TVDb IDs Found: {show_ids}") return movie_ids, show_ids diff --git a/modules/util.py b/modules/util.py index 46552a67..ad65760d 100644 --- a/modules/util.py +++ b/modules/util.py @@ -1,5 +1,6 @@ import logging, re, signal, sys, time, traceback from datetime import datetime +from plexapi.exceptions import BadRequest, NotFound, Unauthorized try: import msvcrt @@ -19,6 +20,9 @@ class Failed(Exception): def retry_if_not_failed(exception): return not isinstance(exception, Failed) +def retry_if_not_plex(exception): + return not isinstance(exception, (BadRequest, NotFound, Unauthorized)) + separating_character = "=" screen_width = 100 @@ -207,11 +211,22 @@ def get_bool(method_name, method_data): else: raise Failed(f"Collection Error: {method_name} attribute: {method_data} invalid must be either true or false") -def get_list(data, lower=False, split=True): +def compile_list(data): + if isinstance(data, list): + text = "" + for item in data: + text += f"{',' if len(text) > 0 else ''}{item}" + return text + else: + return data + + +def get_list(data, lower=False, split=True, int_list=False): if isinstance(data, list): return data elif isinstance(data, dict): return [data] elif split is False: return [str(data)] elif lower is True: return [d.strip().lower() for d in str(data).split(",")] + elif int_list is True: return [int(d.strip()) for d in str(data).split(",")] else: return [d.strip() for d in str(data).split(",")] def get_int_list(data, id_type): diff --git a/plex_meta_manager.py b/plex_meta_manager.py index 8607ef9f..713d5d9e 100644 --- a/plex_meta_manager.py +++ b/plex_meta_manager.py @@ -2,14 +2,16 @@ import argparse, logging, os, re, sys, time from datetime import datetime try: import schedule - from modules import tests, util + from modules import util + from modules.builder import CollectionBuilder from modules.config import Config + from modules.util import Failed + from plexapi.exceptions import BadRequest except ModuleNotFoundError: print("Error: Requirements are not installed") sys.exit(0) parser = argparse.ArgumentParser() -parser.add_argument("--my-tests", dest="tests", help=argparse.SUPPRESS, action="store_true", default=False) parser.add_argument("-db", "--debug", dest="debug", help=argparse.SUPPRESS, action="store_true", default=False) parser.add_argument("-c", "--config", dest="config", help="Run with desired *.yml file", type=str) parser.add_argument("-t", "--time", dest="time", help="Time to update each day use format HH:MM (Default: 03:00)", default="03:00", type=str) @@ -34,7 +36,6 @@ def check_bool(env_str, default): else: return default -my_tests = check_bool("PMM_TESTS", args.tests) test = check_bool("PMM_TEST", args.test) debug = check_bool("PMM_DEBUG", args.debug) run = check_bool("PMM_RUN", args.run) @@ -76,7 +77,7 @@ file_handler.setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(lev cmd_handler = logging.StreamHandler() cmd_handler.setFormatter(logging.Formatter("| %(message)-100s |")) -cmd_handler.setLevel(logging.DEBUG if my_tests or test or debug else logging.INFO) +cmd_handler.setLevel(logging.DEBUG if test or debug else logging.INFO) logger.addHandler(cmd_handler) logger.addHandler(file_handler) @@ -91,35 +92,311 @@ util.centered("| |_) | |/ _ \\ \\/ / | |\\/| |/ _ \\ __/ _` | | |\\/| |/ _` | '_ util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | ") util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| ") util.centered(" |___/ ") -util.centered(" Version: 1.8.0 ") +util.centered(" Version: 1.9.0 ") util.separator() -if my_tests: - tests.run_tests(default_dir) - sys.exit(0) - -def start(config_path, is_test, daily, collections_to_run, libraries_to_run, resume_from): +def start(config_path, is_test, daily, requested_collections, requested_libraries, resume_from): if daily: start_type = "Daily " elif is_test: start_type = "Test " - elif collections_to_run: start_type = "Collections " - elif libraries_to_run: start_type = "Libraries " + elif requested_collections: start_type = "Collections " + elif requested_libraries: start_type = "Libraries " else: start_type = "" start_time = datetime.now() util.separator(f"Starting {start_type}Run") try: - config = Config(default_dir, config_path, libraries_to_run) - config.update_libraries(is_test, collections_to_run, resume_from) + config = Config(default_dir, config_path, requested_libraries) + update_libraries(config, is_test, requested_collections, resume_from) except Exception as e: util.print_stacktrace() logger.critical(e) logger.info("") util.separator(f"Finished {start_type}Run\nRun Time: {str(datetime.now() - start_time).split('.')[0]}") +def update_libraries(config, is_test, requested_collections, resume_from): + for library in config.libraries: + os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout) + logger.info("") + util.separator(f"{library.name} Library") + logger.info("") + util.separator(f"Mapping {library.name} Library") + logger.info("") + movie_map, show_map = map_guids(config, library) + if not is_test and not resume_from and library.mass_update: + mass_metadata(config, library, movie_map, show_map) + for metadata in library.metadata_files: + logger.info("") + util.separator(f"Running Metadata File\n{metadata.path}") + if not is_test and not resume_from: + try: + metadata.update_metadata(config.TMDb, is_test) + except Failed as e: + logger.error(e) + logger.info("") + util.separator(f"{'Test ' if is_test else ''}Collections") + collections_to_run = metadata.get_collections(requested_collections) + if resume_from and resume_from not in collections_to_run: + logger.warning(f"Collection: {resume_from} not in Metadata File: {metadata.path}") + continue + if collections_to_run: + resume_from = run_collection(config, library, metadata, collections_to_run, is_test, resume_from, movie_map, show_map) + + if library.show_unmanaged is True and not is_test and not requested_collections: + logger.info("") + util.separator(f"Unmanaged Collections in {library.name} Library") + logger.info("") + unmanaged_count = 0 + collections_in_plex = [str(plex_col) for plex_col in library.collections] + for col in library.get_all_collections(): + if col.title not in collections_in_plex: + logger.info(col.title) + unmanaged_count += 1 + logger.info("{} Unmanaged Collections".format(unmanaged_count)) + + if library.assets_for_all is True and not is_test and not requested_collections: + logger.info("") + util.separator(f"All {'Movies' if library.is_movie else 'Shows'} Assets Check for {library.name} Library") + logger.info("") + for item in library.get_all(): + library.update_item_from_assets(item) + has_run_again = False + for library in config.libraries: + if library.run_again: + has_run_again = True + break + + if has_run_again: + logger.info("") + util.separator("Run Again") + logger.info("") + length = 0 + for x in range(1, config.general["run_again_delay"] + 1): + length = util.print_return(length, f"Waiting to run again in {config.general['run_again_delay'] - x + 1} minutes") + for y in range(60): + time.sleep(1) + util.print_end(length) + for library in config.libraries: + if library.run_again: + os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout) + logger.info("") + util.separator(f"{library.name} Library Run Again") + logger.info("") + movie_map, show_map = map_guids(config, library) + for builder in library.run_again: + logger.info("") + util.separator(f"{builder.name} Collection") + logger.info("") + try: + builder.run_collections_again(movie_map, show_map) + except Failed as e: + util.print_stacktrace() + util.print_multiline(e, error=True) + + used_url = [] + for library in config.libraries: + if library.url not in used_url: + used_url.append(library.url) + if library.empty_trash: + library.query(library.PlexServer.library.emptyTrash) + if library.clean_bundles: + library.query(library.PlexServer.library.cleanBundles) + if library.optimize: + library.query(library.PlexServer.library.optimize) + +def map_guids(config, library): + movie_map = {} + show_map = {} + length = 0 + logger.info(f"Mapping {'Movie' if library.is_movie else 'Show'} Library: {library.name}") + items = library.Plex.all() + for i, item in enumerate(items, 1): + length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}") + try: + id_type, main_id = config.Convert.get_id(item, library, length) + except BadRequest: + util.print_stacktrace() + util.print_end(length, f"Mapping Error: | {item.guid} for {item.title} not found") + continue + if not isinstance(main_id, list): + main_id = [main_id] + if id_type == "movie": + for m in main_id: + if m in movie_map: movie_map[m].append(item.ratingKey) + else: movie_map[m] = [item.ratingKey] + elif id_type == "show": + for m in main_id: + if m in show_map: show_map[m].append(item.ratingKey) + else: show_map[m] = [item.ratingKey] + util.print_end(length, f"Processed {len(items)} {'Movies' if library.is_movie else 'Shows'}") + return movie_map, show_map + +def mass_metadata(config, library, movie_map, show_map): + length = 0 + logger.info("") + util.separator(f"Mass Editing {'Movie' if library.is_movie else 'Show'} Library: {library.name}") + logger.info("") + items = library.Plex.all() + for i, item in enumerate(items, 1): + length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}") + tmdb_id = None + tvdb_id = None + imdb_id = None + if config.Cache: + t_id, guid_media_type, _ = config.Cache.config.Cache.query_guid_map(item.guid) + if t_id: + if "movie" in guid_media_type: + tmdb_id = t_id + else: + tvdb_id = t_id + if not tmdb_id and not tvdb_id: + for tmdb, rating_keys in movie_map.items(): + if item.ratingKey in rating_keys: + tmdb_id = tmdb + break + if not tmdb_id and not tvdb_id and library.is_show: + for tvdb, rating_keys in show_map.items(): + if item.ratingKey in rating_keys: + tvdb_id = tvdb + break + if tmdb_id: + imdb_id = config.Convert.tmdb_to_imdb(tmdb_id) + elif tvdb_id: + tmdb_id = config.Convert.tvdb_to_tmdb(tvdb_id) + imdb_id = config.Convert.tvdb_to_imdb(tvdb_id) + + tmdb_item = None + if library.mass_genre_update == "tmdb" or library.mass_audience_rating_update == "tmdb" or library.mass_critic_rating_update == "tmdb": + if tmdb_id: + try: + tmdb_item = config.TMDb.get_movie(tmdb_id) if library.is_movie else config.TMDb.get_show(tmdb_id) + except Failed as e: + util.print_end(length, str(e)) + else: + util.print_end(length, f"{item.title[:25]:<25} | No TMDb ID for Guid: {item.guid}") + + omdb_item = None + if library.mass_genre_update in ["omdb", "imdb"] or library.mass_audience_rating_update in ["omdb", "imdb"] or library.mass_critic_rating_update in ["omdb", "imdb"]: + if config.OMDb.limit is False: + if imdb_id: + try: + omdb_item = config.OMDb.get_omdb(imdb_id) + except Failed as e: + util.print_end(length, str(e)) + else: + util.print_end(length, f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}") + + if not tmdb_item and not omdb_item: + continue + + if library.mass_genre_update: + try: + if tmdb_item and library.mass_genre_update == "tmdb": + new_genres = [genre.name for genre in tmdb_item.genres] + elif omdb_item and library.mass_genre_update in ["omdb", "imdb"]: + new_genres = omdb_item.genres + else: + raise Failed + item_genres = [genre.tag for genre in item.genres] + display_str = "" + for genre in (g for g in item_genres if g not in new_genres): + library.query_data(item.removeGenre, genre) + display_str += f"{', ' if len(display_str) > 0 else ''}-{genre}" + for genre in (g for g in new_genres if g not in item_genres): + library.query_data(item.addGenre, genre) + display_str += f"{', ' if len(display_str) > 0 else ''}+{genre}" + if len(display_str) > 0: + util.print_end(length, f"{item.title[:25]:<25} | Genres | {display_str}") + except Failed: + pass + if library.mass_audience_rating_update or library.mass_critic_rating_update: + try: + if tmdb_item and library.mass_genre_update == "tmdb": + new_rating = tmdb_item.vote_average + elif omdb_item and library.mass_genre_update in ["omdb", "imdb"]: + new_rating = omdb_item.imdb_rating + else: + raise Failed + if new_rating is None: + util.print_end(length, f"{item.title[:25]:<25} | No Rating Found") + else: + if library.mass_audience_rating_update and str(item.audienceRating) != str(new_rating): + library.edit_query(item, {"audienceRating.value": new_rating, "audienceRating.locked": 1}) + util.print_end(length, f"{item.title[:25]:<25} | Audience Rating | {new_rating}") + if library.mass_critic_rating_update and str(item.rating) != str(new_rating): + library.edit_query(item, {"rating.value": new_rating, "rating.locked": 1}) + util.print_end(length, f"{item.title[:25]:<25} | Critic Rating | {new_rating}") + except Failed: + pass + +def run_collection(config, library, metadata, requested_collections, is_test, resume_from, movie_map, show_map): + for mapping_name, collection_attrs in requested_collections.items(): + if is_test and ("test" not in collection_attrs or collection_attrs["test"] is not True): + no_template_test = True + if "template" in collection_attrs and collection_attrs["template"]: + for data_template in util.get_list(collection_attrs["template"], split=False): + if "name" in data_template \ + and data_template["name"] \ + and metadata.templates \ + and data_template["name"] in metadata.templates \ + and metadata.templates[data_template["name"]] \ + and "test" in metadata.templates[data_template["name"]] \ + and metadata.templates[data_template["name"]]["test"] is True: + no_template_test = False + if no_template_test: + continue + try: + if resume_from and resume_from != mapping_name: + continue + elif resume_from == mapping_name: + resume_from = None + logger.info("") + util.separator(f"Resuming Collections") + + logger.info("") + util.separator(f"{mapping_name} Collection") + logger.info("") + + builder = CollectionBuilder(config, library, metadata, mapping_name, collection_attrs) + + if len(builder.schedule) > 0: + util.print_multiline(builder.schedule, info=True) + + logger.info("") + logger.info(f"Sync Mode: {'sync' if builder.sync else 'append'}") + + if len(builder.filters) > 0: + logger.info("") + for filter_key, filter_value in builder.filters: + logger.info(f"Collection Filter {filter_key}: {filter_value}") + + if not builder.smart_url: + builder.collect_rating_keys(movie_map, show_map) + logger.info("") + if len(builder.rating_keys) > 0: + builder.add_to_collection(movie_map) + if len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0: + builder.run_missing() + if builder.sync and len(builder.rating_keys) > 0: + builder.sync_collection() + logger.info("") + + builder.update_details() + + if builder.run_again and (len(builder.run_again_movies) > 0 or len(builder.run_again_shows) > 0): + library.run_again.append(builder) + + except Failed as e: + util.print_stacktrace() + util.print_multiline(e, error=True) + except Exception as e: + util.print_stacktrace() + logger.error(f"Unknown Error: {e}") + return resume_from + try: if run or test or collections or libraries or resume: start(config_file, test, False, collections, libraries, resume) else: - length = 0 + time_length = 0 schedule.every().day.at(time_to_run).do(start, config_file, False, True, None, None, None) while True: schedule.run_pending() @@ -132,7 +409,7 @@ try: time_str = f"{hours} Hour{'s' if hours > 1 else ''} and " if hours > 0 else "" time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}" - length = util.print_return(length, f"Current Time: {current} | {time_str} until the daily run at {time_to_run}") + time_length = util.print_return(time_length, f"Current Time: {current} | {time_str} until the daily run at {time_to_run}") time.sleep(1) except KeyboardInterrupt: util.separator("Exiting Plex Meta Manager")