Merge pull request #392 from meisnate12/develop

v1.12.2
pull/436/head v1.12.2
meisnate12 3 years ago committed by GitHub
commit 6413f2c694
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -0,0 +1,12 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "daily"
target-branch: "develop"

@ -4,9 +4,12 @@ from modules.util import Failed
logger = logging.getLogger("Plex Meta Manager") logger = logging.getLogger("Plex Meta Manager")
builders = ["anilist_id", "anilist_popular", "anilist_relations", "anilist_studio", "anilist_top_rated", "anilist_search"] builders = ["anilist_id", "anilist_popular", "anilist_trending", "anilist_relations", "anilist_studio", "anilist_top_rated", "anilist_search"]
pretty_names = {"score": "Average Score", "popular": "Popularity"} pretty_names = {"score": "Average Score", "popular": "Popularity", "trending": "Trending"}
attr_translation = {"year": "seasonYear", "adult": "isAdult", "start": "startDate", "end": "endDate", "tag_category": "tagCategory", "score": "averageScore", "min_tag_percent": "minimumTagRank"} attr_translation = {
"year": "seasonYear", "adult": "isAdult", "start": "startDate", "end": "endDate", "tag_category": "tagCategory",
"score": "averageScore", "min_tag_percent": "minimumTagRank", "country": "countryOfOrigin",
}
mod_translation = {"": "in", "not": "not_in", "before": "greater", "after": "lesser", "gt": "greater", "gte": "greater", "lt": "lesser", "lte": "lesser"} mod_translation = {"": "in", "not": "not_in", "before": "greater", "after": "lesser", "gt": "greater", "gte": "greater", "lt": "lesser", "lte": "lesser"}
mod_searches = [ mod_searches = [
"start.before", "start.after", "end.before", "end.after", "start.before", "start.after", "end.before", "end.after",
@ -14,20 +17,35 @@ mod_searches = [
"episodes.gt", "episodes.gte", "episodes.lt", "episodes.lte", "duration.gt", "duration.gte", "duration.lt", "duration.lte", "episodes.gt", "episodes.gte", "episodes.lt", "episodes.lte", "duration.gt", "duration.gte", "duration.lt", "duration.lte",
"score.gt", "score.gte", "score.lt", "score.lte", "popularity.gt", "popularity.gte", "popularity.lt", "popularity.lte" "score.gt", "score.gte", "score.lt", "score.lte", "popularity.gt", "popularity.gte", "popularity.lt", "popularity.lte"
] ]
no_mod_searches = ["search", "season", "year", "adult", "min_tag_percent", "limit", "sort_by"] no_mod_searches = ["search", "season", "year", "adult", "min_tag_percent", "limit", "sort_by", "source", "country"]
searches = mod_searches + no_mod_searches searches = mod_searches + no_mod_searches
search_types = { sort_options = {"score": "SCORE_DESC", "popular": "POPULARITY_DESC", "trending": "TRENDING_DESC"}
"search": "String", "season": "MediaSeason", "seasonYear": "Int", "isAdult": "Boolean", "minimumTagRank": "Int",
"startDate": "FuzzyDateInt", "endDate": "FuzzyDateInt", "format": "[MediaFormat]", "status": "[MediaStatus]",
"genre": "[String]", "tag": "[String]", "tagCategory": "[String]",
"episodes": "Int", "duration": "Int", "averageScore": "Int", "popularity": "Int"
}
media_season = {"winter": "WINTER", "spring": "SPRING", "summer": "SUMMER", "fall": "FALL"} media_season = {"winter": "WINTER", "spring": "SPRING", "summer": "SUMMER", "fall": "FALL"}
media_format = {"tv": "TV", "short": "TV_SHORT", "movie": "MOVIE", "special": "SPECIAL", "ova": "OVA", "ona": "ONA", "music": "MUSIC"} media_format = {"tv": "TV", "short": "TV_SHORT", "movie": "MOVIE", "special": "SPECIAL", "ova": "OVA", "ona": "ONA", "music": "MUSIC"}
media_status = {"finished": "FINISHED", "airing": "RELEASING", "not_yet_aired": "NOT_YET_RELEASED", "cancelled": "CANCELLED", "hiatus": "HIATUS"} media_status = {"finished": "FINISHED", "airing": "RELEASING", "not_yet_aired": "NOT_YET_RELEASED", "cancelled": "CANCELLED", "hiatus": "HIATUS"}
media_source = {
"original": "ORIGINAL", "manga": "MANGA", "light_novel": "LIGHT_NOVEL", "visual_novel": "VISUAL_NOVEL",
"video_game": "VIDEO_GAME", "other": "OTHER", "novel": "NOVEL", "doujinshi": "DOUJINSHI", "anime": "ANIME"
}
base_url = "https://graphql.anilist.co" base_url = "https://graphql.anilist.co"
tag_query = "query{MediaTagCollection {name, category}}" tag_query = "query{MediaTagCollection {name, category}}"
genre_query = "query{GenreCollection}" genre_query = "query{GenreCollection}"
country_codes = [
"af", "ax", "al", "dz", "as", "ad", "ao", "ai", "aq", "ag", "ar", "am", "aw", "au", "at", "az", "bs", "bh", "bd",
"bb", "by", "be", "bz", "bj", "bm", "bt", "bo", "bq", "ba", "bw", "bv", "br", "io", "bn", "bg", "bf", "bi", "cv",
"kh", "cm", "ca", "ky", "cf", "td", "cl", "cn", "cx", "cc", "co", "km", "cg", "cd", "ck", "cr", "ci", "hr", "cu",
"cw", "cy", "cz", "dk", "dj", "dm", "do", "ec", "eg", "sv", "gq", "er", "ee", "sz", "et", "fk", "fo", "fj", "fi",
"fr", "gf", "pf", "tf", "ga", "gm", "ge", "de", "gh", "gi", "gr", "gl", "gd", "gp", "gu", "gt", "gg", "gn", "gw",
"gy", "ht", "hm", "va", "hn", "hk", "hu", "is", "in", "id", "ir", "iq", "ie", "im", "il", "it", "jm", "jp", "je",
"jo", "kz", "ke", "ki", "kp", "kr", "kw", "kg", "la", "lv", "lb", "ls", "lr", "ly", "li", "lt", "lu", "mo", "mg",
"mw", "my", "mv", "ml", "mt", "mh", "mq", "mr", "mu", "yt", "mx", "fm", "md", "mc", "mn", "me", "ms", "ma", "mz",
"mm", "na", "nr", "np", "nl", "nc", "nz", "ni", "ne", "ng", "nu", "nf", "mk", "mp", "no", "om", "pk", "pw", "ps",
"pa", "pg", "py", "pe", "ph", "pn", "pl", "pt", "pr", "qa", "re", "ro", "ru", "rw", "bl", "sh", "kn", "lc", "mf",
"pm", "vc", "ws", "sm", "st", "sa", "sn", "rs", "sc", "sl", "sg", "sx", "sk", "si", "sb", "so", "za", "gs", "ss",
"es", "lk", "sd", "sr", "sj", "se", "ch", "sy", "tw", "tj", "tz", "th", "tl", "tg", "tk", "to", "tt", "tn", "tr",
"tm", "tc", "tv", "ug", "ua", "ae", "gb", "us", "um", "uy", "uz", "vu", "ve", "vn", "vg", "vi", "wf", "eh", "ye",
"zm", "zw",
]
class AniList: class AniList:
def __init__(self, config): def __init__(self, config):
@ -35,7 +53,8 @@ class AniList:
self.options = { self.options = {
"Tag": {}, "Tag Category": {}, "Tag": {}, "Tag Category": {},
"Genre": {g.lower().replace(" ", "-"): g for g in self._request(genre_query, {})["data"]["GenreCollection"]}, "Genre": {g.lower().replace(" ", "-"): g for g in self._request(genre_query, {})["data"]["GenreCollection"]},
"Season": media_season, "Format": media_format, "Status": media_status "Country": {c: c.upper() for c in country_codes},
"Season": media_season, "Format": media_format, "Status": media_status, "Source": media_source,
} }
for media_tag in self._request(tag_query, {})["data"]["MediaTagCollection"]: for media_tag in self._request(tag_query, {})["data"]["MediaTagCollection"]:
self.options["Tag"][media_tag["name"].lower().replace(" ", "-")] = media_tag["name"] self.options["Tag"][media_tag["name"].lower().replace(" ", "-")] = media_tag["name"]
@ -87,9 +106,8 @@ class AniList:
return anilist_ids return anilist_ids
def _search(self, **kwargs): def _search(self, **kwargs):
query_vars = "$page: Int, $sort: [MediaSort]" media_vars = f"sort: {sort_options[kwargs['sort_by']]}, type: ANIME"
media_vars = "sort: $sort, type: ANIME" variables = {"sort": sort_options[kwargs['sort_by']]}
variables = {"sort": "SCORE_DESC" if kwargs['sort_by'] == "score" else "POPULARITY_DESC"}
for key, value in kwargs.items(): for key, value in kwargs.items():
if key not in ["sort_by", "limit"]: if key not in ["sort_by", "limit"]:
if "." in key: if "." in key:
@ -101,16 +119,21 @@ class AniList:
final = ani_attr if attr in no_mod_searches else f"{ani_attr}_{mod_translation[mod]}" final = ani_attr if attr in no_mod_searches else f"{ani_attr}_{mod_translation[mod]}"
if attr in ["start", "end"]: if attr in ["start", "end"]:
value = int(util.validate_date(value, f"anilist_search {key}", return_as="%Y%m%d")) value = int(util.validate_date(value, f"anilist_search {key}", return_as="%Y%m%d"))
elif attr in ["season", "format", "status", "genre", "tag", "tag_category"]: elif attr in ["format", "status", "genre", "tag", "tag_category"]:
value = self.options[attr.replace("_", " ").title()][value.lower().replace(" / ", "-").replace(" ", "-")] temp_value = [self.options[attr.replace('_', ' ').title()][v.lower().replace(' / ', '-').replace(' ', '-')] for v in value]
if attr in ["format", "status"]:
value = f"[{', '.join(temp_value)}]"
else:
temp = '", "'.join(temp_value)
value = f'["{temp}"]'
elif attr in ["season", "source", "country"]:
value = self.options[attr.replace("_", " ").title()][value]
if mod == "gte": if mod == "gte":
value -= 1 value -= 1
elif mod == "lte": elif mod == "lte":
value += 1 value += 1
query_vars += f", ${final}: {search_types[ani_attr]}" media_vars += f", {final}: {value}"
media_vars += f", {final}: ${final}" query = f"query ($page: Int) {{Page(page: $page){{pageInfo {{hasNextPage}}media({media_vars}){{id}}}}}}"
variables[key] = value
query = f"query ({query_vars}) {{Page(page: $page){{pageInfo {{hasNextPage}}media({media_vars}){{id}}}}}}"
logger.debug(query) logger.debug(query)
return self._pagenation(query, limit=kwargs["limit"], variables=variables) return self._pagenation(query, limit=kwargs["limit"], variables=variables)
@ -178,8 +201,7 @@ class AniList:
def validate(self, name, data): def validate(self, name, data):
valid = [] valid = []
for d in util.get_list(data): for d in util.get_list(data):
data_check = d.lower().replace(" / ", "-").replace(" ", "-") if d.lower().replace(" / ", "-").replace(" ", "-") in self.options[name]:
if data_check in self.options[name]:
valid.append(d) valid.append(d)
if len(valid) > 0: if len(valid) > 0:
return valid return valid
@ -212,20 +234,24 @@ class AniList:
else: else:
if method == "anilist_popular": if method == "anilist_popular":
data = {"limit": data, "popularity.gt": 3, "sort_by": "popular"} data = {"limit": data, "popularity.gt": 3, "sort_by": "popular"}
elif method == "anilist_trending":
data = {"limit": data, "sort_by": "trending"}
elif method == "anilist_top_rated": elif method == "anilist_top_rated":
data = {"limit": data, "score.gt": 3, "sort_by": "score"} data = {"limit": data, "score.gt": 3, "sort_by": "score"}
elif method not in builders: elif method not in builders:
raise Failed(f"AniList Error: Method {method} not supported") raise Failed(f"AniList Error: Method {method} not supported")
message = f"Processing {method.replace('_', ' ').title().replace('Anilist', 'AniList')}:\nSort By: {pretty_names[data['sort_by']]}" message = f"Processing {method.replace('_', ' ').title().replace('Anilist', 'AniList')}:\n\tSort By {pretty_names[data['sort_by']]}"
if data['limit'] > 0: if data['limit'] > 0:
message += f"\nLimit: {data['limit']}" message += f"\n\tLimit to {data['limit']} Anime"
for key, value in data.items(): for key, value in data.items():
if key not in ["limit", "sort_by"]:
if "." in key: if "." in key:
attr, mod = key.split(".") attr, mod = key.split(".")
mod = f".{mod}"
else: else:
attr = key attr = key
mod = "" mod = ""
message += f"\n{attr.replace('_', ' ').title()} {util.mod_displays[mod]} {value}" message += f"\n\t{attr.replace('_', ' ').title()} {util.mod_displays[mod]} {value}"
util.print_multiline(message) util.print_multiline(message)
anilist_ids = self._search(**data) anilist_ids = self._search(**data)
logger.debug("") logger.debug("")

@ -76,13 +76,13 @@ summary_details = [
] ]
poster_details = ["url_poster", "tmdb_poster", "tmdb_profile", "tvdb_poster", "file_poster"] poster_details = ["url_poster", "tmdb_poster", "tmdb_profile", "tvdb_poster", "file_poster"]
background_details = ["url_background", "tmdb_background", "tvdb_background", "file_background"] background_details = ["url_background", "tmdb_background", "tvdb_background", "file_background"]
boolean_details = ["visible_library", "visible_home", "visible_shared", "show_filtered", "show_missing", "save_missing", "item_assets", "missing_only_released", "revert_overlay"] boolean_details = ["visible_library", "visible_home", "visible_shared", "show_filtered", "show_missing", "save_missing", "item_assets", "missing_only_released", "revert_overlay", "delete_below_minimum"]
string_details = ["sort_title", "content_rating", "name_mapping"] string_details = ["sort_title", "content_rating", "name_mapping"]
ignored_details = [ ignored_details = [
"smart_filter", "smart_label", "smart_url", "run_again", "schedule", "sync_mode", "template", "test", "smart_filter", "smart_label", "smart_url", "run_again", "schedule", "sync_mode", "template", "test",
"tmdb_person", "build_collection", "collection_order", "collection_level", "validate_builders" "tmdb_person", "build_collection", "collection_order", "collection_level", "validate_builders", "collection_name"
] ]
details = ["collection_mode", "collection_order", "collection_level", "label"] + boolean_details + string_details details = ["collection_mode", "collection_order", "collection_level", "collection_minimum", "label"] + boolean_details + string_details
collectionless_details = ["collection_order", "plex_collectionless", "label", "label_sync_mode", "test"] + \ collectionless_details = ["collection_order", "plex_collectionless", "label", "label_sync_mode", "test"] + \
poster_details + background_details + summary_details + string_details poster_details + background_details + summary_details + string_details
item_details = ["item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay"] + list(plex.item_advance_keys.keys()) item_details = ["item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay"] + list(plex.item_advance_keys.keys())
@ -94,12 +94,12 @@ sonarr_details = [
all_filters = [ all_filters = [
"actor", "actor.not", "actor", "actor.not",
"audio_language", "audio_language.not", "audio_language", "audio_language.not",
"audio_track_title", "audio_track_title.not", "audio_track_title.begins", "audio_track_title.ends", "audio_track_title.regex", "audio_track_title", "audio_track_title.not", "audio_track_title.is", "audio_track_title.isnot", "audio_track_title.begins", "audio_track_title.ends", "audio_track_title.regex",
"collection", "collection.not", "collection", "collection.not",
"content_rating", "content_rating.not", "content_rating", "content_rating.not",
"country", "country.not", "country", "country.not",
"director", "director.not", "director", "director.not",
"filepath", "filepath.not", "filepath.begins", "filepath.ends", "filepath.regex", "filepath", "filepath.not", "filepath.is", "filepath.isnot", "filepath.begins", "filepath.ends", "filepath.regex",
"genre", "genre.not", "genre", "genre.not",
"label", "label.not", "label", "label.not",
"producer", "producer.not", "producer", "producer.not",
@ -108,7 +108,7 @@ all_filters = [
"last_played", "last_played.not", "last_played.before", "last_played.after", "last_played.regex", "last_played", "last_played.not", "last_played.before", "last_played.after", "last_played.regex",
"first_episode_aired", "first_episode_aired.not", "first_episode_aired.before", "first_episode_aired.after", "first_episode_aired.regex", "first_episode_aired", "first_episode_aired.not", "first_episode_aired.before", "first_episode_aired.after", "first_episode_aired.regex",
"last_episode_aired", "last_episode_aired.not", "last_episode_aired.before", "last_episode_aired.after", "last_episode_aired.regex", "last_episode_aired", "last_episode_aired.not", "last_episode_aired.before", "last_episode_aired.after", "last_episode_aired.regex",
"title", "title.not", "title.begins", "title.ends", "title.regex", "title", "title.not", "title.is", "title.isnot", "title.begins", "title.ends", "title.regex",
"plays.gt", "plays.gte", "plays.lt", "plays.lte", "plays.gt", "plays.gte", "plays.lt", "plays.lte",
"tmdb_vote_count.gt", "tmdb_vote_count.gte", "tmdb_vote_count.lt", "tmdb_vote_count.lte", "tmdb_vote_count.gt", "tmdb_vote_count.gte", "tmdb_vote_count.lt", "tmdb_vote_count.lte",
"duration.gt", "duration.gte", "duration.lt", "duration.lte", "duration.gt", "duration.gte", "duration.lt", "duration.lte",
@ -116,7 +116,7 @@ all_filters = [
"user_rating.gt", "user_rating.gte", "user_rating.lt", "user_rating.lte", "user_rating.gt", "user_rating.gte", "user_rating.lt", "user_rating.lte",
"audience_rating.gt", "audience_rating.gte", "audience_rating.lt", "audience_rating.lte", "audience_rating.gt", "audience_rating.gte", "audience_rating.lt", "audience_rating.lte",
"critic_rating.gt", "critic_rating.gte", "critic_rating.lt", "critic_rating.lte", "critic_rating.gt", "critic_rating.gte", "critic_rating.lt", "critic_rating.lte",
"studio", "studio.not", "studio.begins", "studio.ends", "studio.regex", "studio", "studio.not", "studio.is", "studio.isnot", "studio.begins", "studio.ends", "studio.regex",
"subtitle_language", "subtitle_language.not", "subtitle_language", "subtitle_language.not",
"resolution", "resolution.not", "resolution", "resolution.not",
"writer", "writer.not", "writer", "writer.not",
@ -126,7 +126,7 @@ all_filters = [
tmdb_filters = ["original_language", "tmdb_vote_count", "tmdb_year", "first_episode_aired", "last_episode_aired"] tmdb_filters = ["original_language", "tmdb_vote_count", "tmdb_year", "first_episode_aired", "last_episode_aired"]
movie_only_filters = [ movie_only_filters = [
"audio_language", "audio_language.not", "audio_language", "audio_language.not",
"audio_track_title", "audio_track_title.not", "audio_track_title.begins", "audio_track_title.ends", "audio_track_title.regex", "audio_track_title", "audio_track_title.not", "audio_track_title.is", "audio_track_title.isnot", "audio_track_title.begins", "audio_track_title.ends", "audio_track_title.regex",
"country", "country.not", "country", "country.not",
"director", "director.not", "director", "director.not",
"duration.gt", "duration.gte", "duration.lt", "duration.lte", "duration.gt", "duration.gte", "duration.lt", "duration.lte",
@ -158,7 +158,7 @@ class CollectionBuilder:
self.config = config self.config = config
self.library = library self.library = library
self.metadata = metadata self.metadata = metadata
self.name = name self.mapping_name = name
self.no_missing = no_missing self.no_missing = no_missing
self.data = data self.data = data
self.language = self.library.Plex.language self.language = self.library.Plex.language
@ -187,11 +187,23 @@ class CollectionBuilder:
self.backgrounds = {} self.backgrounds = {}
self.summaries = {} self.summaries = {}
self.schedule = "" self.schedule = ""
self.minimum = self.library.collection_minimum
self.delete_below_minimum = self.library.delete_below_minimum
self.current_time = datetime.now() self.current_time = datetime.now()
self.current_year = self.current_time.year self.current_year = self.current_time.year
methods = {m.lower(): m for m in self.data} methods = {m.lower(): m for m in self.data}
if "collection_name" in methods:
logger.debug("")
logger.debug("Validating Method: collection_name")
if not self.data[methods["collection_name"]]:
raise Failed("Collection Error: collection_name attribute is blank")
logger.debug(f"Value: {self.data[methods['collection_name']]}")
self.name = self.data[methods["collection_name"]]
else:
self.name = self.mapping_name
if "template" in methods: if "template" in methods:
logger.debug("") logger.debug("")
logger.debug("Validating Method: template") logger.debug("Validating Method: template")
@ -669,6 +681,8 @@ class CollectionBuilder:
self.details[method_name] = plex.collection_mode_options[str(method_data).lower()] self.details[method_name] = plex.collection_mode_options[str(method_data).lower()]
else: else:
raise Failed(f"Collection Error: {method_data} collection_mode invalid\n\tdefault (Library default)\n\thide (Hide Collection)\n\thide_items (Hide Items in this Collection)\n\tshow_items (Show this Collection and its Items)") raise Failed(f"Collection Error: {method_data} collection_mode invalid\n\tdefault (Library default)\n\thide (Hide Collection)\n\thide_items (Hide Items in this Collection)\n\tshow_items (Show this Collection and its Items)")
elif method_name == "collection_minimum":
self.minimum = util.parse(method_name, method_data, datatype="int", minimum=1)
elif method_name == "label": elif method_name == "label":
if "label" in methods and "label.sync" in methods: if "label" in methods and "label.sync" in methods:
raise Failed("Collection Error: Cannot use label and label.sync together") raise Failed("Collection Error: Cannot use label and label.sync together")
@ -774,7 +788,7 @@ class CollectionBuilder:
elif not dict_data[dict_methods["tag"]]: elif not dict_data[dict_methods["tag"]]:
raise Failed("Collection Error: anidb_tag tag attribute is blank") raise Failed("Collection Error: anidb_tag tag attribute is blank")
else: else:
new_dictionary["tag"] = util.regex_first_int(dict_data[dict_methods["username"]], "AniDB Tag ID") new_dictionary["tag"] = util.regex_first_int(dict_data[dict_methods["tag"]], "AniDB Tag ID")
new_dictionary["limit"] = util.parse("limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name, minimum=0) new_dictionary["limit"] = util.parse("limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name, minimum=0)
self.builders.append((method_name, new_dictionary)) self.builders.append((method_name, new_dictionary))
@ -782,7 +796,7 @@ class CollectionBuilder:
if method_name in ["anilist_id", "anilist_relations", "anilist_studio"]: if method_name in ["anilist_id", "anilist_relations", "anilist_studio"]:
for anilist_id in self.config.AniList.validate_anilist_ids(method_data, studio=method_name == "anilist_studio"): for anilist_id in self.config.AniList.validate_anilist_ids(method_data, studio=method_name == "anilist_studio"):
self.builders.append((method_name, anilist_id)) self.builders.append((method_name, anilist_id))
elif method_name in ["anilist_popular", "anilist_top_rated"]: elif method_name in ["anilist_popular", "anilist_trending", "anilist_top_rated"]:
self.builders.append((method_name, util.parse(method_name, method_data, datatype="int", default=10))) self.builders.append((method_name, util.parse(method_name, method_data, datatype="int", default=10)))
elif method_name == "anilist_search": elif method_name == "anilist_search":
if self.current_time.month in [12, 1, 2]: current_season = "winter" if self.current_time.month in [12, 1, 2]: current_season = "winter"
@ -802,13 +816,14 @@ class CollectionBuilder:
new_dictionary["year"] = self.current_year new_dictionary["year"] = self.current_year
elif search_attr == "year": elif search_attr == "year":
new_dictionary[search_attr] = util.parse(search_attr, search_data, datatype="int", parent=method_name, default=self.current_year, minimum=1917, maximum=self.current_year + 1) new_dictionary[search_attr] = util.parse(search_attr, search_data, datatype="int", parent=method_name, default=self.current_year, minimum=1917, maximum=self.current_year + 1)
if "season" not in dict_methods:
logger.warning(f"Collection Warning: {method_name} season attribute not found using this season: {current_season} by default")
new_dictionary["season"] = current_season
elif search_data is None: elif search_data is None:
raise Failed(f"Collection Error: {method_name} {search_final} attribute is blank") raise Failed(f"Collection Error: {method_name} {search_final} attribute is blank")
elif search_attr == "adult": elif search_attr == "adult":
new_dictionary[search_attr] = util.parse(search_attr, search_data, datatype="bool", parent=method_name) new_dictionary[search_attr] = util.parse(search_attr, search_data, datatype="bool", parent=method_name)
elif search_attr == "country":
new_dictionary[search_attr] = util.parse(search_attr, search_data, options=anilist.country_codes, parent=method_name)
elif search_attr == "source":
new_dictionary[search_attr] = util.parse(search_attr, search_data, options=anilist.media_source, parent=method_name)
elif search_attr in ["episodes", "duration", "score", "popularity"]: elif search_attr in ["episodes", "duration", "score", "popularity"]:
new_dictionary[search_final] = util.parse(search_final, search_data, datatype="int", parent=method_name) new_dictionary[search_final] = util.parse(search_final, search_data, datatype="int", parent=method_name)
elif search_attr in ["format", "status", "genre", "tag", "tag_category"]: elif search_attr in ["format", "status", "genre", "tag", "tag_category"]:
@ -823,7 +838,7 @@ class CollectionBuilder:
raise Failed(f"Collection Error: {method_name} {search_final} attribute not supported") raise Failed(f"Collection Error: {method_name} {search_final} attribute not supported")
if len(new_dictionary) == 0: if len(new_dictionary) == 0:
raise Failed(f"Collection Error: {method_name} must have at least one valid search option") raise Failed(f"Collection Error: {method_name} must have at least one valid search option")
new_dictionary["sort_by"] = util.parse("sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=["score", "popular"]) new_dictionary["sort_by"] = util.parse("sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=anilist.sort_options)
new_dictionary["limit"] = util.parse("limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) new_dictionary["limit"] = util.parse("limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name)
self.builders.append((method_name, new_dictionary)) self.builders.append((method_name, new_dictionary))
@ -1068,13 +1083,13 @@ class CollectionBuilder:
rating_keys = self.library.Tautulli.get_rating_keys(self.library, value) rating_keys = self.library.Tautulli.get_rating_keys(self.library, value)
elif "anidb" in method: elif "anidb" in method:
anidb_ids = self.config.AniDB.get_anidb_ids(method, value, self.language) anidb_ids = self.config.AniDB.get_anidb_ids(method, value, self.language)
ids = self.config.Convert.anidb_to_ids(anidb_ids) ids = self.config.Convert.anidb_to_ids(anidb_ids, self.library)
elif "anilist" in method: elif "anilist" in method:
anilist_ids = self.config.AniList.get_anilist_ids(method, value) anilist_ids = self.config.AniList.get_anilist_ids(method, value)
ids = self.config.Convert.anilist_to_ids(anilist_ids) ids = self.config.Convert.anilist_to_ids(anilist_ids, self.library)
elif "mal" in method: elif "mal" in method:
mal_ids = self.config.MyAnimeList.get_mal_ids(method, value) mal_ids = self.config.MyAnimeList.get_mal_ids(method, value)
ids = self.config.Convert.myanimelist_to_ids(mal_ids) ids = self.config.Convert.myanimelist_to_ids(mal_ids, self.library)
elif "tvdb" in method: elif "tvdb" in method:
ids = self.config.TVDb.get_tvdb_ids(method, value, self.language) ids = self.config.TVDb.get_tvdb_ids(method, value, self.language)
elif "imdb" in method: elif "imdb" in method:
@ -1100,7 +1115,9 @@ class CollectionBuilder:
for i, input_data in enumerate(ids, 1): for i, input_data in enumerate(ids, 1):
input_id, id_type = input_data input_id, id_type = input_data
util.print_return(f"Parsing ID {i}/{total_ids}") util.print_return(f"Parsing ID {i}/{total_ids}")
if id_type == "tmdb" and not self.parts_collection: if id_type == "ratingKey":
rating_keys.append(input_id)
elif id_type == "tmdb" and not self.parts_collection:
if input_id in self.library.movie_map: if input_id in self.library.movie_map:
rating_keys.append(self.library.movie_map[input_id][0]) rating_keys.append(self.library.movie_map[input_id][0])
elif input_id not in self.missing_movies: elif input_id not in self.missing_movies:
@ -1122,12 +1139,12 @@ class CollectionBuilder:
else: else:
if self.do_missing: if self.do_missing:
try: try:
tmdb_id, tmdb_type = self.config.Convert.imdb_to_tmdb(input_id) tmdb_id, tmdb_type = self.config.Convert.imdb_to_tmdb(input_id, fail=True)
if tmdb_type == "movie": if tmdb_type == "movie":
if tmdb_id not in self.missing_movies: if tmdb_id not in self.missing_movies:
self.missing_movies.append(tmdb_id) self.missing_movies.append(tmdb_id)
else: else:
tvdb_id = self.config.Convert.tmdb_to_tvdb(tmdb_id) tvdb_id = self.config.Convert.tmdb_to_tvdb(tmdb_id, fail=True)
if tvdb_id not in self.missing_shows: if tvdb_id not in self.missing_shows:
self.missing_shows.append(tvdb_id) self.missing_shows.append(tvdb_id)
except Failed as e: except Failed as e:
@ -1135,26 +1152,28 @@ class CollectionBuilder:
continue continue
elif id_type == "tvdb_season" and self.collection_level == "season": elif id_type == "tvdb_season" and self.collection_level == "season":
show_id, season_num = input_id.split("_") show_id, season_num = input_id.split("_")
if int(show_id) in self.library.show_map: show_id = int(show_id)
show_item = self.library.fetchItem(self.library.show_map[int(show_id)][0]) if show_id in self.library.show_map:
show_item = self.library.fetchItem(self.library.show_map[show_id][0])
try: try:
episode_item = show_item.season(season=int(season_num)) episode_item = show_item.season(season=int(season_num))
rating_keys.append(episode_item.ratingKey) rating_keys.append(episode_item.ratingKey)
except NotFound: except NotFound:
self.missing_parts.append(f"{show_item.title} Season: {season_num} Missing") self.missing_parts.append(f"{show_item.title} Season: {season_num} Missing")
elif int(show_id) not in self.missing_shows: elif show_id not in self.missing_shows:
self.missing_shows.append(int(show_id)) self.missing_shows.append(show_id)
elif id_type == "tvdb_episode" and self.collection_level == "episode": elif id_type == "tvdb_episode" and self.collection_level == "episode":
show_id, season_num, episode_num = input_id.split("_") show_id, season_num, episode_num = input_id.split("_")
if int(show_id) in self.library.show_map: show_id = int(show_id)
show_item = self.library.fetchItem(self.library.show_map[int(show_id)][0]) if show_id in self.library.show_map:
show_item = self.library.fetchItem(self.library.show_map[show_id][0])
try: try:
episode_item = show_item.episode(season=int(season_num), episode=int(episode_num)) episode_item = show_item.episode(season=int(season_num), episode=int(episode_num))
rating_keys.append(episode_item.ratingKey) rating_keys.append(episode_item.ratingKey)
except NotFound: except NotFound:
self.missing_parts.append(f"{show_item.title} Season: {season_num} Episode: {episode_num} Missing") self.missing_parts.append(f"{show_item.title} Season: {season_num} Episode: {episode_num} Missing")
elif int(show_id) not in self.missing_shows: elif show_id not in self.missing_shows:
self.missing_shows.append(int(show_id)) self.missing_shows.append(show_id)
util.print_end() util.print_end()
if len(rating_keys) > 0: if len(rating_keys) > 0:
@ -1364,7 +1383,7 @@ class CollectionBuilder:
else: else:
logger.error(err) logger.error(err)
return valid_regex return valid_regex
elif attribute in ["title", "studio", "episode_title", "audio_track_title"] and modifier in ["", ".not", ".begins", ".ends"]: elif attribute in ["title", "studio", "episode_title", "audio_track_title"] and modifier in ["", ".not", ".is", ".isnot", ".begins", ".ends"]:
return smart_pair(util.get_list(data, split=False)) return smart_pair(util.get_list(data, split=False))
elif attribute == "original_language": elif attribute == "original_language":
return util.get_list(data, lower=True) return util.get_list(data, lower=True)
@ -1800,6 +1819,10 @@ class CollectionBuilder:
os.remove(og_image) os.remove(og_image)
self.config.Cache.update_image_map(item.ratingKey, self.library.image_table_name, "", "") self.config.Cache.update_image_map(item.ratingKey, self.library.image_table_name, "", "")
def delete_collection(self):
if self.obj:
self.library.query(self.obj.delete)
def update_details(self): def update_details(self):
if not self.obj and self.smart_url: if not self.obj and self.smart_url:
self.library.create_smart_collection(self.name, self.smart_type_key, self.smart_url) self.library.create_smart_collection(self.name, self.smart_type_key, self.smart_url)

@ -184,7 +184,9 @@ class Config:
"show_missing": check_for_attribute(self.data, "show_missing", parent="settings", var_type="bool", default=True), "show_missing": check_for_attribute(self.data, "show_missing", parent="settings", var_type="bool", default=True),
"save_missing": check_for_attribute(self.data, "save_missing", parent="settings", var_type="bool", default=True), "save_missing": check_for_attribute(self.data, "save_missing", parent="settings", var_type="bool", default=True),
"missing_only_released": check_for_attribute(self.data, "missing_only_released", parent="settings", var_type="bool", default=False), "missing_only_released": check_for_attribute(self.data, "missing_only_released", parent="settings", var_type="bool", default=False),
"create_asset_folders": check_for_attribute(self.data, "create_asset_folders", parent="settings", var_type="bool", default=False) "create_asset_folders": check_for_attribute(self.data, "create_asset_folders", parent="settings", var_type="bool", default=False),
"collection_minimum": check_for_attribute(self.data, "collection_minimum", parent="settings", var_type="int", default=1),
"delete_below_minimum": check_for_attribute(self.data, "delete_below_minimum", parent="settings", var_type="bool", default=False)
} }
if self.general["cache"]: if self.general["cache"]:
util.separator() util.separator()
@ -353,6 +355,8 @@ class Config:
params["save_missing"] = check_for_attribute(lib, "save_missing", parent="settings", var_type="bool", default=self.general["save_missing"], do_print=False, save=False) params["save_missing"] = check_for_attribute(lib, "save_missing", parent="settings", var_type="bool", default=self.general["save_missing"], do_print=False, save=False)
params["missing_only_released"] = check_for_attribute(lib, "missing_only_released", parent="settings", var_type="bool", default=self.general["missing_only_released"], do_print=False, save=False) params["missing_only_released"] = check_for_attribute(lib, "missing_only_released", parent="settings", var_type="bool", default=self.general["missing_only_released"], do_print=False, save=False)
params["create_asset_folders"] = check_for_attribute(lib, "create_asset_folders", parent="settings", var_type="bool", default=self.general["create_asset_folders"], do_print=False, save=False) params["create_asset_folders"] = check_for_attribute(lib, "create_asset_folders", parent="settings", var_type="bool", default=self.general["create_asset_folders"], do_print=False, save=False)
params["collection_minimum"] = check_for_attribute(lib, "collection_minimum", parent="settings", var_type="int", default=self.general["collection_minimum"], do_print=False, save=False)
params["delete_below_minimum"] = check_for_attribute(lib, "delete_below_minimum", parent="settings", var_type="bool", default=self.general["delete_below_minimum"], do_print=False, save=False)
params["mass_genre_update"] = check_for_attribute(lib, "mass_genre_update", test_list=mass_update_options, default_is_none=True, save=False, do_print=lib and "mass_genre_update" in lib) params["mass_genre_update"] = check_for_attribute(lib, "mass_genre_update", test_list=mass_update_options, default_is_none=True, save=False, do_print=lib and "mass_genre_update" in lib)
if self.OMDb is None and params["mass_genre_update"] == "omdb": if self.OMDb is None and params["mass_genre_update"] == "omdb":

@ -5,108 +5,70 @@ from plexapi.exceptions import BadRequest
logger = logging.getLogger("Plex Meta Manager") logger = logging.getLogger("Plex Meta Manager")
arms_url = "https://relations.yuna.moe/api/ids" anime_lists_url = "https://raw.githubusercontent.com/Fribb/anime-lists/master/anime-list-full.json"
anidb_url = "https://raw.githubusercontent.com/Anime-Lists/anime-lists/master/anime-list-master.xml"
class Convert: class Convert:
def __init__(self, config): def __init__(self, config):
self.config = config self.config = config
self.AniDBIDs = self.config.get_html(anidb_url) self.anidb_ids = {}
self.mal_to_anidb = {}
self.anilist_to_anidb = {}
self.anidb_to_imdb = {}
self.anidb_to_tvdb = {}
for anime_id in self.config.get_json(anime_lists_url):
if "anidb_id" in anime_id:
self.anidb_ids[anime_id["anidb_id"]] = anime_id
if "mal_id" in anime_id:
self.mal_to_anidb[int(anime_id["mal_id"])] = int(anime_id["anidb_id"])
if "anilist_id" in anime_id:
self.anilist_to_anidb[int(anime_id["anilist_id"])] = int(anime_id["anidb_id"])
if "imdb_id" in anime_id and str(anime_id["imdb_id"]).startswith("tt"):
self.anidb_to_imdb[int(anime_id["anidb_id"])] = util.get_list(anime_id["imdb_id"])
if "thetvdb_id" in anime_id:
self.anidb_to_tvdb[int(anime_id["anidb_id"])] = int(anime_id["thetvdb_id"])
def _anidb(self, anidb_id, fail=False): def anidb_to_ids(self, anidb_ids, library):
tvdbid = self.AniDBIDs.xpath(f"//anime[contains(@anidbid, '{anidb_id}')]/@tvdbid")
imdbid = self.AniDBIDs.xpath(f"//anime[contains(@anidbid, '{anidb_id}')]/@imdbid")
if len(tvdbid) > 0:
if len(imdbid[0]) > 0:
imdb_ids = util.get_list(imdbid[0])
tmdb_ids = []
for imdb in imdb_ids:
tmdb_id, tmdb_type = self.imdb_to_tmdb(imdb)
if tmdb_id and tmdb_type == "movie":
tmdb_ids.append(tmdb_id)
if tmdb_ids:
return None, imdb_ids, tmdb_ids
else:
fail_text = f"Convert Error: No TMDb ID found for AniDB ID: {anidb_id}"
else:
try:
return int(tvdbid[0]), [], []
except ValueError:
fail_text = f"Convert Error: No TVDb ID or IMDb ID found for AniDB ID: {anidb_id}"
else:
fail_text = f"Convert Error: AniDB ID: {anidb_id} not found"
if fail:
raise Failed(fail_text)
return None, [], []
def _arms_ids(self, anilist_ids=None, anidb_ids=None, mal_ids=None):
all_ids = []
def collect_ids(ids, id_name):
if ids:
if isinstance(ids, list):
all_ids.extend([{id_name: a_id} for a_id in ids])
else:
all_ids.append({id_name: ids})
collect_ids(anilist_ids, "anilist")
collect_ids(anidb_ids, "anidb")
collect_ids(mal_ids, "myanimelist")
converted_ids = []
unconverted_ids = []
unconverted_id_sets = []
for anime_dict in all_ids:
for id_type, anime_id in anime_dict.items():
query_ids = None
expired = None
if self.config.Cache:
query_ids, expired = self.config.Cache.query_anime_map(anime_id, id_type)
if query_ids and not expired:
converted_ids.append(query_ids)
if query_ids is None or expired:
unconverted_ids.append(anime_dict)
if len(unconverted_ids) == 100:
unconverted_id_sets.append(unconverted_ids)
unconverted_ids = []
if len(unconverted_ids) > 0:
unconverted_id_sets.append(unconverted_ids)
for unconverted_id_set in unconverted_id_sets:
for anime_ids in self.config.post_json(arms_url, json=unconverted_id_set):
if anime_ids:
if self.config.Cache:
self.config.Cache.update_anime_map(False, anime_ids)
converted_ids.append(anime_ids)
return converted_ids
def anidb_to_ids(self, anidb_list):
ids = [] ids = []
anidb_list = anidb_ids if isinstance(anidb_ids, list) else [anidb_ids]
for anidb_id in anidb_list: for anidb_id in anidb_list:
try: if anidb_id in library.anidb_map:
tvdb_id, _, tmdb_ids = self._anidb(anidb_id, fail=True) ids.append((library.anidb_map[anidb_id], "ratingKey"))
if tvdb_id: elif anidb_id in self.anidb_to_imdb:
ids.append((tvdb_id, "tvdb")) added = False
if tmdb_ids: for imdb in self.anidb_to_imdb[anidb_id]:
ids.extend([(t, "tmdb") for t in tmdb_ids]) tmdb, tmdb_type = self.imdb_to_tmdb(imdb)
except Failed as e: if tmdb and tmdb_type == "movie":
logger.error(e) ids.append((tmdb, "tmdb"))
added = True
if added is False and anidb_id in self.anidb_to_tvdb:
ids.append((self.anidb_to_tvdb[anidb_id], "tvdb"))
elif anidb_id in self.anidb_to_tvdb:
ids.append((self.anidb_to_tvdb[anidb_id], "tvdb"))
elif anidb_id in self.anidb_ids:
logger.error(f"Convert Error: No TVDb ID or IMDb ID found for AniDB ID: {anidb_id}")
else:
logger.error(f"Convert Error: AniDB ID: {anidb_id} not found")
return ids return ids
def anilist_to_ids(self, anilist_ids): def anilist_to_ids(self, anilist_ids, library):
anidb_ids = [] anidb_ids = []
for id_set in self._arms_ids(anilist_ids=anilist_ids): for anilist_id in anilist_ids:
if id_set["anidb"] is not None: if anilist_id in self.anilist_to_anidb:
anidb_ids.append(id_set["anidb"]) anidb_ids.append(self.anilist_to_anidb[anilist_id])
else: else:
logger.error(f"Convert Error: AniDB ID not found for AniList ID: {id_set['anilist']}") logger.error(f"Convert Error: AniDB ID not found for AniList ID: {anilist_id}")
return self.anidb_to_ids(anidb_ids) return self.anidb_to_ids(anidb_ids, library)
def myanimelist_to_ids(self, mal_ids): def myanimelist_to_ids(self, mal_ids, library):
anidb_ids = [] ids = []
for id_set in self._arms_ids(mal_ids=mal_ids): for mal_id in mal_ids:
if id_set["anidb"] is not None: if mal_id in library.mal_map:
anidb_ids.append(id_set["anidb"]) ids.append((library.mal_map[mal_id], "ratingKey"))
elif mal_id in self.mal_to_anidb:
ids.extend(self.anidb_to_ids(self.mal_to_anidb[mal_id], library))
else: else:
logger.error(f"Convert Error: AniDB ID not found for MyAnimeList ID: {id_set['myanimelist']}") logger.error(f"Convert Error: AniDB ID not found for MyAnimeList ID: {mal_id}")
return self.anidb_to_ids(anidb_ids) return ids
def tmdb_to_imdb(self, tmdb_id, is_movie=True, fail=False): def tmdb_to_imdb(self, tmdb_id, is_movie=True, fail=False):
media_type = "movie" if is_movie else "show" media_type = "movie" if is_movie else "show"
@ -259,24 +221,37 @@ class Convert:
elif item_type == "thetvdb": tvdb_id.append(int(check_id)) elif item_type == "thetvdb": tvdb_id.append(int(check_id))
elif item_type == "themoviedb": tmdb_id.append(int(check_id)) elif item_type == "themoviedb": tmdb_id.append(int(check_id))
elif item_type == "hama": elif item_type == "hama":
if check_id.startswith("tvdb"): tvdb_id.append(int(re.search("-(.*)", check_id).group(1))) if check_id.startswith("tvdb"):
elif check_id.startswith("anidb"): anidb_id = re.search("-(.*)", check_id).group(1) tvdb_id.append(int(re.search("-(.*)", check_id).group(1)))
else: raise Failed(f"Hama Agent ID: {check_id} not supported") elif check_id.startswith("anidb"):
anidb_id = int(re.search("-(.*)", check_id).group(1))
library.anidb_map[anidb_id] = item.ratingKey
else:
raise Failed(f"Hama Agent ID: {check_id} not supported")
elif item_type == "myanimelist": elif item_type == "myanimelist":
anime_ids = self._arms_ids(mal_ids=check_id) library.mal_map[int(check_id)] = item.ratingKey
if anime_ids[0] and anime_ids[0]["anidb"]: anidb_id = anime_ids[0]["anidb"] if check_id in self.mal_to_anidb:
else: raise Failed(f"Unable to convert MyAnimeList ID: {check_id} to AniDB ID") anidb_id = self.mal_to_anidb[check_id]
else:
raise Failed(f"Convert Error: AniDB ID not found for MyAnimeList ID: {check_id}")
elif item_type == "local": raise Failed("No match in Plex") elif item_type == "local": raise Failed("No match in Plex")
else: raise Failed(f"Agent {item_type} not supported") else: raise Failed(f"Agent {item_type} not supported")
if anidb_id: if anidb_id:
ani_tvdb, ani_imdb, ani_tmdb = self._anidb(anidb_id, fail=True) if anidb_id in self.anidb_to_imdb:
if ani_imdb: added = False
imdb_id.extend(ani_imdb) for imdb in self.anidb_to_imdb[anidb_id]:
if ani_tmdb: tmdb, tmdb_type = self.imdb_to_tmdb(imdb)
tmdb_id.extend(ani_tmdb) if tmdb and tmdb_type == "movie":
if ani_tvdb: imdb_id.append(imdb)
tvdb_id.append(ani_tvdb) tmdb_id.append(tmdb)
added = True
if added is False and anidb_id in self.anidb_to_tvdb:
tvdb_id.append(self.anidb_to_tvdb[anidb_id])
elif anidb_id in self.anidb_to_tvdb:
tvdb_id.append(self.anidb_to_tvdb[anidb_id])
else:
raise Failed(f"AniDB: {anidb_id} not found")
else: else:
if not tmdb_id and imdb_id: if not tmdb_id and imdb_id:
for imdb in imdb_id: for imdb in imdb_id:

@ -18,19 +18,18 @@ class Metadata:
logger.info("") logger.info("")
logger.info(f"Loading Metadata {file_type}: {path}") logger.info(f"Loading Metadata {file_type}: {path}")
def get_dict(attribute, attr_data, check_list=None): def get_dict(attribute, attr_data, check_list=None):
if check_list is None:
check_list = []
if attr_data and attribute in attr_data: if attr_data and attribute in attr_data:
if attr_data[attribute]: if attr_data[attribute]:
if isinstance(attr_data[attribute], dict): if isinstance(attr_data[attribute], dict):
if check_list:
new_dict = {} new_dict = {}
for a_name, a_data in attr_data[attribute].items(): for a_name, a_data in attr_data[attribute].items():
if a_name in check_list: if a_name in check_list:
logger.error(f"Config Warning: Skipping duplicate {attribute[:-1] if attribute[-1] == 's' else attribute}: {a_name}") logger.error(f"Config Warning: Skipping duplicate {attribute[:-1] if attribute[-1] == 's' else attribute}: {a_name}")
else: else:
new_dict[a_name] = a_data new_dict[str(a_name)] = a_data
return new_dict return new_dict
else:
return attr_data[attribute]
else: else:
logger.warning(f"Config Warning: {attribute} must be a dictionary") logger.warning(f"Config Warning: {attribute} must be a dictionary")
else: else:

@ -66,7 +66,7 @@ show_translation = {
"label": "show.label", "label": "show.label",
} }
modifier_translation = { modifier_translation = {
"": "", ".not": "!", ".gt": "%3E%3E", ".gte": "%3E", ".lt": "%3C%3C", ".lte": "%3C", "": "", ".not": "!", ".is": "%3D", ".isnot": "!%3D", ".gt": "%3E%3E", ".gte": "%3E", ".lt": "%3C%3C", ".lte": "%3C",
".before": "%3C%3C", ".after": "%3E%3E", ".begins": "%3C", ".ends": "%3E" ".before": "%3C%3C", ".after": "%3E%3E", ".begins": "%3C", ".ends": "%3E"
} }
episode_sorting_options = {"default": "-1", "oldest": "0", "newest": "1"} episode_sorting_options = {"default": "-1", "oldest": "0", "newest": "1"}
@ -101,8 +101,8 @@ item_advance_keys = {
} }
new_plex_agents = ["tv.plex.agents.movie", "tv.plex.agents.series"] new_plex_agents = ["tv.plex.agents.movie", "tv.plex.agents.series"]
searches = [ searches = [
"title", "title.not", "title.begins", "title.ends", "title", "title.not", "title.is", "title.isnot", "title.begins", "title.ends",
"studio", "studio.not", "studio.begins", "studio.ends", "studio", "studio.not", "studio.is", "studio.isnot", "studio.begins", "studio.ends",
"actor", "actor.not", "actor", "actor.not",
"audio_language", "audio_language.not", "audio_language", "audio_language.not",
"collection", "collection.not", "collection", "collection.not",
@ -126,7 +126,7 @@ searches = [
"audience_rating.gt", "audience_rating.gte", "audience_rating.lt", "audience_rating.lte", "audience_rating.gt", "audience_rating.gte", "audience_rating.lt", "audience_rating.lte",
"year", "year.not", "year.gt", "year.gte", "year.lt", "year.lte", "year", "year.not", "year.gt", "year.gte", "year.lt", "year.lte",
"unplayed_episodes", "episode_unplayed", "episode_duplicate", "episode_progress", "episode_unmatched", "unplayed_episodes", "episode_unplayed", "episode_duplicate", "episode_progress", "episode_unmatched",
"episode_title", "episode_title.not", "episode_title.begins", "episode_title.ends", "episode_title", "episode_title.not", "episode_title.is", "episode_title.isnot", "episode_title.begins", "episode_title.ends",
"episode_added", "episode_added.not", "episode_added.before", "episode_added.after", "episode_added", "episode_added.not", "episode_added.before", "episode_added.after",
"episode_air_date", "episode_air_date.not", "episode_air_date.before", "episode_air_date.after", "episode_air_date", "episode_air_date.not", "episode_air_date.before", "episode_air_date.after",
"episode_last_played", "episode_last_played.not", "episode_last_played.before", "episode_last_played.after", "episode_last_played", "episode_last_played.not", "episode_last_played.before", "episode_last_played.after",
@ -151,7 +151,7 @@ movie_only_searches = [
] ]
show_only_searches = [ show_only_searches = [
"network", "network.not", "network", "network.not",
"episode_title", "episode_title.not", "episode_title.begins", "episode_title.ends", "episode_title", "episode_title.not", "episode_title.is", "episode_title.isnot", "episode_title.begins", "episode_title.ends",
"episode_added", "episode_added.not", "episode_added.before", "episode_added.after", "episode_added", "episode_added.not", "episode_added.before", "episode_added.after",
"episode_air_date", "episode_air_date.not", "episode_air_date", "episode_air_date.not",
"episode_air_date.before", "episode_air_date.after", "episode_air_date.before", "episode_air_date.after",
@ -170,17 +170,6 @@ tmdb_attributes = ["actor", "director", "producer", "writer"]
date_attributes = ["added", "episode_added", "release", "episode_air_date", "last_played", "episode_last_played", "first_episode_aired", "last_episode_aired"] date_attributes = ["added", "episode_added", "release", "episode_air_date", "last_played", "episode_last_played", "first_episode_aired", "last_episode_aired"]
number_attributes = ["plays", "episode_plays", "duration", "tmdb_vote_count"] + date_attributes number_attributes = ["plays", "episode_plays", "duration", "tmdb_vote_count"] + date_attributes
search_display = {"added": "Date Added", "release": "Release Date", "hdr": "HDR", "progress": "In Progress", "episode_progress": "Episode In Progress"} search_display = {"added": "Date Added", "release": "Release Date", "hdr": "HDR", "progress": "In Progress", "episode_progress": "Episode In Progress"}
sorts = {
None: None,
"title.asc": "titleSort:asc", "title.desc": "titleSort:desc",
"originally_available.asc": "originallyAvailableAt:asc", "originally_available.desc": "originallyAvailableAt:desc",
"release.asc": "originallyAvailableAt:asc", "release.desc": "originallyAvailableAt:desc",
"critic_rating.asc": "rating:asc", "critic_rating.desc": "rating:desc",
"audience_rating.asc": "audienceRating:asc", "audience_rating.desc": "audienceRating:desc",
"duration.asc": "duration:asc", "duration.desc": "duration:desc",
"added.asc": "addedAt:asc", "added.desc": "addedAt:desc"
}
modifiers = {".not": "!", ".begins": "<", ".ends": ">", ".before": "<<", ".after": ">>", ".gt": ">>", ".gte": "__gte", ".lt": "<<", ".lte": "__lte"}
tags = [ tags = [
"actor", "audio_language", "collection", "content_rating", "country", "director", "genre", "label", "actor", "audio_language", "collection", "content_rating", "country", "director", "genre", "label",
"network", "producer", "resolution", "studio", "subtitle_language", "writer" "network", "producer", "resolution", "studio", "subtitle_language", "writer"
@ -313,6 +302,8 @@ class Plex:
logger.info(output) logger.info(output)
self.image_table_name = self.config.Cache.get_image_table_name(self.original_mapping_name) if self.config.Cache else None self.image_table_name = self.config.Cache.get_image_table_name(self.original_mapping_name) if self.config.Cache else None
self.missing_path = os.path.join(params["default_dir"], f"{self.name}_missing.yml") self.missing_path = os.path.join(params["default_dir"], f"{self.name}_missing.yml")
self.collection_minimum = params["collection_minimum"]
self.delete_below_minimum = params["delete_below_minimum"]
self.metadata_path = params["metadata_path"] self.metadata_path = params["metadata_path"]
self.asset_directory = params["asset_directory"] self.asset_directory = params["asset_directory"]
self.asset_folders = params["asset_folders"] self.asset_folders = params["asset_folders"]
@ -340,6 +331,8 @@ class Plex:
self.movie_map = {} self.movie_map = {}
self.show_map = {} self.show_map = {}
self.imdb_map = {} self.imdb_map = {}
self.anidb_map = {}
self.mal_map = {}
self.movie_rating_key_map = {} self.movie_rating_key_map = {}
self.show_rating_key_map = {} self.show_rating_key_map = {}
self.run_again = [] self.run_again = []
@ -763,7 +756,7 @@ class Plex:
logger.debug(f"Details Update: {edits}") logger.debug(f"Details Update: {edits}")
try: try:
self.edit_query(item, edits, advanced=advanced) self.edit_query(item, edits, advanced=advanced)
if advanced and "languageOverride" in edits: if advanced and ("languageOverride" in edits or "useOriginalTitle" in edits):
self.query(item.refresh) self.query(item.refresh)
logger.info(f"{item_type}: {name}{' Advanced' if advanced else ''} Details Update Successful") logger.info(f"{item_type}: {name}{' Advanced' if advanced else ''} Details Update Successful")
return True return True

@ -40,7 +40,7 @@ class Radarr:
tags = options["tag"] if "tag" in options else self.tag tags = options["tag"] if "tag" in options else self.tag
search = options["search"] if "search" in options else self.search search = options["search"] if "search" in options else self.search
try: try:
added, exists, invalid = self.api.add_multiple_movies(tmdb_ids, folder, quality_profile, monitor, search, availability, tags) added, exists, invalid = self.api.add_multiple_movies(tmdb_ids, folder, quality_profile, monitor, search, availability, tags, per_request=100)
except Invalid as e: except Invalid as e:
raise Failed(f"Radarr Error: {e}") raise Failed(f"Radarr Error: {e}")
@ -65,7 +65,7 @@ class Radarr:
logger.info("") logger.info("")
logger.info(f"{apply_tags_translation[apply_tags].capitalize()} Radarr Tags: {tags}") logger.info(f"{apply_tags_translation[apply_tags].capitalize()} Radarr Tags: {tags}")
edited, not_exists = self.api.edit_multiple_movies(tmdb_ids, tags=tags, apply_tags=apply_tags_translation[apply_tags]) edited, not_exists = self.api.edit_multiple_movies(tmdb_ids, tags=tags, apply_tags=apply_tags_translation[apply_tags], per_request=100)
if len(edited) > 0: if len(edited) > 0:
logger.info("") logger.info("")

@ -66,7 +66,7 @@ class Sonarr:
search = options["search"] if "search" in options else self.search search = options["search"] if "search" in options else self.search
cutoff_search = options["cutoff_search"] if "cutoff_search" in options else self.cutoff_search cutoff_search = options["cutoff_search"] if "cutoff_search" in options else self.cutoff_search
try: try:
added, exists, invalid = self.api.add_multiple_series(tvdb_ids, folder, quality_profile, language_profile, monitor, season, search, cutoff_search, series, tags) added, exists, invalid = self.api.add_multiple_series(tvdb_ids, folder, quality_profile, language_profile, monitor, season, search, cutoff_search, series, tags, per_request=100)
except Invalid as e: except Invalid as e:
raise Failed(f"Sonarr Error: {e}") raise Failed(f"Sonarr Error: {e}")
@ -91,7 +91,7 @@ class Sonarr:
logger.info("") logger.info("")
logger.info(f"{apply_tags_translation[apply_tags].capitalize()} Sonarr Tags: {tags}") logger.info(f"{apply_tags_translation[apply_tags].capitalize()} Sonarr Tags: {tags}")
edited, not_exists = self.api.edit_multiple_series(tvdb_ids, tags=tags, apply_tags=apply_tags_translation[apply_tags]) edited, not_exists = self.api.edit_multiple_series(tvdb_ids, tags=tags, apply_tags=apply_tags_translation[apply_tags], per_request=100)
if len(edited) > 0: if len(edited) > 0:
logger.info("") logger.info("")

@ -49,7 +49,7 @@ days_alias = {
"sunday": 6, "sun": 6, "su": 6, "u": 6 "sunday": 6, "sun": 6, "su": 6, "u": 6
} }
mod_displays = { mod_displays = {
"": "is", ".not": "is not", ".begins": "begins with", ".ends": "ends with", ".before": "is before", ".after": "is after", "": "is", ".not": "is not", ".is": "is", ".isnot": "is not", ".begins": "begins with", ".ends": "ends with", ".before": "is before", ".after": "is after",
".gt": "is greater than", ".gte": "is greater than or equal", ".lt": "is less than", ".lte": "is less than or equal" ".gt": "is greater than", ".gte": "is greater than or equal", ".lt": "is less than", ".lte": "is less than or equal"
} }
pretty_days = {0: "Monday", 1: "Tuesday", 2: "Wednesday", 3: "Thursday", 4: "Friday", 5: "Saturday", 6: "Sunday"} pretty_days = {0: "Monday", 1: "Tuesday", 2: "Wednesday", 3: "Thursday", 4: "Friday", 5: "Saturday", 6: "Sunday"}
@ -282,13 +282,14 @@ def is_string_filter(values, modifier, data):
for value in values: for value in values:
for check_value in data: for check_value in data:
if (modifier in ["", ".not"] and check_value.lower() in value.lower()) \ if (modifier in ["", ".not"] and check_value.lower() in value.lower()) \
or (modifier in [".is", ".isnot"] and value.lower() == check_value.lower()) \
or (modifier == ".begins" and value.lower().startswith(check_value.lower())) \ or (modifier == ".begins" and value.lower().startswith(check_value.lower())) \
or (modifier == ".ends" and value.lower().endswith(check_value.lower())) \ or (modifier == ".ends" and value.lower().endswith(check_value.lower())) \
or (modifier == ".regex" and re.compile(check_value).match(value)): or (modifier == ".regex" and re.compile(check_value).match(value)):
jailbreak = True jailbreak = True
break break
if jailbreak: break if jailbreak: break
return (jailbreak and modifier == ".not") or (not jailbreak and modifier in ["", ".begins", ".ends", ".regex"]) return (jailbreak and modifier in [".not", ".isnot"]) or (not jailbreak and modifier in ["", ".is", ".begins", ".ends", ".regex"])
def parse(attribute, data, datatype=None, methods=None, parent=None, default=None, options=None, translation=None, minimum=1, maximum=None, regex=None): def parse(attribute, data, datatype=None, methods=None, parent=None, default=None, options=None, translation=None, minimum=1, maximum=None, regex=None):
display = f"{parent + ' ' if parent else ''}{attribute} attribute" display = f"{parent + ' ' if parent else ''}{attribute} attribute"

@ -1,4 +1,4 @@
import argparse, logging, os, re, sys, time import argparse, logging, os, sys, time
from datetime import datetime from datetime import datetime
from logging.handlers import RotatingFileHandler from logging.handlers import RotatingFileHandler
try: try:
@ -18,7 +18,7 @@ if sys.version_info[0] != 3 or sys.version_info[1] < 6:
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("-db", "--debug", dest="debug", help=argparse.SUPPRESS, action="store_true", default=False) parser.add_argument("-db", "--debug", dest="debug", help=argparse.SUPPRESS, action="store_true", default=False)
parser.add_argument("-c", "--config", dest="config", help="Run with desired *.yml file", type=str) parser.add_argument("-c", "--config", dest="config", help="Run with desired *.yml file", type=str)
parser.add_argument("-t", "--time", dest="time", help="Times to update each day use format HH:MM (Default: 03:00) (comma-separated list)", default="03:00", type=str) parser.add_argument("-t", "--time", "--times", dest="times", help="Times to update each day use format HH:MM (Default: 03:00) (comma-separated list)", default="03:00", type=str)
parser.add_argument("-re", "--resume", dest="resume", help="Resume collection run from a specific collection", type=str) parser.add_argument("-re", "--resume", dest="resume", help="Resume collection run from a specific collection", type=str)
parser.add_argument("-r", "--run", dest="run", help="Run without the scheduler", action="store_true", default=False) parser.add_argument("-r", "--run", dest="run", help="Run without the scheduler", action="store_true", default=False)
parser.add_argument("-rt", "--test", "--tests", "--run-test", "--run-tests", dest="test", help="Run in debug mode with only collections that have test: true", action="store_true", default=False) parser.add_argument("-rt", "--test", "--tests", "--run-test", "--run-tests", dest="test", help="Run in debug mode with only collections that have test: true", action="store_true", default=False)
@ -32,47 +32,54 @@ parser.add_argument("-d", "--divider", dest="divider", help="Character that divi
parser.add_argument("-w", "--width", dest="width", help="Screen Width (Default: 100)", default=100, type=int) parser.add_argument("-w", "--width", dest="width", help="Screen Width (Default: 100)", default=100, type=int)
args = parser.parse_args() args = parser.parse_args()
def check_bool(env_str, default): def get_arg(env_str, default, arg_bool=False, arg_int=False):
env_var = os.environ.get(env_str) env_var = os.environ.get(env_str)
if env_var is not None: if env_var:
if arg_bool:
if env_var is True or env_var is False: if env_var is True or env_var is False:
return env_var return env_var
elif env_var.lower() in ["t", "true"]: elif env_var.lower() in ["t", "true"]:
return True return True
else: else:
return False return False
elif arg_int:
return int(env_var)
else:
return str(env_var)
else: else:
return default return default
test = check_bool("PMM_TEST", args.test) test = get_arg("PMM_TEST", args.test, arg_bool=True)
debug = check_bool("PMM_DEBUG", args.debug) debug = get_arg("PMM_DEBUG", args.debug, arg_bool=True)
run = check_bool("PMM_RUN", args.run) run = get_arg("PMM_RUN", args.run, arg_bool=True)
no_countdown = check_bool("PMM_NO_COUNTDOWN", args.no_countdown) no_countdown = get_arg("PMM_NO_COUNTDOWN", args.no_countdown, arg_bool=True)
no_missing = check_bool("PMM_NO_MISSING", args.no_missing) no_missing = get_arg("PMM_NO_MISSING", args.no_missing, arg_bool=True)
library_only = check_bool("PMM_LIBRARIES_ONLY", args.library_only) library_only = get_arg("PMM_LIBRARIES_ONLY", args.library_only, arg_bool=True)
collection_only = check_bool("PMM_COLLECTIONS_ONLY", args.collection_only) collection_only = get_arg("PMM_COLLECTIONS_ONLY", args.collection_only, arg_bool=True)
collections = os.environ.get("PMM_COLLECTIONS") if os.environ.get("PMM_COLLECTIONS") else args.collections collections = get_arg("PMM_COLLECTIONS", args.collections)
libraries = os.environ.get("PMM_LIBRARIES") if os.environ.get("PMM_LIBRARIES") else args.libraries libraries = get_arg("PMM_LIBRARIES", args.libraries)
resume = os.environ.get("PMM_RESUME") if os.environ.get("PMM_RESUME") else args.resume resume = get_arg("PMM_RESUME", args.resume)
times = get_arg("PMM_TIME", args.times)
times_to_run = util.get_list(os.environ.get("PMM_TIME") if os.environ.get("PMM_TIME") else args.time) divider = get_arg("PMM_DIVIDER", args.divider)
for time_to_run in times_to_run: screen_width = get_arg("PMM_WIDTH", args.width)
if not re.match("^([0-1]?[0-9]|2[0-3]):[0-5][0-9]$", time_to_run): config_file = get_arg("PMM_CONFIG", args.config)
raise util.Failed(f"Argument Error: time argument invalid: {time_to_run} must be in the HH:MM format")
util.separating_character = divider[0]
util.separating_character = os.environ.get("PMM_DIVIDER")[0] if os.environ.get("PMM_DIVIDER") else args.divider[0]
if screen_width < 90 or screen_width > 300:
screen_width = int(os.environ.get("PMM_WIDTH")) if os.environ.get("PMM_WIDTH") else args.width print(f"Argument Error: width argument invalid: {screen_width} must be an integer between 90 and 300 using the default 100")
if 90 <= screen_width <= 300: screen_width = 100
util.screen_width = screen_width util.screen_width = screen_width
else:
raise util.Failed(f"Argument Error: width argument invalid: {screen_width} must be an integer between 90 and 300")
config_file = os.environ.get("PMM_CONFIG") if os.environ.get("PMM_CONFIG") else args.config
default_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config") default_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config")
if config_file and os.path.exists(config_file): default_dir = os.path.join(os.path.dirname(os.path.abspath(config_file))) if config_file and os.path.exists(config_file):
elif config_file and not os.path.exists(config_file): raise util.Failed(f"Config Error: config not found at {os.path.abspath(config_file)}") default_dir = os.path.join(os.path.dirname(os.path.abspath(config_file)))
elif not os.path.exists(os.path.join(default_dir, "config.yml")): raise util.Failed(f"Config Error: config not found at {os.path.abspath(default_dir)}") elif config_file and not os.path.exists(config_file):
print(f"Config Error: config not found at {os.path.abspath(config_file)}")
sys.exit(0)
elif not os.path.exists(os.path.join(default_dir, "config.yml")):
print(f"Config Error: config not found at {os.path.abspath(default_dir)}")
sys.exit(0)
os.makedirs(os.path.join(default_dir, "logs"), exist_ok=True) os.makedirs(os.path.join(default_dir, "logs"), exist_ok=True)
@ -108,7 +115,7 @@ def start(config_path, is_test=False, time_scheduled=None, requested_collections
logger.info(util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | ")) logger.info(util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | "))
logger.info(util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| ")) logger.info(util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| "))
logger.info(util.centered(" |___/ ")) logger.info(util.centered(" |___/ "))
logger.info(util.centered(" Version: 1.12.1 ")) logger.info(util.centered(" Version: 1.12.2 "))
if time_scheduled: start_type = f"{time_scheduled} " if time_scheduled: start_type = f"{time_scheduled} "
elif is_test: start_type = "Test " elif is_test: start_type = "Test "
elif requested_collections: start_type = "Collections " elif requested_collections: start_type = "Collections "
@ -490,11 +497,18 @@ def run_collection(config, library, metadata, requested_collections):
builder.find_rating_keys() builder.find_rating_keys()
if len(builder.rating_keys) > 0 and builder.build_collection: if len(builder.rating_keys) >= builder.minimum and builder.build_collection:
logger.info("") logger.info("")
util.separator(f"Adding to {mapping_name} Collection", space=False, border=False) util.separator(f"Adding to {mapping_name} Collection", space=False, border=False)
logger.info("") logger.info("")
builder.add_to_collection() builder.add_to_collection()
elif len(builder.rating_keys) < builder.minimum and builder.build_collection:
logger.info("")
logger.info(f"Collection minimum: {builder.minimum} not met for {mapping_name} Collection")
logger.info("")
if library.delete_below_minimum and builder.obj:
builder.delete_collection()
logger.info(f"Collection {builder.obj.title} deleted")
if builder.do_missing and (len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0): if builder.do_missing and (len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0):
if builder.details["show_missing"] is True: if builder.details["show_missing"] is True:
logger.info("") logger.info("")
@ -536,7 +550,17 @@ try:
if run or test or collections or libraries or resume: if run or test or collections or libraries or resume:
start(config_file, is_test=test, requested_collections=collections, requested_libraries=libraries, resume_from=resume) start(config_file, is_test=test, requested_collections=collections, requested_libraries=libraries, resume_from=resume)
else: else:
times_to_run = util.get_list(times)
valid_times = []
for time_to_run in times_to_run: for time_to_run in times_to_run:
try:
valid_times.append(datetime.strftime(datetime.strptime(time_to_run, "%H:%M"), "%H:%M"))
except ValueError:
if time_to_run:
raise Failed(f"Argument Error: time argument invalid: {time_to_run} must be in the HH:MM format")
else:
raise Failed(f"Argument Error: blank time argument")
for time_to_run in valid_times:
schedule.every().day.at(time_to_run).do(start, config_file, time_scheduled=time_to_run) schedule.every().day.at(time_to_run).do(start, config_file, time_scheduled=time_to_run)
while True: while True:
schedule.run_pending() schedule.run_pending()
@ -544,7 +568,7 @@ try:
current = datetime.now().strftime("%H:%M") current = datetime.now().strftime("%H:%M")
seconds = None seconds = None
og_time_str = "" og_time_str = ""
for time_to_run in times_to_run: for time_to_run in valid_times:
new_seconds = (datetime.strptime(time_to_run, "%H:%M") - datetime.strptime(current, "%H:%M")).total_seconds() new_seconds = (datetime.strptime(time_to_run, "%H:%M") - datetime.strptime(current, "%H:%M")).total_seconds()
if new_seconds < 0: if new_seconds < 0:
new_seconds += 86400 new_seconds += 86400

@ -1,10 +1,10 @@
PlexAPI==4.7.0 PlexAPI==4.7.0
tmdbv3api==1.7.6 tmdbv3api==1.7.6
arrapi==1.1.2 arrapi==1.1.3
lxml==4.6.3 lxml==4.6.3
requests==2.26.0 requests==2.26.0
ruamel.yaml==0.17.10 ruamel.yaml==0.17.10
schedule==1.1.0 schedule==1.1.0
retrying==1.3.3 retrying==1.3.3
pathvalidate==2.4.1 pathvalidate==2.4.1
pillow==8.3.1 pillow==8.3.2
Loading…
Cancel
Save