Merge pull request #76 from meisnate12/develop

v1.3.0
pull/144/head v1.3.0
meisnate12 4 years ago committed by GitHub
commit 288fa7ecba
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

3
.gitignore vendored

@ -7,8 +7,9 @@ __pycache__/
*.so
# Distribution / packaging
.idea
.Python
/modules/test.py
/test.py
logs/
config/*
!config/*.template

@ -1,5 +1,5 @@
# Plex Meta Manager
#### Version 1.2.2
#### Version 1.3.0
The original concept for Plex Meta Manager is [Plex Auto Collections](https://github.com/mza921/Plex-Auto-Collections), but this is rewritten from the ground up to be able to include a scheduler, metadata edits, multiple libraries, and logging. Plex Meta Manager is a Python 3 script that can be continuously run using YAML configuration files to update on a schedule the metadata of the movies, shows, and collections in your libraries as well as automatically build collections based on various methods all detailed in the wiki. Some collection examples that the script can automatically build and update daily include Plex Based Searches like actor, genre, or studio collections or Collections based on TMDb, IMDb, Trakt, TVDb, AniDB, or MyAnimeList lists and various other services.

@ -16,6 +16,7 @@ settings: # Can be individually specified
show_filtered: false
show_missing: true
save_missing: true
run_again_delay: 2
plex: # Can be individually specified per library as well
url: http://192.168.1.12:32400
token: ####################

@ -23,14 +23,14 @@ class AniDBAPI:
def convert_tvdb_to_anidb(self, tvdb_id): return self.convert_anidb(tvdb_id, "tvdbid", "anidbid")
def convert_imdb_to_anidb(self, imdb_id): return self.convert_anidb(imdb_id, "imdbid", "anidbid")
def convert_anidb(self, input_id, from_id, to_id):
ids = self.id_list.xpath("//anime[contains(@{}, '{}')]/@{}".format(from_id, input_id, to_id))
ids = self.id_list.xpath(f"//anime[contains(@{from_id}, '{input_id}')]/@{to_id}")
if len(ids) > 0:
if from_id == "tvdbid": return [int(id) for id in ids]
if from_id == "tvdbid": return [int(i) for i in ids]
if len(ids[0]) > 0:
try: return ids[0].split(",") if to_id == "imdbid" else int(ids[0])
except ValueError: raise Failed("AniDB Error: No {} ID found for {} ID: {}".format(util.pretty_ids[to_id], util.pretty_ids[from_id], input_id))
else: raise Failed("AniDB Error: No {} ID found for {} ID: {}".format(util.pretty_ids[to_id], util.pretty_ids[from_id], input_id))
else: raise Failed("AniDB Error: {} ID: {} not found".format(util.pretty_ids[from_id], input_id))
except ValueError: raise Failed(f"AniDB Error: No {util.pretty_ids[to_id]} ID found for {util.pretty_ids[from_id]} ID: {input_id}")
else: raise Failed(f"AniDB Error: No {util.pretty_ids[to_id]} ID found for {util.pretty_ids[from_id]} ID: {input_id}")
else: raise Failed(f"AniDB Error: {util.pretty_ids[from_id]} ID: {input_id} not found")
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_request(self, url, language):
@ -41,14 +41,14 @@ class AniDBAPI:
return util.get_int_list(response.xpath("//td[@class='name anime']/a/@href"), "AniDB ID")
def validate_anidb_id(self, anidb_id, language):
response = self.send_request("{}/{}".format(self.urls["anime"], anidb_id), language)
ids = response.xpath("//*[text()='a{}']/text()".format(anidb_id))
response = self.send_request(f"{self.urls['anime']}/{anidb_id}", language)
ids = response.xpath(f"//*[text()='a{anidb_id}']/text()")
if len(ids) > 0:
return util.regex_first_int(ids[0], "AniDB ID")
raise Failed("AniDB Error: AniDB ID: {} not found".format(anidb_id))
raise Failed(f"AniDB Error: AniDB ID: {anidb_id} not found")
def get_anidb_relations(self, anidb_id, language):
response = self.send_request("{}/{}{}".format(self.urls["anime"], anidb_id, self.urls["relation"]), language)
response = self.send_request(f"{self.urls['anime']}/{anidb_id}{self.urls['relation']}", language)
return util.get_int_list(response.xpath("//area/@href"), "AniDB ID")
def validate_anidb_list(self, anidb_list, language):
@ -60,47 +60,47 @@ class AniDBAPI:
logger.error(e)
if len(anidb_values) > 0:
return anidb_values
raise Failed("AniDB Error: No valid AniDB IDs in {}".format(anidb_list))
raise Failed(f"AniDB Error: No valid AniDB IDs in {anidb_list}")
def get_items(self, method, data, language, status_message=True):
pretty = util.pretty_names[method] if method in util.pretty_names else method
if status_message:
logger.debug("Data: {}".format(data))
logger.debug(f"Data: {data}")
anime_ids = []
if method == "anidb_popular":
if status_message:
logger.info("Processing {}: {} Anime".format(pretty, data))
logger.info(f"Processing {pretty}: {data} Anime")
anime_ids.extend(self.get_popular(language)[:data])
else:
if status_message: logger.info("Processing {}: {}".format(pretty, data))
if status_message: logger.info(f"Processing {pretty}: {data}")
if method == "anidb_id": anime_ids.append(data)
elif method == "anidb_relation": anime_ids.extend(self.get_anidb_relations(data, language))
else: raise Failed("AniDB Error: Method {} not supported".format(method))
else: raise Failed(f"AniDB Error: Method {method} not supported")
show_ids = []
movie_ids = []
for anidb_id in anime_ids:
try:
tmdb_id = self.convert_from_imdb(self.convert_anidb_to_imdb(anidb_id), language)
tmdb_id = self.convert_from_imdb(self.convert_anidb_to_imdb(anidb_id))
if tmdb_id: movie_ids.append(tmdb_id)
else: raise Failed
except Failed:
try: show_ids.append(self.convert_anidb_to_tvdb(anidb_id))
except Failed: logger.error("AniDB Error: No TVDb ID or IMDb ID found for AniDB ID: {}".format(anidb_id))
except Failed: logger.error(f"AniDB Error: No TVDb ID or IMDb ID found for AniDB ID: {anidb_id}")
if status_message:
logger.debug("AniDB IDs Found: {}".format(anime_ids))
logger.debug("TMDb IDs Found: {}".format(movie_ids))
logger.debug("TVDb IDs Found: {}".format(show_ids))
logger.debug(f"AniDB IDs Found: {anime_ids}")
logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}")
return movie_ids, show_ids
def convert_from_imdb(self, imdb_id, language):
def convert_from_imdb(self, imdb_id):
output_tmdb_ids = []
if not isinstance(imdb_id, list):
imdb_id = [imdb_id]
for imdb in imdb_id:
expired = False
if self.Cache:
tmdb_id, tvdb_id = self.Cache.get_ids_from_imdb(imdb)
expired = False
if not tmdb_id:
tmdb_id, expired = self.Cache.get_tmdb_from_imdb(imdb)
if expired:
@ -121,6 +121,6 @@ class AniDBAPI:
if tmdb_id: output_tmdb_ids.append(tmdb_id)
if self.Cache and tmdb_id and expired is not False:
self.Cache.update_imdb("movie", expired, imdb, tmdb_id)
if len(output_tmdb_ids) == 0: raise Failed("AniDB Error: No TMDb ID found for IMDb: {}".format(imdb_id))
if len(output_tmdb_ids) == 0: raise Failed(f"AniDB Error: No TMDb ID found for IMDb: {imdb_id}")
elif len(output_tmdb_ids) == 1: return output_tmdb_ids[0]
else: return output_tmdb_ids

@ -2,6 +2,8 @@ import glob, logging, os, re
from datetime import datetime, timedelta
from modules import util
from modules.util import Failed
from plexapi.collection import Collections
from plexapi.exceptions import BadRequest, NotFound
logger = logging.getLogger("Plex Meta Manager")
@ -17,11 +19,17 @@ class CollectionBuilder:
"show_missing": library.show_missing,
"save_missing": library.save_missing
}
self.missing_movies = []
self.missing_shows = []
self.methods = []
self.filters = []
self.posters = []
self.backgrounds = []
self.schedule = None
self.posters = {}
self.backgrounds = {}
self.summaries = {}
self.schedule = ""
self.rating_key_map = {}
current_time = datetime.now()
current_year = current_time.year
if "template" in data:
if not self.library.templates:
@ -37,16 +45,17 @@ class CollectionBuilder:
elif not data_template["name"]:
raise Failed("Collection Error: template sub-attribute name is blank")
elif data_template["name"] not in self.library.templates:
raise Failed("Collection Error: template {} not found".format(data_template["name"]))
raise Failed(f"Collection Error: template {data_template['name']} not found")
elif not isinstance(self.library.templates[data_template["name"]], dict):
raise Failed("Collection Error: template {} is not a dictionary".format(data_template["name"]))
raise Failed(f"Collection Error: template {data_template['name']} is not a dictionary")
else:
for tm in data_template:
if not data_template[tm]:
raise Failed("Collection Error: template sub-attribute {} is blank".format(data_template[tm]))
raise Failed(f"Collection Error: template sub-attribute {data_template[tm]} is blank")
template_name = data_template["name"]
template = self.library.templates[template_name]
default = {}
if "default" in template:
if template["default"]:
@ -55,33 +64,48 @@ class CollectionBuilder:
if template["default"][dv]:
default[dv] = template["default"][dv]
else:
raise Failed("Collection Error: template default sub-attribute {} is blank".format(dv))
raise Failed(f"Collection Error: template default sub-attribute {dv} is blank")
else:
raise Failed("Collection Error: template sub-attribute default is not a dictionary")
else:
raise Failed("Collection Error: template sub-attribute default is blank")
optional = []
if "optional" in template:
if template["optional"]:
if isinstance(template["optional"], list):
for op in template["optional"]:
if op not in default:
optional.append(op)
else:
logger.warning(f"Template Warning: variable {op} cannot be optional if it has a default")
else:
optional.append(str(template["optional"]))
else:
raise Failed("Collection Error: template sub-attribute optional is blank")
for m in template:
if m not in self.data and m != "default":
if m not in self.data and m not in ["default", "optional"]:
if template[m]:
attr = None
def replace_txt(txt):
txt = str(txt)
for tm in data_template:
if tm != "name" and "<<{}>>".format(tm) in txt:
txt = txt.replace("<<{}>>".format(tm), str(data_template[tm]))
for option in optional:
if option not in data_template and f"<<{option}>>" in txt:
raise Failed("remove attribute")
for template_method in data_template:
if template_method != "name" and f"<<{template_method}>>" in txt:
txt = txt.replace(f"<<{template_method}>>", str(data_template[template_method]))
if "<<collection_name>>" in txt:
txt = txt.replace("<<collection_name>>", str(self.name))
for dm in default:
if "<<{}>>".format(dm) in txt:
txt = txt.replace("<<{}>>".format(dm), str(default[dm]))
if f"<<{dm}>>" in txt:
txt = txt.replace(f"<<{dm}>>", str(default[dm]))
if txt in ["true", "True"]: return True
elif txt in ["false", "False"]: return False
else:
try: return int(txt)
except ValueError: return txt
try:
if isinstance(template[m], dict):
attr = {}
for sm in template[m]:
@ -104,9 +128,11 @@ class CollectionBuilder:
attr.append(replace_txt(li))
else:
attr = replace_txt(template[m])
except Failed:
continue
self.data[m] = attr
else:
raise Failed("Collection Error: template attribute {} is blank".format(m))
raise Failed(f"Collection Error: template attribute {m} is blank")
skip_collection = True
if "schedule" not in data:
@ -116,7 +142,6 @@ class CollectionBuilder:
skip_collection = False
else:
schedule_list = util.get_list(data["schedule"])
current_time = datetime.now()
next_month = current_time.replace(day=28) + timedelta(days=4)
last_day = next_month - timedelta(days=next_month.day)
for schedule in schedule_list:
@ -130,49 +155,44 @@ class CollectionBuilder:
if run_time.startswith("week"):
if param.lower() in util.days_alias:
weekday = util.days_alias[param.lower()]
self.schedule += "\nScheduled weekly on {}".format(util.pretty_days[weekday])
self.schedule += f"\nScheduled weekly on {util.pretty_days[weekday]}"
if weekday == current_time.weekday():
skip_collection = False
else:
logger.error("Collection Error: weekly schedule attribute {} invalid must be a day of the weeek i.e. weekly(Monday)".format(schedule))
logger.error(f"Collection Error: weekly schedule attribute {schedule} invalid must be a day of the week i.e. weekly(Monday)")
elif run_time.startswith("month"):
try:
if 1 <= int(param) <= 31:
self.schedule += "\nScheduled monthly on the {}".format(util.make_ordinal(param))
self.schedule += f"\nScheduled monthly on the {util.make_ordinal(param)}"
if current_time.day == int(param) or (current_time.day == last_day.day and int(param) > last_day.day):
skip_collection = False
else:
logger.error("Collection Error: monthly schedule attribute {} invalid must be between 1 and 31".format(schedule))
logger.error(f"Collection Error: monthly schedule attribute {schedule} invalid must be between 1 and 31")
except ValueError:
logger.error("Collection Error: monthly schedule attribute {} invalid must be an integer".format(schedule))
logger.error(f"Collection Error: monthly schedule attribute {schedule} invalid must be an integer")
elif run_time.startswith("year"):
match = re.match("^(1[0-2]|0?[1-9])/(3[01]|[12][0-9]|0?[1-9])$", param)
if match:
month = int(match.group(1))
day = int(match.group(2))
self.schedule += "\nScheduled yearly on {} {}".format(util.pretty_months[month], util.make_ordinal(day))
self.schedule += f"\nScheduled yearly on {util.pretty_months[month]} {util.make_ordinal(day)}"
if current_time.month == month and (current_time.day == day or (current_time.day == last_day.day and day > last_day.day)):
skip_collection = False
else:
logger.error("Collection Error: yearly schedule attribute {} invalid must be in the MM/DD format i.e. yearly(11/22)".format(schedule))
logger.error(f"Collection Error: yearly schedule attribute {schedule} invalid must be in the MM/DD format i.e. yearly(11/22)")
else:
logger.error("Collection Error: failed to parse schedule: {}".format(schedule))
logger.error(f"Collection Error: failed to parse schedule: {schedule}")
else:
logger.error("Collection Error: schedule attribute {} invalid".format(schedule))
if self.schedule is None:
logger.error(f"Collection Error: schedule attribute {schedule} invalid")
if len(self.schedule) == 0:
skip_collection = False
if skip_collection:
raise Failed("Skipping Collection {}".format(c))
raise Failed(f"{self.schedule}\n\nCollection {self.name} not scheduled to run")
logger.info("Scanning {} Collection".format(self.name))
logger.info(f"Scanning {self.name} Collection")
self.collectionless = "plex_collectionless" in data
self.sync = self.library.sync_mode == "sync"
if "sync_mode" in data:
if not data["sync_mode"]: logger.warning("Collection Warning: sync_mode attribute is blank using general: {}".format(self.library.sync_mode))
elif data["sync_mode"] not in ["append", "sync"]: logger.warning("Collection Warning: {} sync_mode invalid using general: {}".format(self.library.sync_mode, data["sync_mode"]))
else: self.sync = data["sync_mode"] == "sync"
self.run_again = "run_again" in data
if "tmdb_person" in data:
if data["tmdb_person"]:
@ -180,80 +200,85 @@ class CollectionBuilder:
for tmdb_id in util.get_int_list(data["tmdb_person"], "TMDb Person ID"):
person = config.TMDb.get_person(tmdb_id)
valid_names.append(person.name)
if "summary" not in self.details and hasattr(person, "biography") and person.biography:
self.details["summary"] = person.biography
if "poster" not in self.details and hasattr(person, "profile_path") and person.profile_path:
self.details["poster"] = ("url", "{}{}".format(config.TMDb.image_url, person.profile_path), "tmdb_person")
if hasattr(person, "biography") and person.biography:
self.summaries["tmdb_person"] = person.biography
if hasattr(person, "profile_path") and person.profile_path:
self.posters["tmdb_person"] = f"{config.TMDb.image_url}{person.profile_path}"
if len(valid_names) > 0: self.details["tmdb_person"] = valid_names
else: raise Failed("Collection Error: No valid TMDb Person IDs in {}".format(data["tmdb_person"]))
else: raise Failed(f"Collection Error: No valid TMDb Person IDs in {data['tmdb_person']}")
else:
raise Failed("Collection Error: tmdb_person attribute is blank")
for m in data:
if "tmdb" in m and not config.TMDb: raise Failed("Collection Error: {} requires TMDb to be configured".format(m))
elif "trakt" in m and not config.Trakt: raise Failed("Collection Error: {} requires Trakt todo be configured".format(m))
elif "imdb" in m and not config.IMDb: raise Failed("Collection Error: {} requires TMDb or Trakt to be configured".format(m))
elif "tautulli" in m and not self.library.Tautulli: raise Failed("Collection Error: {} requires Tautulli to be configured".format(m))
elif "mal" in m and not config.MyAnimeList: raise Failed("Collection Error: {} requires MyAnimeList to be configured".format(m))
if "tmdb" in m and not config.TMDb: raise Failed(f"Collection Error: {m} requires TMDb to be configured")
elif "trakt" in m and not config.Trakt: raise Failed(f"Collection Error: {m} requires Trakt todo be configured")
elif "imdb" in m and not config.IMDb: raise Failed(f"Collection Error: {m} requires TMDb or Trakt to be configured")
elif "tautulli" in m and not self.library.Tautulli: raise Failed(f"Collection Error: {m} requires Tautulli to be configured")
elif "mal" in m and not config.MyAnimeList: raise Failed(f"Collection Error: {m} requires MyAnimeList to be configured")
elif data[m] is not None:
logger.debug("")
logger.debug("Method: {}".format(m))
logger.debug("Value: {}".format(data[m]))
logger.debug(f"Method: {m}")
logger.debug(f"Value: {data[m]}")
if m in util.method_alias:
method_name = util.method_alias[m]
logger.warning("Collection Warning: {} attribute will run as {}".format(m, method_name))
logger.warning(f"Collection Warning: {m} attribute will run as {method_name}")
else:
method_name = m
if method_name in util.show_only_lists and self.library.is_movie:
raise Failed("Collection Error: {} attribute only works for show libraries".format(method_name))
raise Failed(f"Collection Error: {method_name} attribute only works for show libraries")
elif method_name in util.movie_only_lists and self.library.is_show:
raise Failed("Collection Error: {} attribute only works for movie libraries".format(method_name))
raise Failed(f"Collection Error: {method_name} attribute only works for movie libraries")
elif method_name in util.movie_only_searches and self.library.is_show:
raise Failed("Collection Error: {} plex search only works for movie libraries".format(method_name))
raise Failed(f"Collection Error: {method_name} plex search only works for movie libraries")
elif method_name not in util.collectionless_lists and self.collectionless:
raise Failed("Collection Error: {} attribute does not work for Collectionless collection".format(method_name))
raise Failed(f"Collection Error: {method_name} attribute does not work for Collectionless collection")
elif method_name == "summary":
self.summaries[method_name] = data[m]
elif method_name == "tmdb_summary":
self.details["summary"] = config.TMDb.get_movie_show_or_collection(util.regex_first_int(data[m], "TMDb ID"), self.library.is_movie).overview
self.summaries[method_name] = config.TMDb.get_movie_show_or_collection(util.regex_first_int(data[m], "TMDb ID"), self.library.is_movie).overview
elif method_name == "tmdb_description":
self.details["summary"] = config.TMDb.get_list(util.regex_first_int(data[m], "TMDb List ID")).description
self.summaries[method_name] = config.TMDb.get_list(util.regex_first_int(data[m], "TMDb List ID")).description
elif method_name == "tmdb_biography":
self.details["summary"] = config.TMDb.get_person(util.regex_first_int(data[m], "TMDb Person ID")).biography
self.summaries[method_name] = config.TMDb.get_person(util.regex_first_int(data[m], "TMDb Person ID")).biography
elif method_name == "collection_mode":
if data[m] in ["default", "hide", "hide_items", "show_items", "hideItems", "showItems"]:
if data[m] == "hide_items": self.details[method_name] = "hideItems"
elif data[m] == "show_items": self.details[method_name] = "showItems"
else: self.details[method_name] = data[m]
else:
raise Failed("Collection Error: {} collection_mode Invalid\n| \tdefault (Library default)\n| \thide (Hide Collection)\n| \thide_items (Hide Items in this Collection)\n| \tshow_items (Show this Collection and its Items)".format(data[m]))
raise Failed(f"Collection Error: {data[m]} collection_mode Invalid\n| \tdefault (Library default)\n| \thide (Hide Collection)\n| \thide_items (Hide Items in this Collection)\n| \tshow_items (Show this Collection and its Items)")
elif method_name == "collection_order":
if data[m] in ["release", "alpha"]:
self.details[method_name] = data[m]
else:
raise Failed("Collection Error: {} collection_order Invalid\n| \trelease (Order Collection by release dates)\n| \talpha (Order Collection Alphabetically)".format(data[m]))
raise Failed(f"Collection Error: {data[m]} collection_order Invalid\n| \trelease (Order Collection by release dates)\n| \talpha (Order Collection Alphabetically)")
elif method_name == "url_poster":
self.posters.append(("url", data[m], method_name))
self.posters[method_name] = data[m]
elif method_name == "tmdb_poster":
self.posters.append(("url", "{}{}".format(config.TMDb.image_url, config.TMDb.get_movie_show_or_collection(util.regex_first_int(data[m], "TMDb ID"), self.library.is_movie).poster_path), method_name))
self.posters[method_name] = f"{config.TMDb.image_url}{config.TMDb.get_movie_show_or_collection(util.regex_first_int(data[m], 'TMDb ID'), self.library.is_movie).poster_path}"
elif method_name == "tmdb_profile":
self.posters.append(("url", "{}{}".format(config.TMDb.image_url, config.TMDb.get_person(util.regex_first_int(data[m], "TMDb Person ID")).profile_path), method_name))
self.posters[method_name] = f"{config.TMDb.image_url}{config.TMDb.get_person(util.regex_first_int(data[m], 'TMDb Person ID')).profile_path}"
elif method_name == "file_poster":
if os.path.exists(data[m]): self.posters.append(("file", os.path.abspath(data[m]), method_name))
else: raise Failed("Collection Error: Poster Path Does Not Exist: {}".format(os.path.abspath(data[m])))
if os.path.exists(data[m]): self.posters[method_name] = os.path.abspath(data[m])
else: raise Failed(f"Collection Error: Poster Path Does Not Exist: {os.path.abspath(data[m])}")
elif method_name == "url_background":
self.backgrounds.append(("url", data[m], method_name))
self.backgrounds[method_name] = data[m]
elif method_name == "tmdb_background":
self.backgrounds.append(("url", "{}{}".format(config.TMDb.image_url, config.TMDb.get_movie_show_or_collection(util.regex_first_int(data[m], "TMDb ID"), self.library.is_movie).poster_path), method_name))
self.backgrounds[method_name] = f"{config.TMDb.image_url}{config.TMDb.get_movie_show_or_collection(util.regex_first_int(data[m], 'TMDb ID'), self.library.is_movie).poster_path}"
elif method_name == "file_background":
if os.path.exists(data[m]): self.backgrounds.append(("file", os.path.abspath(data[m]), method_name))
else: raise Failed("Collection Error: Background Path Does Not Exist: {}".format(os.path.abspath(data[m])))
if os.path.exists(data[m]): self.backgrounds[method_name] = os.path.abspath(data[m])
else: raise Failed(f"Collection Error: Background Path Does Not Exist: {os.path.abspath(data[m])}")
elif method_name == "label_sync_mode":
if data[m] in ["append", "sync"]: self.details[method_name] = data[m]
else: raise Failed("Collection Error: label_sync_mode attribute must be either 'append' or 'sync'")
elif method_name == "sync_mode":
if data[m] in ["append", "sync"]: self.details[method_name] = data[m]
else: raise Failed("Collection Error: sync_mode attribute must be either 'append' or 'sync'")
elif method_name in ["arr_tag", "label"]:
self.details[method_name] = util.get_list(data[m])
elif method_name in util.boolean_details:
if isinstance(data[m], bool): self.details[method_name] = data[m]
else: raise Failed("Collection Error: {} attribute must be either true or false".format(method_name))
else: raise Failed(f"Collection Error: {method_name} attribute must be either true or false")
elif method_name in util.all_details:
self.details[method_name] = data[m]
elif method_name in ["year", "year.not"]:
@ -293,7 +318,6 @@ class CollectionBuilder:
elif method_name == "imdb_list":
new_list = []
for imdb_list in util.get_list(data[m], split=False):
new_dictionary = {}
if isinstance(imdb_list, dict):
if "url" in imdb_list and imdb_list["url"]: imdb_url = imdb_list["url"]
else: raise Failed("Collection Error: imdb_list attribute url is required")
@ -305,25 +329,44 @@ class CollectionBuilder:
self.methods.append((method_name, new_list))
elif method_name in util.dictionary_lists:
if isinstance(data[m], dict):
def get_int(parent, method, data, default, min=1, max=None):
if method not in data: logger.warning("Collection Warning: {} {} attribute not found using {} as default".format(parent, method, default))
elif not data[method]: logger.warning("Collection Warning: {} {} attribute is blank using {} as default".format(parent, method, default))
elif isinstance(data[method], int) and data[method] >= min:
if max is None or data[method] <= max: return data[method]
else: logger.warning("Collection Warning: {} {} attribute {} invalid must an integer <= {} using {} as default".format(parent, method, data[method], max, default))
else: logger.warning("Collection Warning: {} {} attribute {} invalid must an integer >= {} using {} as default".format(parent, method, data[method], min, default))
return default
def get_int(parent, method, data_in, default_in, minimum=1, maximum=None):
if method not in data_in: logger.warning(f"Collection Warning: {parent} {method} attribute not found using {default} as default")
elif not data_in[method]: logger.warning(f"Collection Warning: {parent} {method} attribute is blank using {default} as default")
elif isinstance(data_in[method], int) and data_in[method] >= minimum:
if maximum is None or data_in[method] <= maximum: return data_in[method]
else: logger.warning(f"Collection Warning: {parent} {method} attribute {data_in[method]} invalid must an integer <= {maximum} using {default} as default")
else: logger.warning(f"Collection Warning: {parent} {method} attribute {data_in[method]} invalid must an integer >= {minimum} using {default} as default")
return default_in
if method_name == "filters":
for f in data[m]:
if f in util.method_alias or (f.endswith(".not") and f[:-4] in util.method_alias):
filter = (util.method_alias[f[:-4]] + f[-4:]) if f.endswith(".not") else util.method_alias[f]
logger.warning("Collection Warning: {} filter will run as {}".format(f, filter))
else:
filter = f
if filter in util.movie_only_filters and self.library.is_show: raise Failed("Collection Error: {} filter only works for movie libraries".format(filter))
elif data[m][f] is None: raise Failed("Collection Error: {} filter is blank".format(filter))
elif filter in util.all_filters: self.filters.append((filter, data[m][f]))
else: raise Failed("Collection Error: {} filter not supported".format(filter))
filter_method = (util.method_alias[f[:-4]] + f[-4:]) if f.endswith(".not") else util.method_alias[f]
logger.warning(f"Collection Warning: {f} filter will run as {filter_method}")
else:
filter_method = f
if filter_method in util.movie_only_filters and self.library.is_show:
raise Failed(f"Collection Error: {filter_method} filter only works for movie libraries")
elif data[m][f] is None:
raise Failed(f"Collection Error: {filter_method} filter is blank")
elif filter_method == "year":
filter_data = util.get_year_list(data[m][f], f"{filter_method} filter")
elif filter_method in ["max_age", "duration.gte", "duration.lte", "tmdb_vote_count.gte", "tmdb_vote_count.lte"]:
filter_data = util.check_number(data[m][f], f"{filter_method} filter", minimum=1)
elif filter_method in ["year.gte", "year.lte"]:
filter_data = util.check_number(data[m][f], f"{filter_method} filter", minimum=1800, maximum=current_year)
elif filter_method in ["rating.gte", "rating.lte"]:
filter_data = util.check_number(data[m][f], f"{filter_method} filter", number_type="float", minimum=0.1, maximum=10)
elif filter_method in ["originally_available.gte", "originally_available.lte"]:
filter_data = util.check_date(data[m][f], f"{filter_method} filter")
elif filter_method == "original_language":
filter_data = util.get_list(data[m][f], lower=True)
elif filter_method == "collection":
filter_data = data[m][f] if isinstance(data[m][f], list) else [data[m][f]]
elif filter_method in util.all_filters:
filter_data = util.get_list(data[m][f])
else:
raise Failed(f"Collection Error: {filter_method} filter not supported")
self.filters.append((filter_method, filter_data))
elif method_name == "plex_collectionless":
new_dictionary = {}
prefix_list = []
@ -347,13 +390,13 @@ class CollectionBuilder:
for s in data[m]:
if s in util.method_alias or (s.endswith(".not") and s[:-4] in util.method_alias):
search = (util.method_alias[s[:-4]] + s[-4:]) if s.endswith(".not") else util.method_alias[s]
logger.warning("Collection Warning: {} plex search attribute will run as {}".format(s, search))
logger.warning(f"Collection Warning: {s} plex search attribute will run as {search}")
else:
search = s
if search in util.movie_only_searches and self.library.is_show:
raise Failed("Collection Error: {} plex search attribute only works for movie libraries".format(search))
raise Failed(f"Collection Error: {search} plex search attribute only works for movie libraries")
elif util.remove_not(search) in used:
raise Failed("Collection Error: Only one instance of {} can be used try using it as a filter instead".format(search))
raise Failed(f"Collection Error: Only one instance of {search} can be used try using it as a filter instead")
elif search in ["year", "year.not"]:
years = util.get_year_list(data[m][s], search)
if len(years) > 0:
@ -363,7 +406,7 @@ class CollectionBuilder:
used.append(util.remove_not(search))
searches.append((search, util.get_list(data[m][s])))
else:
logger.error("Collection Error: {} plex search attribute not supported".format(search))
logger.error(f"Collection Error: {search} plex search attribute not supported")
self.methods.append((method_name, [searches]))
elif method_name == "tmdb_discover":
new_dictionary = {"limit": 100}
@ -375,71 +418,58 @@ class CollectionBuilder:
if re.compile("([a-z]{2})-([A-Z]{2})").match(str(attr_data)):
new_dictionary[attr] = str(attr_data)
else:
raise Failed("Collection Error: {} attribute {}: {} must match pattern ([a-z]{2})-([A-Z]{2}) e.g. en-US".format(m, attr, attr_data))
raise Failed(f"Collection Error: {m} attribute {attr}: {attr_data} must match pattern ([a-z]{{2}})-([A-Z]{{2}}) e.g. en-US")
elif attr == "region":
if re.compile("^[A-Z]{2}$").match(str(attr_data)):
new_dictionary[attr] = str(attr_data)
else:
raise Failed("Collection Error: {} attribute {}: {} must match pattern ^[A-Z]{2}$ e.g. US".format(m, attr, attr_data))
raise Failed(f"Collection Error: {m} attribute {attr}: {attr_data} must match pattern ^[A-Z]{{2}}$ e.g. US")
elif attr == "sort_by":
if (self.library.is_movie and attr_data in util.discover_movie_sort) or (self.library.is_show and attr_data in util.discover_tv_sort):
new_dictionary[attr] = attr_data
else:
raise Failed("Collection Error: {} attribute {}: {} is invalid".format(m, attr, attr_data))
raise Failed(f"Collection Error: {m} attribute {attr}: {attr_data} is invalid")
elif attr == "certification_country":
if "certification" in data[m] or "certification.lte" in data[m] or "certification.gte" in data[m]:
new_dictionary[attr] = attr_data
else:
raise Failed("Collection Error: {} attribute {}: must be used with either certification, certification.lte, or certification.gte".format(m, attr))
raise Failed(f"Collection Error: {m} attribute {attr}: must be used with either certification, certification.lte, or certification.gte")
elif attr in ["certification", "certification.lte", "certification.gte"]:
if "certification_country" in data[m]:
new_dictionary[attr] = attr_data
else:
raise Failed("Collection Error: {} attribute {}: must be used with certification_country".format(m, attr))
raise Failed(f"Collection Error: {m} attribute {attr}: must be used with certification_country")
elif attr in ["include_adult", "include_null_first_air_dates", "screened_theatrically"]:
if attr_data is True:
new_dictionary[attr] = attr_data
elif attr in ["primary_release_date.gte", "primary_release_date.lte", "release_date.gte", "release_date.lte", "air_date.gte", "air_date.lte", "first_air_date.gte", "first_air_date.lte"]:
if re.compile("[0-1]?[0-9][/-][0-3]?[0-9][/-][1-2][890][0-9][0-9]").match(str(attr_data)):
the_date = str(attr_data).split("/") if "/" in str(attr_data) else str(attr_data).split("-")
new_dictionary[attr] = "{}-{}-{}".format(the_date[2], the_date[0], the_date[1])
elif re.compile("[1-2][890][0-9][0-9][/-][0-1]?[0-9][/-][0-3]?[0-9]").match(str(attr_data)):
the_date = str(attr_data).split("/") if "/" in str(attr_data) else str(attr_data).split("-")
new_dictionary[attr] = "{}-{}-{}".format(the_date[0], the_date[1], the_date[2])
else:
raise Failed("Collection Error: {} attribute {}: {} must match pattern MM/DD/YYYY e.g. 12/25/2020".format(m, attr, attr_data))
new_dictionary[attr] = util.check_date(attr_data, f"{m} attribute {attr}", return_string=True)
elif attr in ["primary_release_year", "year", "first_air_date_year"]:
if isinstance(attr_data, int) and 1800 < attr_data and attr_data < 2200:
new_dictionary[attr] = attr_data
else:
raise Failed("Collection Error: {} attribute {}: must be a valid year e.g. 1990".format(m, attr))
new_dictionary[attr] = util.check_number(attr_data, f"{m} attribute {attr}", minimum=1800, maximum=current_year + 1)
elif attr in ["vote_count.gte", "vote_count.lte", "vote_average.gte", "vote_average.lte", "with_runtime.gte", "with_runtime.lte"]:
if (isinstance(attr_data, int) or isinstance(attr_data, float)) and 0 < attr_data:
new_dictionary[attr] = attr_data
else:
raise Failed("Collection Error: {} attribute {}: must be a valid number greater then 0".format(m, attr))
new_dictionary[attr] = util.check_number(attr_data, f"{m} attribute {attr}", minimum=1)
elif attr in ["with_cast", "with_crew", "with_people", "with_companies", "with_networks", "with_genres", "without_genres", "with_keywords", "without_keywords", "with_original_language", "timezone"]:
new_dictionary[attr] = attr_data
else:
raise Failed("Collection Error: {} attribute {} not supported".format(m, attr))
raise Failed(f"Collection Error: {m} attribute {attr} not supported")
elif attr == "limit":
if isinstance(attr_data, int) and attr_data > 0:
new_dictionary[attr] = attr_data
else:
raise Failed("Collection Error: {} attribute {}: must be a valid number greater then 0".format(m, attr))
raise Failed(f"Collection Error: {m} attribute {attr}: must be a valid number greater then 0")
else:
raise Failed("Collection Error: {} attribute {} not supported".format(m, attr))
raise Failed(f"Collection Error: {m} attribute {attr} not supported")
else:
raise Failed("Collection Error: {} parameter {} is blank".format(m, attr))
raise Failed(f"Collection Error: {m} parameter {attr} is blank")
if len(new_dictionary) > 1:
self.methods.append((method_name, [new_dictionary]))
else:
raise Failed("Collection Error: {} had no valid fields".format(m))
raise Failed(f"Collection Error: {m} had no valid fields")
elif "tautulli" in method_name:
new_dictionary = {}
if method_name == "tautulli_popular": new_dictionary["list_type"] = "popular"
elif method_name == "tautulli_watched": new_dictionary["list_type"] = "watched"
else: raise Failed("Collection Error: {} attribute not supported".format(method_name))
else: raise Failed(f"Collection Error: {method_name} attribute not supported")
new_dictionary["list_days"] = get_int(method_name, "list_days", data[m], 30)
new_dictionary["list_size"] = get_int(method_name, "list_size", data[m], 10)
@ -449,22 +479,21 @@ class CollectionBuilder:
new_dictionary = {"sort_by": "anime_num_list_users"}
if "sort_by" not in data[m]: logger.warning("Collection Warning: mal_season sort_by attribute not found using members as default")
elif not data[m]["sort_by"]: logger.warning("Collection Warning: mal_season sort_by attribute is blank using members as default")
elif data[m]["sort_by"] not in util.mal_season_sort: logger.warning("Collection Warning: mal_season sort_by attribute {} invalid must be either 'members' or 'score' using members as default".format(data[m]["sort_by"]))
elif data[m]["sort_by"] not in util.mal_season_sort: logger.warning(f"Collection Warning: mal_season sort_by attribute {data[m]['sort_by']} invalid must be either 'members' or 'score' using members as default")
else: new_dictionary["sort_by"] = util.mal_season_sort[data[m]["sort_by"]]
current_time = datetime.now()
if current_time.month in [1, 2, 3]: new_dictionary["season"] = "winter"
elif current_time.month in [4, 5, 6]: new_dictionary["season"] = "spring"
elif current_time.month in [7, 8, 9]: new_dictionary["season"] = "summer"
elif current_time.month in [10, 11, 12]: new_dictionary["season"] = "fall"
if "season" not in data[m]: logger.warning("Collection Warning: mal_season season attribute not found using the current season: {} as default".format(new_dictionary["season"]))
elif not data[m]["season"]: logger.warning("Collection Warning: mal_season season attribute is blank using the current season: {} as default".format(new_dictionary["season"]))
elif data[m]["season"] not in util.pretty_seasons: logger.warning("Collection Warning: mal_season season attribute {} invalid must be either 'winter', 'spring', 'summer' or 'fall' using the current season: {} as default".format(data[m]["season"], new_dictionary["season"]))
if "season" not in data[m]: logger.warning(f"Collection Warning: mal_season season attribute not found using the current season: {new_dictionary['season']} as default")
elif not data[m]["season"]: logger.warning(f"Collection Warning: mal_season season attribute is blank using the current season: {new_dictionary['season']} as default")
elif data[m]["season"] not in util.pretty_seasons: logger.warning(f"Collection Warning: mal_season season attribute {data[m]['season']} invalid must be either 'winter', 'spring', 'summer' or 'fall' using the current season: {new_dictionary['season']} as default")
else: new_dictionary["season"] = data[m]["season"]
new_dictionary["year"] = get_int(method_name, "year", data[m], current_time.year, min=1917, max=current_time.year + 1)
new_dictionary["limit"] = get_int(method_name, "limit", data[m], 100, max=500)
new_dictionary["year"] = get_int(method_name, "year", data[m], current_time.year, minimum=1917, maximum=current_time.year + 1)
new_dictionary["limit"] = get_int(method_name, "limit", data[m], 100, maximum=500)
self.methods.append((method_name, [new_dictionary]))
elif method_name == "mal_userlist":
new_dictionary = {"status": "all", "sort_by": "list_score"}
@ -474,48 +503,60 @@ class CollectionBuilder:
if "status" not in data[m]: logger.warning("Collection Warning: mal_season status attribute not found using all as default")
elif not data[m]["status"]: logger.warning("Collection Warning: mal_season status attribute is blank using all as default")
elif data[m]["status"] not in util.mal_userlist_status: logger.warning("Collection Warning: mal_season status attribute {} invalid must be either 'all', 'watching', 'completed', 'on_hold', 'dropped' or 'plan_to_watch' using all as default".format(data[m]["status"]))
elif data[m]["status"] not in util.mal_userlist_status: logger.warning(f"Collection Warning: mal_season status attribute {data[m]['status']} invalid must be either 'all', 'watching', 'completed', 'on_hold', 'dropped' or 'plan_to_watch' using all as default")
else: new_dictionary["status"] = util.mal_userlist_status[data[m]["status"]]
if "sort_by" not in data[m]: logger.warning("Collection Warning: mal_season sort_by attribute not found using score as default")
elif not data[m]["sort_by"]: logger.warning("Collection Warning: mal_season sort_by attribute is blank using score as default")
elif data[m]["sort_by"] not in util.mal_userlist_sort: logger.warning("Collection Warning: mal_season sort_by attribute {} invalid must be either 'score', 'last_updated', 'title' or 'start_date' using score as default".format(data[m]["sort_by"]))
elif data[m]["sort_by"] not in util.mal_userlist_sort: logger.warning(f"Collection Warning: mal_season sort_by attribute {data[m]['sort_by']} invalid must be either 'score', 'last_updated', 'title' or 'start_date' using score as default")
else: new_dictionary["sort_by"] = util.mal_userlist_sort[data[m]["sort_by"]]
new_dictionary["limit"] = get_int(method_name, "limit", data[m], 100, max=1000)
new_dictionary["limit"] = get_int(method_name, "limit", data[m], 100, maximum=1000)
self.methods.append((method_name, [new_dictionary]))
else:
raise Failed("Collection Error: {} attribute is not a dictionary: {}".format(m, data[m]))
raise Failed(f"Collection Error: {m} attribute is not a dictionary: {data[m]}")
elif method_name in util.count_lists:
list_count = util.regex_first_int(data[m], "List Size", default=20)
if list_count < 1:
logger.warning("Collection Warning: {} must be an integer greater then 0 defaulting to 20".format(method_name))
logger.warning(f"Collection Warning: {method_name} must be an integer greater then 0 defaulting to 20")
list_count = 20
self.methods.append((method_name, [list_count]))
elif method_name in util.tmdb_lists:
values = config.TMDb.validate_tmdb_list(util.get_int_list(data[m], "TMDb {} ID".format(util.tmdb_type[method_name])), util.tmdb_type[method_name])
values = config.TMDb.validate_tmdb_list(util.get_int_list(data[m], f"TMDb {util.tmdb_type[method_name]} ID"), util.tmdb_type[method_name])
if method_name[-8:] == "_details":
if method_name in ["tmdb_collection_details", "tmdb_movie_details", "tmdb_show_details"]:
item = config.TMDb.get_movie_show_or_collection(values[0], self.library.is_movie)
if "summary" not in self.details and hasattr(item, "overview") and item.overview:
self.details["summary"] = item.overview
if "background" not in self.details and hasattr(item, "backdrop_path") and item.backdrop_path:
self.details["background"] = ("url", "{}{}".format(config.TMDb.image_url, item.backdrop_path), method_name[:-8])
if "poster" not in self.details and hasattr(item, "poster_path") and item.poster_path:
self.details["poster"] = ("url", "{}{}".format(config.TMDb.image_url, item.poster_path), method_name[:-8])
if hasattr(item, "overview") and item.overview:
self.summaries[method_name] = item.overview
if hasattr(item, "backdrop_path") and item.backdrop_path:
self.backgrounds[method_name] = f"{config.TMDb.image_url}{item.backdrop_path}"
if hasattr(item, "poster_path") and item.poster_path:
self.posters[method_name] = f"{config.TMDb.image_url}{item.poster_path}"
elif method_name in ["tmdb_actor_details", "tmdb_crew_details", "tmdb_director_details", "tmdb_producer_details", "tmdb_writer_details"]:
item = config.TMDb.get_person(values[0])
if hasattr(item, "biography") and item.biography:
self.summaries[method_name] = item.biography
if hasattr(item, "profile_path") and item.profile_path:
self.posters[method_name] = f"{config.TMDb.image_url}{item.profile_path}"
else:
item = config.TMDb.get_list(values[0])
if "summary" not in self.details and hasattr(item, "description") and item.description:
self.details["summary"] = item.description
if hasattr(item, "description") and item.description:
self.summaries[method_name] = item.description
self.methods.append((method_name[:-8], values))
else:
self.methods.append((method_name, values))
elif method_name in util.all_lists:
self.methods.append((method_name, util.get_list(data[m])))
elif method_name not in util.other_attributes:
raise Failed("Collection Error: {} attribute not supported".format(method_name))
raise Failed(f"Collection Error: {method_name} attribute not supported")
else:
raise Failed("Collection Error: {} attribute is blank".format(m))
raise Failed(f"Collection Error: {m} attribute is blank")
self.sync = self.library.sync_mode == "sync"
if "sync_mode" in data:
if not data["sync_mode"]: logger.warning(f"Collection Warning: sync_mode attribute is blank using general: {self.library.sync_mode}")
elif data["sync_mode"] not in ["append", "sync"]: logger.warning(f"Collection Warning: {self.library.sync_mode} sync_mode invalid using general: {data['sync_mode']}")
else: self.sync = data["sync_mode"] == "sync"
self.do_arr = False
if self.library.Radarr:
@ -523,12 +564,12 @@ class CollectionBuilder:
if self.library.Sonarr:
self.do_arr = self.details["add_to_arr"] if "add_to_arr" in self.details else self.library.Sonarr.add
def run_methods(self, collection_obj, collection_name, map, movie_map, show_map):
def run_methods(self, collection_obj, collection_name, rating_key_map, movie_map, show_map):
items_found = 0
for method, values in self.methods:
logger.debug("")
logger.debug("Method: {}".format(method))
logger.debug("Values: {}".format(values))
logger.debug(f"Method: {method}")
logger.debug(f"Values: {values}")
pretty = util.pretty_names[method] if method in util.pretty_names else method
for value in values:
items = []
@ -549,9 +590,9 @@ class CollectionBuilder:
else: missing_shows.append(show_id)
return items_found_inside
logger.info("")
logger.debug("Value: {}".format(value))
logger.debug(f"Value: {value}")
if method == "plex_all":
logger.info("Processing {} {}".format(pretty, "Movies" if self.library.is_movie else "Shows"))
logger.info(f"Processing {pretty} {'Movies' if self.library.is_movie else 'Shows'}")
items = self.library.Plex.all()
items_found += len(items)
elif method == "plex_collection":
@ -559,7 +600,6 @@ class CollectionBuilder:
items_found += len(items)
elif method == "plex_search":
search_terms = {}
output = ""
for i, attr_pair in enumerate(value):
search_list = attr_pair[1]
final_method = attr_pair[0][:-4] + "!" if attr_pair[0][-4:] == ".not" else attr_pair[0]
@ -568,8 +608,9 @@ class CollectionBuilder:
search_terms[final_method] = search_list
ors = ""
for o, param in enumerate(attr_pair[1]):
ors += "{}{}".format(" OR " if o > 0 else "{}(".format(attr_pair[0]), param)
logger.info("\t\t AND {})".format(ors) if i > 0 else "Processing {}: {})".format(pretty, ors))
or_des = " OR " if o > 0 else f"{attr_pair[0]}("
ors += f"{or_des}{param}"
logger.info(f"\t\t AND {ors})" if i > 0 else f"Processing {pretty}: {ors})")
items = self.library.Plex.search(**search_terms)
items_found += len(items)
elif method == "plex_collectionless":
@ -590,7 +631,7 @@ class CollectionBuilder:
all_items = self.library.Plex.all()
length = 0
for i, item in enumerate(all_items, 1):
length = util.print_return(length, "Processing: {}/{} {}".format(i, len(all_items), item.title))
length = util.print_return(length, f"Processing: {i}/{len(all_items)} {item.title}")
add_item = True
for collection in item.collections:
if collection.tag.lower() in good_collections:
@ -599,7 +640,7 @@ class CollectionBuilder:
if add_item:
items.append(item)
items_found += len(items)
util.print_end(length, "Processed {} {}".format(len(all_items), "Movies" if self.library.is_movie else "Shows"))
util.print_end(length, f"Processed {len(all_items)} {'Movies' if self.library.is_movie else 'Shows'}")
elif "tautulli" in method:
items = self.library.Tautulli.get_items(self.library, time_range=value["list_days"], stats_count=value["list_size"], list_type=value["list_type"], stats_count_buffer=value["list_buffer"])
items_found += len(items)
@ -609,98 +650,145 @@ class CollectionBuilder:
elif "imdb" in method: items_found += check_map(self.config.IMDb.get_items(method, value, self.library.Plex.language))
elif "tmdb" in method: items_found += check_map(self.config.TMDb.get_items(method, value, self.library.is_movie))
elif "trakt" in method: items_found += check_map(self.config.Trakt.get_items(method, value, self.library.is_movie))
else: logger.error("Collection Error: {} method not supported".format(method))
else: logger.error(f"Collection Error: {method} method not supported")
if len(items) > 0: map = self.library.add_to_collection(collection_obj if collection_obj else collection_name, items, self.filters, self.details["show_filtered"], map, movie_map, show_map)
if len(items) > 0: rating_key_map = self.library.add_to_collection(collection_obj if collection_obj else collection_name, items, self.filters, self.details["show_filtered"], rating_key_map, movie_map, show_map)
else: logger.error("No items found to add to this collection ")
if len(missing_movies) > 0 or len(missing_shows) > 0:
logger.info("")
if len(missing_movies) > 0:
not_lang = None
terms = None
arr_filters = []
for filter_method, filter_data in self.filters:
if filter_method.startswith("original_language"):
terms = util.get_list(filter_data, lower=True)
not_lang = filter_method.endswith(".not")
break
if (filter_method.startswith("original_language") and self.library.is_movie) or filter_method.startswith("tmdb_vote_count"):
arr_filters.append((filter_method, filter_data))
if len(missing_movies) > 0:
missing_movies_with_names = []
for missing_id in missing_movies:
try:
movie = self.config.TMDb.get_movie(missing_id)
title = str(movie.title)
if not_lang is None or (not_lang is True and movie.original_language not in terms) or (not_lang is False and movie.original_language in terms):
missing_movies_with_names.append((title, missing_id))
if self.details["show_missing"] is True:
logger.info("{} Collection | ? | {} (TMDb: {})".format(collection_name, title, missing_id))
elif self.details["show_filtered"] is True:
logger.info("{} Collection | X | {} (TMDb: {})".format(collection_name, title, missing_id))
except Failed as e:
logger.error(e)
logger.info("{} Movie{} Missing".format(len(missing_movies_with_names), "s" if len(missing_movies_with_names) > 1 else ""))
continue
match = True
for filter_method, filter_data in arr_filters:
if (filter_method == "original_language" and movie.original_language not in filter_data) \
or (filter_method == "original_language.not" and movie.original_language in filter_data) \
or (filter_method == "tmdb_vote_count.gte" and movie.vote_count < filter_data) \
or (filter_method == "tmdb_vote_count.lte" and movie.vote_count > filter_data):
match = False
break
if match:
missing_movies_with_names.append((movie.title, missing_id))
if self.details["show_missing"] is True:
logger.info(f"{collection_name} Collection | ? | {movie.title} (TMDb: {missing_id})")
elif self.details["show_filtered"] is True:
logger.info(f"{collection_name} Collection | X | {movie.title} (TMDb: {missing_id})")
logger.info(f"{len(missing_movies_with_names)} Movie{'s' if len(missing_movies_with_names) > 1 else ''} Missing")
if self.details["save_missing"] is True:
self.library.add_missing(collection_name, missing_movies_with_names, True)
if self.do_arr and self.library.Radarr:
self.library.Radarr.add_tmdb([missing_id for title, missing_id in missing_movies_with_names], tag=self.details["arr_tag"])
if self.run_again:
self.missing_movies.extend([missing_id for title, missing_id in missing_movies_with_names])
if len(missing_shows) > 0 and self.library.is_show:
missing_shows_with_names = []
for missing_id in missing_shows:
try:
title = str(self.config.TVDb.get_series(self.library.Plex.language, tvdb_id=missing_id).title.encode("ascii", "replace").decode())
missing_shows_with_names.append((title, missing_id))
if self.details["show_missing"] is True:
logger.info("{} Collection | ? | {} (TVDB: {})".format(collection_name, title, missing_id))
except Failed as e:
logger.error(e)
logger.info("{} Show{} Missing".format(len(missing_shows_with_names), "s" if len(missing_shows_with_names) > 1 else ""))
continue
match = True
if arr_filters:
show = self.config.TMDb.get_show(self.config.TMDb.convert_tvdb_to_tmdb(missing_id))
for filter_method, filter_data in arr_filters:
if (filter_method == "tmdb_vote_count.gte" and show.vote_count < filter_data) \
or (filter_method == "tmdb_vote_count.lte" and show.vote_count > filter_data):
match = False
break
if match:
missing_shows_with_names.append((title, missing_id))
if self.details["show_missing"] is True:
logger.info(f"{collection_name} Collection | ? | {title} (TVDB: {missing_id})")
elif self.details["show_filtered"] is True:
logger.info(f"{collection_name} Collection | X | {title} (TVDb: {missing_id})")
logger.info(f"{len(missing_shows_with_names)} Show{'s' if len(missing_shows_with_names) > 1 else ''} Missing")
if self.details["save_missing"] is True:
self.library.add_missing(collection_name, missing_shows_with_names, False)
if self.do_arr and self.library.Sonarr:
self.library.Sonarr.add_tvdb([missing_id for title, missing_id in missing_shows_with_names], tag=self.details["arr_tag"])
if self.run_again:
self.missing_shows.extend([missing_id for title, missing_id in missing_shows_with_names])
if self.sync and items_found > 0:
logger.info("")
count_removed = 0
for ratingKey, item in map.items():
for ratingKey, item in rating_key_map.items():
if item is not None:
logger.info("{} Collection | - | {}".format(collection_name, item.title))
logger.info(f"{collection_name} Collection | - | {item.title}")
item.removeCollection(collection_name)
count_removed += 1
logger.info("{} {}{} Removed".format(count_removed, "Movie" if self.library.is_movie else "Show", "s" if count_removed == 1 else ""))
logger.info(f"{count_removed} {'Movie' if self.library.is_movie else 'Show'}{'s' if count_removed == 1 else ''} Removed")
logger.info("")
def update_details(self, collection):
edits = {}
def get_summary(summary_method, summaries):
logger.info(f"Detail: {summary_method} updated Collection Summary")
return summaries[summary_method]
if "summary" in self.summaries: summary = get_summary("summary", self.summaries)
elif "tmdb_description" in self.summaries: summary = get_summary("tmdb_description", self.summaries)
elif "tmdb_summary" in self.summaries: summary = get_summary("tmdb_summary", self.summaries)
elif "tmdb_biography" in self.summaries: summary = get_summary("tmdb_biography", self.summaries)
elif "tmdb_person" in self.summaries: summary = get_summary("tmdb_person", self.summaries)
elif "tmdb_collection_details" in self.summaries: summary = get_summary("tmdb_collection_details", self.summaries)
elif "tmdb_list_details" in self.summaries: summary = get_summary("tmdb_list_details", self.summaries)
elif "tmdb_actor_details" in self.summaries: summary = get_summary("tmdb_actor_details", self.summaries)
elif "tmdb_crew_details" in self.summaries: summary = get_summary("tmdb_crew_details", self.summaries)
elif "tmdb_director_details" in self.summaries: summary = get_summary("tmdb_director_details", self.summaries)
elif "tmdb_producer_details" in self.summaries: summary = get_summary("tmdb_producer_details", self.summaries)
elif "tmdb_writer_details" in self.summaries: summary = get_summary("tmdb_writer_details", self.summaries)
elif "tmdb_movie_details" in self.summaries: summary = get_summary("tmdb_movie_details", self.summaries)
elif "tmdb_show_details" in self.summaries: summary = get_summary("tmdb_show_details", self.summaries)
else: summary = None
if summary:
edits["summary.value"] = summary
edits["summary.locked"] = 1
if "sort_title" in self.details:
edits["titleSort.value"] = self.details["sort_title"]
edits["titleSort.locked"] = 1
logger.info(f"Detail: sort_title updated Collection Sort Title to {self.details['sort_title']}")
if "content_rating" in self.details:
edits["contentRating.value"] = self.details["content_rating"]
edits["contentRating.locked"] = 1
if "summary" in self.details:
edits["summary.value"] = self.details["summary"]
edits["summary.locked"] = 1
if len(edits) > 0:
logger.debug(edits)
collection.edit(**edits)
collection.reload()
logger.info("Details: have been updated")
logger.info(f"Detail: content_rating updated Collection Content Rating to {self.details['content_rating']}")
if "collection_mode" in self.details:
collection.modeUpdate(mode=self.details["collection_mode"])
logger.info(f"Detail: collection_mode updated Collection Mode to {self.details['collection_mode']}")
if "collection_order" in self.details:
collection.sortUpdate(sort=self.details["collection_order"])
logger.info(f"Detail: collection_order updated Collection Order to {self.details['collection_order']}")
if "label" in self.details:
item_labels = [label.tag for label in collection.labels]
labels = util.get_list(self.details["label"])
if "label_sync_mode" in self.details and self.details["label_sync_mode"] == "sync":
for label in (l for l in item_labels if l not in labels):
for label in (la for la in item_labels if la not in labels):
collection.removeLabel(label)
logger.info("Detail: Label {} removed".format(label))
for label in (l for l in labels if l not in item_labels):
logger.info(f"Detail: Label {label} removed")
for label in (la for la in labels if la not in item_labels):
collection.addLabel(label)
logger.info("Detail: Label {} added".format(label))
logger.info(f"Detail: Label {label} added")
if len(edits) > 0:
logger.debug(edits)
collection.edit(**edits)
collection.reload()
logger.info("Details: have been updated")
if self.library.asset_directory:
name_mapping = self.name
@ -708,17 +796,17 @@ class CollectionBuilder:
if self.details["name_mapping"]: name_mapping = self.details["name_mapping"]
else: logger.error("Collection Error: name_mapping attribute is blank")
for ad in self.library.asset_directory:
path = os.path.join(ad, "{}".format(name_mapping))
path = os.path.join(ad, f"{name_mapping}")
if not os.path.isdir(path):
continue
matches = glob.glob(os.path.join(ad, "{}".format(name_mapping), "poster.*"))
matches = glob.glob(os.path.join(ad, f"{name_mapping}", "poster.*"))
if len(matches) > 0:
for match in matches:
self.posters.append(("file", os.path.abspath(match), "asset_directory"))
matches = glob.glob(os.path.join(ad, "{}".format(name_mapping), "background.*"))
self.posters["asset_directory"] = os.path.abspath(match)
matches = glob.glob(os.path.join(ad, f"{name_mapping}", "background.*"))
if len(matches) > 0:
for match in matches:
self.backgrounds.append(("file", os.path.abspath(match), "asset_directory"))
self.backgrounds["asset_directory"] = os.path.abspath(match)
dirs = [folder for folder in os.listdir(path) if os.path.isdir(os.path.join(path, folder))]
if len(dirs) > 0:
for item in collection.items():
@ -730,25 +818,107 @@ class CollectionBuilder:
background_path = os.path.abspath(matches[0]) if len(matches) > 0 else None
if poster_path:
item.uploadPoster(filepath=poster_path)
logger.info("Detail: asset_directory updated {}'s poster to [file] {}".format(item.title, poster_path))
logger.info(f"Detail: asset_directory updated {item.title}'s poster to [file] {poster_path}")
if background_path:
item.uploadArt(filepath=background_path)
logger.info("Detail: asset_directory updated {}'s background to [file] {}".format(item.title, background_path))
logger.info(f"Detail: asset_directory updated {item.title}'s background to [file] {background_path}")
if poster_path is None and background_path is None:
logger.warning("No Files Found: {}".format(os.path.join(path, folder)))
else:
logger.warning("No Folder: {}".format(os.path.join(path, folder)))
poster = util.choose_from_list(self.posters, "poster", list_type="tuple")
if not poster and "poster" in self.details: poster = self.details["poster"]
if poster:
if poster[0] == "url": collection.uploadPoster(url=poster[1])
else: collection.uploadPoster(filepath=poster[1])
logger.info("Detail: {} updated collection poster to [{}] {}".format(poster[2], poster[0], poster[1]))
background = util.choose_from_list(self.backgrounds, "background", list_type="tuple")
if not background and "background" in self.details: background = self.details["background"]
if background:
if background[0] == "url": collection.uploadArt(url=background[1])
else: collection.uploadArt(filepath=background[1])
logger.info("Detail: {} updated collection background to [{}] {}".format(background[2], background[0], background[1]))
logger.warning(f"No Files Found: {os.path.join(path, folder)}")
else:
logger.warning(f"No Folder: {os.path.join(path, folder)}")
def set_image(image_method, images, is_background=False):
if image_method in ['file_poster', 'asset_directory']:
if is_background: collection.uploadArt(filepath=images[image_method])
else: collection.uploadPoster(filepath=images[image_method])
image_location = "File"
else:
if is_background: collection.uploadArt(url=images[image_method])
else: collection.uploadPoster(url=images[image_method])
image_location = "URL"
logger.info(f"Detail: {image_method} updated collection {'background' if is_background else 'poster'} to [{image_location}] {images[image_method]}")
if len(self.posters) > 1:
logger.info(f"{len(self.posters)} posters found:")
for p in self.posters:
logger.info(f"Method: {p} Poster: {self.posters[p]}")
if "url_poster" in self.posters: set_image("url_poster", self.posters)
elif "file_poster" in self.posters: set_image("file_poster", self.posters)
elif "tmdb_poster" in self.posters: set_image("tmdb_poster", self.posters)
elif "tmdb_profile" in self.posters: set_image("tmdb_profile", self.posters)
elif "asset_directory" in self.posters: set_image("asset_directory", self.posters)
elif "tmdb_person" in self.posters: set_image("tmdb_person", self.posters)
elif "tmdb_collection_details" in self.posters: set_image("tmdb_collection", self.posters)
elif "tmdb_actor_details" in self.posters: set_image("tmdb_actor_details", self.posters)
elif "tmdb_crew_details" in self.posters: set_image("tmdb_crew_details", self.posters)
elif "tmdb_director_details" in self.posters: set_image("tmdb_director_details", self.posters)
elif "tmdb_producer_details" in self.posters: set_image("tmdb_producer_details", self.posters)
elif "tmdb_writer_details" in self.posters: set_image("tmdb_writer_details", self.posters)
elif "tmdb_movie_details" in self.posters: set_image("tmdb_movie", self.posters)
elif "tmdb_show_details" in self.posters: set_image("tmdb_show", self.posters)
else: logger.info("No poster to update")
logger.info("")
if len(self.backgrounds) > 1:
logger.info(f"{len(self.backgrounds)} backgrounds found:")
for b in self.backgrounds:
logger.info(f"Method: {b} Background: {self.backgrounds[b]}")
if "url_background" in self.backgrounds: set_image("url_background", self.backgrounds, is_background=True)
elif "file_background" in self.backgrounds: set_image("file_poster", self.backgrounds, is_background=True)
elif "tmdb_background" in self.backgrounds: set_image("tmdb_poster", self.backgrounds, is_background=True)
elif "asset_directory" in self.backgrounds: set_image("asset_directory", self.backgrounds, is_background=True)
elif "tmdb_collection_details" in self.backgrounds: set_image("tmdb_collection", self.backgrounds, is_background=True)
elif "tmdb_movie_details" in self.backgrounds: set_image("tmdb_movie", self.backgrounds, is_background=True)
elif "tmdb_show_details" in self.backgrounds: set_image("tmdb_show", self.backgrounds, is_background=True)
else: logger.info("No background to update")
def run_collections_again(self, library, collection_obj, movie_map, show_map):
collection_items = collection_obj.items() if isinstance(collection_obj, Collections) else []
name = collection_obj.title if isinstance(collection_obj, Collections) else collection_obj
rating_keys = [movie_map[mm] for mm in self.missing_movies if mm in movie_map]
if library.is_show:
rating_keys.extend([show_map[sm] for sm in self.missing_shows if sm in show_map])
if len(rating_keys) > 0:
for rating_key in rating_keys:
try:
current = library.fetchItem(int(rating_key))
except (BadRequest, NotFound):
logger.error(f"Plex Error: Item {rating_key} not found")
continue
if current in collection_items:
logger.info(f"{name} Collection | = | {current.title}")
else:
current.addCollection(name)
logger.info(f"{name} Collection | + | {current.title}")
logger.info(f"{len(rating_keys)} {'Movie' if library.is_movie else 'Show'}{'s' if len(rating_keys) > 1 else ''} Processed")
if len(self.missing_movies) > 0:
logger.info("")
for missing_id in self.missing_movies:
if missing_id not in movie_map:
try:
movie = self.config.TMDb.get_movie(missing_id)
except Failed as e:
logger.error(e)
continue
if self.details["show_missing"] is True:
logger.info(f"{name} Collection | ? | {movie.title} (TMDb: {missing_id})")
logger.info("")
logger.info(f"{len(self.missing_movies)} Movie{'s' if len(self.missing_movies) > 1 else ''} Missing")
if len(self.missing_shows) > 0 and library.is_show:
logger.info("")
for missing_id in self.missing_shows:
if missing_id not in show_map:
try:
title = str(self.config.TVDb.get_series(self.library.Plex.language, tvdb_id=missing_id).title.encode("ascii", "replace").decode())
except Failed as e:
logger.error(e)
continue
if self.details["show_missing"] is True:
logger.info(f"{name} Collection | ? | {title} (TVDb: {missing_id})")
logger.info(f"{len(self.missing_shows)} Show{'s' if len(self.missing_shows) > 1 else ''} Missing")

@ -6,13 +6,13 @@ logger = logging.getLogger("Plex Meta Manager")
class Cache:
def __init__(self, config_path, expiration):
cache = "{}.cache".format(os.path.splitext(config_path)[0])
cache = f"{os.path.splitext(config_path)[0]}.cache"
with sqlite3.connect(cache) as connection:
connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor:
cursor.execute("SELECT count(name) FROM sqlite_master WHERE type='table' AND name='guids'")
if cursor.fetchone()[0] == 0:
logger.info("Initializing cache database at {}".format(cache))
logger.info(f"Initializing cache database at {cache}")
cursor.execute(
"""CREATE TABLE IF NOT EXISTS guids (
INTEGER PRIMARY KEY,
@ -34,7 +34,7 @@ class Cache:
media_type TEXT)"""
)
else:
logger.info("Using cache database at {}".format(cache))
logger.info(f"Using cache database at {cache}")
self.expiration = expiration
self.cache_path = cache
@ -73,7 +73,7 @@ class Cache:
with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor:
cursor.execute("SELECT * FROM guids WHERE {} = ? AND media_type = ?".format(from_id), (key, media_type))
cursor.execute(f"SELECT * FROM guids WHERE {from_id} = ? AND media_type = ?", (key, media_type))
row = cursor.fetchone()
if row and row[to_id]:
datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d")

@ -1,4 +1,4 @@
import glob, logging, os, re, requests
import logging, os, re, requests, time
from modules import util
from modules.anidb import AniDBAPI
from modules.builder import CollectionBuilder
@ -11,7 +11,7 @@ from modules.radarr import RadarrAPI
from modules.sonarr import SonarrAPI
from modules.tautulli import TautulliAPI
from modules.tmdb import TMDbAPI
from modules.trakt import TraktAPI
from modules.trakttv import TraktAPI
from modules.tvdb import TVDbAPI
from modules.util import Failed
from plexapi.exceptions import BadRequest
@ -23,10 +23,10 @@ class Config:
def __init__(self, default_dir, config_path=None):
logger.info("Locating config...")
if config_path and os.path.exists(config_path): self.config_path = os.path.abspath(config_path)
elif config_path and not os.path.exists(config_path): raise Failed("Config Error: config not found at {}".format(os.path.abspath(config_path)))
elif config_path and not os.path.exists(config_path): raise Failed(f"Config Error: config not found at {os.path.abspath(config_path)}")
elif os.path.exists(os.path.join(default_dir, "config.yml")): self.config_path = os.path.abspath(os.path.join(default_dir, "config.yml"))
else: raise Failed("Config Error: config not found at {}".format(os.path.abspath(default_dir)))
logger.info("Using {} as config".format(self.config_path))
else: raise Failed(f"Config Error: config not found at {os.path.abspath(default_dir)}")
logger.info(f"Using {self.config_path} as config")
yaml.YAML().allow_duplicate_keys = True
try:
@ -74,10 +74,9 @@ class Config:
yaml.round_trip_dump(new_config, open(self.config_path, "w"), indent=ind, block_seq_indent=bsi)
self.data = new_config
except yaml.scanner.ScannerError as e:
raise Failed("YAML Error: {}".format(str(e).replace("\n", "\n|\t ")))
raise Failed(f"YAML Error: {util.tab_new_lines(e)}")
def check_for_attribute(data, attribute, parent=None, test_list=None, options="", default=None, do_print=True, default_is_none=False, req_default=False, var_type="str", throw=False, save=True):
message = ""
endline = ""
if parent is not None:
if parent in data:
@ -86,53 +85,55 @@ class Config:
data = None
do_print = False
save = False
text = "{} attribute".format(attribute) if parent is None else "{} sub-attribute {}".format(parent, attribute)
text = f"{attribute} attribute" if parent is None else f"{parent} sub-attribute {attribute}"
if data is None or attribute not in data:
message = "{} not found".format(text)
message = f"{text} not found"
if parent and save is True:
new_config, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.config_path))
endline = "\n{} sub-attribute {} added to config".format(parent, attribute)
if parent not in new_config: new_config = {parent: {attribute: default}}
elif not new_config[parent]: new_config[parent] = {attribute: default}
elif attribute not in new_config[parent]: new_config[parent][attribute] = default
else: endLine = ""
yaml.round_trip_dump(new_config, open(self.config_path, "w"), indent=ind, block_seq_indent=bsi)
elif not data[attribute] and data[attribute] != False:
loaded_config, ind_in, bsi_in = yaml.util.load_yaml_guess_indent(open(self.config_path))
endline = f"\n{parent} sub-attribute {attribute} added to config"
if parent not in loaded_config or not loaded_config[parent]: loaded_config[parent] = {attribute: default}
elif attribute not in loaded_config[parent]: loaded_config[parent][attribute] = default
else: endline = ""
yaml.round_trip_dump(loaded_config, open(self.config_path, "w"), indent=ind_in, block_seq_indent=bsi_in)
elif not data[attribute] and data[attribute] is not False:
if default_is_none is True: return None
else: message = "{} is blank".format(text)
else: message = f"{text} is blank"
elif var_type == "bool":
if isinstance(data[attribute], bool): return data[attribute]
else: message = "{} must be either true or false".format(text)
else: message = f"{text} must be either true or false"
elif var_type == "int":
if isinstance(data[attribute], int) and data[attribute] > 0: return data[attribute]
else: message = "{} must an integer > 0".format(text)
else: message = f"{text} must an integer > 0"
elif var_type == "path":
if os.path.exists(os.path.abspath(data[attribute])): return data[attribute]
else: message = "Path {} does not exist".format(os.path.abspath(data[attribute]))
else: message = f"Path {os.path.abspath(data[attribute])} does not exist"
elif var_type == "list": return util.get_list(data[attribute])
elif var_type == "listpath":
elif var_type == "list_path":
temp_list = [path for path in util.get_list(data[attribute], split=True) if os.path.exists(os.path.abspath(path))]
if len(temp_list) > 0: return temp_list
else: message = "No Paths exist"
elif var_type == "lowerlist": return util.get_list(data[attribute], lower=True)
elif var_type == "lower_list": return util.get_list(data[attribute], lower=True)
elif test_list is None or data[attribute] in test_list: return data[attribute]
else: message = "{}: {} is an invalid input".format(text, data[attribute])
else: message = f"{text}: {data[attribute]} is an invalid input"
if var_type == "path" and default and os.path.exists(os.path.abspath(default)):
return default
elif var_type == "path" and default:
default = None
message = "neither {} or the default path {} could be found".format(data[attribute], default)
if attribute in data and data[attribute]:
message = f"neither {data[attribute]} or the default path {default} could be found"
else:
message = f"no {text} found and the default path {default} could be found"
if default is not None or default_is_none:
message = message + " using {} as default".format(default)
message = message + f" using {default} as default"
message = message + endline
if req_default and default is None:
raise Failed("Config Error: {} attribute must be set under {} globally or under this specific Library".format(attribute, parent))
raise Failed(f"Config Error: {attribute} attribute must be set under {parent} globally or under this specific Library")
if (default is None and not default_is_none) or throw:
if len(options) > 0:
message = message + "\n" + options
raise Failed("Config Error: {}".format(message))
raise Failed(f"Config Error: {message}")
if do_print:
util.print_multiline("Config Warning: {}".format(message))
util.print_multiline(f"Config Warning: {message}")
if attribute in data and data[attribute] and test_list is not None and data[attribute] not in test_list:
util.print_multiline(options)
return default
@ -141,18 +142,19 @@ class Config:
self.general["cache"] = check_for_attribute(self.data, "cache", parent="settings", options=" true (Create a cache to store ids)\n false (Do not create a cache to store ids)", var_type="bool", default=True)
self.general["cache_expiration"] = check_for_attribute(self.data, "cache_expiration", parent="settings", var_type="int", default=60)
if self.general["cache"]:
util.seperator()
util.separator()
self.Cache = Cache(self.config_path, self.general["cache_expiration"])
else:
self.Cache = None
self.general["asset_directory"] = check_for_attribute(self.data, "asset_directory", parent="settings", var_type="listpath", default=[os.path.join(default_dir, "assets")])
self.general["asset_directory"] = check_for_attribute(self.data, "asset_directory", parent="settings", var_type="list_path", default=[os.path.join(default_dir, "assets")])
self.general["sync_mode"] = check_for_attribute(self.data, "sync_mode", parent="settings", default="append", test_list=["append", "sync"], options=" append (Only Add Items to the Collection)\n sync (Add & Remove Items from the Collection)")
self.general["run_again_delay"] = check_for_attribute(self.data, "run_again_delay", parent="settings", var_type="int", default=0)
self.general["show_unmanaged"] = check_for_attribute(self.data, "show_unmanaged", parent="settings", var_type="bool", default=True)
self.general["show_filtered"] = check_for_attribute(self.data, "show_filtered", parent="settings", var_type="bool", default=False)
self.general["show_missing"] = check_for_attribute(self.data, "show_missing", parent="settings", var_type="bool", default=True)
self.general["save_missing"] = check_for_attribute(self.data, "save_missing", parent="settings", var_type="bool", default=True)
util.seperator()
util.separator()
self.TMDb = None
if "tmdb" in self.data:
@ -162,11 +164,11 @@ class Config:
except Failed as e: raise Failed(e)
self.tmdb["language"] = check_for_attribute(self.data, "language", parent="tmdb", default="en")
self.TMDb = TMDbAPI(self.tmdb)
logger.info("TMDb Connection {}".format("Failed" if self.TMDb is None else "Successful"))
logger.info(f"TMDb Connection {'Failed' if self.TMDb is None else 'Successful'}")
else:
raise Failed("Config Error: tmdb attribute not found")
util.seperator()
util.separator()
self.Trakt = None
if "trakt" in self.data:
@ -180,11 +182,11 @@ class Config:
self.Trakt = TraktAPI(self.trakt, authorization)
except Failed as e:
logger.error(e)
logger.info("Trakt Connection {}".format("Failed" if self.Trakt is None else "Successful"))
logger.info(f"Trakt Connection {'Failed' if self.Trakt is None else 'Successful'}")
else:
logger.warning("trakt attribute not found")
util.seperator()
util.separator()
self.MyAnimeList = None
self.MyAnimeListIDList = MyAnimeListIDList()
@ -199,7 +201,7 @@ class Config:
self.MyAnimeList = MyAnimeListAPI(self.mal, self.MyAnimeListIDList, authorization)
except Failed as e:
logger.error(e)
logger.info("My Anime List Connection {}".format("Failed" if self.MyAnimeList is None else "Successful"))
logger.info(f"My Anime List Connection {'Failed' if self.MyAnimeList is None else 'Successful'}")
else:
logger.warning("mal attribute not found")
@ -207,7 +209,7 @@ class Config:
self.IMDb = IMDbAPI(Cache=self.Cache, TMDb=self.TMDb, Trakt=self.Trakt, TVDb=self.TVDb) if self.TMDb or self.Trakt else None
self.AniDB = AniDBAPI(Cache=self.Cache, TMDb=self.TMDb, Trakt=self.Trakt)
util.seperator()
util.separator()
logger.info("Connecting to Plex Libraries...")
@ -224,7 +226,7 @@ class Config:
self.general["radarr"]["root_folder_path"] = check_for_attribute(self.data, "root_folder_path", parent="radarr", default_is_none=True)
self.general["radarr"]["add"] = check_for_attribute(self.data, "add", parent="radarr", var_type="bool", default=False)
self.general["radarr"]["search"] = check_for_attribute(self.data, "search", parent="radarr", var_type="bool", default=False)
self.general["radarr"]["tag"] = check_for_attribute(self.data, "tag", parent="radarr", var_type="lowerlist", default_is_none=True)
self.general["radarr"]["tag"] = check_for_attribute(self.data, "tag", parent="radarr", var_type="lower_list", default_is_none=True)
self.general["sonarr"] = {}
self.general["sonarr"]["url"] = check_for_attribute(self.data, "url", parent="sonarr", default_is_none=True)
@ -234,7 +236,8 @@ class Config:
self.general["sonarr"]["root_folder_path"] = check_for_attribute(self.data, "root_folder_path", parent="sonarr", default_is_none=True)
self.general["sonarr"]["add"] = check_for_attribute(self.data, "add", parent="sonarr", var_type="bool", default=False)
self.general["sonarr"]["search"] = check_for_attribute(self.data, "search", parent="sonarr", var_type="bool", default=False)
self.general["sonarr"]["tag"] = check_for_attribute(self.data, "tag", parent="sonarr", var_type="lowerlist", default_is_none=True)
self.general["sonarr"]["season_folder"] = check_for_attribute(self.data, "season_folder", parent="sonarr", var_type="bool", default=True)
self.general["sonarr"]["tag"] = check_for_attribute(self.data, "tag", parent="sonarr", var_type="lower_list", default_is_none=True)
self.general["tautulli"] = {}
self.general["tautulli"]["url"] = check_for_attribute(self.data, "url", parent="tautulli", default_is_none=True)
@ -244,17 +247,16 @@ class Config:
try: libs = check_for_attribute(self.data, "libraries", throw=True)
except Failed as e: raise Failed(e)
for lib in libs:
util.seperator()
util.separator()
params = {}
if "library_name" in libs[lib] and libs[lib]["library_name"]:
params["name"] = str(libs[lib]["library_name"])
logger.info("Connecting to {} ({}) Library...".format(params["name"], lib))
logger.info(f"Connecting to {params['name']} ({lib}) Library...")
else:
params["name"] = str(lib)
logger.info("Connecting to {} Library...".format(params["name"]))
default_lib = os.path.join(default_dir, "{}.yml".format(lib))
logger.info(f"Connecting to {params['name']} Library...")
params["asset_directory"] = check_for_attribute(libs[lib], "asset_directory", parent="settings", var_type="listpath", default=self.general["asset_directory"], default_is_none=True, save=False)
params["asset_directory"] = check_for_attribute(libs[lib], "asset_directory", parent="settings", var_type="list_path", default=self.general["asset_directory"], default_is_none=True, save=False)
if params["asset_directory"] is None:
logger.warning("Config Warning: Assets will not be used asset_directory attribute must be set under config or under this specific Library")
@ -265,21 +267,21 @@ class Config:
params["save_missing"] = check_for_attribute(libs[lib], "save_missing", parent="settings", var_type="bool", default=self.general["save_missing"], save=False)
try:
params["metadata_path"] = check_for_attribute(libs[lib], "metadata_path", var_type="path", default=os.path.join(default_dir, "{}.yml".format(lib)), throw=True)
params["metadata_path"] = check_for_attribute(libs[lib], "metadata_path", var_type="path", default=os.path.join(default_dir, f"{lib}.yml"), throw=True)
params["library_type"] = check_for_attribute(libs[lib], "library_type", test_list=["movie", "show"], options=" movie (For Movie Libraries)\n show (For Show Libraries)", throw=True)
params["plex"] = {}
params["plex"]["url"] = check_for_attribute(libs[lib], "url", parent="plex", default=self.general["plex"]["url"], req_default=True, save=False)
params["plex"]["token"] = check_for_attribute(libs[lib], "token", parent="plex", default=self.general["plex"]["token"], req_default=True, save=False)
params["plex"]["timeout"] = check_for_attribute(libs[lib], "timeout", parent="plex", var_type="int", default=self.general["plex"]["timeout"], save=False)
library = PlexAPI(params, self.TMDb, self.TVDb)
logger.info("{} Library Connection Successful".format(params["name"]))
logger.info(f"{params['name']} Library Connection Successful")
except Failed as e:
util.print_multiline(e)
logger.info("{} Library Connection Failed".format(params["name"]))
logger.info(f"{params['name']} Library Connection Failed")
continue
if self.general["radarr"]["url"] or "radarr" in libs[lib]:
logger.info("Connecting to {} library's Radarr...".format(params["name"]))
logger.info(f"Connecting to {params['name']} library's Radarr...")
radarr_params = {}
try:
radarr_params["url"] = check_for_attribute(libs[lib], "url", parent="radarr", default=self.general["radarr"]["url"], req_default=True, save=False)
@ -289,14 +291,14 @@ class Config:
radarr_params["root_folder_path"] = check_for_attribute(libs[lib], "root_folder_path", parent="radarr", default=self.general["radarr"]["root_folder_path"], req_default=True, save=False)
radarr_params["add"] = check_for_attribute(libs[lib], "add", parent="radarr", var_type="bool", default=self.general["radarr"]["add"], save=False)
radarr_params["search"] = check_for_attribute(libs[lib], "search", parent="radarr", var_type="bool", default=self.general["radarr"]["search"], save=False)
radarr_params["tag"] = check_for_attribute(libs[lib], "search", parent="radarr", var_type="lowerlist", default=self.general["radarr"]["tag"], default_is_none=True, save=False)
radarr_params["tag"] = check_for_attribute(libs[lib], "search", parent="radarr", var_type="lower_list", default=self.general["radarr"]["tag"], default_is_none=True, save=False)
library.add_Radarr(RadarrAPI(self.TMDb, radarr_params))
except Failed as e:
util.print_multiline(e)
logger.info("{} library's Radarr Connection {}".format(params["name"], "Failed" if library.Radarr is None else "Successful"))
logger.info(f"{params['name']} library's Radarr Connection {'Failed' if library.Radarr is None else 'Successful'}")
if self.general["sonarr"]["url"] or "sonarr" in libs[lib]:
logger.info("Connecting to {} library's Sonarr...".format(params["name"]))
logger.info(f"Connecting to {params['name']} library's Sonarr...")
sonarr_params = {}
try:
sonarr_params["url"] = check_for_attribute(libs[lib], "url", parent="sonarr", default=self.general["sonarr"]["url"], req_default=True, save=False)
@ -306,14 +308,15 @@ class Config:
sonarr_params["root_folder_path"] = check_for_attribute(libs[lib], "root_folder_path", parent="sonarr", default=self.general["sonarr"]["root_folder_path"], req_default=True, save=False)
sonarr_params["add"] = check_for_attribute(libs[lib], "add", parent="sonarr", var_type="bool", default=self.general["sonarr"]["add"], save=False)
sonarr_params["search"] = check_for_attribute(libs[lib], "search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["search"], save=False)
sonarr_params["tag"] = check_for_attribute(libs[lib], "search", parent="sonarr", var_type="lowerlist", default=self.general["sonarr"]["tag"], default_is_none=True, save=False)
sonarr_params["season_folder"] = check_for_attribute(libs[lib], "season_folder", parent="sonarr", var_type="bool", default=self.general["sonarr"]["season_folder"], save=False)
sonarr_params["tag"] = check_for_attribute(libs[lib], "search", parent="sonarr", var_type="lower_list", default=self.general["sonarr"]["tag"], default_is_none=True, save=False)
library.add_Sonarr(SonarrAPI(self.TVDb, sonarr_params, library.Plex.language))
except Failed as e:
util.print_multiline(e)
logger.info("{} library's Sonarr Connection {}".format(params["name"], "Failed" if library.Sonarr is None else "Successful"))
logger.info(f"{params['name']} library's Sonarr Connection {'Failed' if library.Sonarr is None else 'Successful'}")
if self.general["tautulli"]["url"] or "tautulli" in libs[lib]:
logger.info("Connecting to {} library's Tautulli...".format(params["name"]))
logger.info(f"Connecting to {params['name']} library's Tautulli...")
tautulli_params = {}
try:
tautulli_params["url"] = check_for_attribute(libs[lib], "url", parent="tautulli", default=self.general["tautulli"]["url"], req_default=True, save=False)
@ -321,32 +324,32 @@ class Config:
library.add_Tautulli(TautulliAPI(tautulli_params))
except Failed as e:
util.print_multiline(e)
logger.info("{} library's Tautulli Connection {}".format(params["name"], "Failed" if library.Tautulli is None else "Successful"))
logger.info(f"{params['name']} library's Tautulli Connection {'Failed' if library.Tautulli is None else 'Successful'}")
self.libraries.append(library)
util.seperator()
util.separator()
if len(self.libraries) > 0:
logger.info("{} Plex Library Connection{} Successful".format(len(self.libraries), "s" if len(self.libraries) > 1 else ""))
logger.info(f"{len(self.libraries)} Plex Library Connection{'s' if len(self.libraries) > 1 else ''} Successful")
else:
raise Failed("Plex Error: No Plex libraries were found")
util.seperator()
util.separator()
def update_libraries(self, test, requested_collections):
for library in self.libraries:
os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout)
logger.info("")
util.seperator("{} Library".format(library.name))
util.separator(f"{library.name} Library")
try: library.update_metadata(self.TMDb, test)
except Failed as e: logger.error(e)
logger.info("")
util.seperator("{} Library {}Collections".format(library.name, "Test " if test else ""))
util.separator(f"{library.name} Library {'Test ' if test else ''}Collections")
collections = {c: library.collections[c] for c in util.get_list(requested_collections) if c in library.collections} if requested_collections else library.collections
if collections:
logger.info("")
util.seperator("Mapping {} Library".format(library.name))
util.separator(f"Mapping {library.name} Library")
logger.info("")
movie_map, show_map = self.map_guids(library)
for c in collections:
@ -357,51 +360,54 @@ class Config:
if "name" in data_template \
and data_template["name"] \
and library.templates \
and data_template["name"] in self.library.templates \
and self.library.templates[data_template["name"]] \
and "test" in self.library.templates[data_template["name"]] \
and self.library.templates[data_template["name"]]["test"] == True:
and data_template["name"] in library.templates \
and library.templates[data_template["name"]] \
and "test" in library.templates[data_template["name"]] \
and library.templates[data_template["name"]]["test"] is True:
no_template_test = False
if no_template_test:
continue
try:
logger.info("")
util.seperator("{} Collection".format(c))
util.separator(f"{c} Collection")
logger.info("")
map = {}
rating_key_map = {}
try:
builder = CollectionBuilder(self, library, c, collections[c])
except Exception as e:
except Failed as ef:
util.print_multiline(ef, error=True)
continue
except Exception as ee:
util.print_stacktrace()
logger.error(e)
logger.error(ee)
continue
try:
collection_obj = library.get_collection(c)
collection_name = collection_obj.title
except Failed as e:
except Failed:
collection_obj = None
collection_name = c
if builder.schedule is not None:
print_multiline(builder.schedule, info=True)
if len(builder.schedule) > 0:
util.print_multiline(builder.schedule, info=True)
logger.info("")
if builder.sync:
logger.info("Sync Mode: sync")
if collection_obj:
for item in collection_obj.items():
map[item.ratingKey] = item
rating_key_map[item.ratingKey] = item
else:
logger.info("Sync Mode: append")
for i, f in enumerate(builder.filters):
if i == 0:
logger.info("")
logger.info("Collection Filter {}: {}".format(f[0], f[1]))
logger.info(f"Collection Filter {f[0]}: {f[1]}")
builder.run_methods(collection_obj, collection_name, map, movie_map, show_map)
builder.run_methods(collection_obj, collection_name, rating_key_map, movie_map, show_map)
try:
plex_collection = library.get_collection(collection_name)
@ -411,15 +417,19 @@ class Config:
builder.update_details(plex_collection)
if builder.run_again and (len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0):
library.run_again.append(builder)
except Exception as e:
util.print_stacktrace()
logger.error("Unknown Error: {}".format(e))
logger.error(f"Unknown Error: {e}")
if library.show_unmanaged is True and not test and not requested_collections:
logger.info("")
util.seperator("Unmanaged Collections in {} Library".format(library.name))
util.separator(f"Unmanaged Collections in {library.name} Library")
logger.info("")
unmanaged_count = 0
collections_in_plex = [str(pcol) for pcol in collections]
collections_in_plex = [str(plex_col) for plex_col in collections]
for col in library.get_all_collections():
if col.title not in collections_in_plex:
logger.info(col.title)
@ -429,20 +439,57 @@ class Config:
logger.info("")
logger.error("No collection to update")
has_run_again = False
for library in self.libraries:
if library.run_again:
has_run_again = True
break
if has_run_again:
logger.info("")
util.separator("Run Again")
logger.info("")
length = 0
for x in range(1, self.general["run_again_delay"] + 1):
length = util.print_return(length, f"Waiting to run again in {self.general['run_again_delay'] - x + 1} minutes")
for y in range(60):
time.sleep(1)
util.print_end(length)
for library in self.libraries:
if library.run_again:
os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout)
logger.info("")
util.separator(f"{library.name} Library Run Again")
logger.info("")
collections = {c: library.collections[c] for c in util.get_list(requested_collections) if c in library.collections} if requested_collections else library.collections
if collections:
util.separator(f"Mapping {library.name} Library")
logger.info("")
movie_map, show_map = self.map_guids(library)
for builder in library.run_again:
logger.info("")
util.separator(f"{builder.name} Collection")
logger.info("")
try:
collection_obj = library.get_collection(builder.name)
except Failed as e:
util.print_multiline(e, error=True)
continue
builder.run_collections_again(library, collection_obj, movie_map, show_map)
def map_guids(self, library):
movie_map = {}
show_map = {}
length = 0
count = 0
logger.info("Mapping {} Library: {}".format("Movie" if library.is_movie else "Show", library.name))
logger.info(f"Mapping {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
items = library.Plex.all()
for i, item in enumerate(items, 1):
length = util.print_return(length, "Processing: {}/{} {}".format(i, len(items), item.title))
length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}")
try:
id_type, main_id = self.get_id(item, library, length)
except BadRequest:
util.print_stacktrace()
util.print_end(length, "{} {:<46} | {} for {}".format("Cache | ! |" if self.Cache else "Mapping Error:", item.guid, error_message, item.title))
util.print_end(length, f"{'Cache | ! |' if self.Cache else 'Mapping Error:'} | {item.guid} for {item.title} not found")
continue
if isinstance(main_id, list):
if id_type == "movie":
@ -452,7 +499,7 @@ class Config:
else:
if id_type == "movie": movie_map[main_id] = item.ratingKey
elif id_type == "show": show_map[main_id] = item.ratingKey
util.print_end(length, "Processed {} {}".format(len(items), "Movies" if library.is_movie else "Shows"))
util.print_end(length, f"Processed {len(items)} {'Movies' if library.is_movie else 'Shows'}")
return movie_map, show_map
def get_id(self, item, library, length):
@ -485,10 +532,10 @@ class Config:
elif item_type == "hama":
if check_id.startswith("tvdb"): tvdb_id = int(re.search("-(.*)", check_id).group(1))
elif check_id.startswith("anidb"): anidb_id = re.search("-(.*)", check_id).group(1)
else: error_message = "Hama Agent ID: {} not supported".format(check_id)
else: error_message = f"Hama Agent ID: {check_id} not supported"
elif item_type == "myanimelist": mal_id = check_id
elif item_type == "local": error_message = "No match in Plex"
else: error_message = "Agent {} not supported".format(item_type)
else: error_message = f"Agent {item_type} not supported"
if not error_message:
if anidb_id and not tvdb_id:
@ -502,7 +549,7 @@ class Config:
ids = self.MyAnimeListIDList.find_mal_ids(mal_id)
if "thetvdb_id" in ids and int(ids["thetvdb_id"]) > 0: tvdb_id = int(ids["thetvdb_id"])
elif "themoviedb_id" in ids and int(ids["themoviedb_id"]) > 0: tmdb_id = int(ids["themoviedb_id"])
else: raise Failed("MyAnimeList Error: MyAnimeList ID: {} has no other IDs associated with it".format(mal_id))
else: raise Failed(f"MyAnimeList Error: MyAnimeList ID: {mal_id} has no other IDs associated with it")
except Failed:
pass
if mal_id and not tvdb_id:
@ -561,29 +608,29 @@ class Config:
elif self.Trakt: api_name = "Trakt"
else: api_name = None
if tmdb_id and imdb_id: id_name = "TMDb ID: {} or IMDb ID: {}".format(tmdb_id, imdb_id)
elif imdb_id and tvdb_id: id_name = "IMDb ID: {} or TVDb ID: {}".format(imdb_id, tvdb_id)
elif tmdb_id: id_name = "TMDb ID: {}".format(tmdb_id)
elif imdb_id: id_name = "IMDb ID: {}".format(imdb_id)
elif tvdb_id: id_name = "TVDb ID: {}".format(tvdb_id)
if tmdb_id and imdb_id: id_name = f"TMDb ID: {tmdb_id} or IMDb ID: {imdb_id}"
elif imdb_id and tvdb_id: id_name = f"IMDb ID: {imdb_id} or TVDb ID: {tvdb_id}"
elif tmdb_id: id_name = f"TMDb ID: {tmdb_id}"
elif imdb_id: id_name = f"IMDb ID: {imdb_id}"
elif tvdb_id: id_name = f"TVDb ID: {tvdb_id}"
else: id_name = None
if anidb_id and not tmdb_id and not tvdb_id: error_message = "Unable to convert AniDb ID: {} to TMDb ID or TVDb ID".format(anidb_id)
elif mal_id and not tmdb_id and not tvdb_id: error_message = "Unable to convert MyAnimeList ID: {} to TMDb ID or TVDb ID".format(mal_id)
elif id_name and api_name: error_message = "Unable to convert {} to {} using {}".format(id_name, service_name, api_name)
elif id_name: error_message = "Configure TMDb or Trakt to covert {} to {}".format(id_name, service_name)
else: error_message = "No ID to convert to {}".format(service_name)
if anidb_id and not tmdb_id and not tvdb_id: error_message = f"Unable to convert AniDb ID: {anidb_id} to TMDb ID or TVDb ID"
elif mal_id and not tmdb_id and not tvdb_id: error_message = f"Unable to convert MyAnimeList ID: {mal_id} to TMDb ID or TVDb ID"
elif id_name and api_name: error_message = f"Unable to convert {id_name} to {service_name} using {api_name}"
elif id_name: error_message = f"Configure TMDb or Trakt to covert {id_name} to {service_name}"
else: error_message = f"No ID to convert to {service_name}"
if self.Cache and (tmdb_id and library.is_movie) or ((tvdb_id or ((anidb_id or mal_id) and tmdb_id)) and library.is_show):
if isinstance(tmdb_id, list):
for i in range(len(tmdb_id)):
util.print_end(length, "Cache | {} | {:<46} | {:<6} | {:<10} | {:<6} | {:<5} | {:<5} | {}".format("^" if expired is True else "+", item.guid, tmdb_id[i] if tmdb_id[i] else "None", imdb_id[i] if imdb_id[i] else "None", tvdb_id if tvdb_id else "None", anidb_id if anidb_id else "None", mal_id if mal_id else "None", item.title))
util.print_end(length, f"Cache | {'^' if expired is True else '+'} | {item.guid:<46} | {tmdb_id[i] if tmdb_id[i] else 'None':<6} | {imdb_id[i] if imdb_id[i] else 'None':<10} | {tvdb_id if tvdb_id else 'None':<6} | {anidb_id if anidb_id else 'None':<5} | {mal_id if mal_id else 'None':<5} | {item.title}")
self.Cache.update_guid("movie" if library.is_movie else "show", item.guid, tmdb_id[i], imdb_id[i], tvdb_id, anidb_id, mal_id, expired)
else:
util.print_end(length, "Cache | {} | {:<46} | {:<6} | {:<10} | {:<6} | {:<5} | {:<5} | {}".format("^" if expired is True else "+", item.guid, tmdb_id if tmdb_id else "None", imdb_id if imdb_id else "None", tvdb_id if tvdb_id else "None", anidb_id if anidb_id else "None", mal_id if mal_id else "None", item.title))
util.print_end(length, f"Cache | {'^' if expired is True else '+'} | {item.guid:<46} | {tmdb_id if tmdb_id else 'None':<6} | {imdb_id if imdb_id else 'None':<10} | {tvdb_id if tvdb_id else 'None':<6} | {anidb_id if anidb_id else 'None':<5} | {mal_id if mal_id else 'None':<5} | {item.title}")
self.Cache.update_guid("movie" if library.is_movie else "show", item.guid, tmdb_id, imdb_id, tvdb_id, anidb_id, mal_id, expired)
if tmdb_id and library.is_movie: return "movie", tmdb_id
elif tvdb_id and library.is_show: return "show", tvdb_id
elif (anidb_id or mal_id) and tmdb_id: return "movie", tmdb_id
else:
util.print_end(length, "{} {:<46} | {} for {}".format("Cache | ! |" if self.Cache else "Mapping Error:", item.guid, error_message, item.title))
util.print_end(length, f"{'Cache | ! |' if self.Cache else 'Mapping Error:'} {item.guid:<46} | {error_message} for {item.title}")
return None, None

@ -1,4 +1,4 @@
import logging, math, re, requests, time
import logging, math, re, requests
from lxml import html
from modules import util
from modules.util import Failed
@ -18,35 +18,35 @@ class IMDbAPI:
def get_imdb_ids_from_url(self, imdb_url, language, limit):
imdb_url = imdb_url.strip()
if not imdb_url.startswith("https://www.imdb.com/list/ls") and not imdb_url.startswith("https://www.imdb.com/search/title/?"):
raise Failed("IMDb Error: {} must begin with either:\n| https://www.imdb.com/list/ls (For Lists)\n| https://www.imdb.com/search/title/? (For Searches)".format(imdb_url))
raise Failed(f"IMDb Error: {imdb_url} must begin with either:\n| https://www.imdb.com/list/ls (For Lists)\n| https://www.imdb.com/search/title/? (For Searches)")
if imdb_url.startswith("https://www.imdb.com/list/ls"):
try: list_id = re.search("(\\d+)", str(imdb_url)).group(1)
except AttributeError: raise Failed("IMDb Error: Failed to parse List ID from {}".format(imdb_url))
current_url = "https://www.imdb.com/search/title/?lists=ls{}".format(list_id)
except AttributeError: raise Failed(f"IMDb Error: Failed to parse List ID from {imdb_url}")
current_url = f"https://www.imdb.com/search/title/?lists=ls{list_id}"
else:
current_url = imdb_url
header = {"Accept-Language": language}
length = 0
imdb_ids = []
try: results = self.send_request(current_url, header).xpath("//div[@class='desc']/span/text()")[0].replace(",", "")
except IndexError: raise Failed("IMDb Error: Failed to parse URL: {}".format(imdb_url))
except IndexError: raise Failed(f"IMDb Error: Failed to parse URL: {imdb_url}")
try: total = int(re.findall("(\\d+) title", results)[0])
except IndexError: raise Failed("IMDb Error: No Results at URL: {}".format(imdb_url))
if "&start=" in current_url: current_url = re.sub("&start=\d+", "", current_url)
if "&count=" in current_url: current_url = re.sub("&count=\d+", "", current_url)
except IndexError: raise Failed(f"IMDb Error: No Results at URL: {imdb_url}")
if "&start=" in current_url: current_url = re.sub("&start=\\d+", "", current_url)
if "&count=" in current_url: current_url = re.sub("&count=\\d+", "", current_url)
if limit < 1 or total < limit: limit = total
remainder = limit % 250
if remainder == 0: remainder = 250
num_of_pages = math.ceil(int(limit) / 250)
for i in range(1, num_of_pages + 1):
start_num = (i - 1) * 250 + 1
length = util.print_return(length, "Parsing Page {}/{} {}-{}".format(i, num_of_pages, start_num, limit if i == num_of_pages else i * 250))
response = self.send_request("{}&count={}&start={}".format(current_url, remainder if i == num_of_pages else 250, start_num), header)
length = util.print_return(length, f"Parsing Page {i}/{num_of_pages} {start_num}-{limit if i == num_of_pages else i * 250}")
response = self.send_request(f"{current_url}&count={remainder if i == num_of_pages else 250}&start={start_num}", header)
imdb_ids.extend(response.xpath("//div[contains(@class, 'lister-item-image')]//a/img//@data-tconst"))
util.print_end(length)
if imdb_ids: return imdb_ids
else: raise Failed("IMDb Error: No Movies Found at {}".format(imdb_url))
else: raise Failed(f"IMDb Error: No Movies Found at {imdb_url}")
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_request(self, url, header):
@ -55,37 +55,40 @@ class IMDbAPI:
def get_items(self, method, data, language, status_message=True):
pretty = util.pretty_names[method] if method in util.pretty_names else method
if status_message:
logger.debug("Data: {}".format(data))
logger.debug(f"Data: {data}")
show_ids = []
movie_ids = []
if method == "imdb_id":
if status_message:
logger.info("Processing {}: {}".format(pretty, data))
logger.info(f"Processing {pretty}: {data}")
tmdb_id, tvdb_id = self.convert_from_imdb(data, language)
if tmdb_id: movie_ids.append(tmdb_id)
if tvdb_id: show_ids.append(tvdb_id)
elif method == "imdb_list":
if status_message:
logger.info("Processing {}: {}".format(pretty,"{} Items at {}".format(data["limit"], data["url"]) if data["limit"] > 0 else data["url"]))
status = f"{data['limit']} Items at " if data['limit'] > 0 else ''
logger.info(f"Processing {pretty}: {status}{data['url']}")
imdb_ids = self.get_imdb_ids_from_url(data["url"], language, data["limit"])
total_ids = len(imdb_ids)
length = 0
for i, imdb_id in enumerate(imdb_ids, 1):
length = util.print_return(length, "Converting IMDb ID {}/{}".format(i, total_ids))
length = util.print_return(length, f"Converting IMDb ID {i}/{total_ids}")
try:
tmdb_id, tvdb_id = self.convert_from_imdb(imdb_id, language)
if tmdb_id: movie_ids.append(tmdb_id)
if tvdb_id: show_ids.append(tvdb_id)
except Failed as e: logger.warning(e)
util.print_end(length, "Processed {} IMDb IDs".format(total_ids))
util.print_end(length, f"Processed {total_ids} IMDb IDs")
else:
raise Failed("IMDb Error: Method {} not supported".format(method))
raise Failed(f"IMDb Error: Method {method} not supported")
if status_message:
logger.debug("TMDb IDs Found: {}".format(movie_ids))
logger.debug("TVDb IDs Found: {}".format(show_ids))
logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}")
return movie_ids, show_ids
def convert_from_imdb(self, imdb_id, language):
update_tmdb = False
update_tvdb = False
if self.Cache:
tmdb_id, tvdb_id = self.Cache.get_ids_from_imdb(imdb_id)
update_tmdb = False
@ -121,7 +124,7 @@ class IMDbAPI:
try:
if tvdb_id and not from_cache: self.TVDb.get_series(language, tvdb_id=tvdb_id)
except Failed: tvdb_id = None
if not tmdb_id and not tvdb_id : raise Failed("IMDb Error: No TMDb ID or TVDb ID found for IMDb: {}".format(imdb_id))
if not tmdb_id and not tvdb_id: raise Failed(f"IMDb Error: No TMDb ID or TVDb ID found for IMDb: {imdb_id}")
if self.Cache:
if tmdb_id and update_tmdb is not False:
self.Cache.update_imdb("movie", update_tmdb, imdb_id, tmdb_id)

@ -18,16 +18,16 @@ class MyAnimeListIDList:
for attrs in self.ids:
if from_id in attrs and int(attrs[from_id]) == int(input_id) and to_id in attrs and int(attrs[to_id]) > 0:
return int(attrs[to_id])
raise Failed("MyAnimeList Error: {} ID not found for {}: {}".format(util.pretty_ids[to_id], util.pretty_ids[from_id], input_id))
raise Failed(f"MyAnimeList Error: {util.pretty_ids[to_id]} ID not found for {util.pretty_ids[from_id]}: {input_id}")
def find_mal_ids(self, mal_id):
for mal in self.ids:
if "mal_id" in mal and int(mal["mal_id"]) == int(mal_id):
return mal
raise Failed("MyAnimeList Error: MyAnimeList ID: {} not found".format(mal_id))
raise Failed(f"MyAnimeList Error: MyAnimeList ID: {mal_id} not found")
class MyAnimeListAPI:
def __init__(self, params, MyAnimeListIDList, authorization=None):
def __init__(self, params, MyAnimeListIDList_in, authorization=None):
self.urls = {
"oauth_token": "https://myanimelist.net/v1/oauth2/token",
"oauth_authorize": "https://myanimelist.net/v1/oauth2/authorize",
@ -40,16 +40,16 @@ class MyAnimeListAPI:
self.client_secret = params["client_secret"]
self.config_path = params["config_path"]
self.authorization = authorization
self.MyAnimeListIDList = MyAnimeListIDList
self.MyAnimeListIDList = MyAnimeListIDList_in
if not self.save_authorization(self.authorization):
if not self.refresh_authorization():
self.get_authorization()
def get_authorization(self):
code_verifier = secrets.token_urlsafe(100)[:128]
url = "{}?response_type=code&client_id={}&code_challenge={}".format(self.urls["oauth_authorize"], self.client_id, code_verifier)
url = f"{self.urls['oauth_authorize']}?response_type=code&client_id={self.client_id}&code_challenge={code_verifier}"
logger.info("")
logger.info("Navigate to: {}".format(url))
logger.info(f"Navigate to: {url}")
logger.info("")
logger.info("Login and click the Allow option. You will then be redirected to a localhost")
logger.info("url that most likely won't load, which is fine. Copy the URL and paste it below")
@ -106,7 +106,7 @@ class MyAnimeListAPI:
"expires_in": authorization["expires_in"],
"refresh_token": authorization["refresh_token"]
}
logger.info("Saving authorization information to {}".format(self.config_path))
logger.info(f"Saving authorization information to {self.config_path}")
yaml.round_trip_dump(config, open(self.config_path, "w"), indent=ind, block_seq_indent=bsi)
self.authorization = authorization
return True
@ -119,8 +119,8 @@ class MyAnimeListAPI:
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def send_request(self, url, authorization=None):
new_authorization = authorization if authorization else self.authorization
response = requests.get(url, headers={"Authorization": "Bearer {}".format(new_authorization["access_token"])}).json()
if "error" in response: raise Failed("MyAnimeList Error: {}".format(response["error"]))
response = requests.get(url, headers={"Authorization": f"Bearer {new_authorization['access_token']}"}).json()
if "error" in response: raise Failed(f"MyAnimeList Error: {response['error']}")
else: return response
def parse_mal_ids(self, data):
@ -131,50 +131,51 @@ class MyAnimeListAPI:
return mal_ids
def get_username(self):
return self.send_request("{}/@me".format(self.urls["user"]))["name"]
return self.send_request(f"{self.urls['user']}/@me")["name"]
def get_ranked(self, ranking_type, limit):
url = "{}?ranking_type={}&limit={}".format(self.urls["ranking"], ranking_type, limit)
url = f"{self.urls['ranking']}?ranking_type={ranking_type}&limit={limit}"
return self.parse_mal_ids(self.send_request(url))
def get_season(self, season, year, sort_by, limit):
url = "{}/{}/{}?sort={}&limit={}".format(self.urls["season"], year, season, sort_by, limit)
url = f"{self.urls['season']}/{year}/{season}?sort={sort_by}&limit={limit}"
return self.parse_mal_ids(self.send_request(url))
def get_suggestions(self, limit):
url = "{}?limit={}".format(self.urls["suggestions"], limit)
url = f"{self.urls['suggestions']}?limit={limit}"
return self.parse_mal_ids(self.send_request(url))
def get_userlist(self, username, status, sort_by, limit):
url = "{}/{}/animelist?{}sort={}&limit={}".format(self.urls["user"], username, "" if status == "all" else "status={}&".format(status), sort_by, limit)
final_status = "" if status == "all" else f"status={status}&"
url = f"{self.urls['user']}/{username}/animelist?{final_status}sort={sort_by}&limit={limit}"
return self.parse_mal_ids(self.send_request(url))
def get_items(self, method, data, status_message=True):
if status_message:
logger.debug("Data: {}".format(data))
logger.debug(f"Data: {data}")
pretty = util.pretty_names[method] if method in util.pretty_names else method
if method == "mal_id":
mal_ids = [data]
if status_message:
logger.info("Processing {}: {}".format(pretty, data))
logger.info(f"Processing {pretty}: {data}")
elif method in util.mal_ranked_name:
mal_ids = self.get_ranked(util.mal_ranked_name[method], data)
if status_message:
logger.info("Processing {}: {} Anime".format(pretty, data))
logger.info(f"Processing {pretty}: {data} Anime")
elif method == "mal_season":
mal_ids = self.get_season(data["season"], data["year"], data["sort_by"], data["limit"])
if status_message:
logger.info("Processing {}: {} Anime from {} {} sorted by {}".format(pretty, data["limit"], util.pretty_seasons[data["season"]], data["year"], util.mal_pretty[data["sort_by"]]))
logger.info(f"Processing {pretty}: {data['limit']} Anime from {util.pretty_seasons[data['season']]} {data['year']} sorted by {util.mal_pretty[data['sort_by']]}")
elif method == "mal_suggested":
mal_ids = self.get_suggestions(data)
if status_message:
logger.info("Processing {}: {} Anime".format(pretty, data))
logger.info(f"Processing {pretty}: {data} Anime")
elif method == "mal_userlist":
mal_ids = self.get_userlist(data["username"], data["status"], data["sort_by"], data["limit"])
if status_message:
logger.info("Processing {}: {} Anime from {}'s {} list sorted by {}".format(pretty, data["limit"], self.get_username() if data["username"] == "@me" else data["username"], util.mal_pretty[data["status"]], util.mal_pretty[data["sort_by"]]))
logger.info(f"Processing {pretty}: {data['limit']} Anime from {self.get_username() if data['username'] == '@me' else data['username']}'s {util.mal_pretty[data['status']]} list sorted by {util.mal_pretty[data['sort_by']]}")
else:
raise Failed("MyAnimeList Error: Method {} not supported".format(method))
raise Failed(f"MyAnimeList Error: Method {method} not supported")
show_ids = []
movie_ids = []
for mal_id in mal_ids:
@ -182,12 +183,12 @@ class MyAnimeListAPI:
ids = self.MyAnimeListIDList.find_mal_ids(mal_id)
if "thetvdb_id" in ids and int(ids["thetvdb_id"]) > 0: show_ids.append(int(ids["thetvdb_id"]))
elif "themoviedb_id" in ids and int(ids["themoviedb_id"]) > 0: movie_ids.append(int(ids["themoviedb_id"]))
else: raise Failed("MyAnimeList Error: MyAnimeList ID: {} has no other IDs associated with it".format(mal_id))
else: raise Failed(f"MyAnimeList Error: MyAnimeList ID: {mal_id} has no other IDs associated with it")
except Failed as e:
if status_message:
logger.error(e)
if status_message:
logger.debug("MyAnimeList IDs Found: {}".format(mal_ids))
logger.debug("Shows Found: {}".format(show_ids))
logger.debug("Movies Found: {}".format(movie_ids))
logger.debug(f"MyAnimeList IDs Found: {mal_ids}")
logger.debug(f"Shows Found: {show_ids}")
logger.debug(f"Movies Found: {movie_ids}")
return movie_ids, show_ids

@ -1,9 +1,10 @@
import datetime, logging, os, requests
from lxml import html
import logging, os, re, requests
from datetime import datetime, timedelta
from modules import util
from modules.util import Failed
from plexapi.exceptions import BadRequest, NotFound, Unauthorized
from plexapi.library import Collections, MovieSection, ShowSection
from plexapi.library import MovieSection, ShowSection
from plexapi.collection import Collections
from plexapi.server import PlexServer
from plexapi.video import Movie, Show
from retrying import retry
@ -15,23 +16,23 @@ class PlexAPI:
def __init__(self, params, TMDb, TVDb):
try: self.PlexServer = PlexServer(params["plex"]["url"], params["plex"]["token"], timeout=params["plex"]["timeout"])
except Unauthorized: raise Failed("Plex Error: Plex token is invalid")
except ValueError as e: raise Failed("Plex Error: {}".format(e))
except requests.exceptions.ConnectionError as e:
except ValueError as e: raise Failed(f"Plex Error: {e}")
except requests.exceptions.ConnectionError:
util.print_stacktrace()
raise Failed("Plex Error: Plex url is invalid")
self.is_movie = params["library_type"] == "movie"
self.is_show = params["library_type"] == "show"
self.Plex = next((s for s in self.PlexServer.library.sections() if s.title == params["name"] and ((self.is_movie and isinstance(s, MovieSection)) or (self.is_show and isinstance(s, ShowSection)))), None)
if not self.Plex: raise Failed("Plex Error: Plex Library {} not found".format(params["name"]))
if not self.Plex: raise Failed(f"Plex Error: Plex Library {params['name']} not found")
try: self.data, ind, bsi = yaml.util.load_yaml_guess_indent(open(params["metadata_path"], encoding="utf-8"))
except yaml.scanner.ScannerError as e: raise Failed("YAML Error: {}".format(str(e).replace("\n", "\n|\t ")))
except yaml.scanner.ScannerError as e: raise Failed(f"YAML Error: {util.tab_new_lines(e)}")
def get_dict(attribute):
if attribute in self.data:
if self.data[attribute]:
if isinstance(self.data[attribute], dict): return self.data[attribute]
else: logger.waring("Config Warning: {} must be a dictionary".format(attribute))
else: logger.warning("Config Warning: {} attribute is blank".format(attribute))
else: logger.warning(f"Config Warning: {attribute} must be a dictionary")
else: logger.warning(f"Config Warning: {attribute} attribute is blank")
return None
self.metadata = get_dict("metadata")
@ -43,7 +44,7 @@ class PlexAPI:
if params["asset_directory"]:
for ad in params["asset_directory"]:
logger.info("Using Asset Directory: {}".format(ad))
logger.info(f"Using Asset Directory: {ad}")
self.TMDb = TMDb
self.TVDb = TVDb
@ -51,7 +52,7 @@ class PlexAPI:
self.Sonarr = None
self.Tautulli = None
self.name = params["name"]
self.missing_path = os.path.join(os.path.dirname(os.path.abspath(params["metadata_path"])), "{}_missing.yml".format(os.path.splitext(os.path.basename(params["metadata_path"]))[0]))
self.missing_path = os.path.join(os.path.dirname(os.path.abspath(params["metadata_path"])), f"{os.path.splitext(os.path.basename(params['metadata_path']))[0]}_missing.yml")
self.metadata_path = params["metadata_path"]
self.asset_directory = params["asset_directory"]
self.sync_mode = params["sync_mode"]
@ -62,6 +63,7 @@ class PlexAPI:
self.plex = params["plex"]
self.timeout = params["plex"]["timeout"]
self.missing = {}
self.run_again = []
def add_Radarr(self, Radarr):
self.Radarr = Radarr
@ -93,7 +95,7 @@ class PlexAPI:
def get_collection(self, data):
collection = util.choose_from_list(self.search(str(data), libtype="collection"), "collection", str(data), exact=True)
if collection: return collection
else: raise Failed("Plex Error: Collection {} not found".format(data))
else: raise Failed(f"Plex Error: Collection {data} not found")
def validate_collections(self, collections):
valid_collections = []
@ -101,7 +103,7 @@ class PlexAPI:
try: valid_collections.append(self.get_collection(collection))
except Failed as e: logger.error(e)
if len(valid_collections) == 0:
raise Failed("Collection Error: No valid Plex Collections in {}".format(collections[c][m]))
raise Failed(f"Collection Error: No valid Plex Collections in {collections}")
return valid_collections
def add_missing(self, collection, items, is_movie):
@ -117,9 +119,9 @@ class PlexAPI:
try:
yaml.round_trip_dump(self.missing, open(self.missing_path, "w"))
except yaml.scanner.ScannerError as e:
logger.error("YAML Error: {}".format(str(e).replace("\n", "\n|\t ")))
logger.error(f"YAML Error: {util.tab_new_lines(e)}")
def add_to_collection(self, collection, items, filters, show_filtered, map, movie_map, show_map):
def add_to_collection(self, collection, items, filters, show_filtered, rating_key_map, movie_map, show_map):
name = collection.title if isinstance(collection, Collections) else collection
collection_items = collection.items() if isinstance(collection, Collections) else []
total = len(items)
@ -129,23 +131,20 @@ class PlexAPI:
try:
current = self.fetchItem(item.ratingKey if isinstance(item, (Movie, Show)) else int(item))
except (BadRequest, NotFound):
logger.error("Plex Error: Item {} not found".format(item))
logger.error(f"Plex Error: Item {item} not found")
continue
match = True
if filters:
length = util.print_return(length, "Filtering {}/{} {}".format((" " * (max_length - len(str(i)))) + str(i), total, current.title))
for f in filters:
modifier = f[0][-4:]
method = util.filter_alias[f[0][:-4]] if modifier in [".not", ".lte", ".gte"] else util.filter_alias[f[0]]
length = util.print_return(length, f"Filtering {(' ' * (max_length - len(str(i)))) + str(i)}/{total} {current.title}")
for filter_method, filter_data in filters:
modifier = filter_method[-4:]
method = util.filter_alias[filter_method[:-4]] if modifier in [".not", ".lte", ".gte"] else util.filter_alias[filter_method]
if method == "max_age":
threshold_date = datetime.now() - timedelta(days=f[1])
attr = getattr(current, "originallyAvailableAt")
if attr is None or attr < threshold_date:
threshold_date = datetime.now() - timedelta(days=filter_data)
if current.originallyAvailableAt is None or current.originallyAvailableAt < threshold_date:
match = False
break
elif method == "original_language":
terms = util.get_list(f[1], lower=True)
tmdb_id = None
movie = None
for key, value in movie_map.items():
if current.ratingKey == value:
@ -155,25 +154,32 @@ class PlexAPI:
except Failed:
pass
if movie is None:
logger.warning("Filter Error: No TMDb ID found for {}".format(current.title))
logger.warning(f"Filter Error: No TMDb ID found for {current.title}")
continue
if (modifier == ".not" and movie.original_language in terms) or (modifier != ".not" and movie.original_language not in terms):
if (modifier == ".not" and movie.original_language in filter_data) or (modifier != ".not" and movie.original_language not in filter_data):
match = False
break
elif modifier in [".gte", ".lte"]:
if method == "originallyAvailableAt":
threshold_date = datetime.strptime(f[1], "%m/%d/%y")
attr = getattr(current, "originallyAvailableAt")
if (modifier == ".lte" and attr > threshold_date) or (modifier == ".gte" and attr < threshold_date):
match = False
if method == "vote_count":
tmdb_item = None
for key, value in movie_map.items():
if current.ratingKey == value:
try:
tmdb_item = self.TMDb.get_movie(key) if self.is_movie else self.TMDb.get_show(key)
break
elif method in ["year", "rating"]:
attr = getattr(current, method)
if (modifier == ".lte" and attr > f[1]) or (modifier == ".gte" and attr < f[1]):
except Failed:
pass
if tmdb_item is None:
logger.warning(f"Filter Error: No TMDb ID found for {current.title}")
continue
attr = tmdb_item.vote_count
else:
attr = getattr(current, method) / 60000 if method == "duration" else getattr(current, method)
if (modifier == ".lte" and attr > filter_data) or (modifier == ".gte" and attr < filter_data):
match = False
break
else:
terms = util.get_list(f[1])
attrs = []
if method in ["video_resolution", "audio_language", "subtitle_language"]:
for media in current.media:
if method == "video_resolution": attrs = [media.videoResolution]
@ -183,26 +189,26 @@ class PlexAPI:
elif method in ["contentRating", "studio", "year", "rating", "originallyAvailableAt"]: attrs = [str(getattr(current, method))]
elif method in ["actors", "countries", "directors", "genres", "writers", "collections"]: attrs = [getattr(x, "tag") for x in getattr(current, method)]
if (not list(set(terms) & set(attrs)) and modifier != ".not") or (list(set(terms) & set(attrs)) and modifier == ".not"):
if (not list(set(filter_data) & set(attrs)) and modifier != ".not") or (list(set(filter_data) & set(attrs)) and modifier == ".not"):
match = False
break
length = util.print_return(length, "Filtering {}/{} {}".format((" " * (max_length - len(str(i)))) + str(i), total, current.title))
length = util.print_return(length, f"Filtering {(' ' * (max_length - len(str(i)))) + str(i)}/{total} {current.title}")
if match:
util.print_end(length, "{} Collection | {} | {}".format(name, "=" if current in collection_items else "+", current.title))
if current in collection_items: map[current.ratingKey] = None
util.print_end(length, f"{name} Collection | {'=' if current in collection_items else '+'} | {current.title}")
if current in collection_items: rating_key_map[current.ratingKey] = None
else: current.addCollection(name)
elif show_filtered is True:
logger.info("{} Collection | X | {}".format(name, current.title))
media_type = "{}{}".format("Movie" if self.is_movie else "Show", "s" if total > 1 else "")
util.print_end(length, "{} {} Processed".format(total, media_type))
return map
logger.info(f"{name} Collection | X | {current.title}")
media_type = f"{'Movie' if self.is_movie else 'Show'}{'s' if total > 1 else ''}"
util.print_end(length, f"{total} {media_type} Processed")
return rating_key_map
def search_item(self, data, year=None):
return util.choose_from_list(self.search(data, year=year), "movie" if self.is_movie else "show", str(data), exact=True)
def update_metadata(self, TMDb, test):
logger.info("")
util.seperator("{} Library Metadata".format(self.name))
util.separator(f"{self.name} Library Metadata")
logger.info("")
if not self.metadata:
raise Failed("No metadata to edit")
@ -210,15 +216,11 @@ class PlexAPI:
if test and ("test" not in self.metadata[m] or self.metadata[m]["test"] is not True):
continue
logger.info("")
util.seperator()
util.separator()
logger.info("")
year = None
if "year" in self.metadata[m]:
now = datetime.datetime.now()
if self.metadata[m]["year"] is None: logger.error("Metadata Error: year attribute is blank")
elif not isinstance(self.metadata[m]["year"], int): logger.error("Metadata Error: year attribute must be an integer")
elif self.metadata[m]["year"] not in range(1800, now.year + 2): logger.error("Metadata Error: year attribute must be between 1800-{}".format(now.year + 1))
else: year = self.metadata[m]["year"]
year = util.check_number(self.metadata[m]["year"], "year", minimum=1800, maximum=datetime.now().year + 1)
title = m
if "title" in self.metadata[m]:
@ -228,7 +230,7 @@ class PlexAPI:
item = self.search_item(title, year=year)
if item is None:
item = self.search_item("{} (SUB)".format(title), year=year)
item = self.search_item(f"{title} (SUB)", year=year)
if item is None and "alt_title" in self.metadata[m]:
if self.metadata[m]["alt_title"] is None:
@ -238,11 +240,12 @@ class PlexAPI:
item = self.search_item(alt_title, year=year)
if item is None:
logger.error("Plex Error: Item {} not found".format(m))
logger.error("Skipping {}".format(m))
logger.error(f"Plex Error: Item {m} not found")
logger.error(f"Skipping {m}")
continue
logger.info("Updating {}: {}...".format("Movie" if self.is_movie else "Show", title))
item_type = "Movie" if self.is_movie else "Show"
logger.info(f"Updating {item_type}: {title}...")
tmdb_item = None
try:
@ -253,7 +256,6 @@ class PlexAPI:
except Failed as e:
logger.error(e)
originally_available = tmdb_item.first_air_date if tmdb_item else None
rating = tmdb_item.vote_average if tmdb_item else None
original_title = tmdb_item.original_name if tmdb_item and tmdb_item.original_name != tmdb_item.name else None
@ -268,32 +270,32 @@ class PlexAPI:
if key is None: key = name
if value is None: value = group[name]
if str(current) != str(value):
edits["{}.value".format(key)] = value
edits["{}.locked".format(key)] = 1
edits[f"{key}.value"] = value
edits[f"{key}.locked"] = 1
logger.info(f"Detail: {name} updated to {value}")
else:
logger.error("Metadata Error: {} attribute is blank".format(name))
logger.error(f"Metadata Error: {name} attribute is blank")
add_edit("title", item.title, self.metadata[m], value=title)
add_edit("sort_title", item.titleSort, self.metadata[m], key="titleSort")
add_edit("originally_available", str(item.originallyAvailableAt)[:-9], self.metadata[m], key="originallyAvailableAt", value=originally_available)
add_edit("rating", item.rating, self.metadata[m], value=rating)
add_edit("content_rating", item.contentRating, self.metadata[m], key="contentRating")
item_original_title = item.originalTitle if self.is_movie else item._data.attrib.get("originalTitle")
add_edit("original_title", item_original_title, self.metadata[m], key="originalTitle", value=original_title)
add_edit("original_title", item.originalTitle, self.metadata[m], key="originalTitle", value=original_title)
add_edit("studio", item.studio, self.metadata[m], value=studio)
item_tagline = item.tagline if self.is_movie else item._data.attrib.get("tagline")
add_edit("tagline", item_tagline, self.metadata[m], value=tagline)
add_edit("summary", item.summary, self.metadata[m], value=summary)
if len(edits) > 0:
logger.debug("Details Update: {}".format(edits))
logger.debug(f"Details Update: {edits}")
try:
item.edit(**edits)
item.reload()
logger.info("{}: {} Details Update Successful".format("Movie" if self.is_movie else "Show", m))
logger.info(f"{item_type}: {m} Details Update Successful")
except BadRequest:
util.print_stacktrace()
logger.error("{}: {} Details Update Failed".format("Movie" if self.is_movie else "Show", m))
logger.error(f"{item_type}: {m} Details Update Failed")
else:
logger.info("{}: {} Details Update Not Needed".format("Movie" if self.is_movie else "Show", m))
logger.info(f"{item_type}: {m} Details Update Not Needed")
genres = []
@ -312,10 +314,10 @@ class PlexAPI:
elif self.metadata[m]["genre_sync_mode"] == "sync":
for genre in (g for g in item_genres if g not in genres):
item.removeGenre(genre)
logger.info("Detail: Genre {} removed".format(genre))
logger.info(f"Detail: Genre {genre} removed")
for genre in (g for g in genres if g not in item_genres):
item.addGenre(genre)
logger.info("Detail: Genre {} added".format(genre))
logger.info(f"Detail: Genre {genre} added")
if "label" in self.metadata[m]:
if self.metadata[m]["label"]:
@ -325,12 +327,12 @@ class PlexAPI:
if self.metadata[m]["label_sync_mode"] is None: logger.error("Metadata Error: label_sync_mode attribute is blank defaulting to append")
elif self.metadata[m]["label_sync_mode"] not in ["append", "sync"]: logger.error("Metadata Error: label_sync_mode attribute must be either 'append' or 'sync' defaulting to append")
elif self.metadata[m]["label_sync_mode"] == "sync":
for label in (l for l in item_labels if l not in labels):
for label in (la for la in item_labels if la not in labels):
item.removeLabel(label)
logger.info("Detail: Label {} removed".format(label))
for label in (l for l in labels if l not in item_labels):
logger.info(f"Detail: Label {label} removed")
for label in (la for la in labels if la not in item_labels):
item.addLabel(label)
logger.info("Detail: Label {} added".format(label))
logger.info(f"Detail: Label {label} added")
else:
logger.error("Metadata Error: label attribute is blank")
@ -338,10 +340,10 @@ class PlexAPI:
if self.metadata[m]["seasons"]:
for season_id in self.metadata[m]["seasons"]:
logger.info("")
logger.info("Updating season {} of {}...".format(season_id, m))
logger.info(f"Updating season {season_id} of {m}...")
if isinstance(season_id, int):
try: season = item.season(season_id)
except NotFound: logger.error("Metadata Error: Season: {} not found".format(season_id))
except NotFound: logger.error(f"Metadata Error: Season: {season_id} not found")
else:
if "title" in self.metadata[m]["seasons"][season_id] and self.metadata[m]["seasons"][season_id]["title"]:
@ -352,7 +354,7 @@ class PlexAPI:
if self.metadata[m]["seasons"][season_id]["sub"] is None:
logger.error("Metadata Error: sub attribute is blank")
elif self.metadata[m]["seasons"][season_id]["sub"] is True and "(SUB)" not in title:
title = "{} (SUB)".format(title)
title = f"{title} (SUB)"
elif self.metadata[m]["seasons"][season_id]["sub"] is False and title.endswith(" (SUB)"):
title = title[:-6]
else:
@ -362,18 +364,18 @@ class PlexAPI:
add_edit("title", season.title, self.metadata[m]["seasons"][season_id], value=title)
add_edit("summary", season.summary, self.metadata[m]["seasons"][season_id])
if len(edits) > 0:
logger.debug("Season: {} Details Update: {}".format(season_id, edits))
logger.debug(f"Season: {season_id} Details Update: {edits}")
try:
season.edit(**edits)
season.reload()
logger.info("Season: {} Details Update Successful".format(season_id))
logger.info(f"Season: {season_id} Details Update Successful")
except BadRequest:
util.print_stacktrace()
logger.error("Season: {} Details Update Failed".format(season_id))
logger.error(f"Season: {season_id} Details Update Failed")
else:
logger.info("Season: {} Details Update Not Needed".format(season_id))
logger.info(f"Season: {season_id} Details Update Not Needed")
else:
logger.error("Metadata Error: Season: {} invalid, it must be an integer".format(season_id))
logger.error(f"Metadata Error: Season: {season_id} invalid, it must be an integer")
else:
logger.error("Metadata Error: seasons attribute is blank")
@ -381,14 +383,14 @@ class PlexAPI:
if self.metadata[m]["episodes"]:
for episode_str in self.metadata[m]["episodes"]:
logger.info("")
match = re.search("[Ss]{1}\d+[Ee]{1}\d+", episode_str)
match = re.search("[Ss]\\d+[Ee]\\d+", episode_str)
if match:
output = match.group(0)[1:].split("E" if "E" in m.group(0) else "e")
episode_id = int(output[0])
season_id = int(output[1])
logger.info("Updating episode S{}E{} of {}...".format(episode_id, season_id, m))
logger.info(f"Updating episode S{episode_id}E{season_id} of {m}...")
try: episode = item.episode(season=season_id, episode=episode_id)
except NotFound: logger.error("Metadata Error: episode {} of season {} not found".format(episode_id, season_id))
except NotFound: logger.error(f"Metadata Error: episode {episode_id} of season {season_id} not found")
else:
if "title" in self.metadata[m]["episodes"][episode_str] and self.metadata[m]["episodes"][episode_str]["title"]:
title = self.metadata[m]["episodes"][episode_str]["title"]
@ -398,7 +400,7 @@ class PlexAPI:
if self.metadata[m]["episodes"][episode_str]["sub"] is None:
logger.error("Metadata Error: sub attribute is blank")
elif self.metadata[m]["episodes"][episode_str]["sub"] is True and "(SUB)" not in title:
title = "{} (SUB)".format(title)
title = f"{title} (SUB)"
elif self.metadata[m]["episodes"][episode_str]["sub"] is False and title.endswith(" (SUB)"):
title = title[:-6]
else:
@ -410,17 +412,18 @@ class PlexAPI:
add_edit("originally_available", str(episode.originallyAvailableAt)[:-9], self.metadata[m]["episodes"][episode_str], key="originallyAvailableAt")
add_edit("summary", episode.summary, self.metadata[m]["episodes"][episode_str])
if len(edits) > 0:
logger.debug("Season: {} Episode: {} Details Update: {}".format(season_id, episode_id, edits))
logger.debug(f"Season: {season_id} Episode: {episode_id} Details Update: {edits}")
try:
episode.edit(**edits)
episode.reload()
logger.info("Season: {} Episode: {} Details Update Successful".format(season_id, episode_id))
logger.info(
f"Season: {season_id} Episode: {episode_id} Details Update Successful")
except BadRequest:
util.print_stacktrace()
logger.error("Season: {} Episode: {} Details Update Failed".format(season_id, episode_id))
logger.error(f"Season: {season_id} Episode: {episode_id} Details Update Failed")
else:
logger.info("Season: {} Episode: {} Details Update Not Needed".format(season_id, episode_id))
logger.info(f"Season: {season_id} Episode: {episode_id} Details Update Not Needed")
else:
logger.error("Metadata Error: episode {} invlaid must have S##E## format".format(episode_str))
logger.error(f"Metadata Error: episode {episode_str} invalid must have S##E## format")
else:
logger.error("Metadata Error: episodes attribute is blank")

@ -7,27 +7,27 @@ logger = logging.getLogger("Plex Meta Manager")
class RadarrAPI:
def __init__(self, tmdb, params):
self.url_params = {"apikey": "{}".format(params["token"])}
self.base_url = "{}/api{}".format(params["url"], "/v3/" if params["version"] == "v3" else "/")
self.url_params = {"apikey": f"{params['token']}"}
self.base_url = f"{params['url']}/api{'/v3' if params['version'] == 'v3' else ''}/"
try:
result = requests.get("{}system/status".format(self.base_url), params=self.url_params).json()
except Exception as e:
result = requests.get(f"{self.base_url}system/status", params=self.url_params).json()
except Exception:
util.print_stacktrace()
raise Failed("Radarr Error: Could not connect to Radarr at {}".format(params["url"]))
raise Failed(f"Radarr Error: Could not connect to Radarr at {params['url']}")
if "error" in result and result["error"] == "Unauthorized":
raise Failed("Radarr Error: Invalid API Key")
if "version" not in result:
raise Failed("Radarr Error: Unexpected Response Check URL")
self.quality_profile_id = None
profiles = ""
for profile in self.send_get("{}{}".format(self.base_url, "qualityProfile" if params["version"] == "v3" else "profile")):
for profile in self.send_get(f"{self.base_url}{'qualityProfile' if params['version'] == 'v3' else 'profile'}"):
if len(profiles) > 0:
profiles += ", "
profiles += profile["name"]
if profile["name"] == params["quality_profile"]:
self.quality_profile_id = profile["id"]
if not self.quality_profile_id:
raise Failed("Radarr Error: quality_profile: {} does not exist in radarr. Profiles available: {}".format(params["quality_profile"], profiles))
raise Failed(f"Radarr Error: quality_profile: {params['quality_profile']} does not exist in radarr. Profiles available: {profiles}")
self.tmdb = tmdb
self.url = params["url"]
self.version = params["version"]
@ -39,7 +39,7 @@ class RadarrAPI:
def add_tmdb(self, tmdb_ids, tag=None):
logger.info("")
logger.debug("TMDb IDs: {}".format(tmdb_ids))
logger.debug(f"TMDb IDs: {tmdb_ids}")
tag_nums = []
add_count = 0
if tag is None:
@ -47,8 +47,8 @@ class RadarrAPI:
if tag:
tag_cache = {}
for label in tag:
self.send_post("{}tag".format(self.base_url), {"label": str(label)})
for t in self.send_get("{}tag".format(self.base_url)):
self.send_post(f"{self.base_url}tag", {"label": str(label)})
for t in self.send_get(f"{self.base_url}tag"):
tag_cache[t["label"]] = t["id"]
for label in tag:
if label in tag_cache:
@ -63,20 +63,20 @@ class RadarrAPI:
try:
year = movie.release_date.split("-")[0]
except AttributeError:
logger.error("TMDb Error: No year for ({}) {}".format(tmdb_id, movie.title))
logger.error(f"TMDb Error: No year for ({tmdb_id}) {movie.title}")
continue
if year.isdigit() is False:
logger.error("TMDb Error: No release date yet for ({}) {}".format(tmdb_id, movie.title))
logger.error(f"TMDb Error: No release date yet for ({tmdb_id}) {movie.title}")
continue
poster = "https://image.tmdb.org/t/p/original{}".format(movie.poster_path)
poster = f"https://image.tmdb.org/t/p/original{movie.poster_path}"
titleslug = re.sub(r"([^\s\w]|_)+", "", "{} {}".format(movie.title, year)).replace(" ", "-").lower()
titleslug = re.sub(r"([^\s\w]|_)+", "", f"{movie.title} {year}").replace(" ", "-").lower()
url_json = {
"title": movie.title,
"{}".format("qualityProfileId" if self.version == "v3" else "profileId"): self.quality_profile_id,
f"{'qualityProfileId' if self.version == 'v3' else 'profileId'}": self.quality_profile_id,
"year": int(year),
"tmdbid": int(tmdb_id),
"titleslug": titleslug,
@ -87,17 +87,17 @@ class RadarrAPI:
}
if tag_nums:
url_json["tags"] = tag_nums
response = self.send_post("{}movie".format(self.base_url), url_json)
response = self.send_post(f"{self.base_url}movie", url_json)
if response.status_code < 400:
logger.info("Added to Radarr | {:<6} | {}".format(tmdb_id, movie.title))
logger.info(f"Added to Radarr | {tmdb_id:<6} | {movie.title}")
add_count += 1
else:
try:
logger.error("Radarr Error: ({}) {}: ({}) {}".format(tmdb_id, movie.title, response.status_code, response.json()[0]["errorMessage"]))
except KeyError as e:
logger.error(f"Radarr Error: ({tmdb_id}) {movie.title}: ({response.status_code}) {response.json()[0]['errorMessage']}")
except KeyError:
logger.debug(url_json)
logger.error("Radarr Error: {}".format(response.json()))
logger.info("{} Movie{} added to Radarr".format(add_count, "s" if add_count > 1 else ""))
logger.error(f"Radarr Error: {response.json()}")
logger.info(f"{add_count} Movie{'s' if add_count > 1 else ''} added to Radarr")
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_get(self, url):

@ -7,27 +7,27 @@ logger = logging.getLogger("Plex Meta Manager")
class SonarrAPI:
def __init__(self, tvdb, params, language):
self.url_params = {"apikey": "{}".format(params["token"])}
self.base_url = "{}/api{}".format(params["url"], "/v3/" if params["version"] == "v3" else "/")
self.url_params = {"apikey": f"{params['token']}"}
self.base_url = f"{params['url']}/api{'/v3/' if params['version'] == 'v3' else '/'}"
try:
result = requests.get("{}system/status".format(self.base_url), params=self.url_params).json()
except Exception as e:
result = requests.get(f"{self.base_url}system/status", params=self.url_params).json()
except Exception:
util.print_stacktrace()
raise Failed("Sonarr Error: Could not connect to Sonarr at {}".format(params["url"]))
raise Failed(f"Sonarr Error: Could not connect to Sonarr at {params['url']}")
if "error" in result and result["error"] == "Unauthorized":
raise Failed("Sonarr Error: Invalid API Key")
if "version" not in result:
raise Failed("Sonarr Error: Unexpected Response Check URL")
self.quality_profile_id = None
profiles = ""
for profile in self.send_get("{}{}".format(self.base_url, "qualityProfile" if params["version"] == "v3" else "profile")):
for profile in self.send_get(f"{self.base_url}{'qualityProfile' if params['version'] == 'v3' else 'profile'}"):
if len(profiles) > 0:
profiles += ", "
profiles += profile["name"]
if profile["name"] == params["quality_profile"]:
self.quality_profile_id = profile["id"]
if not self.quality_profile_id:
raise Failed("Sonarr Error: quality_profile: {} does not exist in sonarr. Profiles available: {}".format(params["quality_profile"], profiles))
raise Failed(f"Sonarr Error: quality_profile: {params['quality_profile']} does not exist in sonarr. Profiles available: {profiles}")
self.tvdb = tvdb
self.language = language
self.url = params["url"]
@ -36,11 +36,12 @@ class SonarrAPI:
self.root_folder_path = params["root_folder_path"]
self.add = params["add"]
self.search = params["search"]
self.season_folder = params["season_folder"]
self.tag = params["tag"]
def add_tvdb(self, tvdb_ids, tag=None):
logger.info("")
logger.debug("TVDb IDs: {}".format(tvdb_ids))
logger.debug(f"TVDb IDs: {tvdb_ids}")
tag_nums = []
add_count = 0
if tag is None:
@ -48,8 +49,8 @@ class SonarrAPI:
if tag:
tag_cache = {}
for label in tag:
self.send_post("{}tag".format(self.base_url), {"label": str(label)})
for t in self.send_get("{}tag".format(self.base_url)):
self.send_post(f"{self.base_url}tag", {"label": str(label)})
for t in self.send_get(f"{self.base_url}tag"):
tag_cache[t["label"]] = t["id"]
for label in tag:
if label in tag_cache:
@ -65,12 +66,13 @@ class SonarrAPI:
url_json = {
"title": show.title,
"{}".format("qualityProfileId" if self.version == "v3" else "profileId"): self.quality_profile_id,
f"{'qualityProfileId' if self.version == 'v3' else 'profileId'}": self.quality_profile_id,
"languageProfileId": 1,
"tvdbId": int(tvdb_id),
"titleslug": titleslug,
"language": self.language,
"monitored": True,
"seasonFolder": self.season_folder,
"rootFolderPath": self.root_folder_path,
"seasons": [],
"images": [{"covertype": "poster", "url": show.poster_path}],
@ -78,17 +80,17 @@ class SonarrAPI:
}
if tag_nums:
url_json["tags"] = tag_nums
response = self.send_post("{}series".format(self.base_url), url_json)
response = self.send_post(f"{self.base_url}series", url_json)
if response.status_code < 400:
logger.info("Added to Sonarr | {:<6} | {}".format(tvdb_id, show.title))
logger.info(f"Added to Sonarr | {tvdb_id:<6} | {show.title}")
add_count += 1
else:
try:
logger.error("Sonarr Error: ({}) {}: ({}) {}".format(tvdb_id, show.title, response.status_code, response.json()[0]["errorMessage"]))
except KeyError as e:
logger.error(f"Sonarr Error: ({tvdb_id}) {show.title}: ({response.status_code}) {response.json()[0]['errorMessage']}")
except KeyError:
logger.debug(url_json)
logger.error("Sonarr Error: {}".format(response.json()))
logger.info("{} Show{} added to Sonarr".format(add_count, "s" if add_count > 1 else ""))
logger.error(f"Sonarr Error: {response.json()}")
logger.info(f"{add_count} Show{'s' if add_count > 1 else ''} added to Sonarr")
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_get(self, url):

@ -8,12 +8,12 @@ logger = logging.getLogger("Plex Meta Manager")
class TautulliAPI:
def __init__(self, params):
try:
response = requests.get("{}/api/v2?apikey={}&cmd=get_library_names".format(params["url"], params["apikey"])).json()
except Exception as e:
response = requests.get(f"{params['url']}/api/v2?apikey={params['apikey']}&cmd=get_library_names").json()
except Exception:
util.print_stacktrace()
raise Failed("Tautulli Error: Invalid url")
if response["response"]["result"] != "success":
raise Failed("Tautulli Error: {}".format(response["response"]["message"]))
raise Failed(f"Tautulli Error: {response['response']['message']}")
self.url = params["url"]
self.apikey = params["apikey"]
@ -25,9 +25,9 @@ class TautulliAPI:
def get_items(self, library, time_range=30, stats_count=20, list_type="popular", stats_count_buffer=20, status_message=True):
if status_message:
logger.info("Processing Tautulli Most {}: {} {}".format("Popular" if list_type == "popular" else "Watched", stats_count, "Movies" if library.is_movie else "Shows"))
response = self.send_request("{}/api/v2?apikey={}&cmd=get_home_stats&time_range={}&stats_count={}".format(self.url, self.apikey, time_range, int(stats_count) + int(stats_count_buffer)))
stat_id = "{}_{}".format("popular" if list_type == "popular" else "top", "movies" if library.is_movie else "tv")
logger.info(f"Processing Tautulli Most {'Popular' if list_type == 'popular' else 'Watched'}: {stats_count} {'Movies' if library.is_movie else 'Shows'}")
response = self.send_request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_home_stats&time_range={time_range}&stats_count={int(stats_count) + int(stats_count_buffer)}")
stat_id = f"{'popular' if list_type == 'popular' else 'top'}_{'movies' if library.is_movie else 'tv'}"
items = None
for entry in response["response"]["data"]:
@ -47,16 +47,16 @@ class TautulliAPI:
return rating_keys
def get_section_id(self, library_name):
response = self.send_request("{}/api/v2?apikey={}&cmd=get_library_names".format(self.url, self.apikey))
response = self.send_request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_library_names")
section_id = None
for entry in response["response"]["data"]:
if entry["section_name"] == library_name:
section_id = entry["section_id"]
break
if section_id: return section_id
else: raise Failed("Tautulli Error: No Library named {} in the response".format(library_name))
else: raise Failed(f"Tautulli Error: No Library named {library_name} in the response")
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_request(self, url):
logger.debug("Tautulli URL: {}".format(url.replace(self.apikey, "################################")))
logger.debug(f"Tautulli URL: {url.replace(self.apikey, '################################')}")
return requests.get(url).json()

@ -9,7 +9,7 @@ def run_tests(default_dir):
try:
config = Config(default_dir)
logger.info("")
util.seperator("Mapping Tests")
util.separator("Mapping Tests")
for library in config.libraries:
config.map_guids(library)
anidb_tests(config)
@ -19,136 +19,136 @@ def run_tests(default_dir):
tmdb_tests(config)
trakt_tests(config)
tvdb_tests(config)
util.seperator("Finished All Plex Meta Manager Tests")
util.separator("Finished All Plex Meta Manager Tests")
except KeyboardInterrupt:
util.seperator("Canceled Plex Meta Manager Tests")
util.separator("Canceled Plex Meta Manager Tests")
def anidb_tests(config):
if config.AniDB:
util.seperator("AniDB Tests")
util.separator("AniDB Tests")
try:
config.AniDB.convert_anidb_to_tvdb(69)
logger.info("Success | Convert AniDB to TVDb")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert AniDB to TVDb: {}".format(e))
logger.error(f"Failure | Convert AniDB to TVDb: {e}")
try:
config.AniDB.convert_anidb_to_imdb(112)
logger.info("Success | Convert AniDB to IMDb")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert AniDB to IMDb: {}".format(e))
logger.error(f"Failure | Convert AniDB to IMDb: {e}")
try:
config.AniDB.convert_tvdb_to_anidb(81797)
logger.info("Success | Convert TVDb to AniDB")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert TVDb to AniDB: {}".format(e))
logger.error(f"Failure | Convert TVDb to AniDB: {e}")
try:
config.AniDB.convert_imdb_to_anidb("tt0245429")
logger.info("Success | Convert IMDb to AniDB")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert IMDb to AniDB: {}".format(e))
logger.error(f"Failure | Convert IMDb to AniDB: {e}")
try:
config.AniDB.get_items("anidb_id", 69, "en", status_message=False)
logger.info("Success | Get AniDB ID")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Get AniDB ID: {}".format(e))
logger.error(f"Failure | Get AniDB ID: {e}")
try:
config.AniDB.get_items("anidb_relation", 69, "en", status_message=False)
logger.info("Success | Get AniDB Relation")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Get AniDB Relation: {}".format(e))
logger.error(f"Failure | Get AniDB Relation: {e}")
try:
config.AniDB.get_items("anidb_popular", 30, "en", status_message=False)
logger.info("Success | Get AniDB Popular")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Get AniDB Popular: {}".format(e))
logger.error(f"Failure | Get AniDB Popular: {e}")
try:
config.AniDB.validate_anidb_list(["69", "112"], "en")
logger.info("Success | Validate AniDB List")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Validate AniDB List: {}".format(e))
logger.error(f"Failure | Validate AniDB List: {e}")
else:
util.seperator("AniDB Not Configured")
util.separator("AniDB Not Configured")
def imdb_tests(config):
if config.IMDb:
util.seperator("IMDb Tests")
util.separator("IMDb Tests")
tmdb_ids, tvdb_ids = config.IMDb.get_items("imdb_list", {"url": "https://www.imdb.com/search/title/?groups=top_1000", "limit": 0}, "en", status_message=False)
if len(tmdb_ids) == 1000: logger.info("Success | IMDb URL get TMDb IDs")
else: logger.error("Failure | IMDb URL get TMDb IDs: {} Should be 1000".format(len(tmdb_ids)))
else: logger.error(f"Failure | IMDb URL get TMDb IDs: {len(tmdb_ids)} Should be 1000")
tmdb_ids, tvdb_ids = config.IMDb.get_items("imdb_list", {"url": "https://www.imdb.com/list/ls026173135/", "limit": 0}, "en", status_message=False)
if len(tmdb_ids) == 250: logger.info("Success | IMDb URL get TMDb IDs")
else: logger.error("Failure | IMDb URL get TMDb IDs: {} Should be 250".format(len(tmdb_ids)))
else: logger.error(f"Failure | IMDb URL get TMDb IDs: {len(tmdb_ids)} Should be 250")
tmdb_ids, tvdb_ids = config.IMDb.get_items("imdb_id", "tt0814243", "en", status_message=False)
if len(tmdb_ids) == 1: logger.info("Success | IMDb ID get TMDb IDs")
else: logger.error("Failure | IMDb ID get TMDb IDs: {} Should be 1".format(len(tmdb_ids)))
else: logger.error(f"Failure | IMDb ID get TMDb IDs: {len(tmdb_ids)} Should be 1")
else:
util.seperator("IMDb Not Configured")
util.separator("IMDb Not Configured")
def mal_tests(config):
if config.MyAnimeListIDList:
util.seperator("MyAnimeListXML Tests")
util.separator("MyAnimeListXML Tests")
try:
config.MyAnimeListIDList.convert_mal_to_tvdb(21)
logger.info("Success | Convert MyAnimeList to TVDb")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert MyAnimeList to TVDb: {}".format(e))
logger.error(f"Failure | Convert MyAnimeList to TVDb: {e}")
try:
config.MyAnimeListIDList.convert_mal_to_tmdb(199)
logger.info("Success | Convert MyAnimeList to TMDb")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert MyAnimeList to TMDb: {}".format(e))
logger.error(f"Failure | Convert MyAnimeList to TMDb: {e}")
try:
config.MyAnimeListIDList.convert_tvdb_to_mal(81797)
logger.info("Success | Convert TVDb to MyAnimeList")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert TVDb to MyAnimeList: {}".format(e))
logger.error(f"Failure | Convert TVDb to MyAnimeList: {e}")
try:
config.MyAnimeListIDList.convert_tmdb_to_mal(129)
logger.info("Success | Convert TMDb to MyAnimeList")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert TMDb to MyAnimeList: {}".format(e))
logger.error(f"Failure | Convert TMDb to MyAnimeList: {e}")
try:
config.MyAnimeListIDList.find_mal_ids(21)
logger.info("Success | Find MyAnimeList ID")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Find MyAnimeList ID: {}".format(e))
logger.error(f"Failure | Find MyAnimeList ID: {e}")
else:
util.seperator("MyAnimeListXML Not Configured")
util.separator("MyAnimeListXML Not Configured")
if config.MyAnimeList:
util.seperator("MyAnimeList Tests")
util.separator("MyAnimeList Tests")
mal_list_tests = [
("mal_all", 10),
@ -168,71 +168,71 @@ def mal_tests(config):
for mal_list_test in mal_list_tests:
try:
config.MyAnimeList.get_items(mal_list_test[0], mal_list_test[1], status_message=False)
logger.info("Success | Get Anime using {}".format(util.pretty_names[mal_list_test[0]]))
logger.info(f"Success | Get Anime using {util.pretty_names[mal_list_test[0]]}")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Get Anime using {}: {}".format(util.pretty_names[mal_list_test[0]], e))
logger.error(f"Failure | Get Anime using {util.pretty_names[mal_list_test[0]]}: {e}")
else:
util.seperator("MyAnimeList Not Configured")
util.separator("MyAnimeList Not Configured")
def tautulli_tests(config):
if config.libraries[0].Tautulli:
util.seperator("Tautulli Tests")
util.separator("Tautulli Tests")
try:
config.libraries[0].Tautulli.get_section_id(config.libraries[0].name)
logger.info("Success | Get Section ID")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Get Section ID: {}".format(e))
logger.error(f"Failure | Get Section ID: {e}")
try:
config.libraries[0].Tautulli.get_popular(config.libraries[0], status_message=False)
logger.info("Success | Get Popular")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Get Popular: {}".format(e))
logger.error(f"Failure | Get Popular: {e}")
try:
config.libraries[0].Tautulli.get_top(config.libraries[0], status_message=False)
logger.info("Success | Get Top")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Get Top: {}".format(e))
logger.error(f"Failure | Get Top: {e}")
else:
util.seperator("Tautulli Not Configured")
util.separator("Tautulli Not Configured")
def tmdb_tests(config):
if config.TMDb:
util.seperator("TMDb Tests")
util.separator("TMDb Tests")
try:
config.TMDb.convert_imdb_to_tmdb("tt0076759")
logger.info("Success | Convert IMDb to TMDb")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert IMDb to TMDb: {}".format(e))
logger.error(f"Failure | Convert IMDb to TMDb: {e}")
try:
config.TMDb.convert_tmdb_to_imdb(11)
logger.info("Success | Convert TMDb to IMDb")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert TMDb to IMDb: {}".format(e))
logger.error(f"Failure | Convert TMDb to IMDb: {e}")
try:
config.TMDb.convert_imdb_to_tvdb("tt0458290")
logger.info("Success | Convert IMDb to TVDb")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert IMDb to TVDb: {}".format(e))
logger.error(f"Failure | Convert IMDb to TVDb: {e}")
try:
config.TMDb.convert_tvdb_to_imdb(83268)
logger.info("Success | Convert TVDb to IMDb")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert TVDb to IMDb: {}".format(e))
logger.error(f"Failure | Convert TVDb to IMDb: {e}")
tmdb_list_tests = [
([11], "Movie"),
@ -247,10 +247,10 @@ def tmdb_tests(config):
for tmdb_list_test in tmdb_list_tests:
try:
config.TMDb.validate_tmdb_list(tmdb_list_test[0], tmdb_type=tmdb_list_test[1])
logger.info("Success | Get TMDb {}".format(tmdb_list_test[1]))
logger.info(f"Success | Get TMDb {tmdb_list_test[1]}")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Get TMDb {}: {}".format(tmdb_list_test[1], e))
logger.error(f"Failure | Get TMDb {tmdb_list_test[1]}: {e}")
tmdb_list_tests = [
("tmdb_discover", {"sort_by": "popularity.desc", "limit": 100}, True),
@ -279,79 +279,79 @@ def tmdb_tests(config):
for tmdb_list_test in tmdb_list_tests:
try:
config.TMDb.get_items(tmdb_list_test[0], tmdb_list_test[1], tmdb_list_test[2], status_message=False)
logger.info("Success | Get {} using {}".format("Movies" if tmdb_list_test[2] else "Shows", util.pretty_names[tmdb_list_test[0]]))
logger.info(f"Success | Get {'Movies' if tmdb_list_test[2] else 'Shows'} using {util.pretty_names[tmdb_list_test[0]]}")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Get {} using {}: {}".format("Movies" if tmdb_list_test[2] else "Shows", util.pretty_names[tmdb_list_test[0]], e))
logger.error(f"Failure | Get {'Movies' if tmdb_list_test[2] else 'Shows'} using {util.pretty_names[tmdb_list_test[0]]}: {e}")
else:
util.seperator("TMDb Not Configured")
util.separator("TMDb Not Configured")
def trakt_tests(config):
if config.Trakt:
util.seperator("Trakt Tests")
util.separator("Trakt Tests")
try:
config.Trakt.convert_imdb_to_tmdb("tt0076759")
logger.info("Success | Convert IMDb to TMDb")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert IMDb to TMDb: {}".format(e))
logger.error(f"Failure | Convert IMDb to TMDb: {e}")
try:
config.Trakt.convert_tmdb_to_imdb(11)
logger.info("Success | Convert TMDb to IMDb")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert TMDb to IMDb: {}".format(e))
logger.error(f"Failure | Convert TMDb to IMDb: {e}")
try:
config.Trakt.convert_imdb_to_tvdb("tt0458290")
logger.info("Success | Convert IMDb to TVDb")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert IMDb to TVDb: {}".format(e))
logger.error(f"Failure | Convert IMDb to TVDb: {e}")
try:
config.Trakt.convert_tvdb_to_imdb(83268)
logger.info("Success | Convert TVDb to IMDb")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert TVDb to IMDb: {}".format(e))
logger.error(f"Failure | Convert TVDb to IMDb: {e}")
try:
config.Trakt.convert_tmdb_to_tvdb(11)
logger.info("Success | Convert TMDb to TVDb")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert TMDb to TVDb: {}".format(e))
logger.error(f"Failure | Convert TMDb to TVDb: {e}")
try:
config.Trakt.convert_tvdb_to_tmdb(83268)
logger.info("Success | Convert TVDb to TMDb")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Convert TVDb to TMDb: {}".format(e))
logger.error(f"Failure | Convert TVDb to TMDb: {e}")
try:
config.Trakt.validate_trakt_list(["https://trakt.tv/users/movistapp/lists/christmas-movies"])
logger.info("Success | Get List")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Get List: {}".format(e))
logger.error(f"Failure | Get List: {e}")
try:
config.Trakt.validate_trakt_watchlist(["me"], True)
logger.info("Success | Get Watchlist Movies")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Get Watchlist Movies: {}".format(e))
logger.error(f"Failure | Get Watchlist Movies: {e}")
try:
config.Trakt.validate_trakt_watchlist(["me"], False)
logger.info("Success | Get Watchlist Shows")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Get Watchlist Shows: {}".format(e))
logger.error(f"Failure | Get Watchlist Shows: {e}")
trakt_list_tests = [
("trakt_list", "https://trakt.tv/users/movistapp/lists/christmas-movies", True),
@ -364,52 +364,52 @@ def trakt_tests(config):
for trakt_list_test in trakt_list_tests:
try:
config.Trakt.get_items(trakt_list_test[0], trakt_list_test[1], trakt_list_test[2], status_message=False)
logger.info("Success | Get {} using {}".format("Movies" if trakt_list_test[2] else "Shows", util.pretty_names[trakt_list_test[0]]))
logger.info(f"Success | Get {'Movies' if trakt_list_test[2] else 'Shows'} using {util.pretty_names[trakt_list_test[0]]}")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | Get {} using {}: {}".format("Movies" if trakt_list_test[2] else "Shows", util.pretty_names[trakt_list_test[0]], e))
logger.error(f"Failure | Get {'Movies' if trakt_list_test[2] else 'Shows'} using {util.pretty_names[trakt_list_test[0]]}: {e}")
else:
util.seperator("Trakt Not Configured")
util.separator("Trakt Not Configured")
def tvdb_tests(config):
if config.TVDb:
util.seperator("TVDb Tests")
util.separator("TVDb Tests")
tmdb_ids, tvdb_ids = config.TVDb.get_items("tvdb_list", "https://www.thetvdb.com/lists/arrowverse", "en", status_message=False)
if len(tvdb_ids) == 10 and len(tmdb_ids) == 0: logger.info("Success | TVDb URL get TVDb IDs and TMDb IDs")
else: logger.error("Failure | TVDb URL get TVDb IDs and TMDb IDs: {} Should be 10 and {} Should be 0".format(len(tvdb_ids), len(tmdb_ids)))
else: logger.error(f"Failure | TVDb URL get TVDb IDs and TMDb IDs: {len(tvdb_ids)} Should be 10 and {len(tmdb_ids)} Should be 0")
tmdb_ids, tvdb_ids = config.TVDb.get_items("tvdb_list", "https://www.thetvdb.com/lists/6957", "en", status_message=False)
if len(tvdb_ids) == 4 and len(tmdb_ids) == 2: logger.info("Success | TVDb URL get TVDb IDs and TMDb IDs")
else: logger.error("Failure | TVDb URL get TVDb IDs and TMDb IDs: {} Should be 4 and {} Should be 2".format(len(tvdb_ids), len(tmdb_ids)))
else: logger.error(f"Failure | TVDb URL get TVDb IDs and TMDb IDs: {len(tvdb_ids)} Should be 4 and {len(tmdb_ids)} Should be 2")
try:
config.TVDb.get_items("tvdb_show", "https://www.thetvdb.com/series/arrow", "en", status_message=False)
logger.info("Success | TVDb URL get TVDb Series ID")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | TVDb URL get TVDb Series ID: {}".format(e))
logger.error(f"Failure | TVDb URL get TVDb Series ID: {e}")
try:
config.TVDb.get_items("tvdb_show", 279121, "en", status_message=False)
logger.info("Success | TVDb ID get TVDb Series ID")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | TVDb ID get TVDb Series ID: {}".format(e))
logger.error(f"Failure | TVDb ID get TVDb Series ID: {e}")
try:
config.TVDb.get_items("tvdb_movie", "https://www.thetvdb.com/movies/the-lord-of-the-rings-the-fellowship-of-the-ring", "en", status_message=False)
logger.info("Success | TVDb URL get TVDb Movie ID")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | TVDb URL get TVDb Movie ID: {}".format(e))
logger.error(f"Failure | TVDb URL get TVDb Movie ID: {e}")
try:
config.TVDb.get_items("tvdb_movie", 107, "en", status_message=False)
logger.info("Success | TVDb ID get TVDb Movie ID")
except Failed as e:
util.print_stacktrace()
logger.error("Failure | TVDb ID get TVDb Movie ID: {}".format(e))
logger.error(f"Failure | TVDb ID get TVDb Movie ID: {e}")
else:
util.seperator("TVDb Not Configured")
util.separator("TVDb Not Configured")

@ -1,4 +1,4 @@
import logging, os, tmdbv3api
import logging, tmdbv3api
from modules import util
from modules.util import Failed
from retrying import retry
@ -13,7 +13,7 @@ class TMDbAPI:
self.TMDb.language = params["language"]
response = tmdbv3api.Configuration().info()
if hasattr(response, "status_message"):
raise Failed("TMDb Error: {}".format(response.status_message))
raise Failed(f"TMDb Error: {response.status_message}")
self.apikey = params["apikey"]
self.language = params["language"]
self.Movie = tmdbv3api.Movie()
@ -30,16 +30,20 @@ class TMDbAPI:
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def convert_from_tmdb(self, tmdb_id, convert_to, is_movie):
try: return self.Movie.external_ids(tmdb_id)[convert_to] if is_movie else self.TV.external_ids(tmdb_id)[convert_to]
except TMDbException: raise Failed("TMDb Error: No {} found for TMDb ID {}".format(convert_to.upper().replace("B_", "b "), tmdb_id))
try:
id_to_return = self.Movie.external_ids(tmdb_id)[convert_to] if is_movie else self.TV.external_ids(tmdb_id)[convert_to]
if not id_to_return or (convert_to == "tvdb_id" and id_to_return == 0):
raise Failed(f"TMDb Error: No {convert_to.upper().replace('B_', 'b ')} found for TMDb ID {tmdb_id}")
return id_to_return
except TMDbException:
raise Failed(f"TMDb Error: {'Movie' if is_movie else 'Show'} TMDb ID: {tmdb_id} not found")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def convert_to_tmdb(self, external_id, external_source, is_movie):
search_results = self.Movie.external(external_id=external_id, external_source=external_source)
search = search_results["movie_results" if is_movie else "tv_results"]
if len(search) == 1: return int(search[0]["id"])
else: raise Failed("TMDb Error: No TMDb ID found for {} {}".format(external_source.upper().replace("B_", "b "), external_id))
else: raise Failed(f"TMDb Error: No TMDb ID found for {external_source.upper().replace('B_', 'b ')} {external_id}")
def convert_tmdb_to_imdb(self, tmdb_id, is_movie=True): return self.convert_from_tmdb(tmdb_id, "imdb_id", is_movie)
def convert_imdb_to_tmdb(self, imdb_id, is_movie=True): return self.convert_to_tmdb(imdb_id, "imdb_id", is_movie)
@ -53,48 +57,74 @@ class TMDbAPI:
try: return self.get_collection(tmdb_id)
except Failed:
try: return self.get_movie(tmdb_id)
except Failed: raise Failed("TMDb Error: No Movie or Collection found for TMDb ID {}".format(tmdb_id))
except Failed: raise Failed(f"TMDb Error: No Movie or Collection found for TMDb ID {tmdb_id}")
else: return self.get_show(tmdb_id)
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_movie(self, tmdb_id):
try: return self.Movie.details(tmdb_id)
except TMDbException as e: raise Failed("TMDb Error: No Movie found for TMDb ID {}: {}".format(tmdb_id, e))
except TMDbException as e: raise Failed(f"TMDb Error: No Movie found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_show(self, tmdb_id):
try: return self.TV.details(tmdb_id)
except TMDbException as e: raise Failed("TMDb Error: No Show found for TMDb ID {}: {}".format(tmdb_id, e))
except TMDbException as e: raise Failed(f"TMDb Error: No Show found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_collection(self, tmdb_id):
try: return self.Collection.details(tmdb_id)
except TMDbException as e: raise Failed("TMDb Error: No Collection found for TMDb ID {}: {}".format(tmdb_id, e))
except TMDbException as e: raise Failed(f"TMDb Error: No Collection found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_person(self, tmdb_id):
try: return self.Person.details(tmdb_id)
except TMDbException as e: raise Failed("TMDb Error: No Person found for TMDb ID {}: {}".format(tmdb_id, e))
except TMDbException as e: raise Failed(f"TMDb Error: No Person found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_person_credits(self, tmdb_id):
try: return self.Person.combined_credits(tmdb_id)
except TMDbException as e: raise Failed(f"TMDb Error: No Person found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_company(self, tmdb_id):
try: return self.Company.details(tmdb_id)
except TMDbException as e: raise Failed("TMDb Error: No Company found for TMDb ID {}: {}".format(tmdb_id, e))
except TMDbException as e: raise Failed(f"TMDb Error: No Company found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_network(self, tmdb_id):
try: return self.Network.details(tmdb_id)
except TMDbException as e: raise Failed("TMDb Error: No Network found for TMDb ID {}: {}".format(tmdb_id, e))
except TMDbException as e: raise Failed(f"TMDb Error: No Network found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_keyword(self, tmdb_id):
try: return self.Keyword.details(tmdb_id)
except TMDbException as e: raise Failed("TMDb Error: No Keyword found for TMDb ID {}: {}".format(tmdb_id, e))
except TMDbException as e: raise Failed(f"TMDb Error: No Keyword found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_list(self, tmdb_id):
try: return self.List.details(tmdb_id, all_details=True)
except TMDbException as e: raise Failed("TMDb Error: No List found for TMDb ID {}: {}".format(tmdb_id, e))
except TMDbException as e: raise Failed(f"TMDb Error: No List found for TMDb ID {tmdb_id}: {e}")
def get_credits(self, tmdb_id, actor=False, crew=False, director=False, producer=False, writer=False):
movie_ids = []
show_ids = []
actor_credits = self.get_person_credits(tmdb_id)
if actor:
for credit in actor_credits.cast:
if credit.media_type == "movie":
movie_ids.append(credit.id)
elif credit.media_type == "tv":
show_ids.append(credit.id)
for credit in actor_credits.crew:
if crew or \
(director and credit.department == "Directing") or \
(producer and credit.department == "Production") or \
(writer and credit.department == "Writing"):
if credit.media_type == "movie":
movie_ids.append(credit.id)
elif credit.media_type == "tv":
show_ids.append(credit.id)
return movie_ids, show_ids
def get_pagenation(self, method, amount, is_movie):
ids = []
@ -105,6 +135,7 @@ class TMDbAPI:
elif method == "tmdb_now_playing" and is_movie: tmdb_items = self.Movie.now_playing(x + 1)
elif method == "tmdb_trending_daily": tmdb_items = self.Trending.movie_day(x + 1) if is_movie else self.Trending.tv_day(x + 1)
elif method == "tmdb_trending_weekly": tmdb_items = self.Trending.movie_week(x + 1) if is_movie else self.Trending.tv_week(x + 1)
else: raise Failed(f"TMDb Error: {method} method not supported")
for tmdb_item in tmdb_items:
try:
ids.append(tmdb_item.id if is_movie else self.convert_tmdb_to_tvdb(tmdb_item.id))
@ -137,12 +168,15 @@ class TMDbAPI:
def get_items(self, method, data, is_movie, status_message=True):
if status_message:
logger.debug("Data: {}".format(data))
logger.debug(f"Data: {data}")
pretty = util.pretty_names[method] if method in util.pretty_names else method
media_type = "Movie" if is_movie else "Show"
movie_ids = []
show_ids = []
if method in ["tmdb_discover", "tmdb_company", "tmdb_keyword"] or (method == "tmdb_network" and not is_movie):
attrs = None
tmdb_id = ""
tmdb_name = ""
if method in ["tmdb_company", "tmdb_network", "tmdb_keyword"]:
tmdb_id = int(data)
if method == "tmdb_company":
@ -162,16 +196,16 @@ class TMDbAPI:
else: show_ids, amount = self.get_discover(attrs, limit, is_movie)
if status_message:
if method in ["tmdb_company", "tmdb_network", "tmdb_keyword"]:
logger.info("Processing {}: ({}) {} ({} {}{})".format(pretty, tmdb_id, tmdb_name, amount, media_type, "" if amount == 1 else "s"))
else:
logger.info("Processing {}: {} {}{}".format(pretty, amount, media_type, "" if amount == 1 else "s"))
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({amount} {media_type}{'' if amount == 1 else 's'})")
elif method == "tmdb_discover":
logger.info(f"Processing {pretty}: {amount} {media_type}{'' if amount == 1 else 's'}")
for attr, value in attrs.items():
logger.info(" {}: {}".format(attr, value))
logger.info(f" {attr}: {value}")
elif method in ["tmdb_popular", "tmdb_top_rated", "tmdb_now_playing", "tmdb_trending_daily", "tmdb_trending_weekly"]:
if is_movie: movie_ids = self.get_pagenation(method, data, is_movie)
else: show_ids = self.get_pagenation(method, data, is_movie)
if status_message:
logger.info("Processing {}: {} {}{}".format(pretty, data, media_type, "" if data == 1 else "s"))
logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}")
else:
tmdb_id = int(data)
if method == "tmdb_list":
@ -193,17 +227,22 @@ class TMDbAPI:
movie_ids.append(tmdb_item["id"])
elif method == "tmdb_show":
tmdb_name = str(self.get_show(tmdb_id).name)
try: show_ids.append(self.convert_tmdb_to_tvdb(tmdb_id))
except Failed: pass
show_ids.append(self.convert_tmdb_to_tvdb(tmdb_id))
else:
raise Failed("TMDb Error: Method {} not supported".format(method))
tmdb_name = str(self.get_person(tmdb_id).name)
if method == "tmdb_actor": movie_ids, show_ids = self.get_credits(tmdb_id, actor=True)
elif method == "tmdb_director": movie_ids, show_ids = self.get_credits(tmdb_id, director=True)
elif method == "tmdb_producer": movie_ids, show_ids = self.get_credits(tmdb_id, producer=True)
elif method == "tmdb_writer": movie_ids, show_ids = self.get_credits(tmdb_id, writer=True)
elif method == "tmdb_crew": movie_ids, show_ids = self.get_credits(tmdb_id, crew=True)
else: raise Failed(f"TMDb Error: Method {method} not supported")
if status_message and len(movie_ids) > 0:
logger.info("Processing {}: ({}) {} ({} Movie{})".format(pretty, tmdb_id, tmdb_name, len(movie_ids), "" if len(movie_ids) == 1 else "s"))
if status_message and len(show_ids) > 0:
logger.info("Processing {}: ({}) {} ({} Show{})".format(pretty, tmdb_id, tmdb_name, len(show_ids), "" if len(show_ids) == 1 else "s"))
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(movie_ids)} Movie{'' if len(movie_ids) == 1 else 's'})")
if status_message and not is_movie and len(show_ids) > 0:
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(show_ids)} Show{'' if len(show_ids) == 1 else 's'})")
if status_message:
logger.debug("TMDb IDs Found: {}".format(movie_ids))
logger.debug("TVDb IDs Found: {}".format(show_ids))
logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}")
return movie_ids, show_ids
def validate_tmdb_list(self, tmdb_list, tmdb_type):
@ -211,7 +250,7 @@ class TMDbAPI:
for tmdb_id in tmdb_list:
try: tmdb_values.append(self.validate_tmdb(tmdb_id, tmdb_type))
except Failed as e: logger.error(e)
if len(tmdb_values) == 0: raise Failed("TMDb Error: No valid TMDb IDs in {}".format(tmdb_list))
if len(tmdb_values) == 0: raise Failed(f"TMDb Error: No valid TMDb IDs in {tmdb_list}")
return tmdb_values
def validate_tmdb(self, tmdb_id, tmdb_type):

@ -30,7 +30,7 @@ class TraktAPI:
def get_authorization(self):
url = Trakt["oauth"].authorize_url(self.redirect_uri)
logger.info("Navigate to: {}".format(url))
logger.info(f"Navigate to: {url}")
logger.info("If you get an OAuth error your client_id or client_secret is invalid")
webbrowser.open(url, new=2)
try: pin = util.logger_input("Trakt pin (case insensitive)", timeout=300).strip()
@ -70,7 +70,7 @@ class TraktAPI:
"scope": authorization["scope"],
"created_at": authorization["created_at"]
}
logger.info("Saving authorization information to {}".format(self.config_path))
logger.info(f"Saving authorization information to {self.config_path}")
yaml.round_trip_dump(config, open(self.config_path, "w"), indent=ind, block_seq_indent=bsi)
self.authorization = authorization
Trakt.configuration.defaults.oauth.from_response(self.authorization)
@ -91,7 +91,7 @@ class TraktAPI:
lookup = lookup[0] if isinstance(lookup, list) else lookup
if lookup.get_key(to_source):
return lookup.get_key(to_source) if to_source == "imdb" else int(lookup.get_key(to_source))
raise Failed("No {} ID found for {} ID {}".format(to_source.upper().replace("B", "b"), from_source.upper().replace("B", "b"), external_id))
raise Failed(f"No {to_source.upper().replace('B', 'b')} ID found for {from_source.upper().replace('B', 'b')} ID {external_id}")
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def trending(self, amount, is_movie):
@ -99,7 +99,7 @@ class TraktAPI:
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def watchlist(self, data, is_movie):
items = Trakt["users/{}/watchlist".format(data)].movies() if is_movie else Trakt["users/{}/watchlist".format(data)].shows()
items = Trakt[f"users/{data}/watchlist"].movies() if is_movie else Trakt[f"users/{data}/watchlist"].shows()
if items is None: raise Failed("Trakt Error: No List found")
else: return [i for i in items]
@ -119,7 +119,7 @@ class TraktAPI:
except Failed as e:
logger.error(e)
if len(trakt_values) == 0:
raise Failed("Trakt Error: No valid Trakt Lists in {}".format(value))
raise Failed(f"Trakt Error: No valid Trakt Lists in {values}")
return trakt_values
def validate_trakt_watchlist(self, values, is_movie):
@ -131,23 +131,23 @@ class TraktAPI:
except Failed as e:
logger.error(e)
if len(trakt_values) == 0:
raise Failed("Trakt Error: No valid Trakt Watchlists in {}".format(value))
raise Failed(f"Trakt Error: No valid Trakt Watchlists in {values}")
return trakt_values
def get_items(self, method, data, is_movie, status_message=True):
if status_message:
logger.debug("Data: {}".format(data))
logger.debug(f"Data: {data}")
pretty = self.aliases[method] if method in self.aliases else method
media_type = "Movie" if is_movie else "Show"
if method == "trakt_trending":
trakt_items = self.trending(int(data), is_movie)
if status_message:
logger.info("Processing {}: {} {}{}".format(pretty, data, media_type, "" if data == 1 else "s"))
logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}")
else:
if method == "trakt_watchlist": trakt_items = self.watchlist(data, is_movie)
elif method == "trakt_list": trakt_items = self.standard_list(data)
else: raise Failed("Trakt Error: Method {} not supported".format(method))
if status_message: logger.info("Processing {}: {}".format(pretty, data))
else: raise Failed(f"Trakt Error: Method {method} not supported")
if status_message: logger.info(f"Processing {pretty}: {data}")
show_ids = []
movie_ids = []
for trakt_item in trakt_items:
@ -155,7 +155,7 @@ class TraktAPI:
elif isinstance(trakt_item, Show) and trakt_item.pk[1] not in show_ids: show_ids.append(int(trakt_item.pk[1]))
elif (isinstance(trakt_item, (Season, Episode))) and trakt_item.show.pk[1] not in show_ids: show_ids.append(int(trakt_item.show.pk[1]))
if status_message:
logger.debug("Trakt {} Found: {}".format(media_type, trakt_items))
logger.debug("TMDb IDs Found: {}".format(movie_ids))
logger.debug("TVDb IDs Found: {}".format(show_ids))
logger.debug(f"Trakt {media_type} Found: {trakt_items}")
logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}")
return movie_ids, show_ids

@ -1,4 +1,4 @@
import logging, math, re, requests, time
import logging, requests
from lxml import html
from modules import util
from modules.util import Failed
@ -14,20 +14,24 @@ class TVDbObj:
elif is_movie and tvdb_url.startswith((TVDb.movies_url, TVDb.alt_movies_url, TVDb.movie_id_url)):
self.media_type = "Movie"
else:
raise Failed("TVDb Error: {} must begin with {}".format(tvdb_url, TVDb.movies_url if is_movie else TVDb.series_url))
raise Failed(f"TVDb Error: {tvdb_url} must begin with {TVDb.movies_url if is_movie else TVDb.series_url}")
response = TVDb.send_request(tvdb_url, language)
results = response.xpath("//*[text()='TheTVDB.com {} ID']/parent::node()/span/text()".format(self.media_type))
results = response.xpath(f"//*[text()='TheTVDB.com {self.media_type} ID']/parent::node()/span/text()")
if len(results) > 0:
self.id = int(results[0])
elif tvdb_url.startswith(TVDb.movie_id_url):
raise Failed(f"TVDb Error: Could not find a TVDb Movie using TVDb Movie ID: {tvdb_url[len(TVDb.movie_id_url):]}")
elif tvdb_url.startswith(TVDb.series_id_url):
raise Failed(f"TVDb Error: Could not find a TVDb Series using TVDb Series ID: {tvdb_url[len(TVDb.series_id_url):]}")
else:
raise Failed("TVDb Error: Could not find a TVDb {} ID at the URL {}".format(self.media_type, tvdb_url))
raise Failed(f"TVDb Error: Could not find a TVDb {self.media_type} ID at the URL {tvdb_url}")
results = response.xpath("//div[@class='change_translation_text' and @data-language='eng']/@data-title")
if len(results) > 0 and len(results[0]) > 0:
self.title = results[0]
else:
raise Failed("TVDb Error: Name not found from TVDb URL: {}".format(tvdb_url))
raise Failed(f"TVDb Error: Name not found from TVDb URL: {tvdb_url}")
results = response.xpath("//div[@class='row hidden-xs hidden-sm']/div/img/@src")
self.poster_path = results[0] if len(results) > 0 and len(results[0]) > 0 else None
@ -41,7 +45,7 @@ class TVDbObj:
if not tmdb_id:
results = response.xpath("//*[text()='IMDB']/@href")
if len(results) > 0:
try: tmdb_id = TVDb.convert_from_imdb(util.get_id_from_imdb_url(results[0]), language)
try: tmdb_id = TVDb.convert_from_imdb(util.get_id_from_imdb_url(results[0]))
except Failed as e: logger.error(e)
self.tmdb_id = tmdb_id
self.tvdb_url = tvdb_url
@ -56,27 +60,27 @@ class TVDbAPI:
self.Trakt = Trakt
self.site_url = "https://www.thetvdb.com"
self.alt_site_url = "https://thetvdb.com"
self.list_url = "{}/lists/".format(self.site_url)
self.alt_list_url = "{}/lists/".format(self.alt_site_url)
self.series_url = "{}/series/".format(self.site_url)
self.alt_series_url = "{}/series/".format(self.alt_site_url)
self.movies_url = "{}/movies/".format(self.site_url)
self.alt_movies_url = "{}/movies/".format(self.alt_site_url)
self.series_id_url = "{}/dereferrer/series/".format(self.site_url)
self.movie_id_url = "{}/dereferrer/movie/".format(self.site_url)
self.list_url = f"{self.site_url}/lists/"
self.alt_list_url = f"{self.alt_site_url}/lists/"
self.series_url = f"{self.site_url}/series/"
self.alt_series_url = f"{self.alt_site_url}/series/"
self.movies_url = f"{self.site_url}/movies/"
self.alt_movies_url = f"{self.alt_site_url}/movies/"
self.series_id_url = f"{self.site_url}/dereferrer/series/"
self.movie_id_url = f"{self.site_url}/dereferrer/movie/"
def get_series(self, language, tvdb_url=None, tvdb_id=None):
if not tvdb_url and not tvdb_id:
raise Failed("TVDB Error: getget_seriesmove requires either tvdb_url or tvdb_id")
raise Failed("TVDB Error: get_series requires either tvdb_url or tvdb_id")
elif not tvdb_url and tvdb_id:
tvdb_url = "{}{}".format(self.series_id_url, tvdb_id)
tvdb_url = f"{self.series_id_url}{tvdb_id}"
return TVDbObj(tvdb_url, language, False, self)
def get_movie(self, language, tvdb_url=None, tvdb_id=None):
if not tvdb_url and not tvdb_id:
raise Failed("TVDB Error: get_movie requires either tvdb_url or tvdb_id")
elif not tvdb_url and tvdb_id:
tvdb_url = "{}{}".format(self.movie_id_url, tvdb_id)
tvdb_url = f"{self.movie_id_url}{tvdb_id}"
return TVDbObj(tvdb_url, language, True, self)
def get_tvdb_ids_from_url(self, tvdb_url, language):
@ -90,25 +94,25 @@ class TVDbAPI:
title = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/text()")[0]
item_url = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/@href")[0]
if item_url.startswith("/series/"):
try: show_ids.append(self.get_series(language, tvdb_url="{}{}".format(self.site_url, item_url)).id)
except Failed as e: logger.error("{} for series {}".format(e, title))
try: show_ids.append(self.get_series(language, tvdb_url=f"{self.site_url}{item_url}").id)
except Failed as e: logger.error(f"{e} for series {title}")
elif item_url.startswith("/movies/"):
try:
tmdb_id = self.get_movie(language, tvdb_url="{}{}".format(self.site_url, item_url)).tmdb_id
tmdb_id = self.get_movie(language, tvdb_url=f"{self.site_url}{item_url}").tmdb_id
if tmdb_id: movie_ids.append(tmdb_id)
else: raise Failed("TVDb Error: TMDb ID not found from TVDb URL: {}".format(tvdb_url))
else: raise Failed(f"TVDb Error: TMDb ID not found from TVDb URL: {tvdb_url}")
except Failed as e:
logger.error("{} for series {}".format(e, title))
logger.error(f"{e} for series {title}")
else:
logger.error("TVDb Error: Skipping Movie: {}".format(title))
logger.error(f"TVDb Error: Skipping Movie: {title}")
if len(show_ids) > 0 or len(movie_ids) > 0:
return movie_ids, show_ids
raise Failed("TVDb Error: No TVDb IDs found at {}".format(tvdb_url))
except requests.exceptions.MissingSchema as e:
raise Failed(f"TVDb Error: No TVDb IDs found at {tvdb_url}")
except requests.exceptions.MissingSchema:
util.print_stacktrace()
raise Failed("TVDb Error: URL Lookup Failed for {}".format(tvdb_url))
raise Failed(f"TVDb Error: URL Lookup Failed for {tvdb_url}")
else:
raise Failed("TVDb Error: {} must begin with {}".format(tvdb_url, self.list_url))
raise Failed(f"TVDb Error: {tvdb_url} must begin with {self.list_url}")
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_request(self, url, language):
@ -119,7 +123,7 @@ class TVDbAPI:
show_ids = []
movie_ids = []
if status_message:
logger.info("Processing {}: {}".format(pretty, data))
logger.info(f"Processing {pretty}: {data}")
if method == "tvdb_show":
try: show_ids.append(self.get_series(language, tvdb_id=int(data)).id)
except ValueError: show_ids.append(self.get_series(language, tvdb_url=data).id)
@ -131,16 +135,16 @@ class TVDbAPI:
movie_ids.extend(tmdb_ids)
show_ids.extend(tvdb_ids)
else:
raise Failed("TVDb Error: Method {} not supported".format(method))
raise Failed(f"TVDb Error: Method {method} not supported")
if status_message:
logger.debug("TMDb IDs Found: {}".format(movie_ids))
logger.debug("TVDb IDs Found: {}".format(show_ids))
logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}")
return movie_ids, show_ids
def convert_from_imdb(self, imdb_id, language):
def convert_from_imdb(self, imdb_id):
update = False
if self.Cache:
tmdb_id, tvdb_id = self.Cache.get_ids_from_imdb(imdb_id)
update = False
if not tmdb_id:
tmdb_id, update = self.Cache.get_tmdb_from_imdb(imdb_id)
if update:
@ -158,7 +162,7 @@ class TVDbAPI:
try:
if tmdb_id and not from_cache: self.TMDb.get_movie(tmdb_id)
except Failed: tmdb_id = None
if not tmdb_id: raise Failed("TVDb Error: No TMDb ID found for IMDb: {}".format(imdb_id))
if not tmdb_id: raise Failed(f"TVDb Error: No TMDb ID found for IMDb: {imdb_id}")
if self.Cache and tmdb_id and update is not False:
self.Cache.update_imdb("movie", update, imdb_id, tmdb_id)
return tmdb_id

@ -1,4 +1,5 @@
import datetime, logging, re, signal, sys, time, traceback
import logging, re, signal, sys, time, traceback
from datetime import datetime
try:
import msvcrt
@ -18,7 +19,7 @@ class Failed(Exception):
def retry_if_not_failed(exception):
return not isinstance(exception, Failed)
seperating_character = "="
separating_character = "="
screen_width = 100
method_alias = {
@ -47,6 +48,7 @@ filter_alias = {
"rating": "rating",
"studio": "studio",
"subtitle_language": "subtitle_language",
"tmdb_vote_count": "vote_count",
"writer": "writers",
"video_resolution": "video_resolution",
"year": "year"
@ -113,9 +115,15 @@ pretty_names = {
"plex_search": "Plex Search",
"tautulli_popular": "Tautulli Popular",
"tautulli_watched": "Tautulli Watched",
"tmdb_actor": "TMDb Actor",
"tmdb_actor_details": "TMDb Actor",
"tmdb_collection": "TMDb Collection",
"tmdb_collection_details": "TMDb Collection",
"tmdb_company": "TMDb Company",
"tmdb_crew": "TMDb Crew",
"tmdb_crew_details": "TMDb Crew",
"tmdb_director": "TMDb Director",
"tmdb_director_details": "TMDb Director",
"tmdb_discover": "TMDb Discover",
"tmdb_keyword": "TMDb Keyword",
"tmdb_list": "TMDb List",
@ -126,11 +134,15 @@ pretty_names = {
"tmdb_now_playing": "TMDb Now Playing",
"tmdb_person": "TMDb Person",
"tmdb_popular": "TMDb Popular",
"tmdb_producer": "TMDb Producer",
"tmdb_producer_details": "TMDb Producer",
"tmdb_show": "TMDb Show",
"tmdb_show_details": "TMDb Show",
"tmdb_top_rated": "TMDb Top Rated",
"tmdb_trending_daily": "TMDb Trending Daily",
"tmdb_trending_weekly": "TMDb Trending Weekly",
"tmdb_writer": "TMDb Writer",
"tmdb_writer_details": "TMDb Writer",
"trakt_list": "Trakt List",
"trakt_trending": "Trakt Trending",
"trakt_watchlist": "Trakt Watchlist",
@ -219,9 +231,15 @@ all_lists = [
"plex_search",
"tautulli_popular",
"tautulli_watched",
"tmdb_actor",
"tmdb_actor_details",
"tmdb_collection",
"tmdb_collection_details",
"tmdb_company",
"tmdb_crew",
"tmdb_crew_details",
"tmdb_director",
"tmdb_director_details",
"tmdb_discover",
"tmdb_keyword",
"tmdb_list",
@ -231,11 +249,15 @@ all_lists = [
"tmdb_network",
"tmdb_now_playing",
"tmdb_popular",
"tmdb_producer",
"tmdb_producer_details",
"tmdb_show",
"tmdb_show_details",
"tmdb_top_rated",
"tmdb_trending_daily",
"tmdb_trending_weekly",
"tmdb_writer",
"tmdb_writer_details",
"trakt_list",
"trakt_trending",
"trakt_watchlist",
@ -252,6 +274,7 @@ collectionless_lists = [
"name_mapping", "label", "label_sync_mode"
]
other_attributes = [
"run_again",
"schedule",
"sync_mode",
"template",
@ -327,9 +350,15 @@ count_lists = [
"trakt_trending"
]
tmdb_lists = [
"tmdb_actor",
"tmdb_actor_details",
"tmdb_collection",
"tmdb_collection_details",
"tmdb_company",
"tmdb_crew",
"tmdb_crew_details",
"tmdb_director",
"tmdb_director_details",
"tmdb_discover",
"tmdb_keyword",
"tmdb_list",
@ -339,16 +368,26 @@ tmdb_lists = [
"tmdb_network",
"tmdb_now_playing",
"tmdb_popular",
"tmdb_producer",
"tmdb_producer_details",
"tmdb_show",
"tmdb_show_details",
"tmdb_top_rated",
"tmdb_trending_daily",
"tmdb_trending_weekly"
"tmdb_trending_weekly",
"tmdb_writer",
"tmdb_writer_details"
]
tmdb_type = {
"tmdb_actor": "Person",
"tmdb_actor_details": "Person",
"tmdb_collection": "Collection",
"tmdb_collection_details": "Collection",
"tmdb_company": "Company",
"tmdb_crew": "Person",
"tmdb_crew_details": "Person",
"tmdb_director": "Person",
"tmdb_director_details": "Person",
"tmdb_keyword": "Keyword",
"tmdb_list": "List",
"tmdb_list_details": "List",
@ -356,8 +395,12 @@ tmdb_type = {
"tmdb_movie_details": "Movie",
"tmdb_network": "Network",
"tmdb_person": "Person",
"tmdb_producer": "Person",
"tmdb_producer_details": "Person",
"tmdb_show": "Show",
"tmdb_show_details": "Show"
"tmdb_show_details": "Show",
"tmdb_writer": "Person",
"tmdb_writer_details": "Person"
}
all_filters = [
"actor", "actor.not",
@ -369,6 +412,8 @@ all_filters = [
"genre", "genre.not",
"max_age",
"originally_available.gte", "originally_available.lte",
"tmdb_vote_count.gte", "tmdb_vote_count.lte",
"duration.gte", "duration.lte",
"original_language", "original_language.not",
"rating.gte", "rating.lte",
"studio", "studio.not",
@ -381,6 +426,7 @@ movie_only_filters = [
"audio_language", "audio_language.not",
"country", "country.not",
"director", "director.not",
"duration.gte", "duration.lte",
"original_language", "original_language.not",
"subtitle_language", "subtitle_language.not",
"video_resolution", "video_resolution.not",
@ -443,6 +489,9 @@ discover_tv_sort = [
"popularity.desc", "popularity.asc"
]
def tab_new_lines(data):
return str(data).replace("\n", "\n|\t ") if "\n" in str(data) else str(data)
def adjust_space(old_length, display_title):
display_title = str(display_title)
space_length = old_length - len(display_title)
@ -461,31 +510,32 @@ def choose_from_list(datalist, description, data=None, list_type="title", exact=
if len(datalist) > 0:
if len(datalist) == 1 and (description != "collection" or datalist[0].title == data):
return datalist[0]
message = "Multiple {}s Found\n0) {}".format(description, "Create New Collection: {}".format(data) if description == "collection" else "Do Nothing")
zero_option = f"Create New Collection: {data}" if description == "collection" else "Do Nothing"
message = f"Multiple {description}s Found\n0) {zero_option}"
for i, d in enumerate(datalist, 1):
if list_type == "title":
if d.title == data:
return d
message += "\n{}) {}".format(i, d.title)
message += f"\n{i}) {d.title}"
else:
message += "\n{}) [{}] {}".format(i, d[0], d[1])
message += f"\n{i}) [{d[0]}] {d[1]}"
if exact:
return None
print_multiline(message, info=True)
while True:
try:
selection = int(logger_input("Choose {} number".format(description))) - 1
selection = int(logger_input(f"Choose {description} number")) - 1
if selection >= 0: return datalist[selection]
elif selection == -1: return None
else: logger.info("Invalid {} number".format(description))
except IndexError: logger.info("Invalid {} number".format(description))
else: logger.info(f"Invalid {description} number")
except IndexError: logger.info(f"Invalid {description} number")
except TimeoutExpired:
if list_type == "title":
logger.warning("Input Timeout: using {}".format(data))
logger.warning(f"Input Timeout: using {data}")
return None
else:
logger.warning("Input Timeout: using {}".format(datalist[0][1]))
return datalist[0][1]
logger.warning(f"Input Timeout: using {datalist[0][1]}")
return datalist[0]
else:
return None
@ -507,33 +557,49 @@ def get_int_list(data, id_type):
def get_year_list(data, method):
values = get_list(data)
final_years = []
current_year = datetime.datetime.now().year
current_year = datetime.now().year
for value in values:
try:
if "-" in value:
year_range = re.search("(\\d{4})-(\\d{4}|NOW)", str(value))
start = year_range.group(1)
end = year_range.group(2)
if end == "NOW":
end = current_year
if int(start) < 1800 or int(start) > current_year: logger.error("Collection Error: Skipping {} starting year {} must be between 1800 and {}".format(method, start, current_year))
elif int(end) < 1800 or int(end) > current_year: logger.error("Collection Error: Skipping {} ending year {} must be between 1800 and {}".format(method, end, current_year))
elif int(start) > int(end): logger.error("Collection Error: Skipping {} starting year {} cannot be greater then ending year {}".format(method, start, end))
start = check_year(year_range.group(1), current_year, method)
end = current_year if year_range.group(2) == "NOW" else check_year(year_range.group(2), current_year, method)
if int(start) > int(end):
raise Failed(f"Collection Error: {method} starting year: {start} cannot be greater then ending year {end}")
else:
for i in range(int(start), int(end) + 1):
final_years.append(i)
else:
year = re.search("(\\d+)", str(value)).group(1)
if int(start) < 1800 or int(start) > current_year:
logger.error("Collection Error: Skipping {} year {} must be between 1800 and {}".format(method, year, current_year))
final_years.append(int(i))
else:
if len(str(year)) != len(str(value)):
logger.warning("Collection Warning: {} can be replaced with {}".format(value, year))
final_years.append(year)
final_years.append(check_year(value, current_year, method))
except AttributeError:
logger.error("Collection Error: Skipping {} failed to parse year from {}".format(method, value))
raise Failed(f"Collection Error: {method} failed to parse year from {value}")
return final_years
def check_year(year, current_year, method):
return check_number(year, method, minimum=1800, maximum=current_year)
def check_number(value, method, number_type="int", minimum=None, maximum=None):
if number_type == "int":
try: num_value = int(str(value))
except ValueError: raise Failed(f"Collection Error: {method}: {value} must be an integer")
elif number_type == "float":
try: num_value = float(str(value))
except ValueError: raise Failed(f"Collection Error: {method}: {value} must be a number")
else: raise Failed(f"Number Type: {number_type} invalid")
if minimum is not None and maximum is not None and (num_value < minimum or num_value > maximum):
raise Failed(f"Collection Error: {method}: {num_value} must be between {minimum} and {maximum}")
elif minimum is not None and num_value < minimum:
raise Failed(f"Collection Error: {method}: {num_value} is less then {minimum}")
elif maximum is not None and num_value > maximum:
raise Failed(f"Collection Error: {method}: {num_value} is greater then {maximum}")
else:
return num_value
def check_date(date_text, method, return_string=False):
try: date_obg = datetime.strptime(str(date_text), "%m/%d/%Y")
except ValueError: raise Failed(f"Collection Error: {method}: {date_text} must match pattern MM/DD/YYYY e.g. 12/25/2020")
return str(date_text) if return_string else date_obg
def logger_input(prompt, timeout=60):
if windows: return windows_input(prompt, timeout)
elif hasattr(signal, "SIGALRM"): return unix_input(prompt, timeout)
@ -543,14 +609,14 @@ def alarm_handler(signum, frame):
raise TimeoutExpired
def unix_input(prompt, timeout=60):
prompt = "| {}: ".format(prompt)
prompt = f"| {prompt}: "
signal.signal(signal.SIGALRM, alarm_handler)
signal.alarm(timeout)
try: return input(prompt)
finally: signal.alarm(0)
def old_windows_input(prompt, timeout=60, timer=time.monotonic):
prompt = "| {}: ".format(prompt)
prompt = f"| {prompt}: "
sys.stdout.write(prompt)
sys.stdout.flush()
endtime = timer() + timeout
@ -560,26 +626,26 @@ def old_windows_input(prompt, timeout=60, timer=time.monotonic):
result.append(msvcrt.getwche())
if result[-1] == "\n":
out = "".join(result[:-1])
logger.debug("{}{}".format(prompt[2:], out))
logger.debug(f"{prompt[2:]}{out}")
return out
time.sleep(0.04)
raise TimeoutExpired
def windows_input(prompt, timeout=5):
sys.stdout.write("| {}: ".format(prompt))
sys.stdout.write(f"| {prompt}: ")
sys.stdout.flush()
result = []
start_time = time.time()
while True:
if msvcrt.kbhit():
chr = msvcrt.getwche()
if ord(chr) == 13: # enter_key
char = msvcrt.getwche()
if ord(char) == 13: # enter_key
out = "".join(result)
print("")
logger.debug("{}: {}".format(prompt, out))
logger.debug(f"{prompt}: {out}")
return out
elif ord(chr) >= 32: #space_char
result.append(chr)
elif ord(char) >= 32: #space_char
result.append(char)
if (time.time() - start_time) > timeout:
print("")
raise TimeoutExpired
@ -606,17 +672,17 @@ def my_except_hook(exctype, value, tb):
def get_id_from_imdb_url(imdb_url):
match = re.search("(tt\\d+)", str(imdb_url))
if match: return match.group(1)
else: raise Failed("Regex Error: Failed to parse IMDb ID from IMDb URL: {}".format(imdb_url))
else: raise Failed(f"Regex Error: Failed to parse IMDb ID from IMDb URL: {imdb_url}")
def regex_first_int(data, id_type, default=None):
match = re.search("(\\d+)", str(data))
if match:
return int(match.group(1))
elif default:
logger.warning("Regex Warning: Failed to parse {} from {} using {} as default".format(id_type, data, default))
logger.warning(f"Regex Warning: Failed to parse {id_type} from {data} using {default} as default")
return int(default)
else:
raise Failed("Regex Error: Failed to parse {} from {}".format(id_type, data))
raise Failed(f"Regex Error: Failed to parse {id_type} from {data}")
def remove_not(method):
return method[:-4] if method.endswith(".not") else method
@ -629,20 +695,22 @@ def get_centered_text(text):
text += " "
space -= 1
side = int(space / 2)
return "{}{}{}".format(" " * side, text, " " * side)
return f"{' ' * side}{text}{' ' * side}"
def seperator(text=None):
logger.handlers[0].setFormatter(logging.Formatter("%(message)-{}s".format(screen_width - 2)))
logger.handlers[1].setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s %(message)-{}s".format(screen_width - 2)))
logger.info("|{}|".format(seperating_character * screen_width))
def separator(text=None):
logger.handlers[0].setFormatter(logging.Formatter(f"%(message)-{screen_width - 2}s"))
logger.handlers[1].setFormatter(logging.Formatter(f"[%(asctime)s] %(filename)-27s %(levelname)-10s %(message)-{screen_width - 2}s"))
logger.info(f"|{separating_character * screen_width}|")
if text:
logger.info("| {} |".format(get_centered_text(text)))
logger.info("|{}|".format(seperating_character * screen_width))
logger.handlers[0].setFormatter(logging.Formatter("| %(message)-{}s |".format(screen_width - 2)))
logger.handlers[1].setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)-{}s |".format(screen_width - 2)))
text_list = text.split("\n")
for t in text_list:
logger.info(f"| {get_centered_text(t)} |")
logger.info(f"|{separating_character * screen_width}|")
logger.handlers[0].setFormatter(logging.Formatter(f"| %(message)-{screen_width - 2}s |"))
logger.handlers[1].setFormatter(logging.Formatter(f"[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)-{screen_width - 2}s |"))
def print_return(length, text):
print(adjust_space(length, "| {}".format(text)), end="\r")
print(adjust_space(length, f"| {text}"), end="\r")
return len(text) + 2
def print_end(length, text=None):

@ -1,9 +1,10 @@
import argparse, logging, os, re, schedule, sys, time, traceback, datetime
import argparse, logging, os, re, schedule, sys, time
from datetime import datetime
from modules import tests, util
from modules.config import Config
parser = argparse.ArgumentParser()
parser.add_argument("--mytests", dest="tests", help=argparse.SUPPRESS, action="store_true", default=False)
parser.add_argument("--my-tests", dest="tests", help=argparse.SUPPRESS, action="store_true", default=False)
parser.add_argument("--debug", dest="debug", help=argparse.SUPPRESS, action="store_true", default=False)
parser.add_argument("-c", "--config", dest="config", help="Run with desired *.yml file", type=str)
parser.add_argument("-t", "--time", dest="time", help="Time to update each day use format HH:MM (Default: 03:00)", default="03:00", type=str)
@ -15,18 +16,18 @@ parser.add_argument("-w", "--width", dest="width", help="Screen Width (Default:
args = parser.parse_args()
if not re.match("^([0-1]?[0-9]|2[0-3]):[0-5][0-9]$", args.time):
raise util.Failed("Argument Error: time argument invalid: {} must be in the HH:MM format".format(args.time))
raise util.Failed(f"Argument Error: time argument invalid: {args.time} must be in the HH:MM format")
util.seperating_character = args.divider[0]
util.separating_character = args.divider[0]
if 90 <= args.width <= 300:
util.screen_width = args.width
else:
raise util.Failed("Argument Error: width argument invalid: {} must be an integer between 90 and 300".format(args.width))
raise util.Failed(f"Argument Error: width argument invalid: {args.width} must be an integer between 90 and 300")
default_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config")
if args.config and os.path.exists(args.config): default_dir = os.path.join(os.path.dirname(os.path.abspath(args.config)))
elif args.config and not os.path.exists(args.config): raise util.Failed("Config Error: config not found at {}".format(os.path.abspath(args.config)))
elif not os.path.exists(os.path.join(default_dir, "config.yml")): raise util.Failed("Config Error: config not found at {}".format(os.path.abspath(default_dir)))
elif args.config and not os.path.exists(args.config): raise util.Failed(f"Config Error: config not found at {os.path.abspath(args.config)}")
elif not os.path.exists(os.path.join(default_dir, "config.yml")): raise util.Failed(f"Config Error: config not found at {os.path.abspath(default_dir)}")
os.makedirs(os.path.join(default_dir, "logs"), exist_ok=True)
@ -34,8 +35,8 @@ logger = logging.getLogger("Plex Meta Manager")
logger.setLevel(logging.DEBUG)
def fmt_filter(record):
record.levelname = "[{}]".format(record.levelname)
record.filename = "[{}:{}]".format(record.filename, record.lineno)
record.levelname = f"[{record.levelname}]"
record.filename = f"[{record.filename}:{record.lineno}]"
return True
file_handler = logging.handlers.TimedRotatingFileHandler(os.path.join(default_dir, "logs", "meta.log"), when="midnight", backupCount=10, encoding="utf-8")
@ -51,27 +52,28 @@ logger.addHandler(file_handler)
sys.excepthook = util.my_except_hook
util.seperator()
util.separator()
logger.info(util.get_centered_text(" "))
logger.info(util.get_centered_text(" ____ _ __ __ _ __ __ "))
logger.info(util.get_centered_text("| _ \| | _____ __ | \/ | ___| |_ __ _ | \/ | __ _ _ __ __ _ __ _ ___ _ __ "))
logger.info(util.get_centered_text("| |_) | |/ _ \ \/ / | |\/| |/ _ \ __/ _` | | |\/| |/ _` | '_ \ / _` |/ _` |/ _ \ '__|"))
logger.info(util.get_centered_text("| _ \\| | _____ __ | \\/ | ___| |_ __ _ | \\/ | __ _ _ __ __ _ __ _ ___ _ __ "))
logger.info(util.get_centered_text("| |_) | |/ _ \\ \\/ / | |\\/| |/ _ \\ __/ _` | | |\\/| |/ _` | '_ \\ / _` |/ _` |/ _ \\ '__|"))
logger.info(util.get_centered_text("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | "))
logger.info(util.get_centered_text("|_| |_|\___/_/\_\ |_| |_|\___|\__\__,_| |_| |_|\__,_|_| |_|\__,_|\__, |\___|_| "))
logger.info(util.get_centered_text("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| "))
logger.info(util.get_centered_text(" |___/ "))
logger.info(util.get_centered_text(" Version: 1.2.2 "))
util.seperator()
logger.info(util.get_centered_text(" Version: 1.3.0 "))
util.separator()
if args.tests:
tests.run_tests(default_dir)
sys.exit(0)
def start(config_path, test, daily, collections):
if daily: type = "Daily "
elif test: type = "Test "
elif collections: type = "Collections "
else: type = ""
util.seperator("Starting {}Run".format(type))
if daily: start_type = "Daily "
elif test: start_type = "Test "
elif collections: start_type = "Collections "
else: start_type = ""
start_time = datetime.now()
util.separator(f"Starting {start_type}Run")
try:
config = Config(default_dir, config_path)
config.update_libraries(test, collections)
@ -79,7 +81,7 @@ def start(config_path, test, daily, collections):
util.print_stacktrace()
logger.critical(e)
logger.info("")
util.seperator("Finished {}Run".format(type))
util.separator(f"Finished {start_type}Run\nRun Time: {str(datetime.now() - start_time).split('.')[0]}")
try:
if args.run or args.test or args.collections:
@ -89,16 +91,16 @@ try:
schedule.every().day.at(args.time).do(start, args.config, False, True, None)
while True:
schedule.run_pending()
current = datetime.datetime.now().strftime("%H:%M")
seconds = (datetime.datetime.strptime(args.time, "%H:%M") - datetime.datetime.strptime(current, "%H:%M")).total_seconds()
current = datetime.now().strftime("%H:%M")
seconds = (datetime.strptime(args.time, "%H:%M") - datetime.strptime(current, "%H:%M")).total_seconds()
hours = int(seconds // 3600)
if hours < 0:
hours += 24
minutes = int((seconds % 3600) // 60)
time_str = "{} Hour{} and ".format(hours, "s" if hours > 1 else "") if hours > 0 else ""
time_str += "{} Minute{}".format(minutes, "s" if minutes > 1 else "")
time_str = f"{hours} Hour{'s' if hours > 1 else ''} and " if hours > 0 else ""
time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}"
length = util.print_return(length, "Current Time: {} | {} until the daily run at {}".format(current, time_str, args.time))
length = util.print_return(length, f"Current Time: {current} | {time_str} until the daily run at {args.time}")
time.sleep(1)
except KeyboardInterrupt:
util.seperator("Exiting Plex Meta Manager")
util.separator("Exiting Plex Meta Manager")

@ -1,7 +1,7 @@
# Remove
# Less common, pinned
PlexAPI==4.3.1
tmdbv3api==1.7.3
PlexAPI==4.4.0
tmdbv3api==1.7.5
trakt.py==4.2.0
# More common, flexible
lxml

Loading…
Cancel
Save