Merge pull request #277 from meisnate12/develop

v1.9.3
pull/288/head v1.9.3
meisnate12 4 years ago committed by GitHub
commit d782ff2beb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1,5 +1,5 @@
# Plex Meta Manager
#### Version 1.9.2
#### Version 1.9.3
The original concept for Plex Meta Manager is [Plex Auto Collections](https://github.com/mza921/Plex-Auto-Collections), but this is rewritten from the ground up to be able to include a scheduler, metadata edits, multiple libraries, and logging. Plex Meta Manager is a Python 3 script that can be continuously run using YAML configuration files to update on a schedule the metadata of the movies, shows, and collections in your libraries as well as automatically build collections based on various methods all detailed in the wiki. Some collection examples that the script can automatically build and update daily include Plex Based Searches like actor, genre, or studio collections or Collections based on TMDb, IMDb, Trakt, TVDb, AniDB, or MyAnimeList lists and various other services.

@ -33,6 +33,9 @@ plex: # Can be individually specified
url: http://192.168.1.12:32400
token: ####################
timeout: 60
clean_bundles: false
empty_trash: false
optimize: false
tmdb:
apikey: ################################
language: en

@ -49,7 +49,6 @@ class AniDBAPI:
def get_items(self, method, data, language):
pretty = util.pretty_names[method] if method in util.pretty_names else method
logger.debug(f"Data: {data}")
anidb_ids = []
if method == "anidb_popular":
logger.info(f"Processing {pretty}: {data} Anime")
@ -60,6 +59,7 @@ class AniDBAPI:
elif method == "anidb_relation": anidb_ids.extend(self._relations(data, language))
else: raise Failed(f"AniDB Error: Method {method} not supported")
movie_ids, show_ids = self.config.Convert.anidb_to_ids(anidb_ids)
logger.debug("")
logger.debug(f"AniDB IDs Found: {anidb_ids}")
logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}")

@ -218,7 +218,6 @@ class AniListAPI:
raise Failed(f"AniList Error: No valid AniList IDs in {anilist_ids}")
def get_items(self, method, data):
logger.debug(f"Data: {data}")
pretty = util.pretty_names[method] if method in util.pretty_names else method
if method == "anilist_id":
anilist_id, name = self._validate(data)
@ -243,6 +242,7 @@ class AniListAPI:
else:
raise Failed(f"AniList Error: Method {method} not supported")
movie_ids, show_ids = self.config.Convert.anilist_to_ids(anilist_ids)
logger.debug("")
logger.debug(f"AniList IDs Found: {anilist_ids}")
logger.debug(f"Shows Found: {show_ids}")
logger.debug(f"Movies Found: {movie_ids}")

@ -28,7 +28,7 @@ method_alias = {
"writers": "writer",
"years": "year"
}
filter_alias = {
filter_translation = {
"actor": "actors",
"audience_rating": "audienceRating",
"collection": "collections",
@ -220,11 +220,14 @@ class CollectionBuilder:
methods = {m.lower(): m for m in self.data}
if "template" in methods:
logger.info("")
logger.info("Validating Method: template")
if not self.metadata.templates:
raise Failed("Collection Error: No templates found")
elif not self.data[methods["template"]]:
raise Failed("Collection Error: template attribute is blank")
else:
logger.debug(f"Value: {self.data[methods['template']]}")
for variables in util.get_list(self.data[methods["template"]], split=False):
if not isinstance(variables, dict):
raise Failed("Collection Error: template attribute is not a dictionary")
@ -329,32 +332,45 @@ class CollectionBuilder:
except Failed:
continue
skip_collection = True
if "schedule" not in methods:
skip_collection = False
elif not self.data[methods["schedule"]]:
logger.error("Collection Error: schedule attribute is blank. Running daily")
skip_collection = False
if "schedule" in methods:
logger.info("")
logger.info("Validating Method: schedule")
if not self.data[methods["schedule"]]:
raise Failed("Collection Error: schedule attribute is blank")
else:
logger.debug(f"Value: {self.data[methods['schedule']]}")
skip_collection = True
schedule_list = util.get_list(self.data[methods["schedule"]])
next_month = current_time.replace(day=28) + timedelta(days=4)
last_day = next_month - timedelta(days=next_month.day)
for schedule in schedule_list:
run_time = str(schedule).lower()
if run_time.startswith("day") or run_time.startswith("daily"):
if run_time.startswith(("day", "daily")):
skip_collection = False
elif run_time.startswith("week") or run_time.startswith("month") or run_time.startswith("year"):
elif run_time.startswith(("hour", "week", "month", "year")):
match = re.search("\\(([^)]+)\\)", run_time)
if match:
if not match:
logger.error(f"Collection Error: failed to parse schedule: {schedule}")
continue
param = match.group(1)
if run_time.startswith("week"):
if param.lower() in util.days_alias:
if run_time.startswith("hour"):
try:
if 0 <= int(param) <= 23:
self.schedule += f"\nScheduled to run only on the {util.make_ordinal(param)} hour"
if config.run_hour == int(param):
skip_collection = False
else:
raise ValueError
except ValueError:
logger.error(f"Collection Error: hourly schedule attribute {schedule} invalid must be an integer between 0 and 23")
elif run_time.startswith("week"):
if param.lower() not in util.days_alias:
logger.error(f"Collection Error: weekly schedule attribute {schedule} invalid must be a day of the week i.e. weekly(Monday)")
continue
weekday = util.days_alias[param.lower()]
self.schedule += f"\nScheduled weekly on {util.pretty_days[weekday]}"
if weekday == current_time.weekday():
skip_collection = False
else:
logger.error(f"Collection Error: weekly schedule attribute {schedule} invalid must be a day of the week i.e. weekly(Monday)")
elif run_time.startswith("month"):
try:
if 1 <= int(param) <= 31:
@ -362,21 +378,19 @@ class CollectionBuilder:
if current_time.day == int(param) or (current_time.day == last_day.day and int(param) > last_day.day):
skip_collection = False
else:
logger.error(f"Collection Error: monthly schedule attribute {schedule} invalid must be between 1 and 31")
raise ValueError
except ValueError:
logger.error(f"Collection Error: monthly schedule attribute {schedule} invalid must be an integer")
logger.error(f"Collection Error: monthly schedule attribute {schedule} invalid must be an integer between 1 and 31")
elif run_time.startswith("year"):
match = re.match("^(1[0-2]|0?[1-9])/(3[01]|[12][0-9]|0?[1-9])$", param)
if match:
if not match:
logger.error(f"Collection Error: yearly schedule attribute {schedule} invalid must be in the MM/DD format i.e. yearly(11/22)")
continue
month = int(match.group(1))
day = int(match.group(2))
self.schedule += f"\nScheduled yearly on {util.pretty_months[month]} {util.make_ordinal(day)}"
if current_time.month == month and (current_time.day == day or (current_time.day == last_day.day and day > last_day.day)):
skip_collection = False
else:
logger.error(f"Collection Error: yearly schedule attribute {schedule} invalid must be in the MM/DD format i.e. yearly(11/22)")
else:
logger.error(f"Collection Error: failed to parse schedule: {schedule}")
else:
logger.error(f"Collection Error: schedule attribute {schedule} invalid")
if len(self.schedule) == 0:
@ -384,13 +398,49 @@ class CollectionBuilder:
if skip_collection:
raise Failed(f"{self.schedule}\n\nCollection {self.name} not scheduled to run")
logger.info(f"Scanning {self.name} Collection")
self.run_again = "run_again" in methods
self.collectionless = "plex_collectionless" in methods
self.run_again = False
if "run_again" in methods:
logger.info("")
logger.info("Validating Method: run_again")
if not self.data[methods["run_again"]]:
logger.warning(f"Collection Warning: run_again attribute is blank defaulting to false")
else:
logger.debug(f"Value: {self.data[methods['run_again']]}")
self.run_again = util.get_bool("run_again", self.data[methods["run_again"]])
self.sync = self.library.sync_mode == "sync"
if "sync_mode" in methods:
logger.info("")
logger.info("Validating Method: sync_mode")
if not self.data[methods["sync_mode"]]:
logger.warning(f"Collection Warning: sync_mode attribute is blank using general: {self.library.sync_mode}")
else:
logger.debug(f"Value: {self.data[methods['sync_mode']]}")
if self.data[methods["sync_mode"]].lower() not in ["append", "sync"]:
logger.warning(f"Collection Warning: {self.data[methods['sync_mode']]} sync_mode invalid using general: {self.library.sync_mode}")
else:
self.sync = self.data[methods["sync_mode"]].lower() == "sync"
self.build_collection = True
if "build_collection" in methods:
logger.info("")
logger.info("Validating Method: build_collection")
if not self.data[methods["build_collection"]]:
logger.warning(f"Collection Warning: build_collection attribute is blank defaulting to true")
else:
logger.debug(f"Value: {self.data[methods['build_collection']]}")
self.build_collection = util.get_bool("build_collection", self.data[methods["build_collection"]])
if "tmdb_person" in methods:
if self.data[methods["tmdb_person"]]:
logger.info("")
logger.info("Validating Method: build_collection")
if not self.data[methods["tmdb_person"]]:
raise Failed("Collection Error: tmdb_person attribute is blank")
else:
logger.debug(f"Value: {self.data[methods['tmdb_person']]}")
valid_names = []
for tmdb_id in util.get_int_list(self.data[methods["tmdb_person"]], "TMDb Person ID"):
person = config.TMDb.get_person(tmdb_id)
@ -403,42 +453,48 @@ class CollectionBuilder:
self.details["tmdb_person"] = valid_names
else:
raise Failed(f"Collection Error: No valid TMDb Person IDs in {self.data[methods['tmdb_person']]}")
else:
raise Failed("Collection Error: tmdb_person attribute is blank")
self.smart_sort = "random"
self.smart_label_collection = False
if "smart_label" in methods:
logger.info("")
logger.info("Validating Method: smart_label")
self.smart_label_collection = True
if self.data[methods["smart_label"]]:
if not self.data[methods["smart_label"]]:
logger.warning("Collection Error: smart_label attribute is blank defaulting to random")
else:
logger.debug(f"Value: {self.data[methods['smart_label']]}")
if (self.library.is_movie and str(self.data[methods["smart_label"]]).lower() in plex.movie_smart_sorts) \
or (self.library.is_show and str(self.data[methods["smart_label"]]).lower() in plex.show_smart_sorts):
self.smart_sort = str(self.data[methods["smart_label"]]).lower()
else:
logger.info("")
logger.warning(f"Collection Error: smart_label attribute: {self.data[methods['smart_label']]} is invalid defaulting to random")
else:
logger.info("")
logger.warning("Collection Error: smart_label attribute is blank defaulting to random")
self.smart_url = None
self.smart_type_key = None
if "smart_url" in methods:
if self.data[methods["smart_url"]]:
logger.info("")
logger.info("Validating Method: smart_url")
if not self.data[methods["smart_url"]]:
raise Failed("Collection Error: smart_url attribute is blank")
else:
logger.debug(f"Value: {self.data[methods['smart_url']]}")
try:
self.smart_url, self.smart_type_key = library.get_smart_filter_from_uri(self.data[methods["smart_url"]])
except ValueError:
raise Failed("Collection Error: smart_url is incorrectly formatted")
else:
raise Failed("Collection Error: smart_url attribute is blank")
self.smart_filter_details = ""
if "smart_filter" in methods:
logger.info("")
logger.info("Validating Method: smart_filter")
filter_details = "\n"
smart_filter = self.data[methods["smart_filter"]]
if smart_filter is None:
raise Failed(f"Collection Error: smart_filter attribute is blank")
if not isinstance(smart_filter, dict):
raise Failed(f"Collection Error: smart_filter must be a dictionary: {smart_filter}")
logger.debug(f"Value: {self.data[methods['smart_filter']]}")
smart_methods = {m.lower(): m for m in smart_filter}
if "any" in smart_methods and "all" in smart_methods:
raise Failed(f"Collection Error: Cannot have more then one base")
@ -453,7 +509,7 @@ class CollectionBuilder:
smart_type = "shows"
else:
smart_type = "movies"
logger.info(f"Smart {smart_type.capitalize()[:-1]} Filter")
filter_details += f"Smart {smart_type.capitalize()[:-1]} Filter\n"
self.smart_type_key, smart_sorts = plex.smart_types[smart_type]
smart_sort = "random"
@ -463,7 +519,7 @@ class CollectionBuilder:
if smart_filter[smart_methods["sort_by"]] not in smart_sorts:
raise Failed(f"Collection Error: sort_by: {smart_filter[smart_methods['sort_by']]} is invalid")
smart_sort = smart_filter[smart_methods["sort_by"]]
logger.info(f"Sort By: {smart_sort}")
filter_details += f"Sort By: {smart_sort}\n"
limit = None
if "limit" in smart_methods:
@ -472,7 +528,7 @@ class CollectionBuilder:
if not isinstance(smart_filter[smart_methods["limit"]], int) or smart_filter[smart_methods["limit"]] < 1:
raise Failed("Collection Error: limit attribute must be an integer greater then 0")
limit = smart_filter[smart_methods["limit"]]
logger.info(f"Limit: {limit}")
filter_details += f"Limit: {limit}\n"
validate = True
if "validate" in smart_methods:
@ -481,7 +537,7 @@ class CollectionBuilder:
if not isinstance(smart_filter[smart_methods["validate"]], bool):
raise Failed("Collection Error: validate attribute must be either true or false")
validate = smart_filter[smart_methods["validate"]]
logger.info(f"Validate: {validate}")
filter_details += f"Validate: {validate}\n"
def _filter(filter_dict, fail, is_all=True, level=1):
output = ""
@ -590,7 +646,7 @@ class CollectionBuilder:
if not isinstance(smart_filter[smart_methods[base]], dict):
raise Failed(f"Collection Error: {base} must be a dictionary: {smart_filter[smart_methods[base]]}")
built_filter, filter_text = _filter(smart_filter[smart_methods[base]], validate, is_all=base_all)
util.print_multiline(f"Filter:{filter_text}")
self.smart_filter_details = f"{filter_details}Filter:{filter_text}"
final_filter = built_filter[:-1] if base_all else f"push=1&{built_filter}pop=1"
self.smart_url = f"?type={self.smart_type_key}&{f'limit={limit}&' if limit else ''}sort={smart_sorts[smart_sort]}&{final_filter}"
@ -612,6 +668,10 @@ class CollectionBuilder:
self.smart = self.smart_url or self.smart_label_collection
for method_key, method_data in self.data.items():
if method_key.lower() in ignored_details:
continue
logger.info("")
logger.info(f"Validating Method: {method_key}")
if "trakt" in method_key.lower() and not config.Trakt: raise Failed(f"Collection Error: {method_key} requires Trakt todo be configured")
elif "imdb" in method_key.lower() and not config.IMDb: raise Failed(f"Collection Error: {method_key} requires TMDb or Trakt to be configured")
elif "radarr" in method_key.lower() and not self.library.Radarr: raise Failed(f"Collection Error: {method_key} requires Radarr to be configured")
@ -619,8 +679,6 @@ class CollectionBuilder:
elif "tautulli" in method_key.lower() and not self.library.Tautulli: raise Failed(f"Collection Error: {method_key} requires Tautulli to be configured")
elif "mal" in method_key.lower() and not config.MyAnimeList: raise Failed(f"Collection Error: {method_key} requires MyAnimeList to be configured")
elif method_data is not None:
logger.debug("")
logger.debug(f"Validating Method: {method_key}")
logger.debug(f"Value: {method_data}")
if method_key.lower() in method_alias:
method_name = method_alias[method_key.lower()]
@ -1227,15 +1285,6 @@ class CollectionBuilder:
else:
logger.warning(f"Collection Warning: {method_key} attribute is blank")
self.sync = self.library.sync_mode == "sync"
if "sync_mode" in methods:
if not self.data[methods["sync_mode"]]:
logger.warning(f"Collection Warning: sync_mode attribute is blank using general: {self.library.sync_mode}")
elif self.data[methods["sync_mode"]].lower() not in ["append", "sync"]:
logger.warning(f"Collection Warning: {self.data[methods['sync_mode']]} sync_mode invalid using general: {self.library.sync_mode}")
else:
self.sync = self.data[methods["sync_mode"]].lower() == "sync"
if self.add_to_radarr is None:
self.add_to_radarr = self.library.Radarr.add if self.library.Radarr else False
if self.add_to_sonarr is None:
@ -1251,13 +1300,6 @@ class CollectionBuilder:
self.details["collection_mode"] = "hide"
self.sync = True
self.build_collection = True
if "build_collection" in methods:
if not self.data[methods["build_collection"]]:
logger.warning(f"Collection Warning: build_collection attribute is blank defaulting to true")
else:
self.build_collection = util.get_bool("build_collection", self.data[methods["build_collection"]])
if self.build_collection:
try:
self.obj = library.get_collection(self.name)
@ -1277,6 +1319,8 @@ class CollectionBuilder:
else:
self.sync = False
self.run_again = False
logger.info("")
logger.info("Validation Successful")
def collect_rating_keys(self, movie_map, show_map):
def add_rating_keys(keys):
@ -1315,7 +1359,7 @@ class CollectionBuilder:
elif "anilist" in method: check_map(self.config.AniList.get_items(method, value))
elif "mal" in method: check_map(self.config.MyAnimeList.get_items(method, value))
elif "tvdb" in method: check_map(self.config.TVDb.get_items(method, value, self.library.Plex.language))
elif "imdb" in method: check_map(self.config.IMDb.get_items(method, value, self.library.Plex.language))
elif "imdb" in method: check_map(self.config.IMDb.get_items(method, value, self.library.Plex.language, self.library.is_movie))
elif "letterboxd" in method: check_map(self.config.Letterboxd.get_items(method, value, self.library.Plex.language))
elif "tmdb" in method: check_map(self.config.TMDb.get_items(method, value, self.library.is_movie))
elif "trakt" in method: check_map(self.config.Trakt.get_items(method, value, self.library.is_movie))
@ -1340,7 +1384,7 @@ class CollectionBuilder:
for filter_method, filter_data in self.filters:
modifier = filter_method[-4:]
method = filter_method[:-4] if modifier in [".not", ".lte", ".gte"] else filter_method
method_name = filter_alias[method] if method in filter_alias else method
method_name = filter_translation[method] if method in filter_translation else method
if method_name == "max_age":
threshold_date = datetime.now() - timedelta(days=filter_data)
if current.originallyAvailableAt is None or current.originallyAvailableAt < threshold_date:
@ -1358,8 +1402,8 @@ class CollectionBuilder:
if movie is None:
logger.warning(f"Filter Error: No TMDb ID found for {current.title}")
continue
if (modifier == ".not" and movie.original_language in filter_data) or (
modifier != ".not" and movie.original_language not in filter_data):
if (modifier == ".not" and movie.original_language in filter_data) \
or (modifier != ".not" and movie.original_language not in filter_data):
match = False
break
elif method_name == "audio_track_title":
@ -1432,7 +1476,7 @@ class CollectionBuilder:
break
length = util.print_return(length, f"Filtering {(' ' * (max_length - len(str(i)))) + str(i)}/{total} {current.title}")
if match:
util.print_end(length, f"{name} Collection | {'=' if current in collection_items else '+'} | {current.title}")
logger.info(util.adjust_space(length, f"{name} Collection | {'=' if current in collection_items else '+'} | {current.title}"))
if current in collection_items:
self.plex_map[current.ratingKey] = None
elif self.smart_label_collection:
@ -1442,10 +1486,11 @@ class CollectionBuilder:
elif self.details["show_filtered"] is True:
logger.info(f"{name} Collection | X | {current.title}")
media_type = f"{'Movie' if self.library.is_movie else 'Show'}{'s' if total > 1 else ''}"
util.print_end(length, f"{total} {media_type} Processed")
util.print_end(length)
logger.info("")
logger.info(f"{total} {media_type} Processed")
def run_missing(self):
logger.info("")
arr_filters = []
for filter_method, filter_data in self.filters:
if (filter_method.startswith("original_language") and self.library.is_movie) or filter_method.startswith("tmdb_vote_count"):
@ -1472,6 +1517,7 @@ class CollectionBuilder:
logger.info(f"{self.name} Collection | ? | {movie.title} (TMDb: {missing_id})")
elif self.details["show_filtered"] is True:
logger.info(f"{self.name} Collection | X | {movie.title} (TMDb: {missing_id})")
logger.info("")
logger.info(f"{len(missing_movies_with_names)} Movie{'s' if len(missing_movies_with_names) > 1 else ''} Missing")
if self.details["save_missing"] is True:
self.library.add_missing(self.name, missing_movies_with_names, True)
@ -1506,6 +1552,7 @@ class CollectionBuilder:
logger.info(f"{self.name} Collection | ? | {title} (TVDB: {missing_id})")
elif self.details["show_filtered"] is True:
logger.info(f"{self.name} Collection | X | {title} (TVDb: {missing_id})")
logger.info("")
logger.info(f"{len(missing_shows_with_names)} Show{'s' if len(missing_shows_with_names) > 1 else ''} Missing")
if self.details["save_missing"] is True:
self.library.add_missing(self.name, missing_shows_with_names, False)
@ -1520,16 +1567,21 @@ class CollectionBuilder:
self.run_again_shows.extend(missing_tvdb_ids)
def sync_collection(self):
logger.info("")
count_removed = 0
for ratingKey, item in self.plex_map.items():
if item is not None:
if count_removed == 0:
logger.info("")
util.separator(f"Removed from {self.name} Collection", space=False, border=False)
logger.info("")
logger.info(f"{self.name} Collection | - | {item.title}")
if self.smart_label_collection:
self.library.query_data(item.removeLabel, self.name)
else:
self.library.query_data(item.removeCollection, self.name)
count_removed += 1
if count_removed > 0:
logger.info("")
logger.info(f"{count_removed} {'Movie' if self.library.is_movie else 'Show'}{'s' if count_removed == 1 else ''} Removed")
def update_details(self):
@ -1601,43 +1653,17 @@ class CollectionBuilder:
self.library.collection_order_query(self.obj, self.details["collection_order"])
logger.info(f"Detail: collection_order updated Collection Order to {self.details['collection_order']}")
if "label" in self.details or "label.remove" in self.details or "label.sync" in self.details:
item_labels = [label.tag for label in self.obj.labels]
labels = self.details["label" if "label" in self.details else "label.sync"]
if "label.sync" in self.details:
for label in (la for la in item_labels if la not in labels):
self.library.query_data(self.obj.removeLabel, label)
logger.info(f"Detail: Label {label} removed")
if "label" in self.details or "label.sync" in self.details:
for label in (la for la in labels if la not in item_labels):
self.library.query_data(self.obj.addLabel, label)
logger.info(f"Detail: Label {label} added")
if "label.remove" in self.details:
for label in self.details["label.remove"]:
if label in item_labels:
self.library.query_data(self.obj.removeLabel, label)
logger.info(f"Detail: Label {label} removed")
add_tags = self.details["label"] if "label" in self.details else None
remove_tags = self.details["label.remove"] if "label.remove" in self.details else None
sync_tags = self.details["label.sync"] if "label.sync" in self.details else None
self.library.edit_tags("label", self.obj, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags)
if len(self.item_details) > 0:
labels = None
if "item_label" in self.item_details or "item_label.remove" in self.item_details or "item_label.sync" in self.item_details:
labels = self.item_details["item_label" if "item_label" in self.item_details else "item_label.sync"]
add_tags = self.item_details["item_label"] if "item_label" in self.item_details else None
remove_tags = self.item_details["item_label.remove"] if "item_label.remove" in self.item_details else None
sync_tags = self.item_details["item_label.sync"] if "item_label.sync" in self.item_details else None
for item in self.library.get_collection_items(self.obj, self.smart_label_collection):
if labels is not None:
item_labels = [label.tag for label in item.labels]
if "item_label.sync" in self.item_details:
for label in (la for la in item_labels if la not in labels):
self.library.query_data(item.removeLabel, label)
logger.info(f"Detail: Label {label} removed from {item.title}")
if "item_label" in self.item_details or "item_label.sync" in self.item_details:
for label in (la for la in labels if la not in item_labels):
self.library.query_data(item.addLabel, label)
logger.info(f"Detail: Label {label} added to {item.title}")
if "item_label.remove" in self.item_details:
for label in self.item_details["item_label.remove"]:
if label in item_labels:
self.library.query_data(self.obj.removeLabel, label)
logger.info(f"Detail: Label {label} removed from {item.title}")
self.library.edit_tags("label", item, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags)
advance_edits = {}
for method_name, method_data in self.item_details.items():
if method_name in plex.item_advance_keys:
@ -1670,9 +1696,6 @@ class CollectionBuilder:
except BadRequest:
logger.error(f"Detail: {image_method} failed to update {message}")
if len(self.posters) > 0:
logger.info("")
if len(self.posters) > 1:
logger.info(f"{len(self.posters)} posters found:")
for p in self.posters:
@ -1697,9 +1720,6 @@ class CollectionBuilder:
elif "tmdb_show_details" in self.posters: set_image("tmdb_show_details", self.posters)
else: logger.info("No poster to update")
if len(self.backgrounds) > 0:
logger.info("")
if len(self.backgrounds) > 1:
logger.info(f"{len(self.backgrounds)} backgrounds found:")
for b in self.backgrounds:

@ -1,4 +1,5 @@
import logging, os
from datetime import datetime
from modules import util
from modules.anidb import AniDBAPI
from modules.anilist import AniListAPI
@ -48,7 +49,7 @@ mass_update_options = {"tmdb": "Use TMDb Metadata", "omdb": "Use IMDb Metadata t
library_types = {"movie": "For Movie Libraries", "show": "For Show Libraries"}
class Config:
def __init__(self, default_dir, config_path=None, libraries_to_run=None):
def __init__(self, default_dir, config_path=None, is_test=False, time_scheduled=None, requested_collections=None, requested_libraries=None, resume_from=None):
logger.info("Locating config...")
if config_path and os.path.exists(config_path): self.config_path = os.path.abspath(config_path)
elif config_path and not os.path.exists(config_path): raise Failed(f"Config Error: config not found at {os.path.abspath(config_path)}")
@ -56,6 +57,13 @@ class Config:
else: raise Failed(f"Config Error: config not found at {os.path.abspath(default_dir)}")
logger.info(f"Using {self.config_path} as config")
self.test_mode = is_test
self.run_start_time = time_scheduled
self.run_hour = datetime.strptime(time_scheduled, "%H:%M").hour
self.requested_collections = util.get_list(requested_collections)
self.requested_libraries = util.get_list(requested_libraries)
self.resume_from = resume_from
yaml.YAML().allow_duplicate_keys = True
try:
new_config, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.config_path, encoding="utf-8"))
@ -312,20 +320,23 @@ class Config:
self.libraries = []
try: libs = check_for_attribute(self.data, "libraries", throw=True)
except Failed as e: raise Failed(e)
requested_libraries = util.get_list(libraries_to_run) if libraries_to_run else None
for library_name, lib in libs.items():
if requested_libraries and library_name not in requested_libraries:
if self.requested_libraries and library_name not in self.requested_libraries:
continue
util.separator()
params = {}
logger.info("")
params["mapping_name"] = str(library_name)
if lib and "library_name" in lib and lib["library_name"]:
params["name"] = str(lib["library_name"])
logger.info(f"Connecting to {params['name']} ({library_name}) Library...")
display_name = f"{params['name']} ({params['mapping_name']})"
else:
params["name"] = str(library_name)
logger.info(f"Connecting to {params['name']} Library...")
params["mapping_name"] = str(library_name)
params["name"] = params["mapping_name"]
display_name = params["mapping_name"]
util.separator(f"{display_name} Configuration")
logger.info("")
logger.info(f"Connecting to {display_name} Library...")
params["asset_directory"] = check_for_attribute(lib, "asset_directory", parent="settings", var_type="list_path", default=self.general["asset_directory"], default_is_none=True, save=False)
if params["asset_directory"] is None:
@ -436,15 +447,19 @@ class Config:
params["plex"]["empty_trash"] = check_for_attribute(lib, "empty_trash", parent="plex", var_type="bool", default=self.general["plex"]["empty_trash"], save=False)
params["plex"]["optimize"] = check_for_attribute(lib, "optimize", parent="plex", var_type="bool", default=self.general["plex"]["optimize"], save=False)
library = PlexAPI(params, self.TMDb, self.TVDb)
logger.info(f"{params['name']} Library Connection Successful")
logger.info("")
logger.info(f"{display_name} Library Connection Successful")
except Failed as e:
util.print_multiline(e, error=True)
logger.info(f"{params['name']} Library Connection Failed")
logger.info(f"{display_name} Library Connection Failed")
continue
if self.general["radarr"]["url"] or (lib and "radarr" in lib):
logger.info("")
logger.info(f"Connecting to {params['name']} library's Radarr...")
util.separator("Radarr Configuration", space=False, border=False)
logger.info("")
logger.info(f"Connecting to {display_name} library's Radarr...")
logger.info("")
radarr_params = {}
try:
radarr_params["url"] = check_for_attribute(lib, "url", parent="radarr", default=self.general["radarr"]["url"], req_default=True, save=False)
@ -460,11 +475,15 @@ class Config:
library.Radarr = RadarrAPI(radarr_params)
except Failed as e:
util.print_multiline(e, error=True)
logger.info(f"{params['name']} library's Radarr Connection {'Failed' if library.Radarr is None else 'Successful'}")
logger.info("")
logger.info(f"{display_name} library's Radarr Connection {'Failed' if library.Radarr is None else 'Successful'}")
if self.general["sonarr"]["url"] or (lib and "sonarr" in lib):
logger.info("")
logger.info(f"Connecting to {params['name']} library's Sonarr...")
util.separator("Sonarr Configuration", space=False, border=False)
logger.info("")
logger.info(f"Connecting to {display_name} library's Sonarr...")
logger.info("")
sonarr_params = {}
try:
sonarr_params["url"] = check_for_attribute(lib, "url", parent="sonarr", default=self.general["sonarr"]["url"], req_default=True, save=False)
@ -486,11 +505,15 @@ class Config:
library.Sonarr = SonarrAPI(sonarr_params, library.Plex.language)
except Failed as e:
util.print_multiline(e, error=True)
logger.info(f"{params['name']} library's Sonarr Connection {'Failed' if library.Sonarr is None else 'Successful'}")
logger.info("")
logger.info(f"{display_name} library's Sonarr Connection {'Failed' if library.Sonarr is None else 'Successful'}")
if self.general["tautulli"]["url"] or (lib and "tautulli" in lib):
logger.info("")
logger.info(f"Connecting to {params['name']} library's Tautulli...")
util.separator("Tautulli Configuration", space=False, border=False)
logger.info("")
logger.info(f"Connecting to {display_name} library's Tautulli...")
logger.info("")
tautulli_params = {}
try:
tautulli_params["url"] = check_for_attribute(lib, "url", parent="tautulli", default=self.general["tautulli"]["url"], req_default=True, save=False)
@ -498,7 +521,8 @@ class Config:
library.Tautulli = TautulliAPI(tautulli_params)
except Failed as e:
util.print_multiline(e, error=True)
logger.info(f"{params['name']} library's Tautulli Connection {'Failed' if library.Tautulli is None else 'Successful'}")
logger.info("")
logger.info(f"{display_name} library's Tautulli Connection {'Failed' if library.Tautulli is None else 'Successful'}")
logger.info("")
self.libraries.append(library)

@ -214,7 +214,7 @@ class Convert:
return cache_id
imdb_id = None
try:
imdb_id = self.tmdb_to_imdb(self.tvdb_to_tmdb(tvdb_id), False)
imdb_id = self.tmdb_to_imdb(self.tvdb_to_tmdb(tvdb_id, fail=True), is_movie=False, fail=True)
except Failed:
if self.config.Trakt:
try:
@ -235,7 +235,7 @@ class Convert:
return cache_id
tvdb_id = None
try:
tvdb_id = self.tmdb_to_tvdb(self.imdb_to_tmdb(imdb_id, False))
tvdb_id = self.tmdb_to_tvdb(self.imdb_to_tmdb(imdb_id, is_movie=False, fail=True), fail=True)
except Failed:
if self.config.Trakt:
try:
@ -275,6 +275,7 @@ class Convert:
elif url_parsed.scheme == "imdb": imdb_id.append(url_parsed.netloc)
elif url_parsed.scheme == "tmdb": tmdb_id.append(int(url_parsed.netloc))
except requests.exceptions.ConnectionError:
library.query(item.refresh)
util.print_stacktrace()
raise Failed("No External GUIDs found")
if not tvdb_id and not imdb_id and not tmdb_id:
@ -343,7 +344,7 @@ class Convert:
def update_cache(cache_ids, id_type, guid_type):
if self.config.Cache:
cache_ids = util.compile_list(cache_ids)
util.print_end(length, f" Cache | {'^' if expired else '+'} | {item.guid:<46} | {id_type} ID: {cache_ids:<6} | {item.title}")
logger.info(util.adjust_space(length, f" Cache | {'^' if expired else '+'} | {item.guid:<46} | {id_type} ID: {cache_ids:<6} | {item.title}"))
self.config.Cache.update_guid_map(guid_type, item.guid, cache_ids, expired)
if tmdb_id and library.is_movie:
@ -358,8 +359,8 @@ class Convert:
else:
raise Failed(f"No ID to convert")
except Failed as e:
util.print_end(length, f"Mapping Error | {item.guid:<46} | {e} for {item.title}")
logger.info(util.adjust_space(length, f"Mapping Error | {item.guid:<46} | {e} for {item.title}"))
except BadRequest:
util.print_stacktrace()
util.print_end(length, f"Mapping Error: | {item.guid} for {item.title} not found")
logger.info(util.adjust_space(length, f"Mapping Error | {item.guid:<46} | Bad Request for {item.title}"))
return None, None

@ -91,36 +91,34 @@ class IMDbAPI:
def _request(self, url, header):
return html.fromstring(requests.get(url, headers=header).content)
def get_items(self, method, data, language):
def get_items(self, method, data, language, is_movie):
pretty = util.pretty_names[method] if method in util.pretty_names else method
logger.debug(f"Data: {data}")
show_ids = []
movie_ids = []
if method == "imdb_id":
logger.info(f"Processing {pretty}: {data}")
tmdb_id = self.config.Convert.imdb_to_tmdb(data)
tvdb_id = self.config.Convert.imdb_to_tvdb(data)
def run_convert(imdb_id):
tvdb_id = self.config.Convert.imdb_to_tvdb(imdb_id) if not is_movie else None
tmdb_id = self.config.Convert.imdb_to_tmdb(imdb_id) if tvdb_id is None else None
if not tmdb_id and not tvdb_id:
logger.error(f"Convert Error: No TMDb ID or TVDb ID found for IMDb: {data}")
logger.error(f"Convert Error: No {'' if is_movie else 'TVDb ID or '}TMDb ID found for IMDb: {imdb_id}")
if tmdb_id: movie_ids.append(tmdb_id)
if tvdb_id: show_ids.append(tvdb_id)
if method == "imdb_id":
logger.info(f"Processing {pretty}: {data}")
run_convert(data)
elif method == "imdb_list":
status = f"{data['limit']} Items at " if data['limit'] > 0 else ''
logger.info(f"Processing {pretty}: {status}{data['url']}")
imdb_ids = self._ids_from_url(data["url"], language, data["limit"])
total_ids = len(imdb_ids)
length = 0
for i, imdb_id in enumerate(imdb_ids, 1):
for i, imdb in enumerate(imdb_ids, 1):
length = util.print_return(length, f"Converting IMDb ID {i}/{total_ids}")
tmdb_id = self.config.Convert.imdb_to_tmdb(imdb_id)
tvdb_id = self.config.Convert.imdb_to_tvdb(imdb_id)
if not tmdb_id and not tvdb_id:
logger.error(f"Convert Error: No TMDb ID or TVDb ID found for IMDb: {imdb_id}")
if tmdb_id: movie_ids.append(tmdb_id)
if tvdb_id: show_ids.append(tvdb_id)
util.print_end(length, f"Processed {total_ids} IMDb IDs")
run_convert(imdb)
logger.info(util.adjust_space(length, f"Processed {total_ids} IMDb IDs"))
else:
raise Failed(f"IMDb Error: Method {method} not supported")
logger.debug("")
logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}")
return movie_ids, show_ids

@ -66,8 +66,9 @@ class LetterboxdAPI:
if self.config.Cache:
self.config.Cache.update_letterboxd_map(expired, letterboxd_id, tmdb_id)
movie_ids.append(tmdb_id)
util.print_end(length, f"Processed {total_items} TMDb IDs")
logger.info(util.adjust_space(length, f"Processed {total_items} TMDb IDs"))
else:
logger.error(f"Letterboxd Error: No List Items found in {data}")
logger.debug("")
logger.debug(f"TMDb IDs Found: {movie_ids}")
return movie_ids, []

@ -194,7 +194,6 @@ class MyAnimeListAPI:
return self._parse_request(url)
def get_items(self, method, data):
logger.debug(f"Data: {data}")
pretty = util.pretty_names[method] if method in util.pretty_names else method
if method == "mal_id":
mal_ids = [data]
@ -214,6 +213,7 @@ class MyAnimeListAPI:
else:
raise Failed(f"MyAnimeList Error: Method {method} not supported")
movie_ids, show_ids = self.config.Convert.myanimelist_to_ids(mal_ids)
logger.debug("")
logger.debug(f"MyAnimeList IDs Found: {mal_ids}")
logger.debug(f"Shows Found: {show_ids}")
logger.debug(f"Movies Found: {movie_ids}")

@ -66,11 +66,11 @@ class Metadata:
return self.collections
def update_metadata(self, TMDb, test):
if not self.metadata:
return None
logger.info("")
util.separator(f"Running Metadata")
util.separator("Running Metadata")
logger.info("")
if not self.metadata:
raise Failed("No metadata to edit")
for mapping_name, meta in self.metadata.items():
methods = {mm.lower(): mm for mm in meta}
if test and ("test" not in methods or meta[methods["test"]] is not True):
@ -119,9 +119,7 @@ class Metadata:
logger.error(f"Metadata Error: {attr} attribute is blank")
def edit_tags(attr, obj, group, alias, key=None, extra=None, movie_library=False):
if key is None:
key = f"{attr}s"
if movie_library and not self.library.is_movie:
if movie_library and not self.library.is_movie and (attr in alias or f"{attr}.sync" in alias or f"{attr}.remove" in alias):
logger.error(f"Metadata Error: {attr} attribute only works for movie libraries")
elif attr in alias and f"{attr}.sync" in alias:
logger.error(f"Metadata Error: Cannot use {attr} and {attr}.sync together")
@ -134,33 +132,13 @@ class Metadata:
elif f"{attr}.sync" in alias and group[alias[f"{attr}.sync"]] is None:
logger.error(f"Metadata Error: {attr}.sync attribute is blank")
elif attr in alias or f"{attr}.remove" in alias or f"{attr}.sync" in alias:
attr_key = attr if attr in alias else f"{attr}.sync"
item_tags = [item_tag.tag for item_tag in getattr(obj, key)]
input_tags = []
if group[alias[attr_key]]:
input_tags.extend(util.get_list(group[alias[attr_key]]))
add_tags = util.get_list(group[alias[attr]]) if attr in alias else []
if extra:
input_tags.extend(extra)
if f"{attr}.sync" in alias:
remove_method = getattr(obj, f"remove{attr.capitalize()}")
for tag in (t for t in item_tags if t not in input_tags):
updated = True
self.library.query_data(remove_method, tag)
logger.info(f"Detail: {attr.capitalize()} {tag} removed")
if attr in alias or f"{attr}.sync" in alias:
add_method = getattr(obj, f"add{attr.capitalize()}")
for tag in (t for t in input_tags if t not in item_tags):
updated = True
self.library.query_data(add_method, tag)
logger.info(f"Detail: {attr.capitalize()} {tag} added")
if f"{attr}.remove" in alias:
remove_method = getattr(obj, f"remove{attr.capitalize()}")
for tag in util.get_list(group[alias[f"{attr}.remove"]]):
if tag in item_tags:
self.library.query_data(remove_method, tag)
logger.info(f"Detail: {attr.capitalize()} {tag} removed")
else:
logger.error(f"Metadata Error: {attr} attribute is blank")
add_tags.extend(extra)
remove_tags = util.get_list(group[alias[f"{attr}.remove"]]) if f"{attr}.remove" in alias else None
sync_tags = util.get_list(group[alias[f"{attr}.sync"]]) if f"{attr}.sync" in alias else None
return self.library.edit_tags(attr, obj, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags, key=key)
return False
def set_image(attr, obj, group, alias, poster=True, url=True):
if group[alias[attr]]:
@ -262,8 +240,7 @@ class Metadata:
edits = {}
add_edit("title", item.title, meta, methods, value=title)
add_edit("sort_title", item.titleSort, meta, methods, key="titleSort")
add_edit("originally_available", str(item.originallyAvailableAt)[:-9], meta, methods,
key="originallyAvailableAt", value=originally_available, var_type="date")
add_edit("originally_available", str(item.originallyAvailableAt)[:-9], meta, methods, key="originallyAvailableAt", value=originally_available, var_type="date")
add_edit("critic_rating", item.rating, meta, methods, value=rating, key="rating", var_type="float")
add_edit("audience_rating", item.audienceRating, meta, methods, key="audienceRating", var_type="float")
add_edit("content_rating", item.contentRating, meta, methods, key="contentRating")
@ -271,7 +248,8 @@ class Metadata:
add_edit("studio", item.studio, meta, methods, value=studio)
add_edit("tagline", item.tagline, meta, methods, value=tagline)
add_edit("summary", item.summary, meta, methods, value=summary)
self.library.edit_item(item, mapping_name, item_type, edits)
if self.library.edit_item(item, mapping_name, item_type, edits):
updated = True
advance_edits = {}
add_advanced_edit("episode_sorting", item, meta, methods, show_library=True)
@ -281,15 +259,23 @@ class Metadata:
add_advanced_edit("episode_ordering", item, meta, methods, show_library=True)
add_advanced_edit("metadata_language", item, meta, methods, new_agent=True)
add_advanced_edit("use_original_title", item, meta, methods, new_agent=True)
self.library.edit_item(item, mapping_name, item_type, advance_edits, advanced=True)
if self.library.edit_item(item, mapping_name, item_type, advance_edits, advanced=True):
updated = True
edit_tags("genre", item, meta, methods, extra=genres)
edit_tags("label", item, meta, methods)
edit_tags("collection", item, meta, methods)
edit_tags("country", item, meta, methods, key="countries", movie_library=True)
edit_tags("director", item, meta, methods, movie_library=True)
edit_tags("producer", item, meta, methods, movie_library=True)
edit_tags("writer", item, meta, methods, movie_library=True)
if edit_tags("genre", item, meta, methods, extra=genres):
updated = True
if edit_tags("label", item, meta, methods):
updated = True
if edit_tags("collection", item, meta, methods):
updated = True
if edit_tags("country", item, meta, methods, key="countries", movie_library=True):
updated = True
if edit_tags("director", item, meta, methods, movie_library=True):
updated = True
if edit_tags("producer", item, meta, methods, movie_library=True):
updated = True
if edit_tags("writer", item, meta, methods, movie_library=True):
updated = True
logger.info(f"{item_type}: {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
@ -330,7 +316,8 @@ class Metadata:
edits = {}
add_edit("title", season.title, season_dict, season_methods, value=title)
add_edit("summary", season.summary, season_dict, season_methods)
self.library.edit_item(season, season_id, "Season", edits)
if self.library.edit_item(season, season_id, "Season", edits):
updated = True
set_images(season, season_dict, season_methods)
else:
logger.error(f"Metadata Error: Season: {season_id} invalid, it must be an integer")
@ -380,9 +367,12 @@ class Metadata:
add_edit("originally_available", str(episode.originallyAvailableAt)[:-9],
episode_dict, episode_methods, key="originallyAvailableAt")
add_edit("summary", episode.summary, episode_dict, episode_methods)
self.library.edit_item(episode, f"{season_id} Episode: {episode_id}", "Season", edits)
edit_tags("director", episode, episode_dict, episode_methods)
edit_tags("writer", episode, episode_dict, episode_methods)
if self.library.edit_item(episode, f"{season_id} Episode: {episode_id}", "Season", edits):
updated = True
if edit_tags("director", episode, episode_dict, episode_methods):
updated = True
if edit_tags("writer", episode, episode_dict, episode_methods):
updated = True
set_images(episode, episode_dict, episode_methods)
logger.info(f"Episode S{episode_id}E{season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
else:

@ -324,7 +324,9 @@ class PlexAPI:
self.Sonarr = None
self.Tautulli = None
self.name = params["name"]
self.mapping_name = util.validate_filename(params["mapping_name"])
self.mapping_name, output = util.validate_filename(params["mapping_name"])
if output:
logger.info(output)
self.missing_path = os.path.join(params["default_dir"], f"{self.name}_missing.yml")
self.metadata_path = params["metadata_path"]
self.asset_directory = params["asset_directory"]
@ -401,7 +403,7 @@ class PlexAPI:
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
def get_guids(self, item):
item.reload(checkFiles=False, includeAllConcerts=False, includeBandwidths=False, includeChapters=False,
includeChildren=False, includeConcerts=False, includeExternalMedia=False, inclueExtras=False,
includeChildren=False, includeConcerts=False, includeExternalMedia=False, includeExtras=False,
includeFields='', includeGeolocation=False, includeLoudnessRamps=False, includeMarkers=False,
includeOnDeck=False, includePopularLeaves=False, includePreferences=False, includeRelated=False,
includeRelatedCount=0, includeReviews=False, includeStations=False)
@ -456,8 +458,14 @@ class PlexAPI:
sort_type = movie_smart_sorts[sort] if self.is_movie else show_smart_sorts[sort]
return smart_type, f"?type={smart_type}&sort={sort_type}&label={labels[title]}"
def test_smart_filter(self, uri_args):
logger.debug(f"Smart Collection Test: {uri_args}")
test_items = self.get_filter_items(uri_args)
if len(test_items) < 1:
raise Failed(f"Plex Error: No items for smart filter: {uri_args}")
def create_smart_collection(self, title, smart_type, uri_args):
logger.debug(f"Smart Collection Created: {uri_args}")
self.test_smart_filter(uri_args)
args = {
"type": smart_type,
"title": title,
@ -476,6 +484,7 @@ class PlexAPI:
return f"server://{self.PlexServer.machineIdentifier}/com.plexapp.plugins.library/library/sections/{self.Plex.key}/all{uri_args}"
def update_smart_collection(self, collection, uri_args):
self.test_smart_filter(uri_args)
self._query(f"/library/collections/{collection.ratingKey}/items{utils.joinArgs({'uri': self.build_smart_filter(uri_args)})}", put=True)
def smart(self, collection):
@ -521,7 +530,6 @@ class PlexAPI:
return valid_collections
def get_items(self, method, data):
logger.debug(f"Data: {data}")
pretty = util.pretty_names[method] if method in util.pretty_names else method
media_type = "Movie" if self.is_movie else "Show"
items = []
@ -615,7 +623,7 @@ class PlexAPI:
break
if add_item:
items.append(item)
util.print_end(length, f"Processed {len(all_items)} {'Movies' if self.is_movie else 'Shows'}")
logger.info(util.adjust_space(length, f"Processed {len(all_items)} {'Movies' if self.is_movie else 'Shows'}"))
else:
raise Failed(f"Plex Error: Method {method} not supported")
if len(items) > 0:
@ -643,13 +651,16 @@ class PlexAPI:
return self.get_labeled_items(collection.title if isinstance(collection, Collections) else str(collection))
elif isinstance(collection, Collections):
if self.smart(collection):
key = f"/library/sections/{self.Plex.key}/all{self.smart_filter(collection)}"
return self.Plex._search(key, None, 0, plexapi.X_PLEX_CONTAINER_SIZE)
return self.get_filter_items(self.smart_filter(collection))
else:
return self.query(collection.items)
else:
return []
def get_filter_items(self, uri_args):
key = f"/library/sections/{self.Plex.key}/all{uri_args}"
return self.Plex._search(key, None, 0, plexapi.X_PLEX_CONTAINER_SIZE)
def get_collection_name_and_items(self, collection, smart_label_collection):
name = collection.title if isinstance(collection, Collections) else str(collection)
return name, self.get_collection_items(collection, smart_label_collection)
@ -668,9 +679,38 @@ class PlexAPI:
if advanced and "languageOverride" in edits:
self.query(item.refresh)
logger.info(f"{item_type}: {name}{' Advanced' if advanced else ''} Details Update Successful")
return True
except BadRequest:
util.print_stacktrace()
logger.error(f"{item_type}: {name}{' Advanced' if advanced else ''} Details Update Failed")
return False
def edit_tags(self, attr, obj, add_tags=None, remove_tags=None, sync_tags=None, key=None):
updated = False
if key is None:
key = f"{attr}s"
if add_tags or remove_tags or sync_tags:
item_tags = [item_tag.tag for item_tag in getattr(obj, key)]
input_tags = []
if add_tags:
input_tags.extend(add_tags)
if sync_tags:
input_tags.extend(sync_tags)
if sync_tags or remove_tags:
remove_method = getattr(obj, f"remove{attr.capitalize()}")
for tag in item_tags:
if (sync_tags and tag not in sync_tags) or (remove_tags and tag in remove_tags):
updated = True
self.query_data(remove_method, tag)
logger.info(f"Detail: {attr.capitalize()} {tag} removed")
if input_tags:
add_method = getattr(obj, f"add{attr.capitalize()}")
for tag in input_tags:
if tag not in item_tags:
updated = True
self.query_data(add_method, tag)
logger.info(f"Detail: {attr.capitalize()} {tag} added")
return updated
def update_item_from_assets(self, item, collection_mode=False, upload=True, dirs=None, name=None):
if dirs is None:

@ -66,6 +66,8 @@ class RadarrAPI:
raise Failed(f"Sonarr Error: TMDb ID: {tmdb_id} not found")
def add_tmdb(self, tmdb_ids, **options):
logger.info("")
util.separator(f"Adding to Radarr", space=False, border=False)
logger.info("")
logger.debug(f"TMDb IDs: {tmdb_ids}")
tag_nums = []

@ -86,6 +86,8 @@ class SonarrAPI:
raise Failed(f"Sonarr Error: TVDb ID: {tvdb_id} not found")
def add_tvdb(self, tvdb_ids, **options):
logger.info("")
util.separator(f"Adding to Sonarr", space=False, border=False)
logger.info("")
logger.debug(f"TVDb IDs: {tvdb_ids}")
tag_nums = []

@ -292,7 +292,6 @@ class TMDbAPI:
return tmdb_id
def get_items(self, method, data, is_movie):
logger.debug(f"Data: {data}")
pretty = util.pretty_names[method] if method in util.pretty_names else method
media_type = "Movie" if is_movie else "Show"
movie_ids = []
@ -362,6 +361,7 @@ class TMDbAPI:
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(movie_ids)} Movie{'' if len(movie_ids) == 1 else 's'})")
if not is_movie and len(show_ids) > 0:
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(show_ids)} Show{'' if len(show_ids) == 1 else 's'})")
logger.debug("")
logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}")
return movie_ids, show_ids

@ -157,7 +157,6 @@ class TraktAPI:
return trakt_values
def get_items(self, method, data, is_movie):
logger.debug(f"Data: {data}")
pretty = self.aliases[method] if method in self.aliases else method
media_type = "Movie" if is_movie else "Show"
if method in ["trakt_trending", "trakt_popular", "trakt_recommended", "trakt_watched", "trakt_collected"]:
@ -181,6 +180,7 @@ class TraktAPI:
elif (isinstance(trakt_item, (Season, Episode))) and trakt_item.show.pk[1] not in show_ids:
show_ids.append(int(trakt_item.show.pk[1]))
logger.debug(f"Trakt {media_type} Found: {trakt_items}")
logger.debug("")
logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}")
return movie_ids, show_ids

@ -163,6 +163,7 @@ class TVDbAPI:
show_ids.extend(tvdb_ids)
else:
raise Failed(f"TVDb Error: Method {method} not supported")
logger.debug("")
logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}")
return movie_ids, show_ids

@ -222,7 +222,8 @@ def compile_list(data):
return data
def get_list(data, lower=False, split=True, int_list=False):
if isinstance(data, list): return data
if data is None: return None
elif isinstance(data, list): return data
elif isinstance(data, dict): return [data]
elif split is False: return [str(data)]
elif lower is True: return [d.strip().lower() for d in str(data).split(",")]
@ -352,28 +353,35 @@ def regex_first_int(data, id_type, default=None):
else:
raise Failed(f"Regex Error: Failed to parse {id_type} from {data}")
def centered(text, do_print=True):
def centered(text, sep=" "):
if len(text) > screen_width - 2:
raise Failed("text must be shorter then screen_width")
space = screen_width - len(text) - 2
text = f" {text} "
if space % 2 == 1:
text += " "
text += sep
space -= 1
side = int(space / 2)
final_text = f"{' ' * side}{text}{' ' * side}"
if do_print:
logger.info(final_text)
side = int(space / 2) - 1
final_text = f"{sep * side}{text}{sep * side}"
return final_text
def separator(text=None):
def separator(text=None, space=True, border=True, debug=False):
sep = " " if space else separating_character
for handler in logger.handlers:
apply_formatter(handler, border=False)
logger.info(f"|{separating_character * screen_width}|")
border_text = f"|{separating_character * screen_width}|"
if border and debug:
logger.debug(border_text)
elif border:
logger.info(border_text)
if text:
text_list = text.split("\n")
for t in text_list:
logger.info(f"| {centered(t, do_print=False)} |")
logger.info(f"|{separating_character * screen_width}|")
logger.info(f"|{sep}{centered(t, sep=sep)}{sep}|")
if border and debug:
logger.debug(border_text)
elif border:
logger.info(border_text)
for handler in logger.handlers:
apply_formatter(handler)
@ -387,14 +395,12 @@ def print_return(length, text):
print(adjust_space(length, f"| {text}"), end="\r")
return len(text) + 2
def print_end(length, text=None):
if text: logger.info(adjust_space(length, text))
else: print(adjust_space(length, " "), end="\r")
def print_end(length):
print(adjust_space(length, " "), end="\r")
def validate_filename(filename):
if is_valid_filename(filename):
return filename
return filename, None
else:
mapping_name = sanitize_filename(filename)
logger.info(f"Folder Name: {filename} is invalid using {mapping_name}")
return mapping_name
return mapping_name, f"Log Folder Name: {filename} is invalid using {mapping_name}"

@ -7,13 +7,17 @@ try:
from modules.config import Config
from modules.util import Failed
except ModuleNotFoundError:
print("Error: Requirements are not installed")
print("Requirements Error: Requirements are not installed")
sys.exit(0)
if sys.version_info[0] != 3 or sys.version_info[1] < 6:
print("Version Error: Version: %s.%s.%s incompatible please use Python 3.6+" % (sys.version_info[0], sys.version_info[1], sys.version_info[2]))
sys.exit(0)
parser = argparse.ArgumentParser()
parser.add_argument("-db", "--debug", dest="debug", help=argparse.SUPPRESS, action="store_true", default=False)
parser.add_argument("-c", "--config", dest="config", help="Run with desired *.yml file", type=str)
parser.add_argument("-t", "--time", dest="time", help="Time to update each day use format HH:MM (Default: 03:00)", default="03:00", type=str)
parser.add_argument("-t", "--time", dest="time", help="Times to update each day use format HH:MM (Default: 03:00) (comma-separated list)", default="03:00", type=str)
parser.add_argument("-re", "--resume", dest="resume", help="Resume collection run from a specific collection", type=str)
parser.add_argument("-r", "--run", dest="run", help="Run without the scheduler", action="store_true", default=False)
parser.add_argument("-rt", "--test", "--tests", "--run-test", "--run-tests", dest="test", help="Run in debug mode with only collections that have test: true", action="store_true", default=False)
@ -21,6 +25,7 @@ parser.add_argument("-co", "--collection-only", "--collections-only", dest="coll
parser.add_argument("-lo", "--library-only", "--libraries-only", dest="library_only", help="Run only library operations", action="store_true", default=False)
parser.add_argument("-rc", "-cl", "--collection", "--collections", "--run-collection", "--run-collections", dest="collections", help="Process only specified collections (comma-separated list)", type=str)
parser.add_argument("-rl", "-l", "--library", "--libraries", "--run-library", "--run-libraries", dest="libraries", help="Process only specified libraries (comma-separated list)", type=str)
parser.add_argument("-nc", "--no-countdown", dest="no_countdown", help="Run without displaying the countdown", action="store_true", default=False)
parser.add_argument("-d", "--divider", dest="divider", help="Character that divides the sections (Default: '=')", default="=", type=str)
parser.add_argument("-w", "--width", dest="width", help="Screen Width (Default: 100)", default=100, type=int)
args = parser.parse_args()
@ -40,13 +45,15 @@ def check_bool(env_str, default):
test = check_bool("PMM_TEST", args.test)
debug = check_bool("PMM_DEBUG", args.debug)
run = check_bool("PMM_RUN", args.run)
no_countdown = check_bool("PMM_NO_COUNTDOWN", args.no_countdown)
library_only = check_bool("PMM_LIBRARIES_ONLY", args.library_only)
collection_only = check_bool("PMM_COLLECTIONS_ONLY", args.collection_only)
collections = os.environ.get("PMM_COLLECTIONS") if os.environ.get("PMM_COLLECTIONS") else args.collections
libraries = os.environ.get("PMM_LIBRARIES") if os.environ.get("PMM_LIBRARIES") else args.libraries
resume = os.environ.get("PMM_RESUME") if os.environ.get("PMM_RESUME") else args.resume
time_to_run = os.environ.get("PMM_TIME") if os.environ.get("PMM_TIME") else args.time
times_to_run = util.get_list(os.environ.get("PMM_TIME") if os.environ.get("PMM_TIME") else args.time)
for time_to_run in times_to_run:
if not re.match("^([0-1]?[0-9]|2[0-3]):[0-5][0-9]$", time_to_run):
raise util.Failed(f"Argument Error: time argument invalid: {time_to_run} must be in the HH:MM format")
@ -81,7 +88,7 @@ logger.addHandler(cmd_handler)
sys.excepthook = util.my_except_hook
def start(config_path, is_test, daily, requested_collections, requested_libraries, resume_from):
def start(config_path, is_test=False, time_scheduled=None, requested_collections=None, requested_libraries=None, resume_from=None):
file_logger = os.path.join(default_dir, "logs", "meta.log")
should_roll_over = os.path.isfile(file_logger)
file_handler = logging.handlers.RotatingFileHandler(file_logger, delay=True, mode="w", backupCount=10, encoding="utf-8")
@ -91,33 +98,36 @@ def start(config_path, is_test, daily, requested_collections, requested_librarie
file_handler.doRollover()
logger.addHandler(file_handler)
util.separator()
util.centered(" ")
util.centered(" ____ _ __ __ _ __ __ ")
util.centered("| _ \\| | _____ __ | \\/ | ___| |_ __ _ | \\/ | __ _ _ __ __ _ __ _ ___ _ __ ")
util.centered("| |_) | |/ _ \\ \\/ / | |\\/| |/ _ \\ __/ _` | | |\\/| |/ _` | '_ \\ / _` |/ _` |/ _ \\ '__|")
util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | ")
util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| ")
util.centered(" |___/ ")
util.centered(" Version: 1.9.2 ")
util.separator()
if daily: start_type = "Daily "
logger.info(util.centered(" "))
logger.info(util.centered(" ____ _ __ __ _ __ __ "))
logger.info(util.centered("| _ \\| | _____ __ | \\/ | ___| |_ __ _ | \\/ | __ _ _ __ __ _ __ _ ___ _ __ "))
logger.info(util.centered("| |_) | |/ _ \\ \\/ / | |\\/| |/ _ \\ __/ _` | | |\\/| |/ _` | '_ \\ / _` |/ _` |/ _ \\ '__|"))
logger.info(util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | "))
logger.info(util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| "))
logger.info(util.centered(" |___/ "))
logger.info(util.centered(" Version: 1.9.3 "))
if time_scheduled: start_type = f"{time_scheduled} "
elif is_test: start_type = "Test "
elif requested_collections: start_type = "Collections "
elif requested_libraries: start_type = "Libraries "
else: start_type = ""
start_time = datetime.now()
if time_scheduled is None:
time_scheduled = start_time.strftime("%H:%M")
util.separator(f"Starting {start_type}Run")
try:
config = Config(default_dir, config_path, requested_libraries)
update_libraries(config, is_test, requested_collections, resume_from)
config = Config(default_dir, config_path=config_path, is_test=is_test,
time_scheduled=time_scheduled, requested_collections=requested_collections,
requested_libraries=requested_libraries, resume_from=resume_from)
update_libraries(config)
except Exception as e:
util.print_stacktrace()
logger.critical(e)
logger.info("")
util.separator(f"Finished {start_type}Run\nRun Time: {str(datetime.now() - start_time).split('.')[0]}")
logger.addHandler(file_handler)
logger.removeHandler(file_handler)
def update_libraries(config, is_test, requested_collections, resume_from):
def update_libraries(config):
for library in config.libraries:
os.makedirs(os.path.join(default_dir, "logs", library.mapping_name, "collections"), exist_ok=True)
col_file_logger = os.path.join(default_dir, "logs", library.mapping_name, "library.log")
@ -132,31 +142,34 @@ def update_libraries(config, is_test, requested_collections, resume_from):
logger.info("")
util.separator(f"{library.name} Library")
logger.info("")
util.separator(f"Mapping {library.name} Library")
util.separator(f"Mapping {library.name} Library", space=False, border=False)
logger.info("")
movie_map, show_map = map_guids(config, library)
if not is_test and not resume_from and not collection_only and library.mass_update:
if not config.test_mode and not config.resume_from and not collection_only and library.mass_update:
mass_metadata(config, library, movie_map, show_map)
for metadata in library.metadata_files:
logger.info("")
util.separator(f"Running Metadata File\n{metadata.path}")
if not is_test and not resume_from and not collection_only:
if not config.test_mode and not config.resume_from and not collection_only:
try:
metadata.update_metadata(config.TMDb, is_test)
metadata.update_metadata(config.TMDb, config.test_mode)
except Failed as e:
logger.error(e)
collections_to_run = metadata.get_collections(config.requested_collections)
if config.resume_from and config.resume_from not in collections_to_run:
logger.info("")
util.separator(f"{'Test ' if is_test else ''}Collections")
collections_to_run = metadata.get_collections(requested_collections)
if resume_from and resume_from not in collections_to_run:
logger.warning(f"Collection: {resume_from} not in Metadata File: {metadata.path}")
logger.warning(f"Collection: {config.resume_from} not in Metadata File: {metadata.path}")
continue
if collections_to_run and not library_only:
logger.info("")
util.separator(f"{'Test ' if config.test_mode else ''}Collections")
logger.removeHandler(library_handler)
resume_from = run_collection(config, library, metadata, collections_to_run, is_test, resume_from, movie_map, show_map)
run_collection(config, library, metadata, collections_to_run, movie_map, show_map)
logger.addHandler(library_handler)
if not is_test and not requested_collections:
if not config.test_mode and not config.requested_collections and ((library.show_unmanaged and not library_only) or (library.assets_for_all and not collection_only)):
logger.info("")
util.separator(f"Other {library.name} Library Operations")
unmanaged_collections = []
for col in library.get_all_collections():
if col.title not in library.collections:
@ -164,15 +177,16 @@ def update_libraries(config, is_test, requested_collections, resume_from):
if library.show_unmanaged and not library_only:
logger.info("")
util.separator(f"Unmanaged Collections in {library.name} Library")
util.separator(f"Unmanaged Collections in {library.name} Library", space=False, border=False)
logger.info("")
for col in unmanaged_collections:
logger.info(col.title)
logger.info("")
logger.info(f"{len(unmanaged_collections)} Unmanaged Collections")
if library.assets_for_all and not collection_only:
logger.info("")
util.separator(f"All {'Movies' if library.is_movie else 'Shows'} Assets Check for {library.name} Library")
util.separator(f"All {'Movies' if library.is_movie else 'Shows'} Assets Check for {library.name} Library", space=False, border=False)
logger.info("")
for col in unmanaged_collections:
library.update_item_from_assets(col, collection_mode=True)
@ -235,8 +249,11 @@ def map_guids(config, library):
movie_map = {}
show_map = {}
length = 0
logger.info(f"Mapping {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
logger.info(f"Loading {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
logger.info("")
items = library.Plex.all()
logger.info(f"Mapping {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
logger.info("")
for i, item in enumerate(items, 1):
length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}")
id_type, main_id = config.Convert.get_id(item, library, length)
@ -251,7 +268,8 @@ def map_guids(config, library):
for m in main_id:
if m in show_map: show_map[m].append(item.ratingKey)
else: show_map[m] = [item.ratingKey]
util.print_end(length, f"Processed {len(items)} {'Movies' if library.is_movie else 'Shows'}")
logger.info("")
logger.info(util.adjust_space(length, f"Processed {len(items)} {'Movies' if library.is_movie else 'Shows'}"))
return movie_map, show_map
def mass_metadata(config, library, movie_map, show_map):
@ -298,9 +316,9 @@ def mass_metadata(config, library, movie_map, show_map):
try:
tmdb_item = config.TMDb.get_movie(tmdb_id) if library.is_movie else config.TMDb.get_show(tmdb_id)
except Failed as e:
util.print_end(length, str(e))
logger.info(util.adjust_space(length, str(e)))
else:
util.print_end(length, f"{item.title[:25]:<25} | No TMDb ID for Guid: {item.guid}")
logger.info(util.adjust_space(length, f"{item.title[:25]:<25} | No TMDb ID for Guid: {item.guid}"))
omdb_item = None
if library.mass_genre_update in ["omdb", "imdb"] or library.mass_audience_rating_update in ["omdb", "imdb"] or library.mass_critic_rating_update in ["omdb", "imdb"]:
@ -313,9 +331,9 @@ def mass_metadata(config, library, movie_map, show_map):
try:
omdb_item = config.OMDb.get_omdb(imdb_id)
except Failed as e:
util.print_end(length, str(e))
logger.info(util.adjust_space(length, str(e)))
else:
util.print_end(length, f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}")
logger.info(util.adjust_space(length, f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}"))
if not tmdb_item and not omdb_item:
continue
@ -337,7 +355,7 @@ def mass_metadata(config, library, movie_map, show_map):
library.query_data(item.addGenre, genre)
display_str += f"{', ' if len(display_str) > 0 else ''}+{genre}"
if len(display_str) > 0:
util.print_end(length, f"{item.title[:25]:<25} | Genres | {display_str}")
logger.info(util.adjust_space(length, f"{item.title[:25]:<25} | Genres | {display_str}"))
except Failed:
pass
if library.mass_audience_rating_update or library.mass_critic_rating_update:
@ -349,14 +367,14 @@ def mass_metadata(config, library, movie_map, show_map):
else:
raise Failed
if new_rating is None:
util.print_end(length, f"{item.title[:25]:<25} | No Rating Found")
logger.info(util.adjust_space(length, f"{item.title[:25]:<25} | No Rating Found"))
else:
if library.mass_audience_rating_update and str(item.audienceRating) != str(new_rating):
library.edit_query(item, {"audienceRating.value": new_rating, "audienceRating.locked": 1})
util.print_end(length, f"{item.title[:25]:<25} | Audience Rating | {new_rating}")
logger.info(util.adjust_space(length, f"{item.title[:25]:<25} | Audience Rating | {new_rating}"))
if library.mass_critic_rating_update and str(item.rating) != str(new_rating):
library.edit_query(item, {"rating.value": new_rating, "rating.locked": 1})
util.print_end(length, f"{item.title[:25]:<25} | Critic Rating | {new_rating}")
logger.info(util.adjust_space(length, f"{item.title[:25]:<25} | Critic Rating | {new_rating}"))
except Failed:
pass
@ -372,11 +390,11 @@ def mass_metadata(config, library, movie_map, show_map):
except Failed as e:
logger.error(e)
def run_collection(config, library, metadata, requested_collections, is_test, resume_from, movie_map, show_map):
def run_collection(config, library, metadata, requested_collections, movie_map, show_map):
logger.info("")
for mapping_name, collection_attrs in requested_collections.items():
if is_test and ("test" not in collection_attrs or collection_attrs["test"] is not True):
collection_start = datetime.now()
if config.test_mode and ("test" not in collection_attrs or collection_attrs["test"] is not True):
no_template_test = True
if "template" in collection_attrs and collection_attrs["template"]:
for data_template in util.get_list(collection_attrs["template"], split=False):
@ -391,17 +409,17 @@ def run_collection(config, library, metadata, requested_collections, is_test, re
if no_template_test:
continue
if resume_from and resume_from != mapping_name:
if config.resume_from and config.resume_from != mapping_name:
continue
elif resume_from == mapping_name:
resume_from = None
elif config.resume_from == mapping_name:
config.resume_from = None
logger.info("")
util.separator(f"Resuming Collections")
if "name_mapping" in collection_attrs and collection_attrs["name_mapping"]:
collection_log_name = util.validate_filename(collection_attrs["name_mapping"])
collection_log_name, output_str = util.validate_filename(collection_attrs["name_mapping"])
else:
collection_log_name = util.validate_filename(mapping_name)
collection_log_name, output_str = util.validate_filename(mapping_name)
collection_log_folder = os.path.join(default_dir, "logs", library.mapping_name, "collections", collection_log_name)
os.makedirs(collection_log_folder, exist_ok=True)
col_file_logger = os.path.join(collection_log_folder, f"collection.log")
@ -415,12 +433,23 @@ def run_collection(config, library, metadata, requested_collections, is_test, re
try:
util.separator(f"{mapping_name} Collection")
logger.info("")
if output_str:
logger.info(output_str)
logger.info("")
util.separator(f"Validating {mapping_name} Attributes", space=False, border=False)
builder = CollectionBuilder(config, library, metadata, mapping_name, collection_attrs)
logger.info("")
util.separator(f"Building {mapping_name} Collection", space=False, border=False)
if len(builder.schedule) > 0:
util.print_multiline(builder.schedule, info=True)
if len(builder.smart_filter_details) > 0:
util.print_multiline(builder.smart_filter_details, info=True)
if not builder.smart_url:
logger.info("")
logger.info(f"Sync Mode: {'sync' if builder.sync else 'append'}")
@ -431,16 +460,24 @@ def run_collection(config, library, metadata, requested_collections, is_test, re
logger.info(f"Collection Filter {filter_key}: {filter_value}")
builder.collect_rating_keys(movie_map, show_map)
logger.info("")
if len(builder.rating_keys) > 0 and builder.build_collection:
logger.info("")
util.separator(f"Adding to {mapping_name} Collection", space=False, border=False)
logger.info("")
builder.add_to_collection(movie_map)
if len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0:
logger.info("")
util.separator(f"Missing from Library", space=False, border=False)
logger.info("")
builder.run_missing()
if builder.sync and len(builder.rating_keys) > 0 and builder.build_collection:
builder.sync_collection()
logger.info("")
if builder.build_collection:
logger.info("")
util.separator(f"Updating Details of {mapping_name} Collection", space=False, border=False)
logger.info("")
builder.update_details()
if builder.run_again and (len(builder.run_again_movies) > 0 or len(builder.run_again_shows) > 0):
@ -453,27 +490,34 @@ def run_collection(config, library, metadata, requested_collections, is_test, re
util.print_stacktrace()
logger.error(f"Unknown Error: {e}")
logger.info("")
util.separator(f"Finished {mapping_name} Collection\nCollection Run Time: {str(datetime.now() - collection_start).split('.')[0]}")
logger.removeHandler(collection_handler)
return resume_from
try:
if run or test or collections or libraries or resume:
start(config_file, test, False, collections, libraries, resume)
start(config_file, is_test=test, requested_collections=collections, requested_libraries=libraries, resume_from=resume)
else:
time_length = 0
schedule.every().day.at(time_to_run).do(start, config_file, False, True, None, None, None)
for time_to_run in times_to_run:
schedule.every().day.at(time_to_run).do(start, config_file, time_scheduled=time_to_run)
while True:
schedule.run_pending()
if not no_countdown:
current = datetime.now().strftime("%H:%M")
seconds = (datetime.strptime(time_to_run, "%H:%M") - datetime.strptime(current, "%H:%M")).total_seconds()
seconds = None
og_time_str = ""
for time_to_run in times_to_run:
new_seconds = (datetime.strptime(time_to_run, "%H:%M") - datetime.strptime(current, "%H:%M")).total_seconds()
if new_seconds < 0:
new_seconds += 86400
if (seconds is None or new_seconds < seconds) and new_seconds > 0:
seconds = new_seconds
og_time_str = time_to_run
hours = int(seconds // 3600)
if hours < 0:
hours += 24
minutes = int((seconds % 3600) // 60)
time_str = f"{hours} Hour{'s' if hours > 1 else ''} and " if hours > 0 else ""
time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}"
time_length = util.print_return(time_length, f"Current Time: {current} | {time_str} until the daily run at {time_to_run}")
time.sleep(1)
time_length = util.print_return(time_length, f"Current Time: {current} | {time_str} until the next run at {og_time_str} {times_to_run}")
time.sleep(60)
except KeyboardInterrupt:
util.separator("Exiting Plex Meta Manager")

Loading…
Cancel
Save