[20] move operations

pull/847/head
meisnate12 3 years ago
parent be22731d02
commit 8591483eb0

@ -1 +1 @@
1.16.3-develop19
1.16.3-develop20

@ -207,11 +207,10 @@ music_attributes = [
] + details + summary_details + poster_details + background_details
class CollectionBuilder:
def __init__(self, config, metadata, name, no_missing, data, library=None):
def __init__(self, config, metadata, name, data, library=None, overlay=None):
self.config = config
self.metadata = metadata
self.mapping_name = name
self.no_missing = no_missing
self.data = data
self.library = library
self.libraries = []
@ -720,9 +719,9 @@ class CollectionBuilder:
self.details["collection_mode"] = "hide"
self.sync = True
self.do_missing = not self.no_missing and (self.details["show_missing"] or self.details["save_missing"]
or (self.library.Radarr and self.radarr_details["add_missing"])
or (self.library.Sonarr and self.sonarr_details["add_missing"]))
self.do_missing = not self.config.no_missing and (self.details["show_missing"] or self.details["save_missing"]
or (self.library.Radarr and self.radarr_details["add_missing"])
or (self.library.Sonarr and self.sonarr_details["add_missing"]))
if self.build_collection:
try:

@ -67,6 +67,7 @@ class ConfigFile:
self.read_only = attrs["read_only"] if "read_only" in attrs else False
self.version = attrs["version"] if "version" in attrs else None
self.latest_version = attrs["latest_version"] if "latest_version" in attrs else None
self.no_missing = attrs["no_missing"] if "no_missing" in attrs else None
self.test_mode = attrs["test"] if "test" in attrs else False
self.trace_mode = attrs["trace"] if "trace" in attrs else False
self.delete_collections = attrs["delete"] if "delete" in attrs else False
@ -368,8 +369,7 @@ class ConfigFile:
logger.stacktrace()
logger.error(f"Webhooks Error: {e}")
self.errors = []
logger.save_errors = True
logger.separator()
try:
@ -398,7 +398,6 @@ class ConfigFile:
"expiration": check_for_attribute(self.data, "cache_expiration", parent="omdb", var_type="int", default=60)
})
except Failed as e:
self.errors.append(e)
logger.error(e)
logger.info(f"OMDb Connection {'Failed' if self.OMDb is None else 'Successful'}")
else:
@ -416,7 +415,6 @@ class ConfigFile:
)
logger.info("Mdblist Connection Successful")
except Failed as e:
self.errors.append(e)
logger.error(e)
logger.info("Mdblist Connection Failed")
else:
@ -436,7 +434,6 @@ class ConfigFile:
"authorization": self.data["trakt"]["authorization"] if "authorization" in self.data["trakt"] else None
})
except Failed as e:
self.errors.append(e)
logger.error(e)
logger.info(f"Trakt Connection {'Failed' if self.Trakt is None else 'Successful'}")
else:
@ -455,7 +452,6 @@ class ConfigFile:
"authorization": self.data["mal"]["authorization"] if "authorization" in self.data["mal"] else None
})
except Failed as e:
self.errors.append(e)
logger.error(e)
logger.info(f"My Anime List Connection {'Failed' if self.MyAnimeList is None else 'Successful'}")
else:
@ -471,7 +467,6 @@ class ConfigFile:
check_for_attribute(self.data, "password", parent="anidb", throw=True)
)
except Failed as e:
self.errors.append(e)
logger.error(e)
logger.info(f"AniDB Connection {'Failed Continuing as Guest ' if self.MyAnimeList is None else 'Successful'}")
@ -487,7 +482,10 @@ class ConfigFile:
default_playlist_file = os.path.abspath(os.path.join(self.default_dir, "playlists.yml"))
logger.warning(f"Config Warning: playlist_files attribute is blank using default: {default_playlist_file}")
paths_to_check = [default_playlist_file]
for file_type, playlist_file, temp_vars in util.load_yaml_files(paths_to_check):
files = util.load_yaml_files(paths_to_check)
if not files:
raise Failed("Config Error: No Paths Found for playlist_files")
for file_type, playlist_file, temp_vars in files:
try:
playlist_obj = PlaylistFile(self, file_type, playlist_file, temp_vars)
self.playlist_names.extend([p for p in playlist_obj.playlists])
@ -566,14 +564,12 @@ class ConfigFile:
params = {
"mapping_name": str(library_name),
"name": str(lib["library_name"]) if lib and "library_name" in lib and lib["library_name"] else str(library_name),
"tmdb_collections": None,
"genre_mapper": None,
"content_rating_mapper": None,
"radarr_remove_by_tag": None,
"sonarr_remove_by_tag": None,
"mass_collection_mode": None,
"metadata_backup": None,
"genre_collections": None,
"update_blank_track_titles": None,
"mass_content_rating_update": None,
"mass_originally_available_update": None,
@ -689,28 +685,6 @@ class ConfigFile:
params["metadata_backup"]["exclude"] = check_for_attribute(lib["operations"]["metadata_backup"], "exclude", var_type="comma_list", default_is_none=True, save=False)
params["metadata_backup"]["sync_tags"] = check_for_attribute(lib["operations"]["metadata_backup"], "sync_tags", var_type="bool", default=False, save=False)
params["metadata_backup"]["add_blank_entries"] = check_for_attribute(lib["operations"]["metadata_backup"], "add_blank_entries", var_type="bool", default=True, save=False)
if "tmdb_collections" in lib["operations"]:
params["tmdb_collections"] = {
"exclude_ids": [],
"remove_suffix": [],
"dictionary_variables": {},
"template": {"tmdb_collection_details": "<<collection_id>>"}
}
if lib["operations"]["tmdb_collections"] and isinstance(lib["operations"]["tmdb_collections"], dict):
params["tmdb_collections"]["exclude_ids"] = check_for_attribute(lib["operations"]["tmdb_collections"], "exclude_ids", var_type="int_list", default_is_none=True, save=False)
params["tmdb_collections"]["remove_suffix"] = check_for_attribute(lib["operations"]["tmdb_collections"], "remove_suffix", var_type="comma_list", default_is_none=True, save=False)
if "dictionary_variables" in lib["operations"]["tmdb_collections"] and lib["operations"]["tmdb_collections"]["dictionary_variables"] and isinstance(lib["operations"]["tmdb_collections"]["dictionary_variables"], dict):
for key, value in lib["operations"]["tmdb_collections"]["dictionary_variables"].items():
if isinstance(value, dict):
params["tmdb_collections"]["dictionary_variables"][key] = value
else:
logger.warning(f"Config Warning: tmdb_collections dictionary_variables {key} must be a dictionary")
if "template" in lib["operations"]["tmdb_collections"] and lib["operations"]["tmdb_collections"]["template"] and isinstance(lib["operations"]["tmdb_collections"]["template"], dict):
params["tmdb_collections"]["template"] = lib["operations"]["tmdb_collections"]["template"]
else:
logger.warning("Config Warning: Using default template for tmdb_collections")
else:
logger.error("Config Error: tmdb_collections blank using default settings")
if "genre_mapper" in lib["operations"]:
if lib["operations"]["genre_mapper"] and isinstance(lib["operations"]["genre_mapper"], dict):
params["genre_mapper"] = lib["operations"]["genre_mapper"]
@ -731,40 +705,15 @@ class ConfigFile:
params["content_rating_mapper"][old_content] = new_content if new_content else None
else:
logger.error("Config Error: content_rating_mapper is blank")
if "genre_collections" in lib["operations"]:
params["genre_collections"] = {
"exclude_genres": [],
"dictionary_variables": {},
"title_format": "Top <<genre>> <<library_type>>s",
"template": {"smart_filter": {"limit": 50, "sort_by": "critic_rating.desc", "all": {"genre": "<<genre>>"}}}
}
if lib["operations"]["genre_collections"] and isinstance(lib["operations"]["genre_collections"], dict):
params["genre_collections"]["exclude_genres"] = check_for_attribute(lib["operations"]["genre_collections"], "exclude_genres", var_type="comma_list", default_is_none=True, save=False)
title_format = check_for_attribute(lib["operations"]["genre_collections"], "title_format", default=params["genre_collections"]["title_format"], save=False)
if "<<genre>>" in title_format:
params["genre_collections"]["title_format"] = title_format
else:
logger.error(f"Config Error: using default title_format. <<genre>> not in title_format attribute: {title_format} ")
if "dictionary_variables" in lib["operations"]["genre_collections"] and lib["operations"]["genre_collections"]["dictionary_variables"] and isinstance(lib["operations"]["genre_collections"]["dictionary_variables"], dict):
for key, value in lib["operations"]["genre_collections"]["dictionary_variables"].items():
if isinstance(value, dict):
params["genre_collections"]["dictionary_variables"][key] = value
else:
logger.warning(f"Config Warning: genre_collections dictionary_variables {key} must be a dictionary")
if "template" in lib["operations"]["genre_collections"] and lib["operations"]["genre_collections"]["template"] and isinstance(lib["operations"]["genre_collections"]["template"], dict):
params["genre_collections"]["template"] = lib["operations"]["genre_collections"]["template"]
else:
logger.warning("Config Warning: Using default template for genre_collections")
else:
logger.error("Config Error: genre_collections blank using default settings")
for atr in ["tmdb_collections", "genre_collections"]:
if atr in lib["operations"]:
logger.error(f"Deprecated Error: {atr} has been replaced with dynamic collections")
else:
logger.error("Config Error: operations must be a dictionary")
def error_check(attr, service):
err = f"Config Error: {attr} cannot be {params[attr]} without a successful {service} Connection"
params[attr] = None
self.errors.append(err)
logger.error(err)
logger.error(f"Config Error: {attr} cannot be {params[attr]} without a successful {service} Connection")
for mass_key in ["mass_genre_update", "mass_audience_rating_update", "mass_critic_rating_update", "mass_content_rating_update", "mass_originally_available_update"]:
if params[mass_key] == "omdb" and self.OMDb is None:
@ -779,7 +728,10 @@ class ConfigFile:
if lib and "metadata_path" in lib:
if not lib["metadata_path"]:
raise Failed("Config Error: metadata_path attribute is blank")
params["metadata_path"] = util.load_yaml_files(lib["metadata_path"])
files = util.load_yaml_files(lib["metadata_path"])
if not files:
raise Failed("Config Error: No Paths Found for metadata_path")
params["metadata_path"] = files
else:
params["metadata_path"] = [("File", os.path.join(default_dir, f"{library_name}.yml"), {})]
params["default_dir"] = default_dir
@ -808,22 +760,18 @@ class ConfigFile:
library = Plex(self, params)
logger.info(f"{display_name} Library Connection Successful")
except Failed as e:
self.errors.append(e)
logger.stacktrace()
logger.error(e)
logger.info("")
logger.info(f"{display_name} Library Connection Failed")
continue
try:
logger.info("")
logger.separator("Scanning Metadata Files", space=False, border=False)
library.scan_metadata_files()
except Failed as e:
self.errors.append(e)
logger.stacktrace()
logger.error(e)
logger.info("")
logger.separator("Scanning Metadata Files", space=False, border=False)
library.scan_files()
if not library.metadata_files and not library.library_operation and not self.playlist_files:
logger.info("")
logger.info(f"{display_name} Metadata Failed to Load")
logger.error("Config Error: No valid metadata files, playlist files, or library operations found")
continue
if self.general["radarr"]["url"] or (lib and "radarr" in lib):
@ -848,7 +796,6 @@ class ConfigFile:
"plex_path": check_for_attribute(lib, "plex_path", parent="radarr", default=self.general["radarr"]["plex_path"], default_is_none=True, save=False)
})
except Failed as e:
self.errors.append(e)
logger.stacktrace()
logger.error(e)
logger.info("")
@ -879,7 +826,6 @@ class ConfigFile:
"plex_path": check_for_attribute(lib, "plex_path", parent="sonarr", default=self.general["sonarr"]["plex_path"], default_is_none=True, save=False)
})
except Failed as e:
self.errors.append(e)
logger.stacktrace()
logger.error(e)
logger.info("")
@ -897,7 +843,6 @@ class ConfigFile:
"apikey": check_for_attribute(lib, "apikey", parent="tautulli", default=self.general["tautulli"]["apikey"], req_default=True, save=False)
})
except Failed as e:
self.errors.append(e)
logger.stacktrace()
logger.error(e)
logger.info("")
@ -919,11 +864,13 @@ class ConfigFile:
logger.separator()
if self.errors:
self.notify(self.errors)
if logger.saved_errors:
self.notify(logger.saved_errors)
except Exception as e:
logger.stacktrace()
self.notify(e)
self.notify(logger.saved_errors + [e])
logger.save_errors = False
logger.clear_errors()
raise
def notify(self, text, server=None, library=None, collection=None, playlist=None, critical=True):

@ -15,6 +15,7 @@ class Library(ABC):
self.Sonarr = None
self.Tautulli = None
self.Webhooks = None
self.Operations = None
self.Notifiarr = None
self.collections = []
self.metadatas = []
@ -82,8 +83,6 @@ class Library(ABC):
self.remove_title_parentheses = params["remove_title_parentheses"]
self.mass_collection_mode = params["mass_collection_mode"]
self.metadata_backup = params["metadata_backup"]
self.tmdb_collections = params["tmdb_collections"]
self.genre_collections = params["genre_collections"]
self.genre_mapper = params["genre_mapper"]
self.content_rating_mapper = params["content_rating_mapper"]
self.error_webhooks = params["error_webhooks"]
@ -97,9 +96,9 @@ class Library(ABC):
self.items_library_operation = True if self.assets_for_all or self.mass_genre_update or self.mass_audience_rating_update or self.remove_title_parentheses \
or self.mass_critic_rating_update or self.mass_content_rating_update or self.mass_originally_available_update or self.mass_imdb_parental_labels or self.mass_trakt_rating_update \
or self.genre_mapper or self.content_rating_mapper or self.tmdb_collections or self.radarr_add_all_existing or self.sonarr_add_all_existing else False
or self.genre_mapper or self.content_rating_mapper or self.radarr_add_all_existing or self.sonarr_add_all_existing else False
self.library_operation = True if self.items_library_operation or self.delete_unmanaged_collections or self.delete_collections_with_less \
or self.radarr_remove_by_tag or self.sonarr_remove_by_tag or self.mass_collection_mode or self.genre_collections \
or self.radarr_remove_by_tag or self.sonarr_remove_by_tag or self.mass_collection_mode \
or self.show_unmanaged or self.metadata_backup or self.update_blank_track_titles else False
self.meta_operations = [self.mass_genre_update, self.mass_audience_rating_update, self.mass_critic_rating_update, self.mass_content_rating_update, self.mass_originally_available_update]
@ -112,21 +111,8 @@ class Library(ABC):
logger.info("")
logger.info(output)
def scan_metadata_files(self):
metadata = []
def scan_files(self):
for file_type, metadata_file, temp_vars in self.metadata_path:
if file_type == "Folder":
if os.path.isdir(metadata_file):
yml_files = util.glob_filter(os.path.join(metadata_file, "*.yml"))
if yml_files:
metadata.extend([("File", yml, temp_vars) for yml in yml_files])
else:
logger.error(f"Config Error: No YAML (.yml) files found in {metadata_file}")
else:
logger.error(f"Config Error: Folder not found: {metadata_file}")
else:
metadata.append((file_type, metadata_file, temp_vars))
for file_type, metadata_file, temp_vars in metadata:
try:
meta_obj = MetadataFile(self.config, self, file_type, metadata_file, temp_vars)
if meta_obj.collections:
@ -137,10 +123,6 @@ class Library(ABC):
except Failed as e:
logger.error(e)
if len(self.metadata_files) == 0 and not self.library_operation and not self.config.playlist_files:
logger.info("")
raise Failed("Config Error: No valid metadata files, playlist files, or library operations found")
def upload_images(self, item, poster=None, background=None, overlay=None):
image = None
image_compare = None

@ -39,6 +39,8 @@ class MyLogger:
self.playlists_dir = os.path.join(self.log_dir, PLAYLIST_DIR)
self.main_log = os.path.join(self.log_dir, MAIN_LOG)
self.main_handler = None
self.save_errors = False
self.saved_errors = []
self.library_handlers = {}
self.collection_handlers = {}
self.playlist_handlers = {}
@ -55,6 +57,9 @@ class MyLogger:
self._logger.addHandler(cmd_handler)
def clear_errors(self):
self.saved_errors = []
def _get_handler(self, log_file, count=3):
_handler = RotatingFileHandler(log_file, delay=True, mode="w", backupCount=count, encoding="utf-8")
self._formatter(_handler)
@ -170,10 +175,14 @@ class MyLogger:
self._log(WARNING, str(msg), args, **kwargs)
def error(self, msg, *args, **kwargs):
if self.save_errors:
self.saved_errors.append(msg)
if self._logger.isEnabledFor(ERROR):
self._log(ERROR, str(msg), args, **kwargs)
def critical(self, msg, *args, **kwargs):
if self.save_errors:
self.saved_errors.append(msg)
if self._logger.isEnabledFor(CRITICAL):
self._log(CRITICAL, str(msg), args, **kwargs)

@ -31,7 +31,7 @@ default_templates = {
"trakt_people_list": {"tmdb_person": f"<<value>>", "plex_search": {"all": {"actor": "tmdb"}}}
}
def get_dict(attribute, attr_data, check_list=None):
def get_dict(attribute, attr_data, check_list=None, lower=False):
if check_list is None:
check_list = []
if attr_data and attribute in attr_data:
@ -39,8 +39,8 @@ def get_dict(attribute, attr_data, check_list=None):
if isinstance(attr_data[attribute], dict):
new_dict = {}
for _name, _data in attr_data[attribute].items():
if _name in check_list:
logger.warning(f"Config Warning: Skipping duplicate {attribute[:-1] if attribute[-1] == 's' else attribute}: {_name}")
if lower and str(_name).lower() in check_list or not lower and _name in check_list:
logger.warning(f"Config Warning: Skipping duplicate {attribute[:-1] if attribute[-1] == 's' else attribute}: {str(_name).lower() if lower else _name}")
elif _data is None:
logger.warning(f"Config Warning: {attribute[:-1] if attribute[-1] == 's' else attribute}: {_name} has no data")
elif not isinstance(_data, dict):
@ -237,6 +237,9 @@ class DataFile:
def external_templates(self, data):
if "external_templates" in data and data["external_templates"]:
files = util.load_yaml_files(data["external_templates"])
if not files:
logger.error("Config Error: No Paths Found for external_templates")
for file_type, template_file, temp_vars in util.load_yaml_files(data["external_templates"]):
temp_data = self.load_file(file_type, template_file)
if temp_data and isinstance(temp_data, dict) and "templates" in temp_data and temp_data["templates"] and isinstance(temp_data["templates"], dict):
@ -261,7 +264,7 @@ class MetadataFile(DataFile):
logger.info(f"Loading Metadata {file_type}: {path}")
logger.info("")
data = self.load_file(self.type, self.path)
self.metadata = get_dict("metadata", data, library.metadata_files)
self.metadata = get_dict("metadata", data, library.metadatas)
self.templates = get_dict("templates", data)
self.external_templates(data)
self.collections = get_dict("collections", data, library.collections)
@ -1066,7 +1069,6 @@ class PlaylistFile(DataFile):
def __init__(self, config, file_type, path, temp_vars):
super().__init__(config, file_type, path, temp_vars)
self.data_type = "Playlist"
self.playlists = {}
logger.info("")
logger.info(f"Loading Playlist File {file_type}: {path}")
data = self.load_file(self.type, self.path)

@ -0,0 +1,437 @@
import os, re
from modules import util
from modules.util import Failed
from ruamel import yaml
logger = util.logger
class Operations:
def __init__(self, config, library):
self.config = config
self.library = library
def run_operations(self):
logger.info("")
logger.separator(f"{self.library.name} Library Operations")
logger.info("")
logger.debug(f"Assets For All: {self.library.assets_for_all}")
logger.debug(f"Delete Collections With Less: {self.library.delete_collections_with_less}")
logger.debug(f"Delete Unmanaged Collections: {self.library.delete_unmanaged_collections}")
logger.debug(f"Mass Genre Update: {self.library.mass_genre_update}")
logger.debug(f"Mass Audience Rating Update: {self.library.mass_audience_rating_update}")
logger.debug(f"Mass Critic Rating Update: {self.library.mass_critic_rating_update}")
logger.debug(f"Mass Content Rating Update: {self.library.mass_content_rating_update}")
logger.debug(f"Mass Originally Available Update: {self.library.mass_originally_available_update}")
logger.debug(f"Mass IMDb Parental Labels: {self.library.mass_imdb_parental_labels}")
logger.debug(f"Mass Trakt Rating Update: {self.library.mass_trakt_rating_update}")
logger.debug(f"Mass Collection Mode Update: {self.library.mass_collection_mode}")
logger.debug(f"Split Duplicates: {self.library.split_duplicates}")
logger.debug(f"Radarr Add All Existing: {self.library.radarr_add_all_existing}")
logger.debug(f"Radarr Remove by Tag: {self.library.radarr_remove_by_tag}")
logger.debug(f"Sonarr Add All Existing: {self.library.sonarr_add_all_existing}")
logger.debug(f"Sonarr Remove by Tag: {self.library.sonarr_remove_by_tag}")
logger.debug(f"Update Blank Track Titles: {self.library.update_blank_track_titles}")
logger.debug(f"Update Remove Title Parentheses: {self.library.remove_title_parentheses}")
logger.debug(f"Genre Mapper: {self.library.genre_mapper}")
logger.debug(f"Content Rating Mapper: {self.library.content_rating_mapper}")
logger.debug(f"Metadata Backup: {self.library.metadata_backup}")
logger.debug(f"Item Operation: {self.library.items_library_operation}")
logger.debug("")
if self.library.split_duplicates:
items = self.library.search(**{"duplicate": True})
for item in items:
item.split()
logger.info(f"{item.title[:25]:<25} | Splitting")
if self.library.update_blank_track_titles:
tracks = self.library.get_all(collection_level="track")
num_edited = 0
for i, track in enumerate(tracks, 1):
logger.ghost(f"Processing Track: {i}/{len(tracks)} {track.title}")
if not track.title and track.titleSort:
track.editTitle(track.titleSort)
num_edited += 1
logger.info(f"Track: {track.titleSort} was updated with sort title")
logger.info(f"{len(tracks)} Tracks Processed; {num_edited} Blank Track Titles Updated")
if self.library.items_library_operation:
items = self.library.get_all(load=True)
radarr_adds = []
sonarr_adds = []
trakt_ratings = self.config.Trakt.user_ratings(self.library.is_movie) if self.library.mass_trakt_rating_update else []
reverse_anidb = {}
if self.library.mass_genre_update == "anidb":
for k, v in self.library.anidb_map.items():
reverse_anidb[v] = k
for i, item in enumerate(items, 1):
try:
self.library.reload(item)
except Failed as e:
logger.error(e)
continue
logger.ghost(f"Processing: {i}/{len(items)} {item.title}")
if self.library.assets_for_all:
self.library.find_assets(item)
tmdb_id, tvdb_id, imdb_id = self.library.get_ids(item)
item.batchEdits()
batch_display = "Batch Edits"
if self.library.remove_title_parentheses:
if not any([f.name == "title" and f.locked for f in item.fields]) and item.title.endswith(")"):
new_title = re.sub(" \\(\\w+\\)$", "", item.title)
item.editTitle(new_title)
batch_display += f"\n{item.title[:25]:<25} | Title | {new_title}"
if self.library.mass_trakt_rating_update:
try:
if self.library.is_movie and tmdb_id in trakt_ratings:
new_rating = trakt_ratings[tmdb_id]
elif self.library.is_show and tvdb_id in trakt_ratings:
new_rating = trakt_ratings[tvdb_id]
else:
raise Failed
if str(item.userRating) != str(new_rating):
self.library.query_data(item.rate, new_rating)
logger.info(f"{item.title[:25]:<25} | User Rating | {new_rating}")
except Failed:
pass
if self.library.mass_imdb_parental_labels:
try:
parental_guide = self.config.IMDb.parental_guide(imdb_id)
labels = [f"{k.capitalize()}:{v}" for k, v in parental_guide.items() if self.library.mass_imdb_parental_labels == "with_none" or v != "None"]
batch_display += f"\n{self.library.edit_tags('label', item, add_tags=labels)}"
except Failed:
pass
path = os.path.dirname(str(item.locations[0])) if self.library.is_movie else str(item.locations[0])
if self.library.Radarr and self.library.radarr_add_all_existing and tmdb_id:
path = path.replace(self.library.Radarr.plex_path, self.library.Radarr.radarr_path)
path = path[:-1] if path.endswith(('/', '\\')) else path
radarr_adds.append((tmdb_id, path))
if self.library.Sonarr and self.library.sonarr_add_all_existing and tvdb_id:
path = path.replace(self.library.Sonarr.plex_path, self.library.Sonarr.sonarr_path)
path = path[:-1] if path.endswith(("/", "\\")) else path
sonarr_adds.append((tvdb_id, path))
tmdb_item = None
if any([o == "tmdb" for o in self.library.meta_operations]):
tmdb_item = self.config.TMDb.get_item(item, tmdb_id, tvdb_id, imdb_id, is_movie=self.library.is_movie)
omdb_item = None
if any([o == "omdb" for o in self.library.meta_operations]):
if self.config.OMDb.limit is False:
if tmdb_id and not imdb_id:
imdb_id = self.config.Convert.tmdb_to_imdb(tmdb_id)
elif tvdb_id and not imdb_id:
imdb_id = self.config.Convert.tvdb_to_imdb(tvdb_id)
if imdb_id:
try:
omdb_item = self.config.OMDb.get_omdb(imdb_id)
except Failed as e:
logger.error(str(e))
except Exception:
logger.error(f"IMDb ID: {imdb_id}")
raise
else:
logger.info(f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}")
tvdb_item = None
if any([o == "tvdb" for o in self.library.meta_operations]):
if tvdb_id:
try:
tvdb_item = self.config.TVDb.get_item(tvdb_id, self.library.is_movie)
except Failed as e:
logger.error(str(e))
else:
logger.info(f"{item.title[:25]:<25} | No TVDb ID for Guid: {item.guid}")
anidb_item = None
if any([o == "anidb" for o in self.library.meta_operations]):
if item.ratingKey in reverse_anidb:
anidb_id = reverse_anidb[item.ratingKey]
elif tvdb_id in self.config.Convert._tvdb_to_anidb:
anidb_id = self.config.Convert._tvdb_to_anidb[tvdb_id]
elif imdb_id in self.config.Convert._imdb_to_anidb:
anidb_id = self.config.Convert._imdb_to_anidb[imdb_id]
else:
anidb_id = None
logger.info(f"{item.title[:25]:<25} | No AniDB ID for Guid: {item.guid}")
if anidb_id:
try:
anidb_item = self.config.AniDB.get_anime(anidb_id)
except Failed as e:
logger.error(str(e))
mdb_item = None
if any([o and o.startswith("mdb") for o in self.library.meta_operations]):
if self.config.Mdblist.limit is False:
if tmdb_id and not imdb_id:
imdb_id = self.config.Convert.tmdb_to_imdb(tmdb_id)
elif tvdb_id and not imdb_id:
imdb_id = self.config.Convert.tvdb_to_imdb(tvdb_id)
if imdb_id:
try:
mdb_item = self.config.Mdblist.get_imdb(imdb_id)
except Failed as e:
logger.error(str(e))
except Exception:
logger.error(f"IMDb ID: {imdb_id}")
raise
else:
logger.info(f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}")
def get_rating(attribute):
if tmdb_item and attribute == "tmdb":
return tmdb_item.vote_average
elif omdb_item and attribute == "omdb":
return omdb_item.imdb_rating
elif mdb_item and attribute == "mdb":
return mdb_item.score / 10 if mdb_item.score else None
elif mdb_item and attribute == "mdb_imdb":
return mdb_item.imdb_rating if mdb_item.imdb_rating else None
elif mdb_item and attribute == "mdb_metacritic":
return mdb_item.metacritic_rating / 10 if mdb_item.metacritic_rating else None
elif mdb_item and attribute == "mdb_metacriticuser":
return mdb_item.metacriticuser_rating if mdb_item.metacriticuser_rating else None
elif mdb_item and attribute == "mdb_trakt":
return mdb_item.trakt_rating / 10 if mdb_item.trakt_rating else None
elif mdb_item and attribute == "mdb_tomatoes":
return mdb_item.tomatoes_rating / 10 if mdb_item.tomatoes_rating else None
elif mdb_item and attribute == "mdb_tomatoesaudience":
return mdb_item.tomatoesaudience_rating / 10 if mdb_item.tomatoesaudience_rating else None
elif mdb_item and attribute == "mdb_tmdb":
return mdb_item.tmdb_rating / 10 if mdb_item.tmdb_rating else None
elif mdb_item and attribute == "mdb_letterboxd":
return mdb_item.letterboxd_rating * 2 if mdb_item.letterboxd_rating else None
elif anidb_item and attribute == "anidb_rating":
return anidb_item.rating
elif anidb_item and attribute == "anidb_average":
return anidb_item.average
else:
raise Failed
if self.library.mass_genre_update or self.library.genre_mapper:
try:
new_genres = []
if self.library.mass_genre_update:
if tmdb_item and self.library.mass_genre_update == "tmdb":
new_genres = tmdb_item.genres
elif omdb_item and self.library.mass_genre_update == "omdb":
new_genres = omdb_item.genres
elif tvdb_item and self.library.mass_genre_update == "tvdb":
new_genres = tvdb_item.genres
elif anidb_item and self.library.mass_genre_update == "anidb":
new_genres = anidb_item.tags
else:
raise Failed
if not new_genres:
logger.info(f"{item.title[:25]:<25} | No Genres Found")
if self.library.genre_mapper:
if not new_genres:
new_genres = [g.tag for g in item.genres]
mapped_genres = []
for genre in new_genres:
if genre in self.library.genre_mapper:
if self.library.genre_mapper[genre]:
mapped_genres.append(self.library.genre_mapper[genre])
else:
mapped_genres.append(genre)
new_genres = mapped_genres
batch_display += f"\n{self.library.edit_tags('genre', item, sync_tags=new_genres)}"
except Failed:
pass
if self.library.mass_audience_rating_update:
try:
new_rating = get_rating(self.library.mass_audience_rating_update)
if new_rating is None:
logger.info(f"{item.title[:25]:<25} | No Rating Found")
elif str(item.audienceRating) != str(new_rating):
item.editField("audienceRating", new_rating)
batch_display += f"\n{item.title[:25]:<25} | Audience Rating | {new_rating}"
except Failed:
pass
if self.library.mass_critic_rating_update:
try:
new_rating = get_rating(self.library.mass_critic_rating_update)
if new_rating is None:
logger.info(f"{item.title[:25]:<25} | No Rating Found")
elif str(item.rating) != str(new_rating):
item.editField("rating", new_rating)
batch_display += f"{item.title[:25]:<25} | Critic Rating | {new_rating}"
except Failed:
pass
if self.library.mass_content_rating_update or self.library.content_rating_mapper:
try:
new_rating = None
if self.library.mass_content_rating_update:
if omdb_item and self.library.mass_content_rating_update == "omdb":
new_rating = omdb_item.content_rating
elif mdb_item and self.library.mass_content_rating_update == "mdb":
new_rating = mdb_item.content_rating if mdb_item.content_rating else None
elif mdb_item and self.library.mass_content_rating_update == "mdb_commonsense":
new_rating = mdb_item.commonsense if mdb_item.commonsense else None
elif tmdb_item and self.library.mass_content_rating_update == "tmdb":
new_rating = tmdb_item.content_rating if tmdb_item.content_rating else None
else:
raise Failed
if new_rating is None:
logger.info(f"{item.title[:25]:<25} | No Content Rating Found")
if self.library.content_rating_mapper:
if new_rating is None:
new_rating = item.contentRating
if new_rating in self.library.content_rating_mapper:
new_rating = self.library.content_rating_mapper[new_rating]
if str(item.contentRating) != str(new_rating):
item.editContentRating(new_rating)
batch_display += f"\n{item.title[:25]:<25} | Content Rating | {new_rating}"
except Failed:
pass
if self.library.mass_originally_available_update:
try:
if omdb_item and self.library.mass_originally_available_update == "omdb":
new_date = omdb_item.released
elif mdb_item and self.library.mass_originally_available_update == "mdb":
new_date = mdb_item.released
elif tvdb_item and self.library.mass_originally_available_update == "tvdb":
new_date = tvdb_item.released
elif tmdb_item and self.library.mass_originally_available_update == "tmdb":
new_date = tmdb_item.release_date if self.library.is_movie else tmdb_item.first_air_date
elif anidb_item and self.library.mass_originally_available_update == "anidb":
new_date = anidb_item.released
else:
raise Failed
if new_date is None:
logger.info(f"{item.title[:25]:<25} | No Originally Available Date Found")
elif str(item.originallyAvailableAt) != str(new_date):
item.editOriginallyAvailable(new_date)
batch_display += f"\n{item.title[:25]:<25} | Originally Available Date | {new_date.strftime('%Y-%m-%d')}"
except Failed:
pass
item.saveEdits()
if self.library.Radarr and self.library.radarr_add_all_existing:
try:
self.library.Radarr.add_tmdb(radarr_adds)
except Failed as e:
logger.error(e)
if self.library.Sonarr and self.library.sonarr_add_all_existing:
try:
self.library.Sonarr.add_tvdb(sonarr_adds)
except Failed as e:
logger.error(e)
if self.library.radarr_remove_by_tag:
self.library.Radarr.remove_all_with_tags(self.library.radarr_remove_by_tag)
if self.library.sonarr_remove_by_tag:
self.library.Sonarr.remove_all_with_tags(self.library.sonarr_remove_by_tag)
if self.library.delete_collections_with_less is not None or self.library.delete_unmanaged_collections:
logger.info("")
print_suffix = ""
unmanaged = ""
if self.library.delete_collections_with_less is not None and self.library.delete_collections_with_less > 0:
print_suffix = f" with less then {self.library.delete_collections_with_less} item{'s' if self.library.delete_collections_with_less > 1 else ''}"
if self.library.delete_unmanaged_collections:
if self.library.delete_collections_with_less is None:
unmanaged = "Unmanaged Collections "
elif self.library.delete_collections_with_less > 0:
unmanaged = "Unmanaged Collections and "
logger.separator(f"Deleting All {unmanaged}Collections{print_suffix}", space=False, border=False)
logger.info("")
unmanaged_collections = []
for col in self.library.get_all_collections():
if (self.library.delete_collections_with_less and col.childCount < self.library.delete_collections_with_less) \
or (self.library.delete_unmanaged_collections and col.title not in self.library.collections):
self.library.query(col.delete)
logger.info(f"{col.title} Deleted")
elif col.title not in self.library.collections:
unmanaged_collections.append(col)
if self.library.mass_collection_mode:
logger.info("")
logger.separator(f"Mass Collection Mode for {self.library.name} Library", space=False, border=False)
logger.info("")
for col in self.library.get_all_collections():
self.library.collection_mode_query(col, self.library.mass_collection_mode)
if self.library.show_unmanaged and len(unmanaged_collections) > 0:
logger.info("")
logger.separator(f"Unmanaged Collections in {self.library.name} Library", space=False, border=False)
logger.info("")
for col in unmanaged_collections:
logger.info(col.title)
logger.info("")
logger.info(f"{len(unmanaged_collections)} Unmanaged Collection{'s' if len(unmanaged_collections) > 1 else ''}")
elif self.library.show_unmanaged:
logger.info("")
logger.separator(f"No Unmanaged Collections in {self.library.name} Library", space=False, border=False)
logger.info("")
if self.library.assets_for_all and len(unmanaged_collections) > 0:
logger.info("")
logger.separator(f"Unmanaged Collection Assets Check for {self.library.name} Library", space=False, border=False)
logger.info("")
for col in unmanaged_collections:
self.library.find_assets(col)
if self.library.metadata_backup:
logger.info("")
logger.separator(f"Metadata Backup for {self.library.name} Library", space=False, border=False)
logger.info("")
logger.info(f"Metadata Backup Path: {self.library.metadata_backup['path']}")
logger.info("")
meta = None
if os.path.exists(self.library.metadata_backup["path"]):
try:
meta, _, _ = yaml.util.load_yaml_guess_indent(open(self.library.metadata_backup["path"], encoding="utf-8"))
except yaml.scanner.ScannerError as e:
logger.error(f"YAML Error: {util.tab_new_lines(e)}")
filename, file_extension = os.path.splitext(self.library.metadata_backup["path"])
i = 1
while os.path.exists(f"{filename}{i}{file_extension}"):
i += 1
os.rename(self.library.metadata_backup["path"], f"{filename}{i}{file_extension}")
logger.error(f"Backup failed to load saving copy to {filename}{i}{file_extension}")
if not meta:
meta = {}
if "metadata" not in meta:
meta["metadata"] = {}
special_names = {}
for mk, mv in meta["metadata"].items():
if "title" in mv:
special_names[mv["title"]] = mk
if "year" in mv:
special_names[f"{mv['title']} ({mv['year']})"] = mk
items = self.library.get_all(load=True)
titles = [i.title for i in items]
for i, item in enumerate(items, 1):
logger.ghost(f"Processing: {i}/{len(items)} {item.title}")
map_key, attrs = self.library.get_locked_attributes(item, titles)
if map_key in special_names:
map_key = special_names[map_key]
og_dict = meta["metadata"][map_key] if map_key in meta["metadata"] and meta["metadata"][map_key] else {}
if attrs or (self.library.metadata_backup["add_blank_entries"] and not og_dict):
def get_dict(attrs_dict):
return {ak: get_dict(av) if isinstance(av, dict) else av for ak, av in attrs_dict.items()}
def loop_dict(looping, dest_dict):
if not looping:
return None
for lk, lv in looping.items():
dest_dict[lk] = loop_dict(lv, dest_dict[lk] if lk in dest_dict and dest_dict[lk] else {}) if isinstance(lv, dict) else lv
return dest_dict
meta["metadata"][map_key] = loop_dict(get_dict(attrs), og_dict)
logger.exorcise()
try:
yaml.round_trip_dump(meta, open(self.library.metadata_backup["path"], "w", encoding="utf-8"), block_seq_indent=2)
logger.info(f"{len(meta['metadata'])} {self.library.type.capitalize()}{'s' if len(meta['metadata']) > 1 else ''} Backed Up")
except yaml.scanner.ScannerError as e:
logger.error(f"YAML Error: {util.tab_new_lines(e)}")

@ -435,10 +435,6 @@ class Plex(Library):
self.update_blank_track_titles = False
logger.error(f"update_blank_track_titles library operation only works with music libraries")
if self.tmdb_collections and self.is_show:
self.tmdb_collections = None
logger.error("Config Error: tmdb_collections only work with Movie Libraries.")
def notify(self, text, collection=None, critical=True):
self.config.notify(text, server=self.PlexServer.friendlyName, library=self.name, collection=collection, critical=critical)

@ -77,6 +77,11 @@ tags_to_edit = {
"Artist": ["genre", "style", "mood", "country", "collection", "similar_artist"]
}
mdb_types = ["mdb", "mdb_imdb", "mdb_metacritic", "mdb_metacriticuser", "mdb_trakt", "mdb_tomatoes", "mdb_tomatoesaudience", "mdb_tmdb", "mdb_letterboxd"]
collection_mode_options = {
"default": "default", "hide": "hide",
"hide_items": "hideItems", "hideitems": "hideItems",
"show_items": "showItems", "showitems": "showItems"
}
def tab_new_lines(data):
return str(data).replace("\n", "\n ") if "\n" in str(data) else str(data)
@ -291,7 +296,7 @@ def load_yaml_files(yaml_files):
if os.path.exists(yaml_file):
files.append(("File", yaml_file, {}))
else:
logger.warning(f"Config Warning: Path not found: {path}")
logger.error(f"Config Error: Path not found: {yaml_file}")
return files
def check_num(num, is_int=True):
@ -300,6 +305,12 @@ def check_num(num, is_int=True):
except (ValueError, TypeError):
return None
def check_collection_mode(collection_mode):
if collection_mode and str(collection_mode).lower() in collection_mode_options:
return collection_mode_options[str(collection_mode).lower()]
else:
raise Failed(f"Config Error: {collection_mode} collection_mode invalid\n\tdefault (Library default)\n\thide (Hide Collection)\n\thide_items (Hide Items in this Collection)\n\tshow_items (Show this Collection and its Items)")
def glob_filter(filter_in):
filter_in = filter_in.translate({ord("["): "[[]", ord("]"): "[]]"}) if "[" in filter_in else filter_in
return glob.glob(filter_in)
@ -343,7 +354,7 @@ def is_string_filter(values, modifier, data):
or (modifier in [".is", ".isnot"] and value.lower() == check_value.lower()) \
or (modifier == ".begins" and value.lower().startswith(check_value.lower())) \
or (modifier == ".ends" and value.lower().endswith(check_value.lower())) \
or (modifier == ".regex" and re.compile(check_value).match(value)):
or (modifier == ".regex" and re.compile(check_value).search(value)):
jailbreak = True
break
if jailbreak: break

@ -1,4 +1,4 @@
import argparse, os, re, sys, time, traceback
import argparse, os, sys, time, traceback
from datetime import datetime
try:
@ -99,7 +99,6 @@ from modules import util
util.logger = logger
from modules.builder import CollectionBuilder
from modules.config import ConfigFile
from modules.meta import MetadataFile
from modules.util import Failed, NotScheduled
def my_except_hook(exctype, value, tb):
@ -146,6 +145,7 @@ def start(attrs):
attrs["read_only"] = read_only_config
attrs["version"] = version
attrs["latest_version"] = latest_version
attrs["no_missing"] = no_missing
logger.separator(debug=True)
logger.debug(f"--config (PMM_CONFIG): {config_file}")
logger.debug(f"--time (PMM_TIME): {times}")
@ -212,7 +212,7 @@ def update_libraries(config):
logger.separator(f"{library.name} Library")
if config.library_first and library.library_operation and not config.test_mode and not collection_only:
library_operations(config, library)
library.Operations.run_operations()
logger.debug("")
logger.debug(f"Mapping Name: {library.original_mapping_name}")
@ -279,7 +279,7 @@ def update_libraries(config):
logger.re_add_library_handler(library.mapping_name)
if not config.library_first and library.library_operation and not config.test_mode and not collection_only:
library_operations(config, library)
library.Operations.run_operations()
logger.remove_library_handler(library.mapping_name)
except Exception as e:
@ -400,477 +400,6 @@ def update_libraries(config):
stats["names"].extend([{"name": n, "library": "PLAYLIST"} for n in playlist_stats["names"]])
return stats
def library_operations(config, library):
logger.info("")
logger.separator(f"{library.name} Library Operations")
logger.info("")
logger.debug(f"Assets For All: {library.assets_for_all}")
logger.debug(f"Delete Collections With Less: {library.delete_collections_with_less}")
logger.debug(f"Delete Unmanaged Collections: {library.delete_unmanaged_collections}")
logger.debug(f"Mass Genre Update: {library.mass_genre_update}")
logger.debug(f"Mass Audience Rating Update: {library.mass_audience_rating_update}")
logger.debug(f"Mass Critic Rating Update: {library.mass_critic_rating_update}")
logger.debug(f"Mass Content Rating Update: {library.mass_content_rating_update}")
logger.debug(f"Mass Originally Available Update: {library.mass_originally_available_update}")
logger.debug(f"Mass IMDb Parental Labels: {library.mass_imdb_parental_labels}")
logger.debug(f"Mass Trakt Rating Update: {library.mass_trakt_rating_update}")
logger.debug(f"Mass Collection Mode Update: {library.mass_collection_mode}")
logger.debug(f"Split Duplicates: {library.split_duplicates}")
logger.debug(f"Radarr Add All Existing: {library.radarr_add_all_existing}")
logger.debug(f"Radarr Remove by Tag: {library.radarr_remove_by_tag}")
logger.debug(f"Sonarr Add All Existing: {library.sonarr_add_all_existing}")
logger.debug(f"Sonarr Remove by Tag: {library.sonarr_remove_by_tag}")
logger.debug(f"Update Blank Track Titles: {library.update_blank_track_titles}")
logger.debug(f"Update Remove Title Parentheses: {library.remove_title_parentheses}")
logger.debug(f"TMDb Collections: {library.tmdb_collections}")
logger.debug(f"Genre Collections: {library.genre_collections}")
logger.debug(f"Genre Mapper: {library.genre_mapper}")
logger.debug(f"Content Rating Mapper: {library.content_rating_mapper}")
logger.debug(f"Metadata Backup: {library.metadata_backup}")
logger.debug(f"Item Operation: {library.items_library_operation}")
logger.debug("")
if library.split_duplicates:
items = library.search(**{"duplicate": True})
for item in items:
item.split()
logger.info(f"{item.title[:25]:<25} | Splitting")
if library.update_blank_track_titles:
tracks = library.get_all(collection_level="track")
num_edited = 0
for i, track in enumerate(tracks, 1):
logger.ghost(f"Processing Track: {i}/{len(tracks)} {track.title}")
if not track.title and track.titleSort:
track.editTitle(track.titleSort)
num_edited += 1
logger.info(f"Track: {track.titleSort} was updated with sort title")
logger.info(f"{len(tracks)} Tracks Processed; {num_edited} Blank Track Titles Updated")
tmdb_collections = {}
if library.items_library_operation:
items = library.get_all(load=True)
radarr_adds = []
sonarr_adds = []
trakt_ratings = config.Trakt.user_ratings(library.is_movie) if library.mass_trakt_rating_update else []
reverse_anidb = {}
if library.mass_genre_update == "anidb":
for k, v in library.anidb_map.items():
reverse_anidb[v] = k
for i, item in enumerate(items, 1):
try:
library.reload(item)
except Failed as e:
logger.error(e)
continue
logger.ghost(f"Processing: {i}/{len(items)} {item.title}")
if library.assets_for_all:
library.find_assets(item)
tmdb_id, tvdb_id, imdb_id = library.get_ids(item)
item.batchEdits()
batch_display = "Batch Edits"
if library.remove_title_parentheses:
if not any([f.name == "title" and f.locked for f in item.fields]) and item.title.endswith(")"):
new_title = re.sub(" \(\w+\)$", "", item.title)
item.editTitle(new_title)
batch_display += f"\n{item.title[:25]:<25} | Title | {new_title}"
if library.mass_trakt_rating_update:
try:
if library.is_movie and tmdb_id in trakt_ratings:
new_rating = trakt_ratings[tmdb_id]
elif library.is_show and tvdb_id in trakt_ratings:
new_rating = trakt_ratings[tvdb_id]
else:
raise Failed
if str(item.userRating) != str(new_rating):
library.query_data(item.rate, new_rating)
logger.info(f"{item.title[:25]:<25} | User Rating | {new_rating}")
except Failed:
pass
if library.mass_imdb_parental_labels:
try:
parental_guide = config.IMDb.parental_guide(imdb_id)
labels = [f"{k.capitalize()}:{v}" for k, v in parental_guide.items() if library.mass_imdb_parental_labels == "with_none" or v != "None"]
batch_display += f"\n{library.edit_tags('label', item, add_tags=labels)}"
except Failed:
pass
path = os.path.dirname(str(item.locations[0])) if library.is_movie else str(item.locations[0])
if library.Radarr and library.radarr_add_all_existing and tmdb_id:
path = path.replace(library.Radarr.plex_path, library.Radarr.radarr_path)
path = path[:-1] if path.endswith(('/', '\\')) else path
radarr_adds.append((tmdb_id, path))
if library.Sonarr and library.sonarr_add_all_existing and tvdb_id:
path = path.replace(library.Sonarr.plex_path, library.Sonarr.sonarr_path)
path = path[:-1] if path.endswith(("/", "\\")) else path
sonarr_adds.append((tvdb_id, path))
tmdb_item = None
if library.tmdb_collections or any([o == "tmdb" for o in library.meta_operations]):
tmdb_item = config.TMDb.get_item(item, tmdb_id, tvdb_id, imdb_id, is_movie=library.is_movie)
omdb_item = None
if any([o == "omdb" for o in library.meta_operations]):
if config.OMDb.limit is False:
if tmdb_id and not imdb_id:
imdb_id = config.Convert.tmdb_to_imdb(tmdb_id)
elif tvdb_id and not imdb_id:
imdb_id = config.Convert.tvdb_to_imdb(tvdb_id)
if imdb_id:
try:
omdb_item = config.OMDb.get_omdb(imdb_id)
except Failed as e:
logger.error(str(e))
except Exception:
logger.error(f"IMDb ID: {imdb_id}")
raise
else:
logger.info(f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}")
tvdb_item = None
if any([o == "tvdb" for o in library.meta_operations]):
if tvdb_id:
try:
tvdb_item = config.TVDb.get_item(tvdb_id, library.is_movie)
except Failed as e:
logger.error(str(e))
else:
logger.info(f"{item.title[:25]:<25} | No TVDb ID for Guid: {item.guid}")
anidb_item = None
if any([o == "anidb" for o in library.meta_operations]):
if item.ratingKey in reverse_anidb:
anidb_id = reverse_anidb[item.ratingKey]
elif tvdb_id in config.Convert._tvdb_to_anidb:
anidb_id = config.Convert._tvdb_to_anidb[tvdb_id]
elif imdb_id in config.Convert._imdb_to_anidb:
anidb_id = config.Convert._imdb_to_anidb[imdb_id]
else:
anidb_id = None
logger.info(f"{item.title[:25]:<25} | No AniDB ID for Guid: {item.guid}")
if anidb_id:
try:
anidb_item = config.AniDB.get_anime(anidb_id)
except Failed as e:
logger.error(str(e))
mdb_item = None
if any([o and o.startswith("mdb") for o in library.meta_operations]):
if config.Mdblist.limit is False:
if tmdb_id and not imdb_id:
imdb_id = config.Convert.tmdb_to_imdb(tmdb_id)
elif tvdb_id and not imdb_id:
imdb_id = config.Convert.tvdb_to_imdb(tvdb_id)
if imdb_id:
try:
mdb_item = config.Mdblist.get_imdb(imdb_id)
except Failed as e:
logger.error(str(e))
except Exception:
logger.error(f"IMDb ID: {imdb_id}")
raise
else:
logger.info(f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}")
if library.tmdb_collections and tmdb_item and tmdb_item.collection_id:
tmdb_collections[tmdb_item.collection_id] = tmdb_item.collection_name
def get_rating(attribute):
if tmdb_item and attribute == "tmdb":
return tmdb_item.vote_average
elif omdb_item and attribute == "omdb":
return omdb_item.imdb_rating
elif mdb_item and attribute == "mdb":
return mdb_item.score / 10 if mdb_item.score else None
elif mdb_item and attribute == "mdb_imdb":
return mdb_item.imdb_rating if mdb_item.imdb_rating else None
elif mdb_item and attribute == "mdb_metacritic":
return mdb_item.metacritic_rating / 10 if mdb_item.metacritic_rating else None
elif mdb_item and attribute == "mdb_metacriticuser":
return mdb_item.metacriticuser_rating if mdb_item.metacriticuser_rating else None
elif mdb_item and attribute == "mdb_trakt":
return mdb_item.trakt_rating / 10 if mdb_item.trakt_rating else None
elif mdb_item and attribute == "mdb_tomatoes":
return mdb_item.tomatoes_rating / 10 if mdb_item.tomatoes_rating else None
elif mdb_item and attribute == "mdb_tomatoesaudience":
return mdb_item.tomatoesaudience_rating / 10 if mdb_item.tomatoesaudience_rating else None
elif mdb_item and attribute == "mdb_tmdb":
return mdb_item.tmdb_rating / 10 if mdb_item.tmdb_rating else None
elif mdb_item and attribute == "mdb_letterboxd":
return mdb_item.letterboxd_rating * 2 if mdb_item.letterboxd_rating else None
elif anidb_item and attribute == "anidb_rating":
return anidb_item.rating
elif anidb_item and attribute == "anidb_average":
return anidb_item.average
else:
raise Failed
if library.mass_genre_update or library.genre_mapper:
try:
new_genres = []
if library.mass_genre_update:
if tmdb_item and library.mass_genre_update == "tmdb":
new_genres = tmdb_item.genres
elif omdb_item and library.mass_genre_update == "omdb":
new_genres = omdb_item.genres
elif tvdb_item and library.mass_genre_update == "tvdb":
new_genres = tvdb_item.genres
elif anidb_item and library.mass_genre_update == "anidb":
new_genres = anidb_item.tags
else:
raise Failed
if not new_genres:
logger.info(f"{item.title[:25]:<25} | No Genres Found")
if library.genre_mapper:
if not new_genres:
new_genres = [g.tag for g in item.genres]
mapped_genres = []
for genre in new_genres:
if genre in library.genre_mapper:
if library.genre_mapper[genre]:
mapped_genres.append(library.genre_mapper[genre])
else:
mapped_genres.append(genre)
new_genres = mapped_genres
batch_display += f"\n{library.edit_tags('genre', item, sync_tags=new_genres)}"
except Failed:
pass
if library.mass_audience_rating_update:
try:
new_rating = get_rating(library.mass_audience_rating_update)
if new_rating is None:
logger.info(f"{item.title[:25]:<25} | No Rating Found")
elif str(item.audienceRating) != str(new_rating):
item.editField("audienceRating", new_rating)
batch_display += f"\n{item.title[:25]:<25} | Audience Rating | {new_rating}"
except Failed:
pass
if library.mass_critic_rating_update:
try:
new_rating = get_rating(library.mass_critic_rating_update)
if new_rating is None:
logger.info(f"{item.title[:25]:<25} | No Rating Found")
elif str(item.rating) != str(new_rating):
item.editField("rating", new_rating)
batch_display += f"{item.title[:25]:<25} | Critic Rating | {new_rating}"
except Failed:
pass
if library.mass_content_rating_update or library.content_rating_mapper:
try:
new_rating = None
if library.mass_content_rating_update:
if omdb_item and library.mass_content_rating_update == "omdb":
new_rating = omdb_item.content_rating
elif mdb_item and library.mass_content_rating_update == "mdb":
new_rating = mdb_item.content_rating if mdb_item.content_rating else None
elif mdb_item and library.mass_content_rating_update == "mdb_commonsense":
new_rating = mdb_item.commonsense if mdb_item.commonsense else None
elif tmdb_item and library.mass_content_rating_update == "tmdb":
new_rating = tmdb_item.content_rating if tmdb_item.content_rating else None
else:
raise Failed
if new_rating is None:
logger.info(f"{item.title[:25]:<25} | No Content Rating Found")
if library.content_rating_mapper:
if new_rating is None:
new_rating = item.contentRating
if new_rating in library.content_rating_mapper:
new_rating = library.content_rating_mapper[new_rating]
if str(item.contentRating) != str(new_rating):
item.editContentRating(new_rating)
batch_display += f"\n{item.title[:25]:<25} | Content Rating | {new_rating}"
except Failed:
pass
if library.mass_originally_available_update:
try:
if omdb_item and library.mass_originally_available_update == "omdb":
new_date = omdb_item.released
elif mdb_item and library.mass_originally_available_update == "mdb":
new_date = mdb_item.released
elif tvdb_item and library.mass_originally_available_update == "tvdb":
new_date = tvdb_item.released
elif tmdb_item and library.mass_originally_available_update == "tmdb":
new_date = tmdb_item.release_date if library.is_movie else tmdb_item.first_air_date
elif anidb_item and library.mass_originally_available_update == "anidb":
new_date = anidb_item.released
else:
raise Failed
if new_date is None:
logger.info(f"{item.title[:25]:<25} | No Originally Available Date Found")
elif str(item.originallyAvailableAt) != str(new_date):
item.editOriginallyAvailable(new_date)
batch_display += f"\n{item.title[:25]:<25} | Originally Available Date | {new_date.strftime('%Y-%m-%d')}"
except Failed:
pass
item.saveEdits()
if library.Radarr and library.radarr_add_all_existing:
try:
library.Radarr.add_tmdb(radarr_adds)
except Failed as e:
logger.error(e)
if library.Sonarr and library.sonarr_add_all_existing:
try:
library.Sonarr.add_tvdb(sonarr_adds)
except Failed as e:
logger.error(e)
if tmdb_collections or library.genre_collections:
logger.info("")
logger.separator(f"Starting Automated Collections")
logger.info("")
new_collections = {}
templates = {}
if tmdb_collections:
templates["TMDb Collection"] = library.tmdb_collections["template"]
for _i, _n in tmdb_collections.items():
if int(_i) not in library.tmdb_collections["exclude_ids"]:
template = {"name": "TMDb Collection", "collection_id": _i}
for k, v in library.tmdb_collections["dictionary_variables"].items():
if int(_i) in v:
template[k] = v[int(_i)]
for suffix in library.tmdb_collections["remove_suffix"]:
if _n.endswith(suffix):
_n = _n[:-len(suffix)]
new_collections[_n.strip()] = {"template": template}
if library.genre_collections:
templates["Genre Collection"] = library.genre_collections["template"]
for genre in library.get_tags("genre"):
if genre.title not in library.genre_collections["exclude_genres"]:
template = {"name": "Genre Collection", "genre": genre.title}
for k, v in library.genre_collections["dictionary_variables"].items():
if genre.title in v:
template[k] = v[genre.title]
title = library.genre_collections["title_format"]
title = title.replace("<<genre>>", genre.title)
if "<<library_type>>" in title:
title = title.replace("<<library_type>>", library.type)
new_collections[title] = {"template": template}
metadata = MetadataFile(config, library, "Data", {"collections": new_collections, "templates": templates})
if metadata.collections:
library.collections.extend([c for c in metadata.collections])
run_collection(config, library, metadata, metadata.get_collections(None))
if library.radarr_remove_by_tag:
library.Radarr.remove_all_with_tags(library.radarr_remove_by_tag)
if library.sonarr_remove_by_tag:
library.Sonarr.remove_all_with_tags(library.sonarr_remove_by_tag)
if library.delete_collections_with_less is not None or library.delete_unmanaged_collections:
logger.info("")
print_suffix = ""
unmanaged = ""
if library.delete_collections_with_less is not None and library.delete_collections_with_less > 0:
print_suffix = f" with less then {library.delete_collections_with_less} item{'s' if library.delete_collections_with_less > 1 else ''}"
if library.delete_unmanaged_collections:
if library.delete_collections_with_less is None:
unmanaged = "Unmanaged Collections "
elif library.delete_collections_with_less > 0:
unmanaged = "Unmanaged Collections and "
logger.separator(f"Deleting All {unmanaged}Collections{print_suffix}", space=False, border=False)
logger.info("")
unmanaged_collections = []
for col in library.get_all_collections():
if (library.delete_collections_with_less and col.childCount < library.delete_collections_with_less) \
or (library.delete_unmanaged_collections and col.title not in library.collections):
library.query(col.delete)
logger.info(f"{col.title} Deleted")
elif col.title not in library.collections:
unmanaged_collections.append(col)
if library.mass_collection_mode:
logger.info("")
logger.separator(f"Mass Collection Mode for {library.name} Library", space=False, border=False)
logger.info("")
for col in library.get_all_collections():
library.collection_mode_query(col, library.mass_collection_mode)
if library.show_unmanaged and len(unmanaged_collections) > 0:
logger.info("")
logger.separator(f"Unmanaged Collections in {library.name} Library", space=False, border=False)
logger.info("")
for col in unmanaged_collections:
logger.info(col.title)
logger.info("")
logger.info(f"{len(unmanaged_collections)} Unmanaged Collection{'s' if len(unmanaged_collections) > 1 else ''}")
elif library.show_unmanaged:
logger.info("")
logger.separator(f"No Unmanaged Collections in {library.name} Library", space=False, border=False)
logger.info("")
if library.assets_for_all and len(unmanaged_collections) > 0:
logger.info("")
logger.separator(f"Unmanaged Collection Assets Check for {library.name} Library", space=False, border=False)
logger.info("")
for col in unmanaged_collections:
library.find_assets(col)
if library.metadata_backup:
logger.info("")
logger.separator(f"Metadata Backup for {library.name} Library", space=False, border=False)
logger.info("")
logger.info(f"Metadata Backup Path: {library.metadata_backup['path']}")
logger.info("")
meta = None
if os.path.exists(library.metadata_backup["path"]):
try:
meta, _, _ = yaml.util.load_yaml_guess_indent(open(library.metadata_backup["path"], encoding="utf-8"))
except yaml.scanner.ScannerError as e:
logger.error(f"YAML Error: {util.tab_new_lines(e)}")
filename, file_extension = os.path.splitext(library.metadata_backup["path"])
i = 1
while os.path.exists(f"{filename}{i}{file_extension}"):
i += 1
os.rename(library.metadata_backup["path"], f"{filename}{i}{file_extension}")
logger.error(f"Backup failed to load saving copy to {filename}{i}{file_extension}")
if not meta:
meta = {}
if "metadata" not in meta:
meta["metadata"] = {}
special_names = {}
for mk, mv in meta["metadata"].items():
if "title" in mv:
special_names[mv["title"]] = mk
if "year" in mv:
special_names[f"{mv['title']} ({mv['year']})"] = mk
items = library.get_all(load=True)
titles = [i.title for i in items]
for i, item in enumerate(items, 1):
logger.ghost(f"Processing: {i}/{len(items)} {item.title}")
map_key, attrs = library.get_locked_attributes(item, titles)
if map_key in special_names:
map_key = special_names[map_key]
og_dict = meta["metadata"][map_key] if map_key in meta["metadata"] and meta["metadata"][map_key] else {}
if attrs or (library.metadata_backup["add_blank_entries"] and not og_dict):
def get_dict(attrs_dict):
return {ak: get_dict(av) if isinstance(av, dict) else av for ak, av in attrs_dict.items()}
def loop_dict(looping, dest_dict):
if not looping:
return None
for lk, lv in looping.items():
dest_dict[lk] = loop_dict(lv, dest_dict[lk] if lk in dest_dict and dest_dict[lk] else {}) if isinstance(lv, dict) else lv
return dest_dict
meta["metadata"][map_key] = loop_dict(get_dict(attrs), og_dict)
logger.exorcise()
try:
yaml.round_trip_dump(meta, open(library.metadata_backup["path"], "w", encoding="utf-8"), block_seq_indent=2)
logger.info(f"{len(meta['metadata'])} {library.type.capitalize()}{'s' if len(meta['metadata']) > 1 else ''} Backed Up")
except yaml.scanner.ScannerError as e:
logger.error(f"YAML Error: {util.tab_new_lines(e)}")
def run_collection(config, library, metadata, requested_collections):
logger.info("")
for mapping_name, collection_attrs in requested_collections.items():
@ -913,7 +442,7 @@ def run_collection(config, library, metadata, requested_collections):
logger.separator(f"Validating {mapping_name} Attributes", space=False, border=False)
builder = CollectionBuilder(config, metadata, mapping_name, no_missing, collection_attrs, library=library)
builder = CollectionBuilder(config, metadata, mapping_name, collection_attrs, library=library)
library.stats["names"].append(builder.name)
logger.info("")
@ -1089,7 +618,7 @@ def run_playlists(config):
logger.separator(f"Validating {mapping_name} Attributes", space=False, border=False)
builder = CollectionBuilder(config, playlist_file, mapping_name, no_missing, playlist_attrs)
builder = CollectionBuilder(config, playlist_file, mapping_name, playlist_attrs)
stats["names"].append(builder.name)
logger.info("")

Loading…
Cancel
Save