Merge pull request #282 from meisnate12/develop

v1.10.0
pull/289/head v1.10.0
meisnate12 4 years ago committed by GitHub
commit e92b7fced2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1,5 +1,5 @@
# Plex Meta Manager
#### Version 1.9.3
#### Version 1.10.0
The original concept for Plex Meta Manager is [Plex Auto Collections](https://github.com/mza921/Plex-Auto-Collections), but this is rewritten from the ground up to be able to include a scheduler, metadata edits, multiple libraries, and logging. Plex Meta Manager is a Python 3 script that can be continuously run using YAML configuration files to update on a schedule the metadata of the movies, shows, and collections in your libraries as well as automatically build collections based on various methods all detailed in the wiki. Some collection examples that the script can automatically build and update daily include Plex Based Searches like actor, genre, or studio collections or Collections based on TMDb, IMDb, Trakt, TVDb, AniDB, or MyAnimeList lists and various other services.

File diff suppressed because it is too large Load Diff

@ -16,6 +16,8 @@ class Cache:
else:
logger.info(f"Using cache database at {cache}")
cursor.execute("DROP TABLE IF EXISTS guids")
cursor.execute("DROP TABLE IF EXISTS imdb_to_tvdb_map")
cursor.execute("DROP TABLE IF EXISTS tmdb_to_tvdb_map")
cursor.execute("DROP TABLE IF EXISTS imdb_map")
cursor.execute(
"""CREATE TABLE IF NOT EXISTS guid_map (
@ -34,17 +36,17 @@ class Cache:
expiration_date TEXT)"""
)
cursor.execute(
"""CREATE TABLE IF NOT EXISTS imdb_to_tvdb_map (
"""CREATE TABLE IF NOT EXISTS imdb_to_tvdb_map2 (
INTEGER PRIMARY KEY,
imdb_id TEXT UNIQUE,
tvdb_id TEXT UNIQUE,
tvdb_id TEXT,
expiration_date TEXT)"""
)
cursor.execute(
"""CREATE TABLE IF NOT EXISTS tmdb_to_tvdb_map (
"""CREATE TABLE IF NOT EXISTS tmdb_to_tvdb_map2 (
INTEGER PRIMARY KEY,
tmdb_id TEXT UNIQUE,
tvdb_id TEXT UNIQUE,
tvdb_id TEXT,
expiration_date TEXT)"""
)
cursor.execute(
@ -110,18 +112,18 @@ class Cache:
def query_imdb_to_tvdb_map(self, _id, imdb=True):
from_id = "imdb_id" if imdb else "tvdb_id"
to_id = "tvdb_id" if imdb else "imdb_id"
return self._query_map("imdb_to_tvdb_map", _id, from_id, to_id)
return self._query_map("imdb_to_tvdb_map2", _id, from_id, to_id)
def update_imdb_to_tvdb_map(self, expired, imdb_id, tvdb_id):
self._update_map("imdb_to_tvdb_map", "imdb_id", imdb_id, "tvdb_id", tvdb_id, expired)
self._update_map("imdb_to_tvdb_map2", "imdb_id", imdb_id, "tvdb_id", tvdb_id, expired)
def query_tmdb_to_tvdb_map(self, _id, tmdb=True):
from_id = "tmdb_id" if tmdb else "tvdb_id"
to_id = "tvdb_id" if tmdb else "tmdb_id"
return self._query_map("tmdb_to_tvdb_map", _id, from_id, to_id)
return self._query_map("tmdb_to_tvdb_map2", _id, from_id, to_id)
def update_tmdb_to_tvdb_map(self, expired, tmdb_id, tvdb_id):
self._update_map("tmdb_to_tvdb_map", "tmdb_id", tmdb_id, "tvdb_id", tvdb_id, expired)
self._update_map("tmdb_to_tvdb_map2", "tmdb_id", tmdb_id, "tvdb_id", tvdb_id, expired)
def query_letterboxd_map(self, letterboxd_id):
return self._query_map("letterboxd_map", letterboxd_id, "letterboxd_id", "tmdb_id")

@ -138,6 +138,9 @@ class Config:
elif data[attribute] is None:
if default_is_none is True: return None
else: message = f"{text} is blank"
elif var_type == "url":
if data[attribute].endswith(("\\", "/")): return data[attribute][:-1]
else: return data[attribute]
elif var_type == "bool":
if isinstance(data[attribute], bool): return data[attribute]
else: message = f"{text} must be either true or false"
@ -279,7 +282,7 @@ class Config:
logger.info("Connecting to Plex Libraries...")
self.general["plex"] = {}
self.general["plex"]["url"] = check_for_attribute(self.data, "url", parent="plex", default_is_none=True)
self.general["plex"]["url"] = check_for_attribute(self.data, "url", parent="plex", var_type="url", default_is_none=True)
self.general["plex"]["token"] = check_for_attribute(self.data, "token", parent="plex", default_is_none=True)
self.general["plex"]["timeout"] = check_for_attribute(self.data, "timeout", parent="plex", var_type="int", default=60)
self.general["plex"]["clean_bundles"] = check_for_attribute(self.data, "clean_bundles", parent="plex", var_type="bool", default=False)
@ -287,7 +290,7 @@ class Config:
self.general["plex"]["optimize"] = check_for_attribute(self.data, "optimize", parent="plex", var_type="bool", default=False)
self.general["radarr"] = {}
self.general["radarr"]["url"] = check_for_attribute(self.data, "url", parent="radarr", default_is_none=True)
self.general["radarr"]["url"] = check_for_attribute(self.data, "url", parent="radarr", var_type="url", default_is_none=True)
self.general["radarr"]["token"] = check_for_attribute(self.data, "token", parent="radarr", default_is_none=True)
self.general["radarr"]["version"] = check_for_attribute(self.data, "version", parent="radarr", test_list=radarr_versions, default="v3")
self.general["radarr"]["add"] = check_for_attribute(self.data, "add", parent="radarr", var_type="bool", default=False)
@ -299,7 +302,7 @@ class Config:
self.general["radarr"]["search"] = check_for_attribute(self.data, "search", parent="radarr", var_type="bool", default=False)
self.general["sonarr"] = {}
self.general["sonarr"]["url"] = check_for_attribute(self.data, "url", parent="sonarr", default_is_none=True)
self.general["sonarr"]["url"] = check_for_attribute(self.data, "url", parent="sonarr", var_type="url", default_is_none=True)
self.general["sonarr"]["token"] = check_for_attribute(self.data, "token", parent="sonarr", default_is_none=True)
self.general["sonarr"]["version"] = check_for_attribute(self.data, "version", parent="sonarr", test_list=sonarr_versions, default="v3")
self.general["sonarr"]["add"] = check_for_attribute(self.data, "add", parent="sonarr", var_type="bool", default=False)
@ -314,7 +317,7 @@ class Config:
self.general["sonarr"]["cutoff_search"] = check_for_attribute(self.data, "cutoff_search", parent="sonarr", var_type="bool", default=False)
self.general["tautulli"] = {}
self.general["tautulli"]["url"] = check_for_attribute(self.data, "url", parent="tautulli", default_is_none=True)
self.general["tautulli"]["url"] = check_for_attribute(self.data, "url", parent="tautulli", var_type="url", default_is_none=True)
self.general["tautulli"]["apikey"] = check_for_attribute(self.data, "apikey", parent="tautulli", default_is_none=True)
self.libraries = []
@ -440,13 +443,13 @@ class Config:
params["metadata_path"] = [("File", os.path.join(default_dir, f"{library_name}.yml"))]
params["default_dir"] = default_dir
params["plex"] = {}
params["plex"]["url"] = check_for_attribute(lib, "url", parent="plex", default=self.general["plex"]["url"], req_default=True, save=False)
params["plex"]["url"] = check_for_attribute(lib, "url", parent="plex", var_type="url", default=self.general["plex"]["url"], req_default=True, save=False)
params["plex"]["token"] = check_for_attribute(lib, "token", parent="plex", default=self.general["plex"]["token"], req_default=True, save=False)
params["plex"]["timeout"] = check_for_attribute(lib, "timeout", parent="plex", var_type="int", default=self.general["plex"]["timeout"], save=False)
params["plex"]["clean_bundles"] = check_for_attribute(lib, "clean_bundles", parent="plex", var_type="bool", default=self.general["plex"]["clean_bundles"], save=False)
params["plex"]["empty_trash"] = check_for_attribute(lib, "empty_trash", parent="plex", var_type="bool", default=self.general["plex"]["empty_trash"], save=False)
params["plex"]["optimize"] = check_for_attribute(lib, "optimize", parent="plex", var_type="bool", default=self.general["plex"]["optimize"], save=False)
library = PlexAPI(params, self.TMDb, self.TVDb)
library = PlexAPI(params)
logger.info("")
logger.info(f"{display_name} Library Connection Successful")
except Failed as e:
@ -462,7 +465,7 @@ class Config:
logger.info("")
radarr_params = {}
try:
radarr_params["url"] = check_for_attribute(lib, "url", parent="radarr", default=self.general["radarr"]["url"], req_default=True, save=False)
radarr_params["url"] = check_for_attribute(lib, "url", parent="radarr", var_type="url", default=self.general["radarr"]["url"], req_default=True, save=False)
radarr_params["token"] = check_for_attribute(lib, "token", parent="radarr", default=self.general["radarr"]["token"], req_default=True, save=False)
radarr_params["version"] = check_for_attribute(lib, "version", parent="radarr", test_list=radarr_versions, default=self.general["radarr"]["version"], save=False)
radarr_params["add"] = check_for_attribute(lib, "add", parent="radarr", var_type="bool", default=self.general["radarr"]["add"], save=False)
@ -486,7 +489,7 @@ class Config:
logger.info("")
sonarr_params = {}
try:
sonarr_params["url"] = check_for_attribute(lib, "url", parent="sonarr", default=self.general["sonarr"]["url"], req_default=True, save=False)
sonarr_params["url"] = check_for_attribute(lib, "url", parent="sonarr", var_type="url", default=self.general["sonarr"]["url"], req_default=True, save=False)
sonarr_params["token"] = check_for_attribute(lib, "token", parent="sonarr", default=self.general["sonarr"]["token"], req_default=True, save=False)
sonarr_params["version"] = check_for_attribute(lib, "version", parent="sonarr", test_list=sonarr_versions, default=self.general["sonarr"]["version"], save=False)
sonarr_params["add"] = check_for_attribute(lib, "add", parent="sonarr", var_type="bool", default=self.general["sonarr"]["add"], save=False)
@ -516,7 +519,7 @@ class Config:
logger.info("")
tautulli_params = {}
try:
tautulli_params["url"] = check_for_attribute(lib, "url", parent="tautulli", default=self.general["tautulli"]["url"], req_default=True, save=False)
tautulli_params["url"] = check_for_attribute(lib, "url", parent="tautulli", var_type="url", default=self.general["tautulli"]["url"], req_default=True, save=False)
tautulli_params["apikey"] = check_for_attribute(lib, "apikey", parent="tautulli", default=self.general["tautulli"]["apikey"], req_default=True, save=False)
library.Tautulli = TautulliAPI(tautulli_params)
except Failed as e:

@ -248,7 +248,7 @@ class Convert:
self.config.Cache.update_imdb_to_tvdb_map(expired, imdb_id, tvdb_id)
return tvdb_id
def get_id(self, item, library, length):
def get_id(self, item, library):
expired = None
if self.config.Cache:
cache_id, media_type, expired = self.config.Cache.query_guid_map(item.guid)
@ -344,7 +344,7 @@ class Convert:
def update_cache(cache_ids, id_type, guid_type):
if self.config.Cache:
cache_ids = util.compile_list(cache_ids)
logger.info(util.adjust_space(length, f" Cache | {'^' if expired else '+'} | {item.guid:<46} | {id_type} ID: {cache_ids:<6} | {item.title}"))
logger.info(util.adjust_space(f" Cache | {'^' if expired else '+'} | {item.guid:<46} | {id_type} ID: {cache_ids:<6} | {item.title}"))
self.config.Cache.update_guid_map(guid_type, item.guid, cache_ids, expired)
if tmdb_id and library.is_movie:
@ -359,8 +359,8 @@ class Convert:
else:
raise Failed(f"No ID to convert")
except Failed as e:
logger.info(util.adjust_space(length, f"Mapping Error | {item.guid:<46} | {e} for {item.title}"))
logger.info(util.adjust_space(f"Mapping Error | {item.guid:<46} | {e} for {item.title}"))
except BadRequest:
util.print_stacktrace()
logger.info(util.adjust_space(length, f"Mapping Error | {item.guid:<46} | Bad Request for {item.title}"))
logger.info(util.adjust_space(f"Mapping Error | {item.guid:<46} | Bad Request for {item.title}"))
return None, None

@ -62,7 +62,6 @@ class IMDbAPI:
current_url = self._fix_url(imdb_url)
total, item_count = self._total(current_url, language)
header = {"Accept-Language": language}
length = 0
imdb_ids = []
if "&start=" in current_url: current_url = re.sub("&start=\\d+", "", current_url)
if "&count=" in current_url: current_url = re.sub("&count=\\d+", "", current_url)
@ -74,7 +73,7 @@ class IMDbAPI:
num_of_pages = math.ceil(int(limit) / item_count)
for i in range(1, num_of_pages + 1):
start_num = (i - 1) * item_count + 1
length = util.print_return(length, f"Parsing Page {i}/{num_of_pages} {start_num}-{limit if i == num_of_pages else i * item_count}")
util.print_return(f"Parsing Page {i}/{num_of_pages} {start_num}-{limit if i == num_of_pages else i * item_count}")
if imdb_url.startswith(self.urls["keyword"]):
response = self._request(f"{current_url}&page={i}", header)
else:
@ -83,7 +82,7 @@ class IMDbAPI:
imdb_ids.extend(response.xpath("//div[contains(@class, 'lister-item-image')]//a/img//@data-tconst")[:remainder])
else:
imdb_ids.extend(response.xpath("//div[contains(@class, 'lister-item-image')]//a/img//@data-tconst"))
util.print_end(length)
util.print_end()
if imdb_ids: return imdb_ids
else: raise Failed(f"IMDb Error: No IMDb IDs Found at {imdb_url}")
@ -111,11 +110,10 @@ class IMDbAPI:
logger.info(f"Processing {pretty}: {status}{data['url']}")
imdb_ids = self._ids_from_url(data["url"], language, data["limit"])
total_ids = len(imdb_ids)
length = 0
for i, imdb in enumerate(imdb_ids, 1):
length = util.print_return(length, f"Converting IMDb ID {i}/{total_ids}")
util.print_return(f"Converting IMDb ID {i}/{total_ids}")
run_convert(imdb)
logger.info(util.adjust_space(length, f"Processed {total_ids} IMDb IDs"))
logger.info(util.adjust_space(f"Processed {total_ids} IMDb IDs"))
else:
raise Failed(f"IMDb Error: Method {method} not supported")
logger.debug("")

@ -49,10 +49,9 @@ class LetterboxdAPI:
items = self._parse_list(data, language)
total_items = len(items)
if total_items > 0:
length = 0
for i, item in enumerate(items, 1):
letterboxd_id, slug = item
length = util.print_return(length, f"Finding TMDb ID {i}/{total_items}")
util.print_return(f"Finding TMDb ID {i}/{total_items}")
tmdb_id = None
expired = None
if self.config.Cache:
@ -66,7 +65,7 @@ class LetterboxdAPI:
if self.config.Cache:
self.config.Cache.update_letterboxd_map(expired, letterboxd_id, tmdb_id)
movie_ids.append(tmdb_id)
logger.info(util.adjust_space(length, f"Processed {total_items} TMDb IDs"))
logger.info(util.adjust_space(f"Processed {total_items} TMDb IDs"))
else:
logger.error(f"Letterboxd Error: No List Items found in {data}")
logger.debug("")

@ -118,7 +118,7 @@ class Metadata:
else:
logger.error(f"Metadata Error: {attr} attribute is blank")
def edit_tags(attr, obj, group, alias, key=None, extra=None, movie_library=False):
def edit_tags(attr, obj, group, alias, extra=None, movie_library=False):
if movie_library and not self.library.is_movie and (attr in alias or f"{attr}.sync" in alias or f"{attr}.remove" in alias):
logger.error(f"Metadata Error: {attr} attribute only works for movie libraries")
elif attr in alias and f"{attr}.sync" in alias:
@ -137,7 +137,7 @@ class Metadata:
add_tags.extend(extra)
remove_tags = util.get_list(group[alias[f"{attr}.remove"]]) if f"{attr}.remove" in alias else None
sync_tags = util.get_list(group[alias[f"{attr}.sync"]]) if f"{attr}.sync" in alias else None
return self.library.edit_tags(attr, obj, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags, key=key)
return self.library.edit_tags(attr, obj, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags)
return False
def set_image(attr, obj, group, alias, poster=True, url=True):
@ -252,30 +252,18 @@ class Metadata:
updated = True
advance_edits = {}
add_advanced_edit("episode_sorting", item, meta, methods, show_library=True)
add_advanced_edit("keep_episodes", item, meta, methods, show_library=True)
add_advanced_edit("delete_episodes", item, meta, methods, show_library=True)
add_advanced_edit("season_display", item, meta, methods, show_library=True)
add_advanced_edit("episode_ordering", item, meta, methods, show_library=True)
add_advanced_edit("metadata_language", item, meta, methods, new_agent=True)
add_advanced_edit("use_original_title", item, meta, methods, new_agent=True)
for advance_edit in ["episode_sorting", "keep_episodes", "delete_episodes", "season_display", "episode_ordering", "metadata_language", "use_original_title"]:
is_show = advance_edit in ["episode_sorting", "keep_episodes", "delete_episodes", "season_display", "episode_ordering"]
is_new_agent = advance_edit in ["metadata_language", "use_original_title"]
add_advanced_edit(advance_edit, item, meta, methods, show_library=is_show, new_agent=is_new_agent)
if self.library.edit_item(item, mapping_name, item_type, advance_edits, advanced=True):
updated = True
if edit_tags("genre", item, meta, methods, extra=genres):
updated = True
if edit_tags("label", item, meta, methods):
updated = True
if edit_tags("collection", item, meta, methods):
updated = True
if edit_tags("country", item, meta, methods, key="countries", movie_library=True):
updated = True
if edit_tags("director", item, meta, methods, movie_library=True):
updated = True
if edit_tags("producer", item, meta, methods, movie_library=True):
updated = True
if edit_tags("writer", item, meta, methods, movie_library=True):
updated = True
for tag_edit in ["genre", "label", "collection", "country", "director", "producer", "writer"]:
is_movie = tag_edit in ["country", "director", "producer", "writer"]
has_extra = genres if tag_edit == "genre" else None
if edit_tags(tag_edit, item, meta, methods, movie_library=is_movie, extra=has_extra):
updated = True
logger.info(f"{item_type}: {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")

@ -1,5 +1,5 @@
import glob, logging, os, requests
from modules import util
from modules import builder, util
from modules.meta import Metadata
from modules.util import Failed
import plexapi
@ -19,14 +19,14 @@ search_translation = {
"content_rating": "contentRating",
"subtitle_language": "subtitleLanguage",
"added": "addedAt",
"originally_available": "originallyAvailableAt",
"release": "originallyAvailableAt",
"audience_rating": "audienceRating",
"critic_rating": "rating",
"user_rating": "userRating",
"plays": "viewCount",
"episode_title": "episode.title",
"episode_added": "episode.addedAt",
"episode_originally_available": "episode.originallyAvailableAt",
"episode_air_date": "episode.originallyAvailableAt",
"episode_year": "episode.year",
"episode_user_rating": "episode.userRating",
"episode_plays": "episode.viewCount"
@ -77,58 +77,94 @@ item_advance_keys = {
}
new_plex_agents = ["tv.plex.agents.movie", "tv.plex.agents.series"]
searches = [
"title", "title.and", "title.not", "title.begins", "title.ends",
"studio", "studio.and", "studio.not", "studio.begins", "studio.ends",
"actor", "actor.and", "actor.not",
"audio_language", "audio_language.and", "audio_language.not",
"collection", "collection.and", "collection.not",
"content_rating", "content_rating.and", "content_rating.not",
"country", "country.and", "country.not",
"director", "director.and", "director.not",
"genre", "genre.and", "genre.not",
"label", "label.and", "label.not",
"network", "network.and", "network.not",
"producer", "producer.and", "producer.not",
"subtitle_language", "subtitle_language.and", "subtitle_language.not",
"writer", "writer.and", "writer.not",
"decade", "resolution",
"title", "title.not", "title.begins", "title.ends",
"studio", "studio.not", "studio.begins", "studio.ends",
"actor", "actor.not",
"audio_language", "audio_language.not",
"collection", "collection.not",
"content_rating", "content_rating.not",
"country", "country.not",
"director", "director.not",
"genre", "genre.not",
"label", "label.not",
"network", "network.not",
"producer", "producer.not",
"subtitle_language", "subtitle_language.not",
"writer", "writer.not",
"decade", "resolution", "hdr", "unmatched", "duplicate", "unplayed", "progress", "trash",
"last_played", "last_played.not", "last_played.before", "last_played.after",
"added", "added.not", "added.before", "added.after",
"originally_available", "originally_available.not",
"originally_available.before", "originally_available.after",
"release", "release.not", "release.before", "release.after",
"duration.gt", "duration.gte", "duration.lt", "duration.lte",
"plays.gt", "plays.gte", "plays.lt", "plays.lte",
"user_rating.gt", "user_rating.gte", "user_rating.lt", "user_rating.lte",
"critic_rating.gt", "critic_rating.gte", "critic_rating.lt", "critic_rating.lte",
"audience_rating.gt", "audience_rating.gte", "audience_rating.lt", "audience_rating.lte",
"year", "year.not", "year.gt", "year.gte", "year.lt", "year.lte"
"year", "year.not", "year.gt", "year.gte", "year.lt", "year.lte",
"unplayed_episodes", "episode_unplayed", "episode_duplicate", "episode_progress", "episode_unmatched",
"episode_title", "episode_title.not", "episode_title.begins", "episode_title.ends",
"episode_added", "episode_added.not", "episode_added.before", "episode_added.after",
"episode_air_date", "episode_air_date.not", "episode_air_date.before", "episode_air_date.after",
"episode_last_played", "episode_last_played.not", "episode_last_played.before", "episode_last_played.after",
"episode_plays.gt", "episode_plays.gte", "episode_plays.lt", "episode_plays.lte",
"episode_user_rating.gt", "episode_user_rating.gte", "episode_user_rating.lt", "episode_user_rating.lte",
"episode_year", "episode_year.not", "episode_year.gt", "episode_year.gte", "episode_year.lt", "episode_year.lte"
]
and_searches = [
"title.and", "studio.and", "actor.and", "audio_language.and", "collection.and",
"content_rating.and", "country.and", "director.and", "genre.and", "label.and",
"network.and", "producer.and", "subtitle_language.and", "writer.and"
]
or_searches = [
"title", "studio", "actor", "audio_language", "collection", "content_rating",
"country", "director", "genre", "label", "network", "producer", "subtitle_language",
"writer", "decade", "resolution", "year", "episode_title", "episode_year"
]
movie_only_searches = [
"audio_language", "audio_language.and", "audio_language.not",
"country", "country.and", "country.not",
"subtitle_language", "subtitle_language.and", "subtitle_language.not",
"decade", "resolution",
"originally_available.before", "originally_available.after",
"country", "country.not",
"director", "director.not",
"producer", "producer.not",
"writer", "writer.not",
"decade", "duplicate", "unplayed", "progress", "trash",
"plays.gt", "plays.gte", "plays.lt", "plays.lte",
"duration.gt", "duration.gte", "duration.lt", "duration.lte"
]
show_only_searches = [
"network", "network.and", "network.not",
"network", "network.not",
"episode_title", "episode_title.not", "episode_title.begins", "episode_title.ends",
"episode_added", "episode_added.not", "episode_added.before", "episode_added.after",
"episode_air_date", "episode_air_date.not",
"episode_air_date.before", "episode_air_date.after",
"episode_plays.gt", "episode_plays.gte", "episode_plays.lt", "episode_plays.lte",
"episode_user_rating.gt", "episode_user_rating.gte", "episode_user_rating.lt", "episode_user_rating.lte",
"episode_year", "episode_year.not", "episode_year.gt", "episode_year.gte", "episode_year.lt", "episode_year.lte"
]
tmdb_searches = [
"actor", "actor.and", "actor.not",
"director", "director.and", "director.not",
"producer", "producer.and", "producer.not",
"writer", "writer.and", "writer.not"
number_attributes = ["plays", "episode_plays", "added", "episode_added", "release", "episode_air_date", "duration", "tmdb_vote_count"]
float_attributes = ["user_rating", "episode_user_rating", "critic_rating", "audience_rating"]
boolean_attributes = [
"hdr", "unmatched", "duplicate", "unplayed", "progress", "trash",
"unplayed_episodes", "episode_unplayed", "episode_duplicate", "episode_progress", "episode_unmatched",
]
tmdb_attributes = ["actor", "director", "producer", "writer"]
date_attributes = ["added", "episode_added", "release", "episode_air_date", "last_played", "episode_last_played"]
search_display = {
"added": "Date Added",
"release": "Release Date",
"hdr": "HDR",
"progress": "In Progress",
"episode_progress": "Episode In Progress"
}
sorts = {
None: None,
"title.asc": "titleSort:asc", "title.desc": "titleSort:desc",
"originally_available.asc": "originallyAvailableAt:asc", "originally_available.desc": "originallyAvailableAt:desc",
"release.asc": "originallyAvailableAt:asc", "release.desc": "originallyAvailableAt:desc",
"critic_rating.asc": "rating:asc", "critic_rating.desc": "rating:desc",
"audience_rating.asc": "audienceRating:asc", "audience_rating.desc": "audienceRating:desc",
"duration.asc": "duration:asc", "duration.desc": "duration:desc",
"added.asc": "addedAt:asc", "added.desc": "addedAt:desc"
}
modifiers = {
".and": "&",
".not": "!",
".begins": "<",
".ends": ">",
@ -167,64 +203,11 @@ tags = [
"subtitle_language",
"writer"
]
smart_searches = [
"all", "any",
"title", "title.not", "title.begins", "title.ends",
"studio", "studio.not", "studio.begins", "studio.ends",
"actor", "actor.not",
"audio_language", "audio_language.not",
"collection", "collection.not",
"content_rating", "content_rating.not",
"country", "country.not",
"director", "director.not",
"genre", "genre.not",
"label", "label.not",
"network", "network.not",
"producer", "producer.not",
"subtitle_language", "subtitle_language.not",
"writer", "writer.not",
"decade", "resolution", "hdr",
"added", "added.not", "added.before", "added.after",
"originally_available", "originally_available.not",
"originally_available.before", "originally_available.after",
"plays.gt", "plays.gte", "plays.lt", "plays.lte",
"duration.gt", "duration.gte", "duration.lt", "duration.lte",
"user_rating.gt", "user_rating.gte", "user_rating.lt", "user_rating.lte",
"audience_rating.gt", "audience_rating.gte", "audience_rating.lt","audience_rating.lte",
"critic_rating.gt", "critic_rating.gte", "critic_rating.lt","critic_rating.lte",
"year", "year.not", "year.gt", "year.gte", "year.lt","year.lte",
"episode_title", "episode_title.not", "episode_title.begins", "episode_title.ends",
"episode_added", "episode_added.not", "episode_added.before", "episode_added.after",
"episode_originally_available", "episode_originally_available.not",
"episode_originally_available.before", "episode_originally_available.after",
"episode_year", "episode_year.not", "episode_year.gt", "episode_year.gte", "episode_year.lt","episode_year.lte",
"episode_user_rating.gt", "episode_user_rating.gte", "episode_user_rating.lt","episode_user_rating.lte",
"episode_plays.gt", "episode_plays.gte", "episode_plays.lt", "episode_plays.lte"
]
movie_only_smart_searches = [
"country", "country.not",
"director", "director.not",
"producer", "producer.not",
"writer", "writer.not",
"decade",
"originally_available", "originally_available.not",
"originally_available.before", "originally_available.after",
"plays.gt", "plays.gte", "plays.lt", "plays.lte",
"duration.gt", "duration.gte", "duration.lt", "duration.lte"
]
show_only_smart_searches = [
"episode_title", "episode_title.not", "episode_title.begins", "episode_title.ends",
"episode_added", "episode_added.not", "episode_added.before", "episode_added.after",
"episode_originally_available", "episode_originally_available.not",
"episode_originally_available.before", "episode_originally_available.after",
"episode_year", "episode_year.not", "episode_year.gt", "episode_year.gte", "episode_year.lt","episode_year.lte",
"episode_user_rating.gt", "episode_user_rating.gte", "episode_user_rating.lt","episode_user_rating.lte",
"episode_plays.gt", "episode_plays.gte", "episode_plays.lt", "episode_plays.lte"
]
movie_smart_sorts = {
movie_sorts = {
"title.asc": "titleSort", "title.desc": "titleSort%3Adesc",
"year.asc": "year", "year.desc": "year%3Adesc",
"originally_available.asc": "originallyAvailableAt", "originally_available.desc": "originallyAvailableAt%3Adesc",
"release.asc": "originallyAvailableAt", "release.desc": "originallyAvailableAt%3Adesc",
"critic_rating.asc": "rating", "critic_rating.desc": "rating%3Adesc",
"audience_rating.asc": "audienceRating", "audience_rating.desc": "audienceRating%3Adesc",
"user_rating.asc": "userRating", "user_rating.desc": "userRating%3Adesc",
@ -234,10 +217,11 @@ movie_smart_sorts = {
"added.asc": "addedAt", "added.desc": "addedAt%3Adesc",
"random": "random"
}
show_smart_sorts = {
show_sorts = {
"title.asc": "titleSort", "title.desc": "titleSort%3Adesc",
"year.asc": "year", "year.desc": "year%3Adesc",
"originally_available.asc": "originallyAvailableAt", "originally_available.desc": "originallyAvailableAt%3Adesc",
"release.asc": "originallyAvailableAt", "release.desc": "originallyAvailableAt%3Adesc",
"critic_rating.asc": "rating", "critic_rating.desc": "rating%3Adesc",
"audience_rating.asc": "audienceRating", "audience_rating.desc": "audienceRating%3Adesc",
"user_rating.asc": "userRating", "user_rating.desc": "userRating%3Adesc",
@ -246,19 +230,20 @@ show_smart_sorts = {
"episode_added.asc": "episode.addedAt", "episode_added.desc": "episode.addedAt%3Adesc",
"random": "random"
}
season_smart_sorts = {
season_sorts = {
"season.asc": "season.index%2Cseason.titleSort", "season.desc": "season.index%3Adesc%2Cseason.titleSort",
"show.asc": "show.titleSort%2Cindex", "show.desc": "show.titleSort%3Adesc%2Cindex",
"user_rating.asc": "userRating", "user_rating.desc": "userRating%3Adesc",
"added.asc": "addedAt", "added.desc": "addedAt%3Adesc",
"random": "random"
}
episode_smart_sorts = {
episode_sorts = {
"title.asc": "titleSort", "title.desc": "titleSort%3Adesc",
"show.asc": "show.titleSort%2Cseason.index%3AnullsLast%2Cepisode.index%3AnullsLast%2Cepisode.originallyAvailableAt%3AnullsLast%2Cepisode.titleSort%2Cepisode.id",
"show.desc": "show.titleSort%3Adesc%2Cseason.index%3AnullsLast%2Cepisode.index%3AnullsLast%2Cepisode.originallyAvailableAt%3AnullsLast%2Cepisode.titleSort%2Cepisode.id",
"year.asc": "year", "year.desc": "year%3Adesc",
"originally_available.asc": "originallyAvailableAt", "originally_available.desc": "originallyAvailableAt%3Adesc",
"release.asc": "originallyAvailableAt", "release.desc": "originallyAvailableAt%3Adesc",
"critic_rating.asc": "rating", "critic_rating.desc": "rating%3Adesc",
"audience_rating.asc": "audienceRating", "audience_rating.desc": "audienceRating%3Adesc",
"user_rating.asc": "userRating", "user_rating.desc": "userRating%3Adesc",
@ -267,15 +252,15 @@ episode_smart_sorts = {
"added.asc": "addedAt", "added.desc": "addedAt%3Adesc",
"random": "random"
}
smart_types = {
"movies": (1, movie_smart_sorts),
"shows": (2, show_smart_sorts),
"seasons": (3, season_smart_sorts),
"episodes": (4, episode_smart_sorts),
sort_types = {
"movies": (1, movie_sorts),
"shows": (2, show_sorts),
"seasons": (3, season_sorts),
"episodes": (4, episode_sorts),
}
class PlexAPI:
def __init__(self, params, TMDb, TVDb):
def __init__(self, params):
try:
self.PlexServer = PlexServer(params["plex"]["url"], params["plex"]["token"], timeout=params["plex"]["timeout"])
except Unauthorized:
@ -318,8 +303,6 @@ class PlexAPI:
for ad in params["asset_directory"]:
logger.info(f"Using Asset Directory: {ad}")
self.TMDb = TMDb
self.TVDb = TVDb
self.Radarr = None
self.Sonarr = None
self.Tautulli = None
@ -351,6 +334,10 @@ class PlexAPI:
self.empty_trash = params["plex"]["empty_trash"]
self.optimize = params["plex"]["optimize"]
self.missing = {}
self.movie_map = {}
self.show_map = {}
self.movie_rating_key_map = {}
self.show_rating_key_map = {}
self.run_again = []
def get_all_collections(self):
@ -455,7 +442,7 @@ class PlexAPI:
if title not in labels:
raise Failed(f"Plex Error: Label: {title} does not exist")
smart_type = 1 if self.is_movie else 2
sort_type = movie_smart_sorts[sort] if self.is_movie else show_smart_sorts[sort]
sort_type = movie_sorts[sort] if self.is_movie else show_sorts[sort]
return smart_type, f"?type={smart_type}&sort={sort_type}&label={labels[title]}"
def test_smart_filter(self, uri_args):
@ -537,56 +524,8 @@ class PlexAPI:
logger.info(f"Processing {pretty} {media_type}s")
items = self.get_all()
elif method == "plex_search":
search_terms = {}
has_processed = False
search_limit = None
search_sort = None
for search_method, search_data in data.items():
if search_method == "limit":
search_limit = search_data
elif search_method == "sort_by":
search_sort = search_data
else:
search, modifier = os.path.splitext(str(search_method).lower())
final_search = search_translation[search] if search in search_translation else search
if search in ["added", "originally_available"] and modifier == "":
final_mod = ">>"
elif search in ["added", "originally_available"] and modifier == ".not":
final_mod = "<<"
elif search in ["critic_rating", "audience_rating"] and modifier == ".gt":
final_mod = "__gt"
elif search in ["critic_rating", "audience_rating"] and modifier == ".lt":
final_mod = "__lt"
else:
final_mod = modifiers[modifier] if modifier in modifiers else ""
final_method = f"{final_search}{final_mod}"
if search == "duration":
search_terms[final_method] = search_data * 60000
elif search in ["added", "originally_available"] and modifier in ["", ".not"]:
search_terms[final_method] = f"{search_data}d"
else:
search_terms[final_method] = search_data
if search in ["added", "originally_available"] or modifier in [".gt", ".gte", ".lt", ".lte", ".before", ".after"]:
ors = f"{search_method}({search_data}"
else:
ors = ""
conjunction = " AND " if final_mod == "&" else " OR "
for o, param in enumerate(search_data):
or_des = conjunction if o > 0 else f"{search_method}("
ors += f"{or_des}{param}"
if has_processed:
logger.info(f" AND {ors})")
else:
logger.info(f"Processing {pretty}: {ors})")
has_processed = True
if search_sort:
logger.info(f" SORT BY {search_sort}")
if search_limit:
logger.info(f" LIMIT {search_limit}")
logger.debug(f"Search: {search_terms}")
items = self.search(sort=sorts[search_sort], maxresults=search_limit, **search_terms)
util.print_multiline(data[1], info=True)
items = self.get_filter_items(data[2])
elif method == "plex_collectionless":
good_collections = []
logger.info("Collections Excluded")
@ -612,9 +551,8 @@ class PlexAPI:
logger.info(col.title)
collection_indexes = [c.index for c in good_collections]
all_items = self.get_all()
length = 0
for i, item in enumerate(all_items, 1):
length = util.print_return(length, f"Processing: {i}/{len(all_items)} {item.title}")
util.print_return(f"Processing: {i}/{len(all_items)} {item.title}")
add_item = True
self.query(item.reload)
for collection in item.collections:
@ -623,7 +561,7 @@ class PlexAPI:
break
if add_item:
items.append(item)
logger.info(util.adjust_space(length, f"Processed {len(all_items)} {'Movies' if self.is_movie else 'Shows'}"))
logger.info(util.adjust_space(f"Processed {len(all_items)} {'Movies' if self.is_movie else 'Shows'}"))
else:
raise Failed(f"Plex Error: Method {method} not supported")
if len(items) > 0:
@ -665,6 +603,42 @@ class PlexAPI:
name = collection.title if isinstance(collection, Collections) else str(collection)
return name, self.get_collection_items(collection, smart_label_collection)
def map_guids(self, config):
logger.info(f"Loading {'Movie' if self.is_movie else 'Show'} Library: {self.name}")
logger.info("")
items = self.Plex.all()
logger.info(f"Mapping {'Movie' if self.is_movie else 'Show'} Library: {self.name}")
logger.info("")
for i, item in enumerate(items, 1):
util.print_return(f"Processing: {i}/{len(items)} {item.title}")
if item.ratingKey not in self.movie_rating_key_map and item.ratingKey not in self.show_rating_key_map:
id_type, main_id = config.Convert.get_id(item, self)
if main_id:
if not isinstance(main_id, list):
main_id = [main_id]
if id_type == "movie":
self.movie_rating_key_map[item.ratingKey] = main_id[0]
for m in main_id:
if m in self.movie_map:
self.movie_map[m].append(item.ratingKey)
else:
self.movie_map[m] = [item.ratingKey]
elif id_type == "show":
self.show_rating_key_map[item.ratingKey] = main_id[0]
for m in main_id:
if m in self.show_map:
self.show_map[m].append(item.ratingKey)
else:
self.show_map[m] = [item.ratingKey]
logger.info("")
logger.info(util.adjust_space(f"Processed {len(items)} {'Movies' if self.is_movie else 'Shows'}"))
def get_tmdb_from_map(self, item):
return self.movie_rating_key_map[item.ratingKey] if item.ratingKey in self.movie_rating_key_map else None
def get_tvdb_from_map(self, item):
return self.show_rating_key_map[item.ratingKey] if item.ratingKey in self.show_rating_key_map else None
def search_item(self, data, year=None):
kwargs = {}
if year is not None:
@ -685,10 +659,9 @@ class PlexAPI:
logger.error(f"{item_type}: {name}{' Advanced' if advanced else ''} Details Update Failed")
return False
def edit_tags(self, attr, obj, add_tags=None, remove_tags=None, sync_tags=None, key=None):
def edit_tags(self, attr, obj, add_tags=None, remove_tags=None, sync_tags=None):
updated = False
if key is None:
key = f"{attr}s"
key = builder.filter_translation[attr] if attr in builder.filter_translation else attr
if add_tags or remove_tags or sync_tags:
item_tags = [item_tag.tag for item_tag in getattr(obj, key)]
input_tags = []

@ -2,7 +2,6 @@ import logging, requests
from modules import util
from modules.util import Failed
from plexapi.exceptions import BadRequest, NotFound
from plexapi.video import Movie, Show
from retrying import retry
logger = logging.getLogger("Plex Meta Manager")
@ -11,15 +10,15 @@ builders = ["tautulli_popular", "tautulli_watched"]
class TautulliAPI:
def __init__(self, params):
self.url = params["url"]
self.apikey = params["apikey"]
try:
response = requests.get(f"{params['url']}/api/v2?apikey={params['apikey']}&cmd=get_library_names").json()
response = self._request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_library_names")
except Exception:
util.print_stacktrace()
raise Failed("Tautulli Error: Invalid url")
if response["response"]["result"] != "success":
raise Failed(f"Tautulli Error: {response['response']['message']}")
self.url = params["url"]
self.apikey = params["apikey"]
def get_items(self, library, params):
query_size = int(params["list_size"]) + int(params["list_buffer"])
@ -65,5 +64,5 @@ class TautulliAPI:
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def _request(self, url):
logger.debug(f"Tautulli URL: {url.replace(self.apikey, '################################')}")
logger.debug(f"Tautulli URL: {url.replace(self.apikey, '###############')}")
return requests.get(url).json()

@ -26,6 +26,7 @@ def retry_if_not_plex(exception):
separating_character = "="
screen_width = 100
spacing = 0
days_alias = {
"monday": 0, "mon": 0, "m": 0,
@ -155,13 +156,6 @@ pretty_ids = {
def tab_new_lines(data):
return str(data).replace("\n", "\n|\t ") if "\n" in str(data) else str(data)
def adjust_space(old_length, display_title):
display_title = str(display_title)
space_length = old_length - len(display_title)
if space_length > 0:
display_title += " " * space_length
return display_title
def make_ordinal(n):
n = int(n)
suffix = ["th", "st", "nd", "rd", "th"][min(n % 10, 4)]
@ -391,12 +385,22 @@ def apply_formatter(handler, border=True):
text = f"[%(asctime)s] %(filename)-27s %(levelname)-10s {text}"
handler.setFormatter(logging.Formatter(text))
def print_return(length, text):
print(adjust_space(length, f"| {text}"), end="\r")
return len(text) + 2
def adjust_space(display_title):
display_title = str(display_title)
space_length = spacing - len(display_title)
if space_length > 0:
display_title += " " * space_length
return display_title
def print_return(text):
print(adjust_space(f"| {text}"), end="\r")
global spacing
spacing = len(text) + 2
def print_end(length):
print(adjust_space(length, " "), end="\r")
def print_end():
print(adjust_space(" "), end="\r")
global spacing
spacing = 0
def validate_filename(filename):
if is_valid_filename(filename):

@ -105,7 +105,7 @@ def start(config_path, is_test=False, time_scheduled=None, requested_collections
logger.info(util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | "))
logger.info(util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| "))
logger.info(util.centered(" |___/ "))
logger.info(util.centered(" Version: 1.9.3 "))
logger.info(util.centered(" Version: 1.10.0 "))
if time_scheduled: start_type = f"{time_scheduled} "
elif is_test: start_type = "Test "
elif requested_collections: start_type = "Collections "
@ -144,9 +144,9 @@ def update_libraries(config):
logger.info("")
util.separator(f"Mapping {library.name} Library", space=False, border=False)
logger.info("")
movie_map, show_map = map_guids(config, library)
library.map_guids(config)
if not config.test_mode and not config.resume_from and not collection_only and library.mass_update:
mass_metadata(config, library, movie_map, show_map)
mass_metadata(config, library)
for metadata in library.metadata_files:
logger.info("")
util.separator(f"Running Metadata File\n{metadata.path}")
@ -164,7 +164,7 @@ def update_libraries(config):
logger.info("")
util.separator(f"{'Test ' if config.test_mode else ''}Collections")
logger.removeHandler(library_handler)
run_collection(config, library, metadata, collections_to_run, movie_map, show_map)
run_collection(config, library, metadata, collections_to_run)
logger.addHandler(library_handler)
if not config.test_mode and not config.requested_collections and ((library.show_unmanaged and not library_only) or (library.assets_for_all and not collection_only)):
@ -205,12 +205,11 @@ def update_libraries(config):
logger.info("")
util.separator("Run Again")
logger.info("")
length = 0
for x in range(1, config.general["run_again_delay"] + 1):
length = util.print_return(length, f"Waiting to run again in {config.general['run_again_delay'] - x + 1} minutes")
util.print_return(f"Waiting to run again in {config.general['run_again_delay'] - x + 1} minutes")
for y in range(60):
time.sleep(1)
util.print_end(length)
util.print_end()
for library in config.libraries:
if library.run_again:
col_file_logger = os.path.join(default_dir, "logs", library.mapping_name, f"library.log")
@ -222,13 +221,13 @@ def update_libraries(config):
logger.info("")
util.separator(f"{library.name} Library Run Again")
logger.info("")
movie_map, show_map = map_guids(config, library)
library.map_guids(config)
for builder in library.run_again:
logger.info("")
util.separator(f"{builder.name} Collection")
logger.info("")
try:
builder.run_collections_again(movie_map, show_map)
builder.run_collections_again()
except Failed as e:
util.print_stacktrace()
util.print_multiline(e, error=True)
@ -245,35 +244,7 @@ def update_libraries(config):
if library.optimize:
library.query(library.PlexServer.library.optimize)
def map_guids(config, library):
movie_map = {}
show_map = {}
length = 0
logger.info(f"Loading {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
logger.info("")
items = library.Plex.all()
logger.info(f"Mapping {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
logger.info("")
for i, item in enumerate(items, 1):
length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}")
id_type, main_id = config.Convert.get_id(item, library, length)
if main_id:
if not isinstance(main_id, list):
main_id = [main_id]
if id_type == "movie":
for m in main_id:
if m in movie_map: movie_map[m].append(item.ratingKey)
else: movie_map[m] = [item.ratingKey]
elif id_type == "show":
for m in main_id:
if m in show_map: show_map[m].append(item.ratingKey)
else: show_map[m] = [item.ratingKey]
logger.info("")
logger.info(util.adjust_space(length, f"Processed {len(items)} {'Movies' if library.is_movie else 'Shows'}"))
return movie_map, show_map
def mass_metadata(config, library, movie_map, show_map):
length = 0
def mass_metadata(config, library):
logger.info("")
util.separator(f"Mass Editing {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
logger.info("")
@ -281,27 +252,21 @@ def mass_metadata(config, library, movie_map, show_map):
sonarr_adds = []
items = library.Plex.all()
for i, item in enumerate(items, 1):
length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}")
util.print_return(f"Processing: {i}/{len(items)} {item.title}")
tmdb_id = None
tvdb_id = None
imdb_id = None
if config.Cache:
t_id, guid_media_type, _ = config.Cache.config.Cache.query_guid_map(item.guid)
t_id, guid_media_type, _ = config.Cache.query_guid_map(item.guid)
if t_id:
if "movie" in guid_media_type:
tmdb_id = t_id
else:
tvdb_id = t_id
if not tmdb_id and not tvdb_id:
for tmdb, rating_keys in movie_map.items():
if item.ratingKey in rating_keys:
tmdb_id = tmdb
break
tmdb_id = library.get_tmdb_from_map(item)
if not tmdb_id and not tvdb_id and library.is_show:
for tvdb, rating_keys in show_map.items():
if item.ratingKey in rating_keys:
tvdb_id = tvdb
break
tmdb_id = library.get_tvdb_from_map(item)
if library.Radarr and library.radarr_add_all and tmdb_id:
radarr_adds.append(tmdb_id)
@ -316,9 +281,9 @@ def mass_metadata(config, library, movie_map, show_map):
try:
tmdb_item = config.TMDb.get_movie(tmdb_id) if library.is_movie else config.TMDb.get_show(tmdb_id)
except Failed as e:
logger.info(util.adjust_space(length, str(e)))
logger.info(util.adjust_space(str(e)))
else:
logger.info(util.adjust_space(length, f"{item.title[:25]:<25} | No TMDb ID for Guid: {item.guid}"))
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No TMDb ID for Guid: {item.guid}"))
omdb_item = None
if library.mass_genre_update in ["omdb", "imdb"] or library.mass_audience_rating_update in ["omdb", "imdb"] or library.mass_critic_rating_update in ["omdb", "imdb"]:
@ -331,9 +296,9 @@ def mass_metadata(config, library, movie_map, show_map):
try:
omdb_item = config.OMDb.get_omdb(imdb_id)
except Failed as e:
logger.info(util.adjust_space(length, str(e)))
logger.info(util.adjust_space(str(e)))
else:
logger.info(util.adjust_space(length, f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}"))
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}"))
if not tmdb_item and not omdb_item:
continue
@ -355,7 +320,7 @@ def mass_metadata(config, library, movie_map, show_map):
library.query_data(item.addGenre, genre)
display_str += f"{', ' if len(display_str) > 0 else ''}+{genre}"
if len(display_str) > 0:
logger.info(util.adjust_space(length, f"{item.title[:25]:<25} | Genres | {display_str}"))
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Genres | {display_str}"))
except Failed:
pass
if library.mass_audience_rating_update or library.mass_critic_rating_update:
@ -367,14 +332,14 @@ def mass_metadata(config, library, movie_map, show_map):
else:
raise Failed
if new_rating is None:
logger.info(util.adjust_space(length, f"{item.title[:25]:<25} | No Rating Found"))
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No Rating Found"))
else:
if library.mass_audience_rating_update and str(item.audienceRating) != str(new_rating):
library.edit_query(item, {"audienceRating.value": new_rating, "audienceRating.locked": 1})
logger.info(util.adjust_space(length, f"{item.title[:25]:<25} | Audience Rating | {new_rating}"))
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Audience Rating | {new_rating}"))
if library.mass_critic_rating_update and str(item.rating) != str(new_rating):
library.edit_query(item, {"rating.value": new_rating, "rating.locked": 1})
logger.info(util.adjust_space(length, f"{item.title[:25]:<25} | Critic Rating | {new_rating}"))
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Critic Rating | {new_rating}"))
except Failed:
pass
@ -390,7 +355,7 @@ def mass_metadata(config, library, movie_map, show_map):
except Failed as e:
logger.error(e)
def run_collection(config, library, metadata, requested_collections, movie_map, show_map):
def run_collection(config, library, metadata, requested_collections):
logger.info("")
for mapping_name, collection_attrs in requested_collections.items():
collection_start = datetime.now()
@ -448,6 +413,7 @@ def run_collection(config, library, metadata, requested_collections, movie_map,
util.print_multiline(builder.schedule, info=True)
if len(builder.smart_filter_details) > 0:
logger.info("")
util.print_multiline(builder.smart_filter_details, info=True)
if not builder.smart_url:
@ -459,13 +425,13 @@ def run_collection(config, library, metadata, requested_collections, movie_map,
for filter_key, filter_value in builder.filters:
logger.info(f"Collection Filter {filter_key}: {filter_value}")
builder.collect_rating_keys(movie_map, show_map)
builder.collect_rating_keys()
if len(builder.rating_keys) > 0 and builder.build_collection:
logger.info("")
util.separator(f"Adding to {mapping_name} Collection", space=False, border=False)
logger.info("")
builder.add_to_collection(movie_map)
builder.add_to_collection()
if len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0:
logger.info("")
util.separator(f"Missing from Library", space=False, border=False)
@ -480,6 +446,12 @@ def run_collection(config, library, metadata, requested_collections, movie_map,
logger.info("")
builder.update_details()
if len(builder.item_details) > 0:
logger.info("")
util.separator(f"Updating Details of the Items in {mapping_name} Collection", space=False, border=False)
logger.info("")
builder.update_item_details()
if builder.run_again and (len(builder.run_again_movies) > 0 or len(builder.run_again_shows) > 0):
library.run_again.append(builder)
@ -497,7 +469,6 @@ try:
if run or test or collections or libraries or resume:
start(config_file, is_test=test, requested_collections=collections, requested_libraries=libraries, resume_from=resume)
else:
time_length = 0
for time_to_run in times_to_run:
schedule.every().day.at(time_to_run).do(start, config_file, time_scheduled=time_to_run)
while True:
@ -517,7 +488,7 @@ try:
minutes = int((seconds % 3600) // 60)
time_str = f"{hours} Hour{'s' if hours > 1 else ''} and " if hours > 0 else ""
time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}"
time_length = util.print_return(time_length, f"Current Time: {current} | {time_str} until the next run at {og_time_str} {times_to_run}")
util.print_return(f"Current Time: {current} | {time_str} until the next run at {og_time_str} {times_to_run}")
time.sleep(60)
except KeyboardInterrupt:
util.separator("Exiting Plex Meta Manager")

Loading…
Cancel
Save