Merge pull request #211 from meisnate12/develop

v1.8.0
pull/246/head v1.8.0
meisnate12 4 years ago committed by GitHub
commit 3550d17622
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1,5 +1,5 @@
# Plex Meta Manager # Plex Meta Manager
#### Version 1.7.2 #### Version 1.8.0
The original concept for Plex Meta Manager is [Plex Auto Collections](https://github.com/mza921/Plex-Auto-Collections), but this is rewritten from the ground up to be able to include a scheduler, metadata edits, multiple libraries, and logging. Plex Meta Manager is a Python 3 script that can be continuously run using YAML configuration files to update on a schedule the metadata of the movies, shows, and collections in your libraries as well as automatically build collections based on various methods all detailed in the wiki. Some collection examples that the script can automatically build and update daily include Plex Based Searches like actor, genre, or studio collections or Collections based on TMDb, IMDb, Trakt, TVDb, AniDB, or MyAnimeList lists and various other services. The original concept for Plex Meta Manager is [Plex Auto Collections](https://github.com/mza921/Plex-Auto-Collections), but this is rewritten from the ground up to be able to include a scheduler, metadata edits, multiple libraries, and logging. Plex Meta Manager is a Python 3 script that can be continuously run using YAML configuration files to update on a schedule the metadata of the movies, shows, and collections in your libraries as well as automatically build collections based on various methods all detailed in the wiki. Some collection examples that the script can automatically build and update daily include Plex Based Searches like actor, genre, or studio collections or Collections based on TMDb, IMDb, Trakt, TVDb, AniDB, or MyAnimeList lists and various other services.

@ -1,16 +1,15 @@
## This file is a template remove the .template to use the file ## This file is a template remove the .template to use the file
libraries: libraries: # Library mappings must have a colon (:) placed after them
Movies: Movies:
library_type: movie
TV Shows: TV Shows:
library_type: show
Anime: Anime:
library_type: show
settings: # Can be individually specified per library as well settings: # Can be individually specified per library as well
cache: true cache: true
cache_expiration: 60 cache_expiration: 60
asset_directory: config/assets asset_directory: config/assets
asset_folders: true
assets_for_all: false
sync_mode: append sync_mode: append
show_unmanaged: true show_unmanaged: true
show_filtered: false show_filtered: false

@ -16,21 +16,9 @@ class AniDBAPI:
"popular": "https://anidb.net/latest/anime/popular/?h=1", "popular": "https://anidb.net/latest/anime/popular/?h=1",
"relation": "/relation/graph" "relation": "/relation/graph"
} }
self.id_list = html.fromstring(requests.get("https://raw.githubusercontent.com/Anime-Lists/anime-lists/master/anime-list-master.xml").content)
def convert_anidb_to_tvdb(self, anidb_id): return self.convert_anidb(anidb_id, "anidbid", "tvdbid") def get_AniDB_IDs(self):
def convert_anidb_to_imdb(self, anidb_id): return self.convert_anidb(anidb_id, "anidbid", "imdbid") return html.fromstring(requests.get("https://raw.githubusercontent.com/Anime-Lists/anime-lists/master/anime-list-master.xml").content)
def convert_tvdb_to_anidb(self, tvdb_id): return self.convert_anidb(tvdb_id, "tvdbid", "anidbid")
def convert_imdb_to_anidb(self, imdb_id): return self.convert_anidb(imdb_id, "imdbid", "anidbid")
def convert_anidb(self, input_id, from_id, to_id):
ids = self.id_list.xpath(f"//anime[contains(@{from_id}, '{input_id}')]/@{to_id}")
if len(ids) > 0:
if from_id == "tvdbid": return [int(i) for i in ids]
if len(ids[0]) > 0:
try: return ids[0].split(",") if to_id == "imdbid" else int(ids[0])
except ValueError: raise Failed(f"AniDB Error: No {util.pretty_ids[to_id]} ID found for {util.pretty_ids[from_id]} ID: {input_id}")
else: raise Failed(f"AniDB Error: No {util.pretty_ids[to_id]} ID found for {util.pretty_ids[from_id]} ID: {input_id}")
else: raise Failed(f"AniDB Error: {util.pretty_ids[from_id]} ID: {input_id} not found")
@retry(stop_max_attempt_number=6, wait_fixed=10000) @retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_request(self, url, language): def send_request(self, url, language):
@ -66,29 +54,19 @@ class AniDBAPI:
pretty = util.pretty_names[method] if method in util.pretty_names else method pretty = util.pretty_names[method] if method in util.pretty_names else method
if status_message: if status_message:
logger.debug(f"Data: {data}") logger.debug(f"Data: {data}")
anime_ids = [] anidb_ids = []
if method == "anidb_popular": if method == "anidb_popular":
if status_message: if status_message:
logger.info(f"Processing {pretty}: {data} Anime") logger.info(f"Processing {pretty}: {data} Anime")
anime_ids.extend(self.get_popular(language)[:data]) anidb_ids.extend(self.get_popular(language)[:data])
else: else:
if status_message: logger.info(f"Processing {pretty}: {data}") if status_message: logger.info(f"Processing {pretty}: {data}")
if method == "anidb_id": anime_ids.append(data) if method == "anidb_id": anidb_ids.append(data)
elif method == "anidb_relation": anime_ids.extend(self.get_anidb_relations(data, language)) elif method == "anidb_relation": anidb_ids.extend(self.get_anidb_relations(data, language))
else: raise Failed(f"AniDB Error: Method {method} not supported") else: raise Failed(f"AniDB Error: Method {method} not supported")
show_ids = [] movie_ids, show_ids = self.config.Arms.anidb_to_ids(anidb_ids, language)
movie_ids = []
for anidb_id in anime_ids:
try:
for imdb_id in self.convert_anidb_to_imdb(anidb_id):
tmdb_id, _ = self.config.convert_from_imdb(imdb_id, language)
if tmdb_id: movie_ids.append(tmdb_id)
else: raise Failed
except Failed:
try: show_ids.append(self.convert_anidb_to_tvdb(anidb_id))
except Failed: logger.error(f"AniDB Error: No TVDb ID or IMDb ID found for AniDB ID: {anidb_id}")
if status_message: if status_message:
logger.debug(f"AniDB IDs Found: {anime_ids}") logger.debug(f"AniDB IDs Found: {anidb_ids}")
logger.debug(f"TMDb IDs Found: {movie_ids}") logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}") logger.debug(f"TVDb IDs Found: {show_ids}")
return movie_ids, show_ids return movie_ids, show_ids

@ -52,14 +52,14 @@ class AniListAPI:
return json_obj return json_obj
def anilist_id(self, anilist_id): def anilist_id(self, anilist_id):
query = "query ($id: Int) {Media(id: $id) {idMal title{romaji english}}}" query = "query ($id: Int) {Media(id: $id) {id title{romaji english}}}"
media = self.send_request(query, {"id": anilist_id})["data"]["Media"] media = self.send_request(query, {"id": anilist_id})["data"]["Media"]
if media["idMal"]: if media["id"]:
return media["idMal"], media["title"]["english" if media["title"]["english"] else "romaji"] return media["id"], media["title"]["english" if media["title"]["english"] else "romaji"]
raise Failed(f"AniList Error: No MyAnimeList ID found for {anilist_id}") raise Failed(f"AniList Error: No AniList ID found for {anilist_id}")
def get_pagenation(self, query, limit=0, variables=None): def get_pagenation(self, query, limit=0, variables=None):
mal_ids = [] anilist_ids = []
count = 0 count = 0
page_num = 0 page_num = 0
if variables is None: if variables is None:
@ -71,21 +71,21 @@ class AniListAPI:
json_obj = self.send_request(query, variables) json_obj = self.send_request(query, variables)
next_page = json_obj["data"]["Page"]["pageInfo"]["hasNextPage"] next_page = json_obj["data"]["Page"]["pageInfo"]["hasNextPage"]
for media in json_obj["data"]["Page"]["media"]: for media in json_obj["data"]["Page"]["media"]:
if media["idMal"]: if media["id"]:
mal_ids.append(media["idMal"]) anilist_ids.append(media["id"])
count += 1 count += 1
if 0 < limit == count: if 0 < limit == count:
break break
if 0 < limit == count: if 0 < limit == count:
break break
return mal_ids return anilist_ids
def top_rated(self, limit): def top_rated(self, limit):
query = """ query = """
query ($page: Int) { query ($page: Int) {
Page(page: $page) { Page(page: $page) {
pageInfo {hasNextPage} pageInfo {hasNextPage}
media(averageScore_greater: 3, sort: SCORE_DESC, type: ANIME) {idMal} media(averageScore_greater: 3, sort: SCORE_DESC, type: ANIME) {id}
} }
} }
""" """
@ -96,7 +96,7 @@ class AniListAPI:
query ($page: Int) { query ($page: Int) {
Page(page: $page) { Page(page: $page) {
pageInfo {hasNextPage} pageInfo {hasNextPage}
media(popularity_greater: 1000, sort: POPULARITY_DESC, type: ANIME) {idMal} media(popularity_greater: 1000, sort: POPULARITY_DESC, type: ANIME) {id}
} }
} }
""" """
@ -107,7 +107,7 @@ class AniListAPI:
query ($page: Int, $season: MediaSeason, $year: Int, $sort: [MediaSort]) { query ($page: Int, $season: MediaSeason, $year: Int, $sort: [MediaSort]) {
Page(page: $page){ Page(page: $page){
pageInfo {hasNextPage} pageInfo {hasNextPage}
media(season: $season, seasonYear: $year, type: ANIME, sort: $sort){idMal} media(season: $season, seasonYear: $year, type: ANIME, sort: $sort){id}
} }
} }
""" """
@ -119,7 +119,7 @@ class AniListAPI:
query ($page: Int, $genre: String, $sort: [MediaSort]) { query ($page: Int, $genre: String, $sort: [MediaSort]) {
Page(page: $page){ Page(page: $page){
pageInfo {hasNextPage} pageInfo {hasNextPage}
media(genre: $genre, sort: $sort){idMal} media(genre: $genre, sort: $sort){id}
} }
} }
""" """
@ -131,7 +131,7 @@ class AniListAPI:
query ($page: Int, $tag: String, $sort: [MediaSort]) { query ($page: Int, $tag: String, $sort: [MediaSort]) {
Page(page: $page){ Page(page: $page){
pageInfo {hasNextPage} pageInfo {hasNextPage}
media(tag: $tag, sort: $sort){idMal} media(tag: $tag, sort: $sort){id}
} }
} }
""" """
@ -144,13 +144,13 @@ class AniListAPI:
Studio(id: $id) { Studio(id: $id) {
name name
media(page: $page) { media(page: $page) {
nodes {idMal type} nodes {id type}
pageInfo {hasNextPage} pageInfo {hasNextPage}
} }
} }
} }
""" """
mal_ids = [] anilist_ids = []
page_num = 0 page_num = 0
next_page = True next_page = True
name = None name = None
@ -161,43 +161,43 @@ class AniListAPI:
name = json_obj["data"]["Studio"]["name"] name = json_obj["data"]["Studio"]["name"]
next_page = json_obj["data"]["Studio"]["media"]["pageInfo"]["hasNextPage"] next_page = json_obj["data"]["Studio"]["media"]["pageInfo"]["hasNextPage"]
for media in json_obj["data"]["Studio"]["media"]["nodes"]: for media in json_obj["data"]["Studio"]["media"]["nodes"]:
if media["idMal"] and media["type"] == "ANIME": if media["id"] and media["type"] == "ANIME":
mal_ids.append(media["idMal"]) anilist_ids.append(media["id"])
return mal_ids, name return anilist_ids, name
def relations(self, anilist_id, ignore_ids=None): def relations(self, anilist_id, ignore_ids=None):
query = """ query = """
query ($id: Int) { query ($id: Int) {
Media(id: $id) { Media(id: $id) {
idMal id
relations { relations {
edges {node{id idMal type} relationType} edges {node{id type} relationType}
nodes {id idMal type} nodes {id type}
} }
} }
} }
""" """
new_anilist_ids = []
anilist_ids = [] anilist_ids = []
mal_ids = []
name = "" name = ""
if not ignore_ids: if not ignore_ids:
ignore_ids = [anilist_id] ignore_ids = [anilist_id]
mal_id, name = self.anilist_id(anilist_id) anilist_id, name = self.anilist_id(anilist_id)
mal_ids.append(mal_id) anilist_ids.append(anilist_id)
json_obj = self.send_request(query, {"id": anilist_id}) json_obj = self.send_request(query, {"id": anilist_id})
edges = [media["node"]["id"] for media in json_obj["data"]["Media"]["relations"]["edges"] edges = [media["node"]["id"] for media in json_obj["data"]["Media"]["relations"]["edges"]
if media["relationType"] not in ["CHARACTER", "OTHER"] and media["node"]["type"] == "ANIME"] if media["relationType"] not in ["CHARACTER", "OTHER"] and media["node"]["type"] == "ANIME"]
for media in json_obj["data"]["Media"]["relations"]["nodes"]: for media in json_obj["data"]["Media"]["relations"]["nodes"]:
if media["idMal"] and media["id"] not in ignore_ids and media["id"] in edges and media["type"] == "ANIME": if media["id"] and media["id"] not in ignore_ids and media["id"] in edges and media["type"] == "ANIME":
anilist_ids.append(media["id"]) new_anilist_ids.append(media["id"])
ignore_ids.append(media["id"]) ignore_ids.append(media["id"])
mal_ids.append(media["idMal"]) anilist_ids.append(media["id"])
for next_id in anilist_ids: for next_id in new_anilist_ids:
new_mal_ids, ignore_ids, _ = self.relations(next_id, ignore_ids=ignore_ids) new_relation_ids, ignore_ids, _ = self.relations(next_id, ignore_ids=ignore_ids)
mal_ids.extend(new_mal_ids) anilist_ids.extend(new_relation_ids)
return mal_ids, ignore_ids, name return anilist_ids, ignore_ids, name
def validate_genre(self, genre): def validate_genre(self, genre):
if genre.lower() in self.genres: if genre.lower() in self.genres:
@ -213,7 +213,7 @@ class AniListAPI:
anilist_values = [] anilist_values = []
for anilist_id in anilist_ids: for anilist_id in anilist_ids:
if studio: query = "query ($id: Int) {Studio(id: $id) {name}}" if studio: query = "query ($id: Int) {Studio(id: $id) {name}}"
else: query = "query ($id: Int) {Media(id: $id) {idMal}}" else: query = "query ($id: Int) {Media(id: $id) {id}}"
try: try:
self.send_request(query, {"id": anilist_id}) self.send_request(query, {"id": anilist_id})
anilist_values.append(anilist_id) anilist_values.append(anilist_id)
@ -222,51 +222,41 @@ class AniListAPI:
return anilist_values return anilist_values
raise Failed(f"AniList Error: No valid AniList IDs in {anilist_ids}") raise Failed(f"AniList Error: No valid AniList IDs in {anilist_ids}")
def get_items(self, method, data, status_message=True): def get_items(self, method, data, language, status_message=True):
if status_message: if status_message:
logger.debug(f"Data: {data}") logger.debug(f"Data: {data}")
pretty = util.pretty_names[method] if method in util.pretty_names else method pretty = util.pretty_names[method] if method in util.pretty_names else method
if method == "anilist_id": if method == "anilist_id":
mal_id, name = self.anilist_id(data) anilist_id, name = self.anilist_id(data)
mal_ids = [mal_id] anilist_ids = [anilist_id]
if status_message: if status_message:
logger.info(f"Processing {pretty}: ({data}) {name}") logger.info(f"Processing {pretty}: ({data}) {name}")
elif method in ["anilist_popular", "anilist_top_rated"]: elif method in ["anilist_popular", "anilist_top_rated"]:
mal_ids = self.popular(data) if method == "anilist_popular" else self.top_rated(data) anilist_ids = self.popular(data) if method == "anilist_popular" else self.top_rated(data)
if status_message: if status_message:
logger.info(f"Processing {pretty}: {data} Anime") logger.info(f"Processing {pretty}: {data} Anime")
elif method == "anilist_season": elif method == "anilist_season":
mal_ids = self.season(data["season"], data["year"], data["sort_by"], data["limit"]) anilist_ids = self.season(data["season"], data["year"], data["sort_by"], data["limit"])
if status_message: if status_message:
logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from {util.pretty_seasons[data['season']]} {data['year']} sorted by {pretty_names[data['sort_by']]}") logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from {util.pretty_seasons[data['season']]} {data['year']} sorted by {pretty_names[data['sort_by']]}")
elif method == "anilist_genre": elif method == "anilist_genre":
mal_ids = self.genre(data["genre"], data["sort_by"], data["limit"]) anilist_ids = self.genre(data["genre"], data["sort_by"], data["limit"])
if status_message: if status_message:
logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from the Genre: {data['genre']} sorted by {pretty_names[data['sort_by']]}") logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from the Genre: {data['genre']} sorted by {pretty_names[data['sort_by']]}")
elif method == "anilist_tag": elif method == "anilist_tag":
mal_ids = self.tag(data["tag"], data["sort_by"], data["limit"]) anilist_ids = self.tag(data["tag"], data["sort_by"], data["limit"])
if status_message: if status_message:
logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from the Tag: {data['tag']} sorted by {pretty_names[data['sort_by']]}") logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from the Tag: {data['tag']} sorted by {pretty_names[data['sort_by']]}")
elif method in ["anilist_studio", "anilist_relations"]: elif method in ["anilist_studio", "anilist_relations"]:
if method == "anilist_studio": mal_ids, name = self.studio(data) if method == "anilist_studio": anilist_ids, name = self.studio(data)
else: mal_ids, _, name = self.relations(data) else: anilist_ids, _, name = self.relations(data)
if status_message: if status_message:
logger.info(f"Processing {pretty}: ({data}) {name} ({len(mal_ids)} Anime)") logger.info(f"Processing {pretty}: ({data}) {name} ({len(anilist_ids)} Anime)")
else: else:
raise Failed(f"AniList Error: Method {method} not supported") raise Failed(f"AniList Error: Method {method} not supported")
show_ids = [] movie_ids, show_ids = self.config.Arms.anilist_to_ids(anilist_ids, language)
movie_ids = []
for mal_id in mal_ids:
try:
ids = self.config.MyAnimeListIDList.find_mal_ids(mal_id)
if "thetvdb_id" in ids and int(ids["thetvdb_id"]) > 0: show_ids.append(int(ids["thetvdb_id"]))
elif "themoviedb_id" in ids and int(ids["themoviedb_id"]) > 0: movie_ids.append(int(ids["themoviedb_id"]))
else: raise Failed(f"MyAnimeList Error: MyAnimeList ID: {mal_id} has no other IDs associated with it")
except Failed as e:
if status_message:
logger.error(e)
if status_message: if status_message:
logger.debug(f"MyAnimeList IDs Found: {mal_ids}") logger.debug(f"AniList IDs Found: {anilist_ids}")
logger.debug(f"Shows Found: {show_ids}") logger.debug(f"Shows Found: {show_ids}")
logger.debug(f"Movies Found: {movie_ids}") logger.debug(f"Movies Found: {movie_ids}")
return movie_ids, show_ids return movie_ids, show_ids

@ -0,0 +1,161 @@
import logging, requests
from lxml import html
from modules import util
from modules.util import Failed
from retrying import retry
logger = logging.getLogger("Plex Meta Manager")
class ArmsAPI:
def __init__(self, config):
self.config = config
self.arms_url = "https://relations.yuna.moe/api/ids"
self.anidb_url = "https://raw.githubusercontent.com/Anime-Lists/anime-lists/master/anime-list-master.xml"
self.AniDBIDs = self._get_anidb()
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def _get_anidb(self):
return html.fromstring(requests.get(self.anidb_url).content)
def anidb_to_tvdb(self, anidb_id): return self._anidb(anidb_id, "tvdbid")
def anidb_to_imdb(self, anidb_id): return self._anidb(anidb_id, "imdbid")
def _anidb(self, input_id, to_id):
ids = self.AniDBIDs.xpath(f"//anime[contains(@anidbid, '{input_id}')]/@{to_id}")
if len(ids) > 0:
try:
if len(ids[0]) > 0:
return ids[0].split(",") if to_id == "imdbid" else int(ids[0])
raise ValueError
except ValueError:
raise Failed(f"Arms Error: No {util.pretty_ids[to_id]} ID found for AniDB ID: {input_id}")
else:
raise Failed(f"Arms Error: AniDB ID: {input_id} not found")
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def _request(self, ids):
return requests.post(self.arms_url, json=ids).json()
def mal_to_anidb(self, mal_id):
anime_ids = self._arms_ids(mal_ids=mal_id)
if anime_ids[0] is None:
raise Failed(f"Arms Error: MyAnimeList ID: {mal_id} does not exist")
if anime_ids[0]["anidb"] is None:
raise Failed(f"Arms Error: No AniDB ID for MyAnimeList ID: {mal_id}")
return anime_ids[0]["anidb"]
def anidb_to_ids(self, anidb_list, language):
show_ids = []
movie_ids = []
for anidb_id in anidb_list:
try:
for imdb_id in self.anidb_to_imdb(anidb_id):
tmdb_id, _ = self.imdb_to_ids(imdb_id, language)
if tmdb_id:
movie_ids.append(tmdb_id)
break
else:
raise Failed
except Failed:
try:
tvdb_id = self.anidb_to_tvdb(anidb_id)
if tvdb_id:
show_ids.append(tvdb_id)
except Failed:
logger.error(f"Arms Error: No TVDb ID or IMDb ID found for AniDB ID: {anidb_id}")
return movie_ids, show_ids
def anilist_to_ids(self, anilist_ids, language):
anidb_ids = []
for id_set in self._arms_ids(anilist_ids=anilist_ids):
if id_set["anidb"] is not None:
anidb_ids.append(id_set["anidb"])
else:
logger.error(f"Arms Error: AniDB ID not found for AniList ID: {id_set['anilist']}")
return self.anidb_to_ids(anidb_ids, language)
def myanimelist_to_ids(self, mal_ids, language):
anidb_ids = []
for id_set in self._arms_ids(mal_ids=mal_ids):
if id_set["anidb"] is not None:
anidb_ids.append(id_set["anidb"])
else:
logger.error(f"Arms Error: AniDB ID not found for MyAnimeList ID: {id_set['myanimelist']}")
return self.anidb_to_ids(anidb_ids, language)
def _arms_ids(self, anilist_ids=None, anidb_ids=None, mal_ids=None):
all_ids = []
def collect_ids(ids, id_name):
if ids:
if isinstance(ids, list):
all_ids.extend([{id_name: a_id} for a_id in ids])
else:
all_ids.append({id_name: ids})
collect_ids(anilist_ids, "anilist")
collect_ids(anidb_ids, "anidb")
collect_ids(mal_ids, "myanimelist")
converted_ids = []
if self.config.Cache:
unconverted_ids = []
for anime_dict in all_ids:
for id_type, anime_id in anime_dict.items():
query_ids, update = self.config.Cache.query_anime_map(anime_id, id_type)
if not update and query_ids:
converted_ids.append(query_ids)
else:
unconverted_ids.append({id_type: anime_id})
else:
unconverted_ids = all_ids
for anime_ids in self._request(unconverted_ids):
if anime_ids:
if self.config.Cache:
self.config.Cache.update_anime(False, anime_ids)
converted_ids.append(anime_ids)
return converted_ids
def imdb_to_ids(self, imdb_id, language):
update_tmdb = False
update_tvdb = False
if self.config.Cache:
tmdb_id, tvdb_id = self.config.Cache.get_ids_from_imdb(imdb_id)
update_tmdb = False
if not tmdb_id:
tmdb_id, update_tmdb = self.config.Cache.get_tmdb_from_imdb(imdb_id)
if update_tmdb:
tmdb_id = None
update_tvdb = False
if not tvdb_id:
tvdb_id, update_tvdb = self.config.Cache.get_tvdb_from_imdb(imdb_id)
if update_tvdb:
tvdb_id = None
else:
tmdb_id = None
tvdb_id = None
from_cache = tmdb_id is not None or tvdb_id is not None
if not tmdb_id and not tvdb_id and self.config.TMDb:
try: tmdb_id = self.config.TMDb.convert_imdb_to_tmdb(imdb_id)
except Failed: pass
if not tmdb_id and not tvdb_id and self.config.TMDb:
try: tvdb_id = self.config.TMDb.convert_imdb_to_tvdb(imdb_id)
except Failed: pass
if not tmdb_id and not tvdb_id and self.config.Trakt:
try: tmdb_id = self.config.Trakt.convert_imdb_to_tmdb(imdb_id)
except Failed: pass
if not tmdb_id and not tvdb_id and self.config.Trakt:
try: tvdb_id = self.config.Trakt.convert_imdb_to_tvdb(imdb_id)
except Failed: pass
if tmdb_id and not from_cache:
try: self.config.TMDb.get_movie(tmdb_id)
except Failed: tmdb_id = None
if tvdb_id and not from_cache:
try: self.config.TVDb.get_series(language, tvdb_id)
except Failed: tvdb_id = None
if not tmdb_id and not tvdb_id:
raise Failed(f"Arms Error: No TMDb ID or TVDb ID found for IMDb: {imdb_id}")
if self.config.Cache:
if tmdb_id and update_tmdb is not False:
self.config.Cache.update_imdb("movie", update_tmdb, imdb_id, tmdb_id)
if tvdb_id and update_tvdb is not False:
self.config.Cache.update_imdb("show", update_tvdb, imdb_id, tvdb_id)
return tmdb_id, tvdb_id

@ -8,6 +8,8 @@ from plexapi.exceptions import BadRequest, NotFound
logger = logging.getLogger("Plex Meta Manager") logger = logging.getLogger("Plex Meta Manager")
image_file_details = ["file_poster", "file_background", "asset_directory"] image_file_details = ["file_poster", "file_background", "asset_directory"]
advance_new_agent = ["item_metadata_language", "item_use_original_title"]
advance_show = ["item_episode_sorting", "item_keep_episodes", "item_delete_episodes", "item_season_display", "item_episode_sorting"]
method_alias = { method_alias = {
"actors": "actor", "role": "actor", "roles": "actor", "actors": "actor", "role": "actor", "roles": "actor",
"content_ratings": "content_rating", "contentRating": "content_rating", "contentRatings": "content_rating", "content_ratings": "content_rating", "contentRating": "content_rating", "contentRatings": "content_rating",
@ -123,6 +125,7 @@ all_filters = [
"tmdb_vote_count.gte", "tmdb_vote_count.lte", "tmdb_vote_count.gte", "tmdb_vote_count.lte",
"duration.gte", "duration.lte", "duration.gte", "duration.lte",
"original_language", "original_language.not", "original_language", "original_language.not",
"user_rating.gte", "user_rating.lte",
"audience_rating.gte", "audience_rating.lte", "audience_rating.gte", "audience_rating.lte",
"critic_rating.gte", "critic_rating.lte", "critic_rating.gte", "critic_rating.lte",
"studio", "studio.not", "studio", "studio.not",
@ -154,6 +157,7 @@ class CollectionBuilder:
"show_missing": library.show_missing, "show_missing": library.show_missing,
"save_missing": library.save_missing "save_missing": library.save_missing
} }
self.item_details = {}
self.radarr_options = {} self.radarr_options = {}
self.sonarr_options = {} self.sonarr_options = {}
self.missing_movies = [] self.missing_movies = []
@ -443,30 +447,40 @@ class CollectionBuilder:
elif method_name == "file_background": elif method_name == "file_background":
if os.path.exists(method_data): self.backgrounds[method_name] = os.path.abspath(method_data) if os.path.exists(method_data): self.backgrounds[method_name] = os.path.abspath(method_data)
else: raise Failed(f"Collection Error: Background Path Does Not Exist: {os.path.abspath(method_data)}") else: raise Failed(f"Collection Error: Background Path Does Not Exist: {os.path.abspath(method_data)}")
elif method_name == "label_sync_mode":
if str(method_data).lower() in ["append", "sync"]: self.details[method_name] = method_data.lower()
else: raise Failed("Collection Error: label_sync_mode attribute must be either 'append' or 'sync'")
elif method_name == "sync_mode": elif method_name == "sync_mode":
if str(method_data).lower() in ["append", "sync"]: self.details[method_name] = method_data.lower() if str(method_data).lower() in ["append", "sync"]: self.details[method_name] = method_data.lower()
else: raise Failed("Collection Error: sync_mode attribute must be either 'append' or 'sync'") else: raise Failed("Collection Error: sync_mode attribute must be either 'append' or 'sync'")
elif method_name == "label": elif method_name in ["label", "label.sync"]:
self.details[method_name] = util.get_list(method_data) if "label" in self.data and "label.sync" in self.data:
raise Failed(f"Collection Error: Cannot use label and label.sync together")
if method_name == "label" and "label_sync_mode" in self.data and self.data["label_sync_mode"] == "sync":
self.details["label.sync"] = util.get_list(method_data)
else:
self.details[method_name] = util.get_list(method_data)
elif method_name in ["item_label", "item_label.sync"]:
if "item_label" in self.data and "item_label.sync" in self.data:
raise Failed(f"Collection Error: Cannot use item_label and item_label.sync together")
self.item_details[method_name] = util.get_list(method_data)
elif method_name in plex.item_advance_keys:
key, options = plex.item_advance_keys[method_name]
if method_name in advance_new_agent and self.library.agent not in plex.new_plex_agents:
logger.error(f"Metadata Error: {method_name} attribute only works for with the New Plex Movie Agent and New Plex TV Agent")
elif method_name in advance_show and not self.library.is_show:
logger.error(f"Metadata Error: {method_name} attribute only works for show libraries")
elif str(method_data).lower() not in options:
logger.error(f"Metadata Error: {method_data} {method_name} attribute invalid")
else:
self.item_details[method_name] = str(method_data).lower()
elif method_name in boolean_details: elif method_name in boolean_details:
if isinstance(method_data, bool): self.details[method_name] = method_data self.details[method_name] = util.get_bool(method_name, method_data)
elif str(method_data).lower() in ["t", "true"]: self.details[method_name] = True
elif str(method_data).lower() in ["f", "false"]: self.details[method_name] = False
else: raise Failed(f"Collection Error: {method_name} attribute must be either true or false")
elif method_name in all_details: elif method_name in all_details:
self.details[method_name] = method_data self.details[method_name] = method_data
elif method_name == "radarr_add": elif method_name == "radarr_add":
self.add_to_radarr = True self.add_to_radarr = util.get_bool(method_name, method_data)
elif method_name == "radarr_folder": elif method_name == "radarr_folder":
self.radarr_options["folder"] = method_data self.radarr_options["folder"] = method_data
elif method_name in ["radarr_monitor", "radarr_search"]: elif method_name in ["radarr_monitor", "radarr_search"]:
if isinstance(method_data, bool): self.radarr_options[method_name[7:]] = method_data self.radarr_options[method_name[7:]] = util.get_bool(method_name, method_data)
elif str(method_data).lower() in ["t", "true"]: self.radarr_options[method_name[7:]] = True
elif str(method_data).lower() in ["f", "false"]: self.radarr_options[method_name[7:]] = False
else: raise Failed(f"Collection Error: {method_name} attribute must be either true or false")
elif method_name == "radarr_availability": elif method_name == "radarr_availability":
if str(method_data).lower() in radarr.availability_translation: if str(method_data).lower() in radarr.availability_translation:
self.radarr_options["availability"] = str(method_data).lower() self.radarr_options["availability"] = str(method_data).lower()
@ -478,7 +492,7 @@ class CollectionBuilder:
elif method_name == "radarr_tag": elif method_name == "radarr_tag":
self.radarr_options["tag"] = util.get_list(method_data) self.radarr_options["tag"] = util.get_list(method_data)
elif method_name == "sonarr_add": elif method_name == "sonarr_add":
self.add_to_sonarr = True self.add_to_sonarr = util.get_bool(method_name, method_data)
elif method_name == "sonarr_folder": elif method_name == "sonarr_folder":
self.sonarr_options["folder"] = method_data self.sonarr_options["folder"] = method_data
elif method_name == "sonarr_monitor": elif method_name == "sonarr_monitor":
@ -498,10 +512,7 @@ class CollectionBuilder:
else: else:
raise Failed(f"Collection Error: {method_name} attribute must be either standard, daily, or anime") raise Failed(f"Collection Error: {method_name} attribute must be either standard, daily, or anime")
elif method_name in ["sonarr_season", "sonarr_search", "sonarr_cutoff_search"]: elif method_name in ["sonarr_season", "sonarr_search", "sonarr_cutoff_search"]:
if isinstance(method_data, bool): self.sonarr_options[method_name[7:]] = method_data self.sonarr_options[method_name[7:]] = util.get_bool(method_name, method_data)
elif str(method_data).lower() in ["t", "true"]: self.sonarr_options[method_name[7:]] = True
elif str(method_data).lower() in ["f", "false"]: self.sonarr_options[method_name[7:]] = False
else: raise Failed(f"Collection Error: {method_name} attribute must be either true or false")
elif method_name == "sonarr_tag": elif method_name == "sonarr_tag":
self.sonarr_options["tag"] = util.get_list(method_data) self.sonarr_options["tag"] = util.get_list(method_data)
elif method_name in ["title", "title.and", "title.not", "title.begins", "title.ends"]: elif method_name in ["title", "title.and", "title.not", "title.begins", "title.ends"]:
@ -512,7 +523,7 @@ class CollectionBuilder:
self.methods.append(("plex_search", [{method_name: util.check_date(method_data, method_name, return_string=True, plex_date=True)}])) self.methods.append(("plex_search", [{method_name: util.check_date(method_data, method_name, return_string=True, plex_date=True)}]))
elif method_name in ["added", "added.not", "originally_available", "originally_available.not", "duration.greater", "duration.less"]: elif method_name in ["added", "added.not", "originally_available", "originally_available.not", "duration.greater", "duration.less"]:
self.methods.append(("plex_search", [{method_name: util.check_number(method_data, method_name, minimum=1)}])) self.methods.append(("plex_search", [{method_name: util.check_number(method_data, method_name, minimum=1)}]))
elif method_name in ["critic_rating.greater", "critic_rating.less", "audience_rating.greater", "audience_rating.less"]: elif method_name in ["user_rating.greater", "user_rating.less", "critic_rating.greater", "critic_rating.less", "audience_rating.greater", "audience_rating.less"]:
self.methods.append(("plex_search", [{method_name: util.check_number(method_data, method_name, number_type="float", minimum=0, maximum=10)}])) self.methods.append(("plex_search", [{method_name: util.check_number(method_data, method_name, number_type="float", minimum=0, maximum=10)}]))
elif method_name in ["decade", "year", "year.not"]: elif method_name in ["decade", "year", "year.not"]:
self.methods.append(("plex_search", [{method_name: util.get_year_list(method_data, current_year, method_name)}])) self.methods.append(("plex_search", [{method_name: util.get_year_list(method_data, current_year, method_name)}]))
@ -529,7 +540,7 @@ class CollectionBuilder:
final_values = method_data final_values = method_data
search = os.path.splitext(method_name)[0] search = os.path.splitext(method_name)[0]
valid_values = self.library.validate_search_list(final_values, search) valid_values = self.library.validate_search_list(final_values, search)
if valid_values: if len(valid_values) > 0:
self.methods.append(("plex_search", [{method_name: valid_values}])) self.methods.append(("plex_search", [{method_name: valid_values}]))
else: else:
logger.warning(f"Collection Warning: No valid {search} values found in {final_values}") logger.warning(f"Collection Warning: No valid {search} values found in {final_values}")
@ -566,7 +577,7 @@ class CollectionBuilder:
if isinstance(imdb_list, dict): if isinstance(imdb_list, dict):
dict_methods = {dm.lower(): dm for dm in imdb_list} dict_methods = {dm.lower(): dm for dm in imdb_list}
if "url" in dict_methods and imdb_list[dict_methods["url"]]: if "url" in dict_methods and imdb_list[dict_methods["url"]]:
imdb_url = config.IMDb.validate_imdb_url(imdb_list[dict_methods["url"]]) imdb_url = config.IMDb.validate_imdb_url(imdb_list[dict_methods["url"]], self.library.Plex.language)
else: else:
raise Failed("Collection Error: imdb_list attribute url is required") raise Failed("Collection Error: imdb_list attribute url is required")
if "limit" in dict_methods and imdb_list[dict_methods["limit"]]: if "limit" in dict_methods and imdb_list[dict_methods["limit"]]:
@ -574,7 +585,7 @@ class CollectionBuilder:
else: else:
list_count = 0 list_count = 0
else: else:
imdb_url = config.IMDb.validate_imdb_url(str(imdb_list)) imdb_url = config.IMDb.validate_imdb_url(str(imdb_list), self.library.Plex.language)
list_count = 0 list_count = 0
new_list.append({"url": imdb_url, "limit": list_count}) new_list.append({"url": imdb_url, "limit": list_count})
self.methods.append((method_name, new_list)) self.methods.append((method_name, new_list))
@ -619,7 +630,7 @@ class CollectionBuilder:
valid_data = util.check_number(filter_data, f"{filter_method} filter", minimum=1) valid_data = util.check_number(filter_data, f"{filter_method} filter", minimum=1)
elif filter_method in ["year.gte", "year.lte"]: elif filter_method in ["year.gte", "year.lte"]:
valid_data = util.check_year(filter_data, current_year, f"{filter_method} filter") valid_data = util.check_year(filter_data, current_year, f"{filter_method} filter")
elif filter_method in ["audience_rating.gte", "audience_rating.lte", "critic_rating.gte", "critic_rating.lte"]: elif filter_method in ["user_rating.gte", "user_rating.lte", "audience_rating.gte", "audience_rating.lte", "critic_rating.gte", "critic_rating.lte"]:
valid_data = util.check_number(filter_data, f"{filter_method} filter", number_type="float", minimum=0.1, maximum=10) valid_data = util.check_number(filter_data, f"{filter_method} filter", number_type="float", minimum=0.1, maximum=10)
elif filter_method in ["originally_available.gte", "originally_available.lte"]: elif filter_method in ["originally_available.gte", "originally_available.lte"]:
valid_data = util.check_date(filter_data, f"{filter_method} filter") valid_data = util.check_date(filter_data, f"{filter_method} filter")
@ -704,7 +715,7 @@ class CollectionBuilder:
searches[search_final] = util.check_date(search_data, search_final, return_string=True, plex_date=True) searches[search_final] = util.check_date(search_data, search_final, return_string=True, plex_date=True)
elif (search in ["added", "originally_available"] and modifier in ["", ".not"]) or (search in ["duration"] and modifier in [".greater", ".less"]): elif (search in ["added", "originally_available"] and modifier in ["", ".not"]) or (search in ["duration"] and modifier in [".greater", ".less"]):
searches[search_final] = util.check_number(search_data, search_final, minimum=1) searches[search_final] = util.check_number(search_data, search_final, minimum=1)
elif search in ["critic_rating", "audience_rating"] and modifier in [".greater", ".less"]: elif search in ["user_rating", "critic_rating", "audience_rating"] and modifier in [".greater", ".less"]:
searches[search_final] = util.check_number(search_data, search_final, number_type="float", minimum=0, maximum=10) searches[search_final] = util.check_number(search_data, search_final, number_type="float", minimum=0, maximum=10)
elif (search == "decade" and modifier in [""]) or (search == "year" and modifier in ["", ".not"]): elif (search == "decade" and modifier in [""]) or (search == "year" and modifier in ["", ".not"]):
searches[search_final] = util.get_year_list(search_data, current_year, search_final) searches[search_final] = util.get_year_list(search_data, current_year, search_final)
@ -712,12 +723,15 @@ class CollectionBuilder:
or (search in ["actor", "audio_language", "collection", "content_rating", "country", "director", "genre", "label", "network", "producer", "subtitle_language", "writer"] and modifier not in ["", ".and", ".not"]) \ or (search in ["actor", "audio_language", "collection", "content_rating", "country", "director", "genre", "label", "network", "producer", "subtitle_language", "writer"] and modifier not in ["", ".and", ".not"]) \
or (search in ["resolution", "decade"] and modifier not in [""]) \ or (search in ["resolution", "decade"] and modifier not in [""]) \
or (search in ["added", "originally_available"] and modifier not in ["", ".not", ".before", ".after"]) \ or (search in ["added", "originally_available"] and modifier not in ["", ".not", ".before", ".after"]) \
or (search in ["duration", "critic_rating", "audience_rating"] and modifier not in [".greater", ".less"]) \ or (search in ["duration", "user_rating", "critic_rating", "audience_rating"] and modifier not in [".greater", ".less"]) \
or (search in ["year"] and modifier not in ["", ".not", ".greater", ".less"]): or (search in ["year"] and modifier not in ["", ".not", ".greater", ".less"]):
raise Failed(f"Collection Error: modifier: {modifier} not supported with the {search} plex search attribute") raise Failed(f"Collection Error: modifier: {modifier} not supported with the {search} plex search attribute")
else: else:
raise Failed(f"Collection Error: {search_final} plex search attribute not supported") raise Failed(f"Collection Error: {search_final} plex search attribute not supported")
self.methods.append((method_name, [searches])) if len(searches) > 0:
self.methods.append((method_name, [searches]))
else:
raise Failed("Collection Error: no valid plex search attributes")
elif method_name == "tmdb_discover": elif method_name == "tmdb_discover":
new_dictionary = {"limit": 100} new_dictionary = {"limit": 100}
for discover_name, discover_data in method_data.items(): for discover_name, discover_data in method_data.items():
@ -995,14 +1009,16 @@ class CollectionBuilder:
items_found_inside += len(movie_ids) items_found_inside += len(movie_ids)
for movie_id in movie_ids: for movie_id in movie_ids:
if movie_id in movie_map: if movie_id in movie_map:
items.append(movie_map[movie_id]) items.extend(movie_map[movie_id])
else: else:
missing_movies.append(movie_id) missing_movies.append(movie_id)
if len(show_ids) > 0: if len(show_ids) > 0:
items_found_inside += len(show_ids) items_found_inside += len(show_ids)
for show_id in show_ids: for show_id in show_ids:
if show_id in show_map: items.append(show_map[show_id]) if show_id in show_map:
else: missing_shows.append(show_id) items.extend(show_map[show_id])
else:
missing_shows.append(show_id)
return items_found_inside return items_found_inside
logger.info("") logger.info("")
logger.debug(f"Value: {value}") logger.debug(f"Value: {value}")
@ -1013,8 +1029,8 @@ class CollectionBuilder:
items = self.library.Tautulli.get_items(self.library, time_range=value["list_days"], stats_count=value["list_size"], list_type=value["list_type"], stats_count_buffer=value["list_buffer"]) items = self.library.Tautulli.get_items(self.library, time_range=value["list_days"], stats_count=value["list_size"], list_type=value["list_type"], stats_count_buffer=value["list_buffer"])
items_found += len(items) items_found += len(items)
elif "anidb" in method: items_found += check_map(self.config.AniDB.get_items(method, value, self.library.Plex.language)) elif "anidb" in method: items_found += check_map(self.config.AniDB.get_items(method, value, self.library.Plex.language))
elif "anilist" in method: items_found += check_map(self.config.AniList.get_items(method, value)) elif "anilist" in method: items_found += check_map(self.config.AniList.get_items(method, value, self.library.Plex.language))
elif "mal" in method: items_found += check_map(self.config.MyAnimeList.get_items(method, value)) elif "mal" in method: items_found += check_map(self.config.MyAnimeList.get_items(method, value, self.library.Plex.language))
elif "tvdb" in method: items_found += check_map(self.config.TVDb.get_items(method, value, self.library.Plex.language)) elif "tvdb" in method: items_found += check_map(self.config.TVDb.get_items(method, value, self.library.Plex.language))
elif "imdb" in method: items_found += check_map(self.config.IMDb.get_items(method, value, self.library.Plex.language)) elif "imdb" in method: items_found += check_map(self.config.IMDb.get_items(method, value, self.library.Plex.language))
elif "letterboxd" in method: items_found += check_map(self.config.Letterboxd.get_items(method, value, self.library.Plex.language)) elif "letterboxd" in method: items_found += check_map(self.config.Letterboxd.get_items(method, value, self.library.Plex.language))
@ -1154,10 +1170,10 @@ class CollectionBuilder:
collection.sortUpdate(sort=self.details["collection_order"]) collection.sortUpdate(sort=self.details["collection_order"])
logger.info(f"Detail: collection_order updated Collection Order to {self.details['collection_order']}") logger.info(f"Detail: collection_order updated Collection Order to {self.details['collection_order']}")
if "label" in self.details: if "label" in self.details or "label.sync" in self.details:
item_labels = [label.tag for label in collection.labels] item_labels = [label.tag for label in collection.labels]
labels = util.get_list(self.details["label"]) labels = util.get_list(self.details["label" if "label" in self.details else "label.sync"])
if "label_sync_mode" in self.details and str(self.details["label_sync_mode"]).lower() == "sync": if "label.sync" in self.details:
for label in (la for la in item_labels if la not in labels): for label in (la for la in item_labels if la not in labels):
collection.removeLabel(label) collection.removeLabel(label)
logger.info(f"Detail: Label {label} removed") logger.info(f"Detail: Label {label} removed")
@ -1165,6 +1181,28 @@ class CollectionBuilder:
collection.addLabel(label) collection.addLabel(label)
logger.info(f"Detail: Label {label} added") logger.info(f"Detail: Label {label} added")
if len(self.item_details) > 0:
labels = None
if "item_label" in self.item_details or "item_label.sync" in self.item_details:
labels = util.get_list(self.item_details["item_label" if "item_label" in self.item_details else "item_label.sync"])
for item in collection.items():
if labels is not None:
item_labels = [label.tag for label in item.labels]
if "item_label.sync" in self.item_details:
for label in (la for la in item_labels if la not in labels):
item.removeLabel(label)
logger.info(f"Detail: Label {label} removed from {item.title}")
for label in (la for la in labels if la not in item_labels):
item.addLabel(label)
logger.info(f"Detail: Label {label} added to {item.title}")
advance_edits = {}
for method_name, method_data in self.item_details.items():
if method_name in plex.item_advance_keys:
key, options = plex.item_advance_keys[method_name]
if getattr(item, key) != options[method_data]:
advance_edits[key] = options[method_data]
self.library.edit_item(item, item.title, "Movie" if self.library.is_movie else "Show", advance_edits, advanced=True)
if len(edits) > 0: if len(edits) > 0:
logger.debug(edits) logger.debug(edits)
collection.edit(**edits) collection.edit(**edits)
@ -1180,40 +1218,49 @@ class CollectionBuilder:
path = os.path.join(ad, f"{name_mapping}") path = os.path.join(ad, f"{name_mapping}")
if not os.path.isdir(path): if not os.path.isdir(path):
continue continue
matches = glob.glob(os.path.join(ad, f"{name_mapping}", "poster.*")) if self.library.asset_folders:
if len(matches) > 0: poster_path = os.path.join(ad, f"{name_mapping}", "poster.*")
for match in matches: else:
self.posters["asset_directory"] = os.path.abspath(match) poster_path = os.path.join(ad, f"{name_mapping}.*")
matches = glob.glob(os.path.join(ad, f"{name_mapping}", "background.*")) matches = glob.glob(poster_path)
if len(matches) > 0: if len(matches) > 0:
for match in matches: self.posters["asset_directory"] = os.path.abspath(matches[0])
self.backgrounds["asset_directory"] = os.path.abspath(match) if self.library.asset_folders:
dirs = [folder for folder in os.listdir(path) if os.path.isdir(os.path.join(path, folder))] matches = glob.glob(os.path.join(ad, f"{name_mapping}", "background.*"))
if len(dirs) > 0: if len(matches) > 0:
for item in collection.items(): self.backgrounds["asset_directory"] = os.path.abspath(matches[0])
folder = os.path.basename(os.path.dirname(item.locations[0]) if self.library.is_movie else item.locations[0]) dirs = [folder for folder in os.listdir(path) if os.path.isdir(os.path.join(path, folder))]
if folder in dirs: if len(dirs) > 0:
matches = glob.glob(os.path.join(path, folder, "poster.*")) for item in collection.items():
poster_path = os.path.abspath(matches[0]) if len(matches) > 0 else None folder = os.path.basename(os.path.dirname(item.locations[0]) if self.library.is_movie else item.locations[0])
matches = glob.glob(os.path.join(path, folder, "background.*")) if folder in dirs:
background_path = os.path.abspath(matches[0]) if len(matches) > 0 else None matches = glob.glob(os.path.join(path, folder, "poster.*"))
if poster_path: poster_path = os.path.abspath(matches[0]) if len(matches) > 0 else None
item.uploadPoster(filepath=poster_path) matches = glob.glob(os.path.join(path, folder, "background.*"))
logger.info(f"Detail: asset_directory updated {item.title}'s poster to [file] {poster_path}") background_path = os.path.abspath(matches[0]) if len(matches) > 0 else None
if background_path: if poster_path:
item.uploadArt(filepath=background_path) item.uploadPoster(filepath=poster_path)
logger.info(f"Detail: asset_directory updated {item.title}'s background to [file] {background_path}") logger.info(f"Detail: asset_directory updated {item.title}'s poster to [file] {poster_path}")
if poster_path is None and background_path is None: if background_path:
logger.warning(f"No Files Found: {os.path.join(path, folder)}") item.uploadArt(filepath=background_path)
if self.library.is_show: logger.info(f"Detail: asset_directory updated {item.title}'s background to [file] {background_path}")
for season in item.seasons(): if poster_path is None and background_path is None:
matches = glob.glob(os.path.join(path, folder, f"Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}.*")) logger.warning(f"No Files Found: {os.path.join(path, folder)}")
if len(matches) > 0: if self.library.is_show:
season_path = os.path.abspath(matches[0]) for season in item.seasons():
season.uploadPoster(filepath=season_path) matches = glob.glob(os.path.join(path, folder, f"Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}.*"))
logger.info(f"Detail: asset_directory updated {item.title} Season {season.seasonNumber}'s poster to [file] {season_path}") if len(matches) > 0:
else: season_path = os.path.abspath(matches[0])
logger.warning(f"No Folder: {os.path.join(path, folder)}") season.uploadPoster(filepath=season_path)
logger.info(f"Detail: asset_directory updated {item.title} Season {season.seasonNumber}'s poster to [file] {season_path}")
for episode in season.episodes():
matches = glob.glob(os.path.join(path, folder, f"{episode.seasonEpisode.upper()}.*"))
if len(matches) > 0:
episode_path = os.path.abspath(matches[0])
episode.uploadPoster(filepath=episode_path)
logger.info(f"Detail: asset_directory updated {item.title} {episode.seasonEpisode.upper()}'s poster to [file] {episode_path}")
else:
logger.warning(f"No Folder: {os.path.join(path, folder)}")
def set_image(image_method, images, is_background=False): def set_image(image_method, images, is_background=False):
message = f"{'background' if is_background else 'poster'} to [{'File' if image_method in image_file_details else 'URL'}] {images[image_method]}" message = f"{'background' if is_background else 'poster'} to [{'File' if image_method in image_file_details else 'URL'}] {images[image_method]}"
@ -1280,10 +1327,14 @@ class CollectionBuilder:
def run_collections_again(self, collection_obj, movie_map, show_map): def run_collections_again(self, collection_obj, movie_map, show_map):
collection_items = collection_obj.items() if isinstance(collection_obj, Collections) else [] collection_items = collection_obj.items() if isinstance(collection_obj, Collections) else []
name = collection_obj.title if isinstance(collection_obj, Collections) else collection_obj name = collection_obj.title if isinstance(collection_obj, Collections) else collection_obj
rating_keys = [movie_map[mm] for mm in self.missing_movies if mm in movie_map] rating_keys = []
for mm in self.missing_movies:
if mm in movie_map:
rating_keys.extend(movie_map[mm])
if self.library.is_show: if self.library.is_show:
rating_keys.extend([show_map[sm] for sm in self.missing_shows if sm in show_map]) for sm in self.missing_shows:
if sm in show_map:
rating_keys.extend(show_map[sm])
if len(rating_keys) > 0: if len(rating_keys) > 0:
for rating_key in rating_keys: for rating_key in rating_keys:
try: try:

@ -24,7 +24,6 @@ class Cache:
imdb_id TEXT, imdb_id TEXT,
tvdb_id TEXT, tvdb_id TEXT,
anidb_id TEXT, anidb_id TEXT,
mal_id TEXT,
expiration_date TEXT, expiration_date TEXT,
media_type TEXT)""" media_type TEXT)"""
) )
@ -57,6 +56,15 @@ class Cache:
type TEXT, type TEXT,
expiration_date TEXT)""" expiration_date TEXT)"""
) )
cursor.execute(
"""CREATE TABLE IF NOT EXISTS anime_map (
INTEGER PRIMARY KEY,
anidb TEXT UNIQUE,
anilist TEXT,
myanimelist TEXT,
kitsu TEXT,
expiration_date TEXT)"""
)
self.expiration = expiration self.expiration = expiration
self.cache_path = cache self.cache_path = cache
@ -65,28 +73,24 @@ class Cache:
tvdb_id, tvdb_expired = self.get_tvdb_id("show", imdb_id=imdb_id) tvdb_id, tvdb_expired = self.get_tvdb_id("show", imdb_id=imdb_id)
return tmdb_id, tvdb_id return tmdb_id, tvdb_id
def get_tmdb_id(self, media_type, plex_guid=None, imdb_id=None, tvdb_id=None, anidb_id=None, mal_id=None): def get_tmdb_id(self, media_type, plex_guid=None, imdb_id=None, tvdb_id=None, anidb_id=None):
return self.get_id_from(media_type, "tmdb_id", plex_guid=plex_guid, imdb_id=imdb_id, tvdb_id=tvdb_id, anidb_id=anidb_id, mal_id=mal_id) return self.get_id_from(media_type, "tmdb_id", plex_guid=plex_guid, imdb_id=imdb_id, tvdb_id=tvdb_id, anidb_id=anidb_id)
def get_imdb_id(self, media_type, plex_guid=None, tmdb_id=None, tvdb_id=None, anidb_id=None, mal_id=None):
return self.get_id_from(media_type, "imdb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, tvdb_id=tvdb_id, anidb_id=anidb_id, mal_id=mal_id)
def get_tvdb_id(self, media_type, plex_guid=None, tmdb_id=None, imdb_id=None, anidb_id=None, mal_id=None): def get_imdb_id(self, media_type, plex_guid=None, tmdb_id=None, tvdb_id=None, anidb_id=None):
return self.get_id_from(media_type, "tvdb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, imdb_id=imdb_id, anidb_id=anidb_id, mal_id=mal_id) return self.get_id_from(media_type, "imdb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, tvdb_id=tvdb_id, anidb_id=anidb_id)
def get_anidb_id(self, media_type, plex_guid=None, tmdb_id=None, imdb_id=None, tvdb_id=None, mal_id=None): def get_tvdb_id(self, media_type, plex_guid=None, tmdb_id=None, imdb_id=None, anidb_id=None):
return self.get_id_from(media_type, "anidb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, imdb_id=imdb_id, tvdb_id=tvdb_id, mal_id=mal_id) return self.get_id_from(media_type, "tvdb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, imdb_id=imdb_id, anidb_id=anidb_id)
def get_mal_id(self, media_type, plex_guid=None, tmdb_id=None, imdb_id=None, tvdb_id=None, anidb_id=None): def get_anidb_id(self, media_type, plex_guid=None, tmdb_id=None, imdb_id=None, tvdb_id=None):
return self.get_id_from(media_type, "anidb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, imdb_id=imdb_id, tvdb_id=tvdb_id, anidb_id=anidb_id) return self.get_id_from(media_type, "anidb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, imdb_id=imdb_id, tvdb_id=tvdb_id)
def get_id_from(self, media_type, id_from, plex_guid=None, tmdb_id=None, imdb_id=None, tvdb_id=None, anidb_id=None, mal_id=None): def get_id_from(self, media_type, id_from, plex_guid=None, tmdb_id=None, imdb_id=None, tvdb_id=None, anidb_id=None):
if plex_guid: return self.get_id(media_type, "plex_guid", id_from, plex_guid) if plex_guid: return self.get_id(media_type, "plex_guid", id_from, plex_guid)
elif tmdb_id: return self.get_id(media_type, "tmdb_id", id_from, tmdb_id) elif tmdb_id: return self.get_id(media_type, "tmdb_id", id_from, tmdb_id)
elif imdb_id: return self.get_id(media_type, "imdb_id", id_from, imdb_id) elif imdb_id: return self.get_id(media_type, "imdb_id", id_from, imdb_id)
elif tvdb_id: return self.get_id(media_type, "tvdb_id", id_from, tvdb_id) elif tvdb_id: return self.get_id(media_type, "tvdb_id", id_from, tvdb_id)
elif anidb_id: return self.get_id(media_type, "anidb_id", id_from, anidb_id) elif anidb_id: return self.get_id(media_type, "anidb_id", id_from, anidb_id)
elif mal_id: return self.get_id(media_type, "mal_id", id_from, mal_id)
else: return None, None else: return None, None
def get_id(self, media_type, from_id, to_id, key): def get_id(self, media_type, from_id, to_id, key):
@ -132,13 +136,12 @@ class Cache:
if row["imdb_id"]: ids_to_return["imdb"] = row["imdb_id"] if row["imdb_id"]: ids_to_return["imdb"] = row["imdb_id"]
if row["tvdb_id"]: ids_to_return["tvdb"] = int(row["tvdb_id"]) if row["tvdb_id"]: ids_to_return["tvdb"] = int(row["tvdb_id"])
if row["anidb_id"]: ids_to_return["anidb"] = int(row["anidb_id"]) if row["anidb_id"]: ids_to_return["anidb"] = int(row["anidb_id"])
if row["mal_id"]: ids_to_return["mal"] = int(row["mal_id"])
datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d") datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d")
time_between_insertion = datetime.now() - datetime_object time_between_insertion = datetime.now() - datetime_object
expired = time_between_insertion.days > self.expiration expired = time_between_insertion.days > self.expiration
return ids_to_return, expired return ids_to_return, expired
def update_guid(self, media_type, plex_guid, tmdb_id, imdb_id, tvdb_id, anidb_id, mal_id, expired): def update_guid(self, media_type, plex_guid, tmdb_id, imdb_id, tvdb_id, anidb_id, expired):
expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration))) expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration)))
with sqlite3.connect(self.cache_path) as connection: with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row connection.row_factory = sqlite3.Row
@ -150,10 +153,9 @@ class Cache:
imdb_id = ?, imdb_id = ?,
tvdb_id = ?, tvdb_id = ?,
anidb_id = ?, anidb_id = ?,
mal_id = ?,
expiration_date = ?, expiration_date = ?,
media_type = ? media_type = ?
WHERE plex_guid = ?""", (tmdb_id, imdb_id, tvdb_id, anidb_id, mal_id, expiration_date.strftime("%Y-%m-%d"), media_type, plex_guid)) WHERE plex_guid = ?""", (tmdb_id, imdb_id, tvdb_id, anidb_id, expiration_date.strftime("%Y-%m-%d"), media_type, plex_guid))
if imdb_id and (tmdb_id or tvdb_id): if imdb_id and (tmdb_id or tvdb_id):
cursor.execute("INSERT OR IGNORE INTO imdb_map(imdb_id) VALUES(?)", (imdb_id,)) cursor.execute("INSERT OR IGNORE INTO imdb_map(imdb_id) VALUES(?)", (imdb_id,))
cursor.execute("UPDATE imdb_map SET t_id = ?, expiration_date = ?, media_type = ? WHERE imdb_id = ?", (tmdb_id if media_type == "movie" else tvdb_id, expiration_date.strftime("%Y-%m-%d"), media_type, imdb_id)) cursor.execute("UPDATE imdb_map SET t_id = ?, expiration_date = ?, media_type = ? WHERE imdb_id = ?", (tmdb_id if media_type == "movie" else tvdb_id, expiration_date.strftime("%Y-%m-%d"), media_type, imdb_id))
@ -237,3 +239,31 @@ class Cache:
cursor.execute("INSERT OR IGNORE INTO omdb_data(imdb_id) VALUES(?)", (omdb.imdb_id,)) cursor.execute("INSERT OR IGNORE INTO omdb_data(imdb_id) VALUES(?)", (omdb.imdb_id,))
update_sql = "UPDATE omdb_data SET title = ?, year = ?, content_rating = ?, genres = ?, imdb_rating = ?, imdb_votes = ?, metacritic_rating = ?, type = ?, expiration_date = ? WHERE imdb_id = ?" update_sql = "UPDATE omdb_data SET title = ?, year = ?, content_rating = ?, genres = ?, imdb_rating = ?, imdb_votes = ?, metacritic_rating = ?, type = ?, expiration_date = ? WHERE imdb_id = ?"
cursor.execute(update_sql, (omdb.title, omdb.year, omdb.content_rating, omdb.genres_str, omdb.imdb_rating, omdb.imdb_votes, omdb.metacritic_rating, omdb.type, expiration_date.strftime("%Y-%m-%d"), omdb.imdb_id)) cursor.execute(update_sql, (omdb.title, omdb.year, omdb.content_rating, omdb.genres_str, omdb.imdb_rating, omdb.imdb_votes, omdb.metacritic_rating, omdb.type, expiration_date.strftime("%Y-%m-%d"), omdb.imdb_id))
def query_anime_map(self, anime_id, id_type):
ids = None
expired = None
with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor:
cursor.execute(f"SELECT * FROM anime_map WHERE {id_type} = ?", (anime_id, ))
row = cursor.fetchone()
if row and row["anidb"]:
datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d")
time_between_insertion = datetime.now() - datetime_object
ids = {
"anilist": int(row["anilist"]) if row["anilist"] else None,
"anidb": int(row["anidb"]) if row["anidb"] else None,
"myanimelist": int(row["myanimelist"]) if row["myanimelist"] else None,
"kitsu": int(row["kitsu"]) if row["kitsu"] else None
}
expired = time_between_insertion.days > self.expiration
return ids, expired
def update_anime(self, expired, anime_ids):
expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration)))
with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor:
cursor.execute("INSERT OR IGNORE INTO anime_map(anidb) VALUES(?)", (anime_ids["anidb"],))
cursor.execute("UPDATE anime_map SET anilist = ?, myanimelist = ?, kitsu = ?, expiration_date = ? WHERE anidb = ?", (anime_ids["anidb"], anime_ids["myanimelist"], anime_ids["kitsu"], expiration_date.strftime("%Y-%m-%d"), anime_ids["anidb"]))

@ -1,13 +1,13 @@
import logging, os, re, requests, time import glob, logging, os, re, requests, time
from modules import util from modules import util
from modules.anidb import AniDBAPI from modules.anidb import AniDBAPI
from modules.anilist import AniListAPI from modules.anilist import AniListAPI
from modules.arms import ArmsAPI
from modules.builder import CollectionBuilder from modules.builder import CollectionBuilder
from modules.cache import Cache from modules.cache import Cache
from modules.imdb import IMDbAPI from modules.imdb import IMDbAPI
from modules.letterboxd import LetterboxdAPI from modules.letterboxd import LetterboxdAPI
from modules.mal import MyAnimeListAPI from modules.mal import MyAnimeListAPI
from modules.mal import MyAnimeListIDList
from modules.omdb import OMDbAPI from modules.omdb import OMDbAPI
from modules.plex import PlexAPI from modules.plex import PlexAPI
from modules.radarr import RadarrAPI from modules.radarr import RadarrAPI
@ -18,6 +18,7 @@ from modules.trakttv import TraktAPI
from modules.tvdb import TVDbAPI from modules.tvdb import TVDbAPI
from modules.util import Failed from modules.util import Failed
from plexapi.exceptions import BadRequest from plexapi.exceptions import BadRequest
from retrying import retry
from ruamel import yaml from ruamel import yaml
logger = logging.getLogger("Plex Meta Manager") logger = logging.getLogger("Plex Meta Manager")
@ -50,7 +51,7 @@ mass_genre_update_options = {"tmdb": "Use TMDb Metadata", "omdb": "Use IMDb Meta
library_types = {"movie": "For Movie Libraries", "show": "For Show Libraries"} library_types = {"movie": "For Movie Libraries", "show": "For Show Libraries"}
class Config: class Config:
def __init__(self, default_dir, config_path=None): def __init__(self, default_dir, config_path=None, libraries_to_run=None):
logger.info("Locating config...") logger.info("Locating config...")
if config_path and os.path.exists(config_path): self.config_path = os.path.abspath(config_path) if config_path and os.path.exists(config_path): self.config_path = os.path.abspath(config_path)
elif config_path and not os.path.exists(config_path): raise Failed(f"Config Error: config not found at {os.path.abspath(config_path)}") elif config_path and not os.path.exists(config_path): raise Failed(f"Config Error: config not found at {os.path.abspath(config_path)}")
@ -85,7 +86,7 @@ class Config:
replace_attr(new_config, "save_missing", "plex") replace_attr(new_config, "save_missing", "plex")
if new_config["libraries"]: if new_config["libraries"]:
for library in new_config["libraries"]: for library in new_config["libraries"]:
if "plex" in new_config["libraries"][library]: if new_config["libraries"][library] and "plex" in new_config["libraries"][library]:
replace_attr(new_config["libraries"][library], "asset_directory", "plex") replace_attr(new_config["libraries"][library], "asset_directory", "plex")
replace_attr(new_config["libraries"][library], "sync_mode", "plex") replace_attr(new_config["libraries"][library], "sync_mode", "plex")
replace_attr(new_config["libraries"][library], "show_unmanaged", "plex") replace_attr(new_config["libraries"][library], "show_unmanaged", "plex")
@ -113,7 +114,7 @@ class Config:
def check_for_attribute(data, attribute, parent=None, test_list=None, default=None, do_print=True, default_is_none=False, req_default=False, var_type="str", throw=False, save=True): def check_for_attribute(data, attribute, parent=None, test_list=None, default=None, do_print=True, default_is_none=False, req_default=False, var_type="str", throw=False, save=True):
endline = "" endline = ""
if parent is not None: if parent is not None:
if parent in data: if data and parent in data:
data = data[parent] data = data[parent]
else: else:
data = None data = None
@ -187,6 +188,8 @@ class Config:
else: else:
self.Cache = None self.Cache = None
self.general["asset_directory"] = check_for_attribute(self.data, "asset_directory", parent="settings", var_type="list_path", default=[os.path.join(default_dir, "assets")]) self.general["asset_directory"] = check_for_attribute(self.data, "asset_directory", parent="settings", var_type="list_path", default=[os.path.join(default_dir, "assets")])
self.general["asset_folders"] = check_for_attribute(self.data, "asset_folders", parent="settings", var_type="bool", default=True)
self.general["assets_for_all"] = check_for_attribute(self.data, "assets_for_all", parent="settings", var_type="bool", default=False)
self.general["sync_mode"] = check_for_attribute(self.data, "sync_mode", parent="settings", default="append", test_list=sync_modes) self.general["sync_mode"] = check_for_attribute(self.data, "sync_mode", parent="settings", default="append", test_list=sync_modes)
self.general["run_again_delay"] = check_for_attribute(self.data, "run_again_delay", parent="settings", var_type="int", default=0) self.general["run_again_delay"] = check_for_attribute(self.data, "run_again_delay", parent="settings", var_type="int", default=0)
self.general["show_unmanaged"] = check_for_attribute(self.data, "show_unmanaged", parent="settings", var_type="bool", default=True) self.general["show_unmanaged"] = check_for_attribute(self.data, "show_unmanaged", parent="settings", var_type="bool", default=True)
@ -244,7 +247,6 @@ class Config:
util.separator() util.separator()
self.MyAnimeList = None self.MyAnimeList = None
self.MyAnimeListIDList = MyAnimeListIDList()
if "mal" in self.data: if "mal" in self.data:
logger.info("Connecting to My Anime List...") logger.info("Connecting to My Anime List...")
self.mal = {} self.mal = {}
@ -253,7 +255,7 @@ class Config:
self.mal["client_secret"] = check_for_attribute(self.data, "client_secret", parent="mal", throw=True) self.mal["client_secret"] = check_for_attribute(self.data, "client_secret", parent="mal", throw=True)
self.mal["config_path"] = self.config_path self.mal["config_path"] = self.config_path
authorization = self.data["mal"]["authorization"] if "authorization" in self.data["mal"] and self.data["mal"]["authorization"] else None authorization = self.data["mal"]["authorization"] if "authorization" in self.data["mal"] and self.data["mal"]["authorization"] else None
self.MyAnimeList = MyAnimeListAPI(self.mal, self.MyAnimeListIDList, authorization) self.MyAnimeList = MyAnimeListAPI(self.mal, self, authorization)
except Failed as e: except Failed as e:
logger.error(e) logger.error(e)
logger.info(f"My Anime List Connection {'Failed' if self.MyAnimeList is None else 'Successful'}") logger.info(f"My Anime List Connection {'Failed' if self.MyAnimeList is None else 'Successful'}")
@ -263,6 +265,8 @@ class Config:
self.TVDb = TVDbAPI(self) self.TVDb = TVDbAPI(self)
self.IMDb = IMDbAPI(self) self.IMDb = IMDbAPI(self)
self.AniDB = AniDBAPI(self) self.AniDB = AniDBAPI(self)
self.Arms = ArmsAPI(self)
self.AniDBIDs = self.AniDB.get_AniDB_IDs()
self.AniList = AniListAPI(self) self.AniList = AniListAPI(self)
self.Letterboxd = LetterboxdAPI(self) self.Letterboxd = LetterboxdAPI(self)
@ -309,10 +313,14 @@ class Config:
self.libraries = [] self.libraries = []
try: libs = check_for_attribute(self.data, "libraries", throw=True) try: libs = check_for_attribute(self.data, "libraries", throw=True)
except Failed as e: raise Failed(e) except Failed as e: raise Failed(e)
requested_libraries = util.get_list(libraries_to_run) if libraries_to_run else None
for library_name, lib in libs.items(): for library_name, lib in libs.items():
if requested_libraries and library_name not in requested_libraries:
continue
util.separator() util.separator()
params = {} params = {}
if "library_name" in lib and lib["library_name"]: logger.info("")
if lib and "library_name" in lib and lib["library_name"]:
params["name"] = str(lib["library_name"]) params["name"] = str(lib["library_name"])
logger.info(f"Connecting to {params['name']} ({library_name}) Library...") logger.info(f"Connecting to {params['name']} ({library_name}) Library...")
else: else:
@ -323,32 +331,42 @@ class Config:
if params["asset_directory"] is None: if params["asset_directory"] is None:
logger.warning("Config Warning: Assets will not be used asset_directory attribute must be set under config or under this specific Library") logger.warning("Config Warning: Assets will not be used asset_directory attribute must be set under config or under this specific Library")
if "settings" in lib and lib["settings"] and "sync_mode" in lib["settings"]: if lib and "settings" in lib and lib["settings"] and "asset_folders" in lib["settings"]:
params["asset_folders"] = check_for_attribute(lib, "asset_folders", parent="settings", var_type="bool", default=self.general["asset_folders"], do_print=False, save=False)
else:
params["asset_folders"] = check_for_attribute(lib, "asset_folders", var_type="bool", default=self.general["asset_folders"], do_print=False, save=False)
if lib and "settings" in lib and lib["settings"] and "assets_for_all" in lib["settings"]:
params["assets_for_all"] = check_for_attribute(lib, "assets_for_all", parent="settings", var_type="bool", default=self.general["assets_for_all"], do_print=False, save=False)
else:
params["assets_for_all"] = check_for_attribute(lib, "assets_for_all", var_type="bool", default=self.general["assets_for_all"], do_print=False, save=False)
if lib and "settings" in lib and lib["settings"] and "sync_mode" in lib["settings"]:
params["sync_mode"] = check_for_attribute(lib, "sync_mode", parent="settings", test_list=sync_modes, default=self.general["sync_mode"], do_print=False, save=False) params["sync_mode"] = check_for_attribute(lib, "sync_mode", parent="settings", test_list=sync_modes, default=self.general["sync_mode"], do_print=False, save=False)
else: else:
params["sync_mode"] = check_for_attribute(lib, "sync_mode", test_list=sync_modes, default=self.general["sync_mode"], do_print=False, save=False) params["sync_mode"] = check_for_attribute(lib, "sync_mode", test_list=sync_modes, default=self.general["sync_mode"], do_print=False, save=False)
if "settings" in lib and lib["settings"] and "show_unmanaged" in lib["settings"]: if lib and "settings" in lib and lib["settings"] and "show_unmanaged" in lib["settings"]:
params["show_unmanaged"] = check_for_attribute(lib, "show_unmanaged", parent="settings", var_type="bool", default=self.general["show_unmanaged"], do_print=False, save=False) params["show_unmanaged"] = check_for_attribute(lib, "show_unmanaged", parent="settings", var_type="bool", default=self.general["show_unmanaged"], do_print=False, save=False)
else: else:
params["show_unmanaged"] = check_for_attribute(lib, "show_unmanaged", var_type="bool", default=self.general["show_unmanaged"], do_print=False, save=False) params["show_unmanaged"] = check_for_attribute(lib, "show_unmanaged", var_type="bool", default=self.general["show_unmanaged"], do_print=False, save=False)
if "settings" in lib and lib["settings"] and "show_filtered" in lib["settings"]: if lib and "settings" in lib and lib["settings"] and "show_filtered" in lib["settings"]:
params["show_filtered"] = check_for_attribute(lib, "show_filtered", parent="settings", var_type="bool", default=self.general["show_filtered"], do_print=False, save=False) params["show_filtered"] = check_for_attribute(lib, "show_filtered", parent="settings", var_type="bool", default=self.general["show_filtered"], do_print=False, save=False)
else: else:
params["show_filtered"] = check_for_attribute(lib, "show_filtered", var_type="bool", default=self.general["show_filtered"], do_print=False, save=False) params["show_filtered"] = check_for_attribute(lib, "show_filtered", var_type="bool", default=self.general["show_filtered"], do_print=False, save=False)
if "settings" in lib and lib["settings"] and "show_missing" in lib["settings"]: if lib and "settings" in lib and lib["settings"] and "show_missing" in lib["settings"]:
params["show_missing"] = check_for_attribute(lib, "show_missing", parent="settings", var_type="bool", default=self.general["show_missing"], do_print=False, save=False) params["show_missing"] = check_for_attribute(lib, "show_missing", parent="settings", var_type="bool", default=self.general["show_missing"], do_print=False, save=False)
else: else:
params["show_missing"] = check_for_attribute(lib, "show_missing", var_type="bool", default=self.general["show_missing"], do_print=False, save=False) params["show_missing"] = check_for_attribute(lib, "show_missing", var_type="bool", default=self.general["show_missing"], do_print=False, save=False)
if "settings" in lib and lib["settings"] and "save_missing" in lib["settings"]: if lib and "settings" in lib and lib["settings"] and "save_missing" in lib["settings"]:
params["save_missing"] = check_for_attribute(lib, "save_missing", parent="settings", var_type="bool", default=self.general["save_missing"], do_print=False, save=False) params["save_missing"] = check_for_attribute(lib, "save_missing", parent="settings", var_type="bool", default=self.general["save_missing"], do_print=False, save=False)
else: else:
params["save_missing"] = check_for_attribute(lib, "save_missing", var_type="bool", default=self.general["save_missing"], do_print=False, save=False) params["save_missing"] = check_for_attribute(lib, "save_missing", var_type="bool", default=self.general["save_missing"], do_print=False, save=False)
if "mass_genre_update" in lib and lib["mass_genre_update"]: if lib and "mass_genre_update" in lib and lib["mass_genre_update"]:
params["mass_genre_update"] = check_for_attribute(lib, "mass_genre_update", test_list=mass_genre_update_options, default_is_none=True, save=False) params["mass_genre_update"] = check_for_attribute(lib, "mass_genre_update", test_list=mass_genre_update_options, default_is_none=True, save=False)
else: else:
params["mass_genre_update"] = None params["mass_genre_update"] = None
@ -359,7 +377,6 @@ class Config:
try: try:
params["metadata_path"] = check_for_attribute(lib, "metadata_path", var_type="path", default=os.path.join(default_dir, f"{library_name}.yml"), throw=True) params["metadata_path"] = check_for_attribute(lib, "metadata_path", var_type="path", default=os.path.join(default_dir, f"{library_name}.yml"), throw=True)
params["library_type"] = check_for_attribute(lib, "library_type", test_list=library_types, throw=True)
params["plex"] = {} params["plex"] = {}
params["plex"]["url"] = check_for_attribute(lib, "url", parent="plex", default=self.general["plex"]["url"], req_default=True, save=False) params["plex"]["url"] = check_for_attribute(lib, "url", parent="plex", default=self.general["plex"]["url"], req_default=True, save=False)
params["plex"]["token"] = check_for_attribute(lib, "token", parent="plex", default=self.general["plex"]["token"], req_default=True, save=False) params["plex"]["token"] = check_for_attribute(lib, "token", parent="plex", default=self.general["plex"]["token"], req_default=True, save=False)
@ -372,6 +389,7 @@ class Config:
continue continue
if self.general["radarr"]["url"] or "radarr" in lib: if self.general["radarr"]["url"] or "radarr" in lib:
logger.info("")
logger.info(f"Connecting to {params['name']} library's Radarr...") logger.info(f"Connecting to {params['name']} library's Radarr...")
radarr_params = {} radarr_params = {}
try: try:
@ -391,6 +409,7 @@ class Config:
logger.info(f"{params['name']} library's Radarr Connection {'Failed' if library.Radarr is None else 'Successful'}") logger.info(f"{params['name']} library's Radarr Connection {'Failed' if library.Radarr is None else 'Successful'}")
if self.general["sonarr"]["url"] or "sonarr" in lib: if self.general["sonarr"]["url"] or "sonarr" in lib:
logger.info("")
logger.info(f"Connecting to {params['name']} library's Sonarr...") logger.info(f"Connecting to {params['name']} library's Sonarr...")
sonarr_params = {} sonarr_params = {}
try: try:
@ -416,6 +435,7 @@ class Config:
logger.info(f"{params['name']} library's Sonarr Connection {'Failed' if library.Sonarr is None else 'Successful'}") logger.info(f"{params['name']} library's Sonarr Connection {'Failed' if library.Sonarr is None else 'Successful'}")
if self.general["tautulli"]["url"] or "tautulli" in lib: if self.general["tautulli"]["url"] or "tautulli" in lib:
logger.info("")
logger.info(f"Connecting to {params['name']} library's Tautulli...") logger.info(f"Connecting to {params['name']} library's Tautulli...")
tautulli_params = {} tautulli_params = {}
try: try:
@ -426,6 +446,7 @@ class Config:
util.print_multiline(e, error=True) util.print_multiline(e, error=True)
logger.info(f"{params['name']} library's Tautulli Connection {'Failed' if library.Tautulli is None else 'Successful'}") logger.info(f"{params['name']} library's Tautulli Connection {'Failed' if library.Tautulli is None else 'Successful'}")
logger.info("")
self.libraries.append(library) self.libraries.append(library)
util.separator() util.separator()
@ -537,6 +558,40 @@ class Config:
util.print_stacktrace() util.print_stacktrace()
logger.error(f"Unknown Error: {e}") logger.error(f"Unknown Error: {e}")
if library.assets_for_all is True and not test and not requested_collections:
logger.info("")
util.separator(f"All {'Movies' if library.is_movie else 'Shows'} Assets Check for {library.name} Library")
logger.info("")
for item in library.get_all():
folder = os.path.basename(os.path.dirname(item.locations[0]) if library.is_movie else item.locations[0])
for ad in library.asset_directory:
if library.asset_folders:
poster_path = os.path.join(ad, folder, "poster.*")
else:
poster_path = os.path.join(ad, f"{folder}.*")
matches = glob.glob(poster_path)
if len(matches) > 0:
item.uploadPoster(filepath=os.path.abspath(matches[0]))
logger.info(f"Detail: asset_directory updated {item.title}'s poster to [file] {os.path.abspath(matches[0])}")
if library.asset_folders:
matches = glob.glob(os.path.join(ad, folder, "background.*"))
if len(matches) > 0:
item.uploadArt(filepath=os.path.abspath(matches[0]))
logger.info(f"Detail: asset_directory updated {item.title}'s background to [file] {os.path.abspath(matches[0])}")
if library.is_show:
for season in item.seasons():
matches = glob.glob(os.path.join(ad, folder, f"Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}.*"))
if len(matches) > 0:
season_path = os.path.abspath(matches[0])
season.uploadPoster(filepath=season_path)
logger.info(f"Detail: asset_directory updated {item.title} Season {season.seasonNumber}'s poster to [file] {season_path}")
for episode in season.episodes():
matches = glob.glob(os.path.join(ad, folder, f"{episode.seasonEpisode.upper()}.*"))
if len(matches) > 0:
episode_path = os.path.abspath(matches[0])
episode.uploadPoster(filepath=episode_path)
logger.info(f"Detail: asset_directory updated {item.title} {episode.seasonEpisode.upper()}'s poster to [file] {episode_path}")
if library.show_unmanaged is True and not test and not requested_collections: if library.show_unmanaged is True and not test and not requested_collections:
logger.info("") logger.info("")
util.separator(f"Unmanaged Collections in {library.name} Library") util.separator(f"Unmanaged Collections in {library.name} Library")
@ -590,60 +645,6 @@ class Config:
continue continue
builder.run_collections_again(collection_obj, movie_map, show_map) builder.run_collections_again(collection_obj, movie_map, show_map)
def convert_from_imdb(self, imdb_id, language):
update_tmdb = False
update_tvdb = False
if self.Cache:
tmdb_id, tvdb_id = self.Cache.get_ids_from_imdb(imdb_id)
update_tmdb = False
if not tmdb_id:
tmdb_id, update_tmdb = self.Cache.get_tmdb_from_imdb(imdb_id)
if update_tmdb:
tmdb_id = None
update_tvdb = False
if not tvdb_id:
tvdb_id, update_tvdb = self.Cache.get_tvdb_from_imdb(imdb_id)
if update_tvdb:
tvdb_id = None
else:
tmdb_id = None
tvdb_id = None
from_cache = tmdb_id is not None or tvdb_id is not None
if not tmdb_id and not tvdb_id and self.TMDb:
try:
tmdb_id = self.TMDb.convert_imdb_to_tmdb(imdb_id)
except Failed:
pass
if not tmdb_id and not tvdb_id and self.TMDb:
try:
tvdb_id = self.TMDb.convert_imdb_to_tvdb(imdb_id)
except Failed:
pass
if not tmdb_id and not tvdb_id and self.Trakt:
try:
tmdb_id = self.Trakt.convert_imdb_to_tmdb(imdb_id)
except Failed:
pass
if not tmdb_id and not tvdb_id and self.Trakt:
try:
tvdb_id = self.Trakt.convert_imdb_to_tvdb(imdb_id)
except Failed:
pass
try:
if tmdb_id and not from_cache: self.TMDb.get_movie(tmdb_id)
except Failed: tmdb_id = None
try:
if tvdb_id and not from_cache: self.TVDb.get_series(language, tvdb_id)
except Failed: tvdb_id = None
if not tmdb_id and not tvdb_id: raise Failed(f"IMDb Error: No TMDb ID or TVDb ID found for IMDb: {imdb_id}")
if self.Cache:
if tmdb_id and update_tmdb is not False:
self.Cache.update_imdb("movie", update_tmdb, imdb_id, tmdb_id)
if tvdb_id and update_tvdb is not False:
self.Cache.update_imdb("show", update_tvdb, imdb_id, tvdb_id)
return tmdb_id, tvdb_id
def mass_metadata(self, library, movie_map, show_map): def mass_metadata(self, library, movie_map, show_map):
length = 0 length = 0
logger.info("") logger.info("")
@ -656,13 +657,13 @@ class Config:
if self.Cache: if self.Cache:
ids, expired = self.Cache.get_ids("movie" if library.is_movie else "show", plex_guid=item.guid) ids, expired = self.Cache.get_ids("movie" if library.is_movie else "show", plex_guid=item.guid)
elif library.is_movie: elif library.is_movie:
for tmdb in movie_map: for tmdb, rating_keys in movie_map.items():
if movie_map[tmdb] == item.ratingKey: if item.ratingKey in rating_keys:
ids["tmdb"] = tmdb ids["tmdb"] = tmdb
break break
else: else:
for tvdb in show_map: for tvdb, rating_keys in show_map.items():
if show_map[tvdb] == item.ratingKey: if item.ratingKey in rating_keys:
ids["tvdb"] = tvdb ids["tvdb"] = tvdb
break break
@ -718,15 +719,35 @@ class Config:
continue continue
if isinstance(main_id, list): if isinstance(main_id, list):
if id_type == "movie": if id_type == "movie":
for m in main_id: movie_map[m] = item.ratingKey for m in main_id:
if m in movie_map:
movie_map[m].append(item.ratingKey)
else:
movie_map[m] = [item.ratingKey]
elif id_type == "show": elif id_type == "show":
for m in main_id: show_map[m] = item.ratingKey for m in main_id:
if m in show_map:
show_map[m].append(item.ratingKey)
else:
show_map[m] = [item.ratingKey]
else: else:
if id_type == "movie": movie_map[main_id] = item.ratingKey if id_type == "movie":
elif id_type == "show": show_map[main_id] = item.ratingKey if main_id in movie_map:
movie_map[main_id].append(item.ratingKey)
else:
movie_map[main_id] = [item.ratingKey]
elif id_type == "show":
if main_id in show_map:
show_map[main_id].append(item.ratingKey)
else:
show_map[main_id] = [item.ratingKey]
util.print_end(length, f"Processed {len(items)} {'Movies' if library.is_movie else 'Shows'}") util.print_end(length, f"Processed {len(items)} {'Movies' if library.is_movie else 'Shows'}")
return movie_map, show_map return movie_map, show_map
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def get_guids(self, item):
return item.guids
def get_id(self, item, library, length): def get_id(self, item, library, length):
expired = None expired = None
tmdb_id = None tmdb_id = None
@ -747,16 +768,26 @@ class Config:
check_id = guid.netloc check_id = guid.netloc
if item_type == "plex" and check_id == "movie": if item_type == "plex" and check_id == "movie":
for guid_tag in item.guids: try:
url_parsed = requests.utils.urlparse(guid_tag.id) for guid_tag in self.get_guids(item):
if url_parsed.scheme == "tmdb": tmdb_id = int(url_parsed.netloc) url_parsed = requests.utils.urlparse(guid_tag.id)
elif url_parsed.scheme == "imdb": imdb_id = url_parsed.netloc if url_parsed.scheme == "tmdb": tmdb_id = int(url_parsed.netloc)
elif url_parsed.scheme == "imdb": imdb_id = url_parsed.netloc
except requests.exceptions.ConnectionError:
util.print_stacktrace()
logger.error(f"{'Cache | ! |' if self.Cache else 'Mapping Error:'} {item.guid:<46} | No External GUIDs found for {item.title}")
return None, None
elif item_type == "plex" and check_id == "show": elif item_type == "plex" and check_id == "show":
for guid_tag in item.guids: try:
url_parsed = requests.utils.urlparse(guid_tag.id) for guid_tag in self.get_guids(item):
if url_parsed.scheme == "tvdb": tvdb_id = int(url_parsed.netloc) url_parsed = requests.utils.urlparse(guid_tag.id)
elif url_parsed.scheme == "imdb": imdb_id = url_parsed.netloc if url_parsed.scheme == "tvdb": tvdb_id = int(url_parsed.netloc)
elif url_parsed.scheme == "tmdb": tmdb_id = int(url_parsed.netloc) elif url_parsed.scheme == "imdb": imdb_id = url_parsed.netloc
elif url_parsed.scheme == "tmdb": tmdb_id = int(url_parsed.netloc)
except requests.exceptions.ConnectionError:
util.print_stacktrace()
logger.error(f"{'Cache | ! |' if self.Cache else 'Mapping Error:'} {item.guid:<46} | No External GUIDs found for {item.title}")
return None, None
elif item_type == "imdb": imdb_id = check_id elif item_type == "imdb": imdb_id = check_id
elif item_type == "thetvdb": tvdb_id = int(check_id) elif item_type == "thetvdb": tvdb_id = int(check_id)
elif item_type == "themoviedb": tmdb_id = int(check_id) elif item_type == "themoviedb": tmdb_id = int(check_id)
@ -769,75 +800,76 @@ class Config:
else: error_message = f"Agent {item_type} not supported" else: error_message = f"Agent {item_type} not supported"
if not error_message: if not error_message:
if mal_id and not anidb_id:
try: anidb_id = self.Arms.mal_to_anidb(mal_id)
except Failed: pass
if anidb_id and not tvdb_id: if anidb_id and not tvdb_id:
try: tvdb_id = self.AniDB.convert_anidb_to_tvdb(anidb_id) try: tvdb_id = self.Arms.anidb_to_tvdb(anidb_id)
except Failed: pass except Failed: pass
if anidb_id and not imdb_id: if anidb_id and not imdb_id:
try: imdb_id = self.AniDB.convert_anidb_to_imdb(anidb_id) try: imdb_id = self.Arms.anidb_to_imdb(anidb_id)
except Failed: pass
if mal_id:
try:
ids = self.MyAnimeListIDList.find_mal_ids(mal_id)
if "thetvdb_id" in ids and int(ids["thetvdb_id"]) > 0: tvdb_id = int(ids["thetvdb_id"])
elif "themoviedb_id" in ids and int(ids["themoviedb_id"]) > 0: tmdb_id = int(ids["themoviedb_id"])
else: raise Failed(f"MyAnimeList Error: MyAnimeList ID: {mal_id} has no other IDs associated with it")
except Failed:
pass
if mal_id and not tvdb_id:
try: tvdb_id = self.MyAnimeListIDList.convert_mal_to_tvdb(mal_id)
except Failed: pass
if mal_id and not tmdb_id:
try: tmdb_id = self.MyAnimeListIDList.convert_mal_to_tmdb(mal_id)
except Failed: pass except Failed: pass
if not tmdb_id and imdb_id and isinstance(imdb_id, list) and self.TMDb: if not tmdb_id and imdb_id:
tmdb_id = [] if isinstance(imdb_id, list):
new_imdb_id = [] tmdb_id = []
for imdb in imdb_id: new_imdb_id = []
try: for imdb in imdb_id:
temp_tmdb_id = self.TMDb.convert_imdb_to_tmdb(imdb) try:
tmdb_id.append(temp_tmdb_id) tmdb_id.append(self.TMDb.convert_imdb_to_tmdb(imdb))
new_imdb_id.append(imdb) new_imdb_id.append(imdb)
except Failed: except Failed:
continue if self.Trakt:
imdb_id = new_imdb_id try:
if not tmdb_id and imdb_id and self.TMDb: tmdb_id.append(self.Trakt.convert_imdb_to_tmdb(imdb))
try: tmdb_id = self.TMDb.convert_imdb_to_tmdb(imdb_id) new_imdb_id.append(imdb)
except Failed: pass except Failed:
if not tmdb_id and imdb_id and self.Trakt: continue
try: tmdb_id = self.Trakt.convert_imdb_to_tmdb(imdb_id) else:
except Failed: pass continue
if not tmdb_id and tvdb_id and self.TMDb: imdb_id = new_imdb_id
else:
try: tmdb_id = self.TMDb.convert_imdb_to_tmdb(imdb_id)
except Failed: pass
if not tmdb_id and self.Trakt:
try: tmdb_id = self.Trakt.convert_imdb_to_tmdb(imdb_id)
except Failed: pass
if not tmdb_id and tvdb_id and library.is_show:
try: tmdb_id = self.TMDb.convert_tvdb_to_tmdb(tvdb_id) try: tmdb_id = self.TMDb.convert_tvdb_to_tmdb(tvdb_id)
except Failed: pass except Failed: pass
if not tmdb_id and tvdb_id and self.Trakt: if not tmdb_id and self.Trakt:
try: tmdb_id = self.Trakt.convert_tvdb_to_tmdb(tvdb_id) try: tmdb_id = self.Trakt.convert_tvdb_to_tmdb(tvdb_id)
except Failed: pass except Failed: pass
if not imdb_id and tmdb_id and self.TMDb: if not imdb_id and tmdb_id and library.is_movie:
try: imdb_id = self.TMDb.convert_tmdb_to_imdb(tmdb_id) try: imdb_id = self.TMDb.convert_tmdb_to_imdb(tmdb_id)
except Failed: pass except Failed: pass
if not imdb_id and tmdb_id and self.Trakt: if not imdb_id and self.Trakt:
try: imdb_id = self.Trakt.convert_tmdb_to_imdb(tmdb_id) try: imdb_id = self.Trakt.convert_tmdb_to_imdb(tmdb_id)
except Failed: pass except Failed: pass
if not imdb_id and tvdb_id and self.Trakt: if not imdb_id and tvdb_id and library.is_show:
try: imdb_id = self.Trakt.convert_tmdb_to_imdb(tmdb_id) try: imdb_id = self.TMDb.convert_tvdb_to_imdb(tvdb_id)
except Failed: pass
if not tvdb_id and tmdb_id and self.TMDb and library.is_show:
try: tvdb_id = self.TMDb.convert_tmdb_to_tvdb(tmdb_id)
except Failed: pass except Failed: pass
if not tvdb_id and tmdb_id and self.Trakt and library.is_show: if not imdb_id and self.Trakt:
try: tvdb_id = self.Trakt.convert_tmdb_to_tvdb(tmdb_id) try: imdb_id = self.Trakt.convert_tvdb_to_imdb(tvdb_id)
except Failed: pass except Failed: pass
if not tvdb_id and imdb_id and self.Trakt and library.is_show: if not tvdb_id and library.is_show:
try: tvdb_id = self.Trakt.convert_imdb_to_tvdb(imdb_id) if tmdb_id:
except Failed: pass try: tvdb_id = self.TMDb.convert_tmdb_to_tvdb(tmdb_id)
except Failed: pass
if (not tmdb_id and library.is_movie) or (not tvdb_id and not ((anidb_id or mal_id) and tmdb_id) and library.is_show): if not tvdb_id and self.Trakt:
try: tvdb_id = self.Trakt.convert_tmdb_to_tvdb(tmdb_id)
except Failed: pass
if not tvdb_id and imdb_id:
try: tvdb_id = self.TMDb.convert_imdb_to_tvdb(imdb_id)
except Failed: pass
if not tvdb_id and self.Trakt:
try: tvdb_id = self.Trakt.convert_imdb_to_tvdb(tmdb_id)
except Failed: pass
if (not tmdb_id and library.is_movie) or (not tvdb_id and not (anidb_id and tmdb_id) and library.is_show):
service_name = "TMDb ID" if library.is_movie else "TVDb ID" service_name = "TMDb ID" if library.is_movie else "TVDb ID"
if self.TMDb and self.Trakt: api_name = "TMDb or Trakt" if self.Trakt: api_name = "TMDb or Trakt"
elif self.TMDb: api_name = "TMDb" else: api_name = "TMDb"
elif self.Trakt: api_name = "Trakt"
else: api_name = None
if tmdb_id and imdb_id: id_name = f"TMDb ID: {tmdb_id} or IMDb ID: {imdb_id}" if tmdb_id and imdb_id: id_name = f"TMDb ID: {tmdb_id} or IMDb ID: {imdb_id}"
elif imdb_id and tvdb_id: id_name = f"IMDb ID: {imdb_id} or TVDb ID: {tvdb_id}" elif imdb_id and tvdb_id: id_name = f"IMDb ID: {imdb_id} or TVDb ID: {tvdb_id}"
@ -846,23 +878,21 @@ class Config:
elif tvdb_id: id_name = f"TVDb ID: {tvdb_id}" elif tvdb_id: id_name = f"TVDb ID: {tvdb_id}"
else: id_name = None else: id_name = None
if anidb_id and not tmdb_id and not tvdb_id: error_message = f"Unable to convert AniDb ID: {anidb_id} to TMDb ID or TVDb ID" if anidb_id and not tmdb_id and not tvdb_id: error_message = f"Unable to convert AniDB ID: {anidb_id} to TMDb ID or TVDb ID"
elif mal_id and not tmdb_id and not tvdb_id: error_message = f"Unable to convert MyAnimeList ID: {mal_id} to TMDb ID or TVDb ID" elif id_name: error_message = f"Unable to convert {id_name} to {service_name} using {api_name}"
elif id_name and api_name: error_message = f"Unable to convert {id_name} to {service_name} using {api_name}"
elif id_name: error_message = f"Configure TMDb or Trakt to covert {id_name} to {service_name}"
else: error_message = f"No ID to convert to {service_name}" else: error_message = f"No ID to convert to {service_name}"
if self.Cache and ((tmdb_id and library.is_movie) or ((tvdb_id or ((anidb_id or mal_id) and tmdb_id)) and library.is_show)): if self.Cache and ((tmdb_id and library.is_movie) or ((tvdb_id or (anidb_id and tmdb_id)) and library.is_show)):
if not isinstance(tmdb_id, list): tmdb_id = [tmdb_id] if not isinstance(tmdb_id, list): tmdb_id = [tmdb_id]
if not isinstance(imdb_id, list): imdb_id = [imdb_id] if not isinstance(imdb_id, list): imdb_id = [imdb_id]
for i in range(len(tmdb_id)): for i in range(len(tmdb_id)):
try: imdb_value = imdb_id[i] try: imdb_value = imdb_id[i]
except IndexError: imdb_value = None except IndexError: imdb_value = None
util.print_end(length, f"Cache | {'^' if expired is True else '+'} | {item.guid:<46} | {tmdb_id[i] if tmdb_id[i] else 'None':<6} | {imdb_value if imdb_value else 'None':<10} | {tvdb_id if tvdb_id else 'None':<6} | {anidb_id if anidb_id else 'None':<5} | {mal_id if mal_id else 'None':<5} | {item.title}") util.print_end(length, f"Cache | {'^' if expired is True else '+'} | {item.guid:<46} | {tmdb_id[i] if tmdb_id[i] else 'None':<6} | {imdb_value if imdb_value else 'None':<10} | {tvdb_id if tvdb_id else 'None':<6} | {anidb_id if anidb_id else 'None':<5} | {item.title}")
self.Cache.update_guid("movie" if library.is_movie else "show", item.guid, tmdb_id[i], imdb_value, tvdb_id, anidb_id, mal_id, expired) self.Cache.update_guid("movie" if library.is_movie else "show", item.guid, tmdb_id[i], imdb_value, tvdb_id, anidb_id, expired)
if tmdb_id and library.is_movie: return "movie", tmdb_id if tmdb_id and library.is_movie: return "movie", tmdb_id
elif tvdb_id and library.is_show: return "show", tvdb_id elif tvdb_id and library.is_show: return "show", tvdb_id
elif (anidb_id or mal_id) and tmdb_id: return "movie", tmdb_id elif anidb_id and tmdb_id: return "movie", tmdb_id
else: else:
util.print_end(length, f"{'Cache | ! |' if self.Cache else 'Mapping Error:'} {item.guid:<46} | {error_message} for {item.title}") util.print_end(length, f"{'Cache | ! |' if self.Cache else 'Mapping Error:'} {item.guid:<46} | {error_message} for {item.title}")
return None, None return None, None

@ -17,24 +17,29 @@ class IMDbAPI:
"keyword": "https://www.imdb.com/search/keyword/?" "keyword": "https://www.imdb.com/search/keyword/?"
} }
def validate_imdb_url(self, imdb_url): def validate_imdb_url(self, imdb_url, language):
imdb_url = imdb_url.strip() imdb_url = imdb_url.strip()
if not imdb_url.startswith(self.urls["list"]) and not imdb_url.startswith(self.urls["search"]) and not imdb_url.startswith(self.urls["keyword"]): if not imdb_url.startswith(self.urls["list"]) and not imdb_url.startswith(self.urls["search"]) and not imdb_url.startswith(self.urls["keyword"]):
raise Failed(f"IMDb Error: {imdb_url} must begin with either:\n{self.urls['list']} (For Lists)\n{self.urls['search']} (For Searches)\n{self.urls['keyword']} (For Keyword Searches)") raise Failed(f"IMDb Error: {imdb_url} must begin with either:\n{self.urls['list']} (For Lists)\n{self.urls['search']} (For Searches)\n{self.urls['keyword']} (For Keyword Searches)")
return imdb_url total, _ = self.get_total(self.fix_url(imdb_url), language)
if total > 0:
return imdb_url
raise Failed(f"IMDb Error: {imdb_url} failed to parse")
def get_imdb_ids_from_url(self, imdb_url, language, limit): def fix_url(self, imdb_url):
if imdb_url.startswith(self.urls["list"]): if imdb_url.startswith(self.urls["list"]):
try: list_id = re.search("(\\d+)", str(imdb_url)).group(1) try: list_id = re.search("(\\d+)", str(imdb_url)).group(1)
except AttributeError: raise Failed(f"IMDb Error: Failed to parse List ID from {imdb_url}") except AttributeError: raise Failed(f"IMDb Error: Failed to parse List ID from {imdb_url}")
current_url = f"{self.urls['search']}lists=ls{list_id}" return f"{self.urls['search']}lists=ls{list_id}"
elif imdb_url.endswith("/"):
return imdb_url[:-1]
else: else:
current_url = imdb_url return imdb_url
def get_total(self, imdb_url, language):
header = {"Accept-Language": language} header = {"Accept-Language": language}
length = 0
imdb_ids = []
if imdb_url.startswith(self.urls["keyword"]): if imdb_url.startswith(self.urls["keyword"]):
results = self.send_request(current_url, header).xpath("//div[@class='desc']/text()") results = self.send_request(imdb_url, header).xpath("//div[@class='desc']/text()")
total = None total = None
for result in results: for result in results:
if "title" in result: if "title" in result:
@ -45,13 +50,20 @@ class IMDbAPI:
pass pass
if total is None: if total is None:
raise Failed(f"IMDb Error: No Results at URL: {imdb_url}") raise Failed(f"IMDb Error: No Results at URL: {imdb_url}")
item_count = 50 return total, 50
else: else:
try: results = self.send_request(current_url, header).xpath("//div[@class='desc']/span/text()")[0].replace(",", "") try: results = self.send_request(imdb_url, header).xpath("//div[@class='desc']/span/text()")[0].replace(",", "")
except IndexError: raise Failed(f"IMDb Error: Failed to parse URL: {imdb_url}") except IndexError: raise Failed(f"IMDb Error: Failed to parse URL: {imdb_url}")
try: total = int(re.findall("(\\d+) title", results)[0]) try: total = int(re.findall("(\\d+) title", results)[0])
except IndexError: raise Failed(f"IMDb Error: No Results at URL: {imdb_url}") except IndexError: raise Failed(f"IMDb Error: No Results at URL: {imdb_url}")
item_count = 250 return total, 250
def get_imdb_ids_from_url(self, imdb_url, language, limit):
current_url = self.fix_url(imdb_url)
total, item_count = self.get_total(current_url, language)
header = {"Accept-Language": language}
length = 0
imdb_ids = []
if "&start=" in current_url: current_url = re.sub("&start=\\d+", "", current_url) if "&start=" in current_url: current_url = re.sub("&start=\\d+", "", current_url)
if "&count=" in current_url: current_url = re.sub("&count=\\d+", "", current_url) if "&count=" in current_url: current_url = re.sub("&count=\\d+", "", current_url)
if "&page=" in current_url: current_url = re.sub("&page=\\d+", "", current_url) if "&page=" in current_url: current_url = re.sub("&page=\\d+", "", current_url)
@ -88,7 +100,7 @@ class IMDbAPI:
if method == "imdb_id": if method == "imdb_id":
if status_message: if status_message:
logger.info(f"Processing {pretty}: {data}") logger.info(f"Processing {pretty}: {data}")
tmdb_id, tvdb_id = self.config.convert_from_imdb(data, language) tmdb_id, tvdb_id = self.config.Arms.imdb_to_ids(data, language)
if tmdb_id: movie_ids.append(tmdb_id) if tmdb_id: movie_ids.append(tmdb_id)
if tvdb_id: show_ids.append(tvdb_id) if tvdb_id: show_ids.append(tvdb_id)
elif method == "imdb_list": elif method == "imdb_list":
@ -101,7 +113,7 @@ class IMDbAPI:
for i, imdb_id in enumerate(imdb_ids, 1): for i, imdb_id in enumerate(imdb_ids, 1):
length = util.print_return(length, f"Converting IMDb ID {i}/{total_ids}") length = util.print_return(length, f"Converting IMDb ID {i}/{total_ids}")
try: try:
tmdb_id, tvdb_id = self.config.convert_from_imdb(imdb_id, language) tmdb_id, tvdb_id = self.config.Arms.imdb_to_ids(imdb_id, language)
if tmdb_id: movie_ids.append(tmdb_id) if tmdb_id: movie_ids.append(tmdb_id)
if tvdb_id: show_ids.append(tvdb_id) if tvdb_id: show_ids.append(tvdb_id)
except Failed as e: logger.warning(e) except Failed as e: logger.warning(e)

@ -72,29 +72,9 @@ userlist_status = [
"plan_to_watch" "plan_to_watch"
] ]
class MyAnimeListIDList:
def __init__(self):
self.ids = json.loads(requests.get("https://raw.githubusercontent.com/Fribb/anime-lists/master/animeMapping_full.json").content)
def convert_mal_to_tvdb(self, mal_id): return self.convert_mal(mal_id, "mal_id", "thetvdb_id")
def convert_mal_to_tmdb(self, mal_id): return self.convert_mal(mal_id, "mal_id", "themoviedb_id")
def convert_tvdb_to_mal(self, tvdb_id): return self.convert_mal(tvdb_id, "thetvdb_id", "mal_id")
def convert_tmdb_to_mal(self, tmdb_id): return self.convert_mal(tmdb_id, "themoviedb_id", "mal_id")
def convert_mal(self, input_id, from_id, to_id):
for attrs in self.ids:
if from_id in attrs and int(attrs[from_id]) == int(input_id) and to_id in attrs and int(attrs[to_id]) > 0:
return int(attrs[to_id])
raise Failed(f"MyAnimeList Error: {util.pretty_ids[to_id]} ID not found for {util.pretty_ids[from_id]}: {input_id}")
def find_mal_ids(self, mal_id):
for mal in self.ids:
if "mal_id" in mal and int(mal["mal_id"]) == int(mal_id):
return mal
raise Failed(f"MyAnimeList Error: MyAnimeList ID: {mal_id} not found")
class MyAnimeListAPI: class MyAnimeListAPI:
def __init__(self, params, MyAnimeListIDList_in, authorization=None): def __init__(self, params, config, authorization=None):
self.config = config
self.urls = { self.urls = {
"oauth_token": "https://myanimelist.net/v1/oauth2/token", "oauth_token": "https://myanimelist.net/v1/oauth2/token",
"oauth_authorize": "https://myanimelist.net/v1/oauth2/authorize", "oauth_authorize": "https://myanimelist.net/v1/oauth2/authorize",
@ -107,7 +87,6 @@ class MyAnimeListAPI:
self.client_secret = params["client_secret"] self.client_secret = params["client_secret"]
self.config_path = params["config_path"] self.config_path = params["config_path"]
self.authorization = authorization self.authorization = authorization
self.MyAnimeListIDList = MyAnimeListIDList_in
if not self.save_authorization(self.authorization): if not self.save_authorization(self.authorization):
if not self.refresh_authorization(): if not self.refresh_authorization():
self.get_authorization() self.get_authorization()
@ -214,7 +193,7 @@ class MyAnimeListAPI:
url = f"{self.urls['user']}/{username}/animelist?{final_status}sort={sort_by}&limit={limit}" url = f"{self.urls['user']}/{username}/animelist?{final_status}sort={sort_by}&limit={limit}"
return self.request_and_parse_mal_ids(url) return self.request_and_parse_mal_ids(url)
def get_items(self, method, data, status_message=True): def get_items(self, method, data, language, status_message=True):
if status_message: if status_message:
logger.debug(f"Data: {data}") logger.debug(f"Data: {data}")
pretty = util.pretty_names[method] if method in util.pretty_names else method pretty = util.pretty_names[method] if method in util.pretty_names else method
@ -240,17 +219,7 @@ class MyAnimeListAPI:
logger.info(f"Processing {pretty}: {data['limit']} Anime from {self.get_username() if data['username'] == '@me' else data['username']}'s {pretty_names[data['status']]} list sorted by {pretty_names[data['sort_by']]}") logger.info(f"Processing {pretty}: {data['limit']} Anime from {self.get_username() if data['username'] == '@me' else data['username']}'s {pretty_names[data['status']]} list sorted by {pretty_names[data['sort_by']]}")
else: else:
raise Failed(f"MyAnimeList Error: Method {method} not supported") raise Failed(f"MyAnimeList Error: Method {method} not supported")
show_ids = [] movie_ids, show_ids = self.config.Arms.myanimelist_to_ids(mal_ids, language)
movie_ids = []
for mal_id in mal_ids:
try:
ids = self.MyAnimeListIDList.find_mal_ids(mal_id)
if "thetvdb_id" in ids and int(ids["thetvdb_id"]) > 0: show_ids.append(int(ids["thetvdb_id"]))
elif "themoviedb_id" in ids and int(ids["themoviedb_id"]) > 0: movie_ids.append(int(ids["themoviedb_id"]))
else: raise Failed(f"MyAnimeList Error: MyAnimeList ID: {mal_id} has no other IDs associated with it")
except Failed as e:
if status_message:
logger.error(e)
if status_message: if status_message:
logger.debug(f"MyAnimeList IDs Found: {mal_ids}") logger.debug(f"MyAnimeList IDs Found: {mal_ids}")
logger.debug(f"Shows Found: {show_ids}") logger.debug(f"Shows Found: {show_ids}")

@ -3,7 +3,6 @@ from datetime import datetime, timedelta
from modules import util from modules import util
from modules.util import Failed from modules.util import Failed
from plexapi.exceptions import BadRequest, NotFound, Unauthorized from plexapi.exceptions import BadRequest, NotFound, Unauthorized
from plexapi.library import MovieSection, ShowSection
from plexapi.collection import Collections from plexapi.collection import Collections
from plexapi.server import PlexServer from plexapi.server import PlexServer
from plexapi.video import Movie, Show from plexapi.video import Movie, Show
@ -20,7 +19,8 @@ search_translation = {
"added": "addedAt", "added": "addedAt",
"originally_available": "originallyAvailableAt", "originally_available": "originallyAvailableAt",
"audience_rating": "audienceRating", "audience_rating": "audienceRating",
"critic_rating": "rating" "critic_rating": "rating",
"user_rating": "userRating"
} }
episode_sorting_options = {"default": "-1", "oldest": "0", "newest": "1"} episode_sorting_options = {"default": "-1", "oldest": "0", "newest": "1"}
keep_episodes_options = {"all": 0, "5_latest": 5, "3_latest": 3, "latest": 1, "past_3": -3, "past_7": -7, "past_30": -30} keep_episodes_options = {"all": 0, "5_latest": 5, "3_latest": 3, "latest": 1, "past_3": -3, "past_7": -7, "past_30": -30}
@ -34,6 +34,25 @@ plex_languages = ["default", "ar-SA", "ca-ES", "cs-CZ", "da-DK", "de-DE", "el-GR
metadata_language_options = {lang.lower(): lang for lang in plex_languages} metadata_language_options = {lang.lower(): lang for lang in plex_languages}
metadata_language_options["default"] = None metadata_language_options["default"] = None
use_original_title_options = {"default": -1, "no": 0, "yes": 1} use_original_title_options = {"default": -1, "no": 0, "yes": 1}
advance_keys = {
"episode_sorting": ("episodeSort", episode_sorting_options),
"keep_episodes": ("autoDeletionItemPolicyUnwatchedLibrary", keep_episodes_options),
"delete_episodes": ("autoDeletionItemPolicyWatchedLibrary", delete_episodes_options),
"season_display": ("flattenSeasons", season_display_options),
"episode_ordering": ("showOrdering", episode_ordering_options),
"metadata_language": ("languageOverride", metadata_language_options),
"use_original_title": ("useOriginalTitle", use_original_title_options)
}
item_advance_keys = {
"item_episode_sorting": ("episodeSort", episode_sorting_options),
"item_keep_episodes": ("autoDeletionItemPolicyUnwatchedLibrary", keep_episodes_options),
"item_delete_episodes": ("autoDeletionItemPolicyWatchedLibrary", delete_episodes_options),
"item_season_display": ("flattenSeasons", season_display_options),
"item_episode_ordering": ("showOrdering", episode_ordering_options),
"item_metadata_language": ("languageOverride", metadata_language_options),
"item_use_original_title": ("useOriginalTitle", use_original_title_options)
}
new_plex_agents = ["tv.plex.agents.movie", "tv.plex.agents.series"]
filter_alias = { filter_alias = {
"actor": "actors", "actor": "actors",
"audience_rating": "audienceRating", "audience_rating": "audienceRating",
@ -45,6 +64,7 @@ filter_alias = {
"genre": "genres", "genre": "genres",
"originally_available": "originallyAvailableAt", "originally_available": "originallyAvailableAt",
"tmdb_vote_count": "vote_count", "tmdb_vote_count": "vote_count",
"user_rating": "userRating",
"writer": "writers" "writer": "writers"
} }
searches = [ searches = [
@ -66,6 +86,7 @@ searches = [
"added.before", "added.after", "added.before", "added.after",
"originally_available.before", "originally_available.after", "originally_available.before", "originally_available.after",
"duration.greater", "duration.less", "duration.greater", "duration.less",
"user_rating.greater", "user_rating.less",
"audience_rating.greater", "audience_rating.less", "audience_rating.greater", "audience_rating.less",
"critic_rating.greater", "critic_rating.less", "critic_rating.greater", "critic_rating.less",
"year", "year.not", "year.greater", "year.less" "year", "year.not", "year.greater", "year.less"
@ -118,11 +139,15 @@ class PlexAPI:
except requests.exceptions.ConnectionError: except requests.exceptions.ConnectionError:
util.print_stacktrace() util.print_stacktrace()
raise Failed("Plex Error: Plex url is invalid") raise Failed("Plex Error: Plex url is invalid")
self.is_movie = params["library_type"] == "movie" self.Plex = next((s for s in self.PlexServer.library.sections() if s.title == params["name"]), None)
self.is_show = params["library_type"] == "show"
self.Plex = next((s for s in self.PlexServer.library.sections() if s.title == params["name"] and ((self.is_movie and isinstance(s, MovieSection)) or (self.is_show and isinstance(s, ShowSection)))), None)
if not self.Plex: if not self.Plex:
raise Failed(f"Plex Error: Plex Library {params['name']} not found") raise Failed(f"Plex Error: Plex Library {params['name']} not found")
if self.Plex.type not in ["movie", "show"]:
raise Failed(f"Plex Error: Plex Library must be a Movies or TV Shows library")
self.agent = self.Plex.agent
self.is_movie = self.Plex.type == "movie"
self.is_show = self.Plex.type == "show"
logger.info(f"Using Metadata File: {params['metadata_path']}") logger.info(f"Using Metadata File: {params['metadata_path']}")
try: try:
@ -164,6 +189,8 @@ class PlexAPI:
self.missing_path = os.path.join(os.path.dirname(os.path.abspath(params["metadata_path"])), f"{os.path.splitext(os.path.basename(params['metadata_path']))[0]}_missing.yml") self.missing_path = os.path.join(os.path.dirname(os.path.abspath(params["metadata_path"])), f"{os.path.splitext(os.path.basename(params['metadata_path']))[0]}_missing.yml")
self.metadata_path = params["metadata_path"] self.metadata_path = params["metadata_path"]
self.asset_directory = params["asset_directory"] self.asset_directory = params["asset_directory"]
self.asset_folders = params["asset_folders"]
self.assets_for_all = params["assets_for_all"]
self.sync_mode = params["sync_mode"] self.sync_mode = params["sync_mode"]
self.show_unmanaged = params["show_unmanaged"] self.show_unmanaged = params["show_unmanaged"]
self.show_filtered = params["show_filtered"] self.show_filtered = params["show_filtered"]
@ -171,35 +198,53 @@ class PlexAPI:
self.save_missing = params["save_missing"] self.save_missing = params["save_missing"]
self.mass_genre_update = params["mass_genre_update"] self.mass_genre_update = params["mass_genre_update"]
self.plex = params["plex"] self.plex = params["plex"]
self.url = params["plex"]["url"]
self.token = params["plex"]["token"]
self.timeout = params["plex"]["timeout"] self.timeout = params["plex"]["timeout"]
self.missing = {} self.missing = {}
self.run_again = [] self.run_again = []
def get_all_collections(self):
return self.search(libtype="collection")
@retry(stop_max_attempt_number=6, wait_fixed=10000) @retry(stop_max_attempt_number=6, wait_fixed=10000)
def search(self, title, libtype=None, year=None): def search(self, title=None, libtype=None, sort=None, maxresults=None, **kwargs):
if libtype is not None and year is not None: return self.Plex.search(title=title, year=year, libtype=libtype) return self.Plex.search(title=title, sort=sort, maxresults=maxresults, libtype=libtype, **kwargs)
elif libtype is not None: return self.Plex.search(title=title, libtype=libtype)
elif year is not None: return self.Plex.search(title=title, year=year)
else: return self.Plex.search(title=title)
@retry(stop_max_attempt_number=6, wait_fixed=10000) @retry(stop_max_attempt_number=6, wait_fixed=10000)
def fetchItem(self, data): def fetchItem(self, data):
return self.PlexServer.fetchItem(data) return self.PlexServer.fetchItem(data)
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def get_all(self):
return self.Plex.all()
@retry(stop_max_attempt_number=6, wait_fixed=10000) @retry(stop_max_attempt_number=6, wait_fixed=10000)
def server_search(self, data): def server_search(self, data):
return self.PlexServer.search(data) return self.PlexServer.search(data)
def get_search_choices(self, search_name, key=False): @retry(stop_max_attempt_number=6, wait_fixed=10000)
def add_collection(self, item, name):
item.addCollection(name)
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_search_choices(self, search_name):
try: try:
if key: return {c.key.lower(): c.key for c in self.Plex.listFilterChoices(search_name)} choices = {}
else: return {c.title.lower(): c.title for c in self.Plex.listFilterChoices(search_name)} for choice in self.Plex.listFilterChoices(search_name):
choices[choice.title.lower()] = choice.title
choices[choice.key.lower()] = choice.title
return choices
except NotFound: except NotFound:
raise Failed(f"Collection Error: plex search attribute: {search_name} only supported with Plex's New TV Agent") raise Failed(f"Collection Error: plex search attribute: {search_name} only supported with Plex's New TV Agent")
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def refresh_item(self, rating_key):
requests.put(f"{self.url}/library/metadata/{rating_key}/refresh?X-Plex-Token={self.token}")
def validate_search_list(self, data, search_name): def validate_search_list(self, data, search_name):
final_search = search_translation[search_name] if search_name in search_translation else search_name final_search = search_translation[search_name] if search_name in search_translation else search_name
search_choices = self.get_search_choices(final_search, key=final_search.endswith("Language")) search_choices = self.get_search_choices(final_search)
valid_list = [] valid_list = []
for value in util.get_list(data): for value in util.get_list(data):
if str(value).lower() in search_choices: if str(value).lower() in search_choices:
@ -208,11 +253,8 @@ class PlexAPI:
logger.error(f"Plex Error: {search_name}: {value} not found") logger.error(f"Plex Error: {search_name}: {value} not found")
return valid_list return valid_list
def get_all_collections(self):
return self.Plex.search(libtype="collection")
def get_collection(self, data): def get_collection(self, data):
collection = util.choose_from_list(self.search(str(data), libtype="collection"), "collection", str(data), exact=True) collection = util.choose_from_list(self.search(title=str(data), libtype="collection"), "collection", str(data), exact=True)
if collection: return collection if collection: return collection
else: raise Failed(f"Plex Error: Collection {data} not found") else: raise Failed(f"Plex Error: Collection {data} not found")
@ -234,7 +276,7 @@ class PlexAPI:
if method == "plex_all": if method == "plex_all":
if status_message: if status_message:
logger.info(f"Processing {pretty} {media_type}s") logger.info(f"Processing {pretty} {media_type}s")
items = self.Plex.all() items = self.get_all()
elif method == "plex_collection": elif method == "plex_collection":
if status_message: if status_message:
logger.info(f"Processing {pretty} {data}") logger.info(f"Processing {pretty} {data}")
@ -252,9 +294,9 @@ class PlexAPI:
else: else:
search, modifier = os.path.splitext(str(search_method).lower()) search, modifier = os.path.splitext(str(search_method).lower())
final_search = search_translation[search] if search in search_translation else search final_search = search_translation[search] if search in search_translation else search
if search == "originally_available" and modifier == "": if search in ["added", "originally_available"] and modifier == "":
final_mod = ">>" final_mod = ">>"
elif search == "originally_available" and modifier == ".not": elif search in ["added", "originally_available"] and modifier == ".not":
final_mod = "<<" final_mod = "<<"
elif search in ["critic_rating", "audience_rating"] and modifier == ".greater": elif search in ["critic_rating", "audience_rating"] and modifier == ".greater":
final_mod = "__gte" final_mod = "__gte"
@ -291,7 +333,7 @@ class PlexAPI:
if search_limit: if search_limit:
logger.info(f"\t\t LIMIT {search_limit})") logger.info(f"\t\t LIMIT {search_limit})")
logger.debug(f"Search: {search_terms}") logger.debug(f"Search: {search_terms}")
return self.Plex.search(sort=sorts[search_sort], maxresults=search_limit, **search_terms) return self.search(sort=sorts[search_sort], maxresults=search_limit, **search_terms)
elif method == "plex_collectionless": elif method == "plex_collectionless":
good_collections = [] good_collections = []
for col in self.get_all_collections(): for col in self.get_all_collections():
@ -307,7 +349,7 @@ class PlexAPI:
break break
if keep_collection: if keep_collection:
good_collections.append(col.index) good_collections.append(col.index)
all_items = self.Plex.all() all_items = self.get_all()
length = 0 length = 0
for i, item in enumerate(all_items, 1): for i, item in enumerate(all_items, 1):
length = util.print_return(length, f"Processing: {i}/{len(all_items)} {item.title}") length = util.print_return(length, f"Processing: {i}/{len(all_items)} {item.title}")
@ -371,7 +413,7 @@ class PlexAPI:
elif method_name == "original_language": elif method_name == "original_language":
movie = None movie = None
for key, value in movie_map.items(): for key, value in movie_map.items():
if current.ratingKey == value: if current.ratingKey in value:
try: try:
movie = self.TMDb.get_movie(key) movie = self.TMDb.get_movie(key)
break break
@ -403,7 +445,7 @@ class PlexAPI:
if method_name == "vote_count": if method_name == "vote_count":
tmdb_item = None tmdb_item = None
for key, value in movie_map.items(): for key, value in movie_map.items():
if current.ratingKey == value: if current.ratingKey in value:
try: try:
tmdb_item = self.TMDb.get_movie(key) if self.is_movie else self.TMDb.get_show(key) tmdb_item = self.TMDb.get_movie(key) if self.is_movie else self.TMDb.get_show(key)
break break
@ -443,7 +485,7 @@ class PlexAPI:
if match: if match:
util.print_end(length, f"{name} Collection | {'=' if current in collection_items else '+'} | {current.title}") util.print_end(length, f"{name} Collection | {'=' if current in collection_items else '+'} | {current.title}")
if current in collection_items: rating_key_map[current.ratingKey] = None if current in collection_items: rating_key_map[current.ratingKey] = None
else: current.addCollection(name) else: self.add_collection(current, name)
elif show_filtered is True: elif show_filtered is True:
logger.info(f"{name} Collection | X | {current.title}") logger.info(f"{name} Collection | X | {current.title}")
media_type = f"{'Movie' if self.is_movie else 'Show'}{'s' if total > 1 else ''}" media_type = f"{'Movie' if self.is_movie else 'Show'}{'s' if total > 1 else ''}"
@ -451,7 +493,26 @@ class PlexAPI:
return rating_key_map return rating_key_map
def search_item(self, data, year=None): def search_item(self, data, year=None):
return util.choose_from_list(self.search(data, year=year), "movie" if self.is_movie else "show", str(data), exact=True) kwargs = {}
if year is not None:
kwargs["year"] = year
return util.choose_from_list(self.search(title=str(data), **kwargs), "movie" if self.is_movie else "show", str(data), exact=True)
def edit_item(self, item, name, item_type, edits, advanced=False):
if len(edits) > 0:
logger.debug(f"Details Update: {edits}")
try:
if advanced:
item.editAdvanced(**edits)
else:
item.edit(**edits)
item.reload()
if advanced and "languageOverride" in edits:
self.refresh_item(item.ratingKey)
logger.info(f"{item_type}: {name}{' Advanced' if advanced else ''} Details Update Successful")
except BadRequest:
util.print_stacktrace()
logger.error(f"{item_type}: {name}{' Advanced' if advanced else ''} Details Update Failed")
def update_metadata(self, TMDb, test): def update_metadata(self, TMDb, test):
logger.info("") logger.info("")
@ -463,57 +524,10 @@ class PlexAPI:
methods = {mm.lower(): mm for mm in meta} methods = {mm.lower(): mm for mm in meta}
if test and ("test" not in methods or meta[methods["test"]] is not True): if test and ("test" not in methods or meta[methods["test"]] is not True):
continue continue
logger.info("")
util.separator()
logger.info("")
year = None
if "year" in methods:
year = util.check_number(meta[methods["year"]], "year", minimum=1800, maximum=datetime.now().year + 1)
title = mapping_name
if "title" in methods:
if meta[methods["title"]] is None: logger.error("Metadata Error: title attribute is blank")
else: title = meta[methods["title"]]
item = self.search_item(title, year=year)
if item is None:
item = self.search_item(f"{title} (SUB)", year=year)
if item is None and "alt_title" in methods:
if meta[methods["alt_title"]] is None:
logger.error("Metadata Error: alt_title attribute is blank")
else:
alt_title = meta["alt_title"]
item = self.search_item(alt_title, year=year)
if item is None:
logger.error(f"Plex Error: Item {mapping_name} not found")
logger.error(f"Skipping {mapping_name}")
continue
item_type = "Movie" if self.is_movie else "Show"
logger.info(f"Updating {item_type}: {title}...")
tmdb_item = None
try:
if "tmdb_id" in methods:
if meta[methods["tmdb_id"]] is None: logger.error("Metadata Error: tmdb_id attribute is blank")
elif self.is_show: logger.error("Metadata Error: tmdb_id attribute only works with movie libraries")
else: tmdb_item = TMDb.get_show(util.regex_first_int(meta[methods["tmdb_id"]], "Show"))
except Failed as e:
logger.error(e)
originally_available = tmdb_item.first_air_date if tmdb_item else None
rating = tmdb_item.vote_average if tmdb_item else None
original_title = tmdb_item.original_name if tmdb_item and tmdb_item.original_name != tmdb_item.name else None
studio = tmdb_item.networks[0].name if tmdb_item else None
tagline = tmdb_item.tagline if tmdb_item and len(tmdb_item.tagline) > 0 else None
summary = tmdb_item.overview if tmdb_item else None
updated = False updated = False
edits = {} edits = {}
advance_edits = {}
def add_edit(name, current, group, alias, key=None, value=None, var_type="str"): def add_edit(name, current, group, alias, key=None, value=None, var_type="str"):
if value or name in alias: if value or name in alias:
if value or group[alias[name]]: if value or group[alias[name]]:
@ -534,82 +548,41 @@ class PlexAPI:
logger.error(ee) logger.error(ee)
else: else:
logger.error(f"Metadata Error: {name} attribute is blank") logger.error(f"Metadata Error: {name} attribute is blank")
add_edit("title", item.title, meta, methods, value=title)
add_edit("sort_title", item.titleSort, meta, methods, key="titleSort")
add_edit("originally_available", str(item.originallyAvailableAt)[:-9], meta, methods, key="originallyAvailableAt", value=originally_available, var_type="date")
add_edit("critic_rating", item.rating, meta, methods, value=rating, key="rating", var_type="float")
add_edit("audience_rating", item.audienceRating, meta, methods, key="audienceRating", var_type="float")
add_edit("content_rating", item.contentRating, meta, methods, key="contentRating")
add_edit("original_title", item.originalTitle, meta, methods, key="originalTitle", value=original_title)
add_edit("studio", item.studio, meta, methods, value=studio)
add_edit("tagline", item.tagline, meta, methods, value=tagline)
add_edit("summary", item.summary, meta, methods, value=summary)
if len(edits) > 0:
logger.debug(f"Details Update: {edits}")
updated = True
try:
item.edit(**edits)
item.reload()
logger.info(f"{item_type}: {mapping_name} Details Update Successful")
except BadRequest:
util.print_stacktrace()
logger.error(f"{item_type}: {mapping_name} Details Update Failed")
advance_edits = {} def add_advanced_edit(attr, obj, group, alias, show_library=False, new_agent=False):
def add_advanced_edit(attr, options, key=None, show_library=False): key, options = advance_keys[attr]
if key is None: if attr in alias:
key = attr if new_agent and self.agent not in new_plex_agents:
if attr in methods: logger.error(f"Metadata Error: {attr} attribute only works for with the New Plex Movie Agent and New Plex TV Agent")
if show_library and not self.is_show: elif show_library and not self.is_show:
logger.error(f"Metadata Error: {attr} attribute only works for show libraries") logger.error(f"Metadata Error: {attr} attribute only works for show libraries")
elif meta[methods[attr]]: elif group[alias[attr]]:
method_data = str(meta[methods[attr]]).lower() method_data = str(group[alias[attr]]).lower()
if method_data in options and getattr(item, key) != options[method_data]: if method_data not in options:
logger.error(f"Metadata Error: {group[alias[attr]]} {attr} attribute invalid")
elif getattr(obj, key) != options[method_data]:
advance_edits[key] = options[method_data] advance_edits[key] = options[method_data]
logger.info(f"Detail: {attr} updated to {method_data}") logger.info(f"Detail: {attr} updated to {method_data}")
else:
logger.error(f"Metadata Error: {meta[methods[attr]]} {attr} attribute invalid")
else: else:
logger.error(f"Metadata Error: {attr} attribute is blank") logger.error(f"Metadata Error: {attr} attribute is blank")
add_advanced_edit("episode_sorting", episode_sorting_options, key="episodeSort", show_library=True) def edit_tags(attr, obj, group, alias, key=None, extra=None, movie_library=False):
add_advanced_edit("keep_episodes", keep_episodes_options, key="autoDeletionItemPolicyUnwatchedLibrary", show_library=True)
add_advanced_edit("delete_episodes", delete_episodes_options, key="autoDeletionItemPolicyWatchedLibrary", show_library=True)
add_advanced_edit("season_display", season_display_options, key="flattenSeasons", show_library=True)
add_advanced_edit("episode_ordering", episode_ordering_options, key="showOrdering", show_library=True)
add_advanced_edit("metadata_language", metadata_language_options, key="languageOverride")
add_advanced_edit("use_original_title", use_original_title_options, key="useOriginalTitle")
if len(advance_edits) > 0:
logger.debug(f"Details Update: {advance_edits}")
updated = True
try:
check_dict = {pref.id: list(pref.enumValues.keys()) for pref in item.preferences()}
logger.info(check_dict)
item.editAdvanced(**advance_edits)
item.reload()
logger.info(f"{item_type}: {mapping_name} Advanced Details Update Successful")
except BadRequest:
util.print_stacktrace()
logger.error(f"{item_type}: {mapping_name} Advanced Details Update Failed")
def edit_tags(attr, obj, key=None, extra=None, movie_library=False):
if key is None: if key is None:
key = f"{attr}s" key = f"{attr}s"
if attr in methods and f"{attr}.sync" in methods: if attr in alias and f"{attr}.sync" in alias:
logger.error(f"Metadata Error: Cannot use {attr} and {attr}.sync together") logger.error(f"Metadata Error: Cannot use {attr} and {attr}.sync together")
elif attr in methods or f"{attr}.sync" in methods: elif attr in alias or f"{attr}.sync" in alias:
attr_key = attr if attr in methods else f"{attr}.sync" attr_key = attr if attr in alias else f"{attr}.sync"
if movie_library and not self.is_movie: if movie_library and not self.is_movie:
logger.error(f"Metadata Error: {attr_key} attribute only works for movie libraries") logger.error(f"Metadata Error: {attr_key} attribute only works for movie libraries")
elif meta[methods[attr_key]] or extra: elif group[alias[attr_key]] or extra:
item_tags = [item_tag.tag for item_tag in getattr(obj, key)] item_tags = [item_tag.tag for item_tag in getattr(obj, key)]
input_tags = [] input_tags = []
if meta[methods[attr_key]]: if group[alias[attr_key]]:
input_tags.extend(util.get_list(meta[methods[attr_key]])) input_tags.extend(util.get_list(group[alias[attr_key]]))
if extra: if extra:
input_tags.extend(extra) input_tags.extend(extra)
if f"{attr}.sync" in methods: if f"{attr}.sync" in alias:
remove_method = getattr(obj, f"remove{attr.capitalize()}") remove_method = getattr(obj, f"remove{attr.capitalize()}")
for tag in (t for t in item_tags if t not in input_tags): for tag in (t for t in item_tags if t not in input_tags):
updated = True updated = True
@ -623,23 +596,157 @@ class PlexAPI:
else: else:
logger.error(f"Metadata Error: {attr} attribute is blank") logger.error(f"Metadata Error: {attr} attribute is blank")
genres = [genre.name for genre in tmdb_item.genres] if tmdb_item else [] def set_image(attr, obj, group, alias, is_background=False):
edit_tags("genre", item, extra=genres) if group[alias[attr]]:
edit_tags("label", item) message = f"{'background' if is_background else 'poster'} to [{'File' if attr.startswith('file') else 'URL'}] {group[alias[attr]]}"
edit_tags("collection", item) if group[alias[attr]] and attr.startswith("url") and is_background:
edit_tags("country", item, key="countries", movie_library=True) obj.uploadArt(url=group[alias[attr]])
edit_tags("director", item, movie_library=True) elif group[alias[attr]] and attr.startswith("url"):
edit_tags("producer", item, movie_library=True) obj.uploadPoster(url=group[alias[attr]])
edit_tags("writer", item, movie_library=True) elif group[alias[attr]] and attr.startswith("file") and is_background:
obj.uploadArt(filepath=group[alias[attr]])
elif group[alias[attr]] and attr.startswith("file"):
obj.uploadPoster(filepath=group[alias[attr]])
logger.info(f"Detail: {attr} updated {message}")
else:
logger.error(f"Metadata Error: {attr} attribute is blank")
def set_images(obj, group, alias):
if "url_poster" in alias:
set_image("url_poster", obj, group, alias)
elif "file_poster" in alias:
set_image("file_poster", obj, group, alias)
if "url_background" in alias:
set_image("url_background", obj, group, alias, is_background=True)
elif "file_background" in alias:
set_image("file_background", obj, group, alias, is_background=True)
logger.info("")
util.separator()
logger.info("")
year = None
if "year" in methods:
year = util.check_number(meta[methods["year"]], "year", minimum=1800, maximum=datetime.now().year + 1)
title = mapping_name
if "title" in methods:
if meta[methods["title"]] is None: logger.error("Metadata Error: title attribute is blank")
else: title = meta[methods["title"]]
item = self.search_item(title, year=year)
if item is None:
item = self.search_item(f"{title} (SUB)", year=year)
if item is None and "alt_title" in methods:
if meta[methods["alt_title"]] is None:
logger.error("Metadata Error: alt_title attribute is blank")
else:
alt_title = meta["alt_title"]
item = self.search_item(alt_title, year=year)
if item is None:
logger.error(f"Plex Error: Item {mapping_name} not found")
logger.error(f"Skipping {mapping_name}")
continue
item_type = "Movie" if self.is_movie else "Show"
logger.info(f"Updating {item_type}: {title}...")
tmdb_item = None
tmdb_is_movie = None
if ("tmdb_show" in methods or "tmdb_id" in methods) and "tmdb_movie" in methods:
logger.error("Metadata Error: Cannot use tmdb_movie and tmdb_show when editing the same metadata item")
if "tmdb_show" in methods or "tmdb_id" in methods or "tmdb_movie" in methods:
try:
if "tmdb_show" in methods or "tmdb_id" in methods:
data = meta[methods["tmdb_show" if "tmdb_show" in methods else "tmdb_id"]]
if data is None:
logger.error("Metadata Error: tmdb_show attribute is blank")
else:
tmdb_is_movie = False
tmdb_item = TMDb.get_show(util.regex_first_int(data, "Show"))
elif "tmdb_movie" in methods:
if meta[methods["tmdb_movie"]] is None:
logger.error("Metadata Error: tmdb_movie attribute is blank")
else:
tmdb_is_movie = True
tmdb_item = TMDb.get_movie(util.regex_first_int(meta[methods["tmdb_movie"]], "Movie"))
except Failed as e:
logger.error(e)
originally_available = None
original_title = None
rating = None
studio = None
tagline = None
summary = None
genres = []
if tmdb_item:
originally_available = tmdb_item.release_date if tmdb_is_movie else tmdb_item.first_air_date
if tmdb_item and tmdb_is_movie is True and tmdb_item.original_title != tmdb_item.title:
original_title = tmdb_item.original_title
elif tmdb_item and tmdb_is_movie is False and tmdb_item.original_name != tmdb_item.name:
original_title = tmdb_item.original_name
rating = tmdb_item.vote_average
if tmdb_is_movie is True and tmdb_item.production_companies:
studio = tmdb_item.production_companies[0].name
elif tmdb_is_movie is False and tmdb_item.networks:
studio = tmdb_item.networks[0].name
tagline = tmdb_item.tagline if len(tmdb_item.tagline) > 0 else None
summary = tmdb_item.overview
genres = [genre.name for genre in tmdb_item.genres]
edits = {}
add_edit("title", item.title, meta, methods, value=title)
add_edit("sort_title", item.titleSort, meta, methods, key="titleSort")
add_edit("originally_available", str(item.originallyAvailableAt)[:-9], meta, methods, key="originallyAvailableAt", value=originally_available, var_type="date")
add_edit("critic_rating", item.rating, meta, methods, value=rating, key="rating", var_type="float")
add_edit("audience_rating", item.audienceRating, meta, methods, key="audienceRating", var_type="float")
add_edit("content_rating", item.contentRating, meta, methods, key="contentRating")
add_edit("original_title", item.originalTitle, meta, methods, key="originalTitle", value=original_title)
add_edit("studio", item.studio, meta, methods, value=studio)
add_edit("tagline", item.tagline, meta, methods, value=tagline)
add_edit("summary", item.summary, meta, methods, value=summary)
self.edit_item(item, mapping_name, item_type, edits)
advance_edits = {}
add_advanced_edit("episode_sorting", item, meta, methods, show_library=True)
add_advanced_edit("keep_episodes", item, meta, methods, show_library=True)
add_advanced_edit("delete_episodes", item, meta, methods, show_library=True)
add_advanced_edit("season_display", item, meta, methods, show_library=True)
add_advanced_edit("episode_ordering", item, meta, methods, show_library=True)
add_advanced_edit("metadata_language", item, meta, methods, new_agent=True)
add_advanced_edit("use_original_title", item, meta, methods, new_agent=True)
self.edit_item(item, mapping_name, item_type, advance_edits, advanced=True)
edit_tags("genre", item, meta, methods, extra=genres)
edit_tags("label", item, meta, methods)
edit_tags("collection", item, meta, methods)
edit_tags("country", item, meta, methods, key="countries", movie_library=True)
edit_tags("director", item, meta, methods, movie_library=True)
edit_tags("producer", item, meta, methods, movie_library=True)
edit_tags("writer", item, meta, methods, movie_library=True)
logger.info(f"{item_type}: {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
set_images(item, meta, methods)
if "seasons" in methods and self.is_show: if "seasons" in methods and self.is_show:
if meta[methods["seasons"]]: if meta[methods["seasons"]]:
for season_id in meta[methods["seasons"]]: for season_id in meta[methods["seasons"]]:
updated = False
logger.info("") logger.info("")
logger.info(f"Updating season {season_id} of {mapping_name}...") logger.info(f"Updating season {season_id} of {mapping_name}...")
if isinstance(season_id, int): if isinstance(season_id, int):
try: season = item.season(season_id) season = None
except NotFound: logger.error(f"Metadata Error: Season: {season_id} not found") for s in item.seasons():
if s.index == season_id:
season = s
break
if season is None:
logger.error(f"Metadata Error: Season: {season_id} not found")
else: else:
season_dict = meta[methods["seasons"]][season_id] season_dict = meta[methods["seasons"]][season_id]
season_methods = {sm.lower(): sm for sm in season_dict} season_methods = {sm.lower(): sm for sm in season_dict}
@ -660,19 +767,12 @@ class PlexAPI:
edits = {} edits = {}
add_edit("title", season.title, season_dict, season_methods, value=title) add_edit("title", season.title, season_dict, season_methods, value=title)
add_edit("summary", season.summary, season_methods, season_dict) add_edit("summary", season.summary, season_dict, season_methods)
if len(edits) > 0: self.edit_item(season, season_id, "Season", edits)
logger.debug(f"Season: {season_id} Details Update: {edits}") set_images(season, season_dict, season_methods)
updated = True
try:
season.edit(**edits)
season.reload()
logger.info(f"Season: {season_id} Details Update Successful")
except BadRequest:
util.print_stacktrace()
logger.error(f"Season: {season_id} Details Update Failed")
else: else:
logger.error(f"Metadata Error: Season: {season_id} invalid, it must be an integer") logger.error(f"Metadata Error: Season: {season_id} invalid, it must be an integer")
logger.info(f"Season {season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
else: else:
logger.error("Metadata Error: seasons attribute is blank") logger.error("Metadata Error: seasons attribute is blank")
elif "seasons" in methods: elif "seasons" in methods:
@ -681,13 +781,14 @@ class PlexAPI:
if "episodes" in methods and self.is_show: if "episodes" in methods and self.is_show:
if meta[methods["episodes"]]: if meta[methods["episodes"]]:
for episode_str in meta[methods["episodes"]]: for episode_str in meta[methods["episodes"]]:
updated = False
logger.info("") logger.info("")
match = re.search("[Ss]\\d+[Ee]\\d+", episode_str) match = re.search("[Ss]\\d+[Ee]\\d+", episode_str)
if match: if match:
output = match.group(0)[1:].split("E" if "E" in match.group(0) else "e") output = match.group(0)[1:].split("E" if "E" in match.group(0) else "e")
season_id = int(output[0]) season_id = int(output[0])
episode_id = int(output[1]) episode_id = int(output[1])
logger.info(f"Updating episode S{episode_id}E{season_id} of {mapping_name}...") logger.info(f"Updating episode S{season_id}E{episode_id} of {mapping_name}...")
try: episode = item.episode(season=season_id, episode=episode_id) try: episode = item.episode(season=season_id, episode=episode_id)
except NotFound: logger.error(f"Metadata Error: episode {episode_id} of season {season_id} not found") except NotFound: logger.error(f"Metadata Error: episode {episode_id} of season {season_id} not found")
else: else:
@ -713,26 +814,14 @@ class PlexAPI:
add_edit("rating", episode.rating, episode_dict, episode_methods) add_edit("rating", episode.rating, episode_dict, episode_methods)
add_edit("originally_available", str(episode.originallyAvailableAt)[:-9], episode_dict, episode_methods, key="originallyAvailableAt") add_edit("originally_available", str(episode.originallyAvailableAt)[:-9], episode_dict, episode_methods, key="originallyAvailableAt")
add_edit("summary", episode.summary, episode_dict, episode_methods) add_edit("summary", episode.summary, episode_dict, episode_methods)
if len(edits) > 0: self.edit_item(episode, f"{season_id} Episode: {episode_id}", "Season", edits)
logger.debug(f"Season: {season_id} Episode: {episode_id} Details Update: {edits}") edit_tags("director", episode, episode_dict, episode_methods)
updated = True edit_tags("writer", episode, episode_dict, episode_methods)
try: set_images(episode, episode_dict, episode_methods)
episode.edit(**edits) logger.info(f"Episode S{episode_id}E{season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
episode.reload()
logger.info(
f"Season: {season_id} Episode: {episode_id} Details Update Successful")
except BadRequest:
util.print_stacktrace()
logger.error(f"Season: {season_id} Episode: {episode_id} Details Update Failed")
edit_tags("director", episode)
edit_tags("writer", episode)
else: else:
logger.error(f"Metadata Error: episode {episode_str} invalid must have S##E## format") logger.error(f"Metadata Error: episode {episode_str} invalid must have S##E## format")
else: else:
logger.error("Metadata Error: episodes attribute is blank") logger.error("Metadata Error: episodes attribute is blank")
elif "episodes" in methods: elif "episodes" in methods:
logger.error("Metadata Error: episodes attribute only works for show libraries") logger.error("Metadata Error: episodes attribute only works for show libraries")
if not updated:
logger.info(f"{item_type}: {mapping_name} Details Update Not Needed")

@ -27,34 +27,6 @@ def anidb_tests(config):
if config.AniDB: if config.AniDB:
util.separator("AniDB Tests") util.separator("AniDB Tests")
try:
config.AniDB.convert_anidb_to_tvdb(69)
logger.info("Success | Convert AniDB to TVDb")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert AniDB to TVDb: {e}")
try:
config.AniDB.convert_anidb_to_imdb(112)
logger.info("Success | Convert AniDB to IMDb")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert AniDB to IMDb: {e}")
try:
config.AniDB.convert_tvdb_to_anidb(81797)
logger.info("Success | Convert TVDb to AniDB")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert TVDb to AniDB: {e}")
try:
config.AniDB.convert_imdb_to_anidb("tt0245429")
logger.info("Success | Convert IMDb to AniDB")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert IMDb to AniDB: {e}")
try: try:
config.AniDB.get_items("anidb_id", 69, "en", status_message=False) config.AniDB.get_items("anidb_id", 69, "en", status_message=False)
logger.info("Success | Get AniDB ID") logger.info("Success | Get AniDB ID")
@ -106,47 +78,6 @@ def imdb_tests(config):
util.separator("IMDb Not Configured") util.separator("IMDb Not Configured")
def mal_tests(config): def mal_tests(config):
if config.MyAnimeListIDList:
util.separator("MyAnimeListXML Tests")
try:
config.MyAnimeListIDList.convert_mal_to_tvdb(21)
logger.info("Success | Convert MyAnimeList to TVDb")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert MyAnimeList to TVDb: {e}")
try:
config.MyAnimeListIDList.convert_mal_to_tmdb(199)
logger.info("Success | Convert MyAnimeList to TMDb")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert MyAnimeList to TMDb: {e}")
try:
config.MyAnimeListIDList.convert_tvdb_to_mal(81797)
logger.info("Success | Convert TVDb to MyAnimeList")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert TVDb to MyAnimeList: {e}")
try:
config.MyAnimeListIDList.convert_tmdb_to_mal(129)
logger.info("Success | Convert TMDb to MyAnimeList")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert TMDb to MyAnimeList: {e}")
try:
config.MyAnimeListIDList.find_mal_ids(21)
logger.info("Success | Find MyAnimeList ID")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Find MyAnimeList ID: {e}")
else:
util.separator("MyAnimeListXML Not Configured")
if config.MyAnimeList: if config.MyAnimeList:
util.separator("MyAnimeList Tests") util.separator("MyAnimeList Tests")

@ -55,13 +55,17 @@ class TVDbObj:
if is_movie: if is_movie:
results = response.xpath("//*[text()='TheMovieDB.com']/@href") results = response.xpath("//*[text()='TheMovieDB.com']/@href")
if len(results) > 0: if len(results) > 0:
try: tmdb_id = util.regex_first_int(results[0], "TMDb ID") try:
except Failed as e: logger.error(e) tmdb_id = util.regex_first_int(results[0], "TMDb ID")
except Failed as e:
logger.error(e)
if not tmdb_id: if not tmdb_id:
results = response.xpath("//*[text()='IMDB']/@href") results = response.xpath("//*[text()='IMDB']/@href")
if len(results) > 0: if len(results) > 0:
try: tmdb_id, _ = TVDb.config.convert_from_imdb(util.get_id_from_imdb_url(results[0]), language) try:
except Failed as e: logger.error(e) tmdb_id, _ = TVDb.config.Arms.imdb_to_ids(util.get_id_from_imdb_url(results[0]), language)
except Failed as e:
logger.error(e)
self.tmdb_id = tmdb_id self.tmdb_id = tmdb_id
self.tvdb_url = tvdb_url self.tvdb_url = tvdb_url
self.language = language self.language = language
@ -114,13 +118,17 @@ class TVDbAPI:
title = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/text()")[0] title = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/text()")[0]
item_url = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/@href")[0] item_url = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/@href")[0]
if item_url.startswith("/series/"): if item_url.startswith("/series/"):
try: show_ids.append(self.get_series(language, f"{self.site_url}{item_url}").id) try:
except Failed as e: logger.error(f"{e} for series {title}") show_ids.append(self.get_series(language, f"{self.site_url}{item_url}").id)
except Failed as e:
logger.error(f"{e} for series {title}")
elif item_url.startswith("/movies/"): elif item_url.startswith("/movies/"):
try: try:
tmdb_id = self.get_movie(language, f"{self.site_url}{item_url}").tmdb_id tmdb_id = self.get_movie(language, f"{self.site_url}{item_url}").tmdb_id
if tmdb_id: movie_ids.append(tmdb_id) if tmdb_id:
else: raise Failed(f"TVDb Error: TMDb ID not found from TVDb URL: {tvdb_url}") movie_ids.append(tmdb_id)
else:
raise Failed(f"TVDb Error: TMDb ID not found from TVDb URL: {tvdb_url}")
except Failed as e: except Failed as e:
logger.error(f"{e} for series {title}") logger.error(f"{e} for series {title}")
else: else:

@ -197,6 +197,16 @@ def choose_from_list(datalist, description, data=None, list_type="title", exact=
else: else:
return None return None
def get_bool(method_name, method_data):
if isinstance(method_data, bool):
return method_data
elif str(method_data).lower() in ["t", "true"]:
return True
elif str(method_data).lower() in ["f", "false"]:
return False
else:
raise Failed(f"Collection Error: {method_name} attribute: {method_data} invalid must be either true or false")
def get_list(data, lower=False, split=True): def get_list(data, lower=False, split=True):
if isinstance(data, list): return data if isinstance(data, list): return data
elif isinstance(data, dict): return [data] elif isinstance(data, dict): return [data]
@ -327,9 +337,6 @@ def regex_first_int(data, id_type, default=None):
else: else:
raise Failed(f"Regex Error: Failed to parse {id_type} from {data}") raise Failed(f"Regex Error: Failed to parse {id_type} from {data}")
def remove_not(method):
return method[:-4] if method.endswith(".not") else method
def centered(text, do_print=True): def centered(text, do_print=True):
if len(text) > screen_width - 2: if len(text) > screen_width - 2:
raise Failed("text must be shorter then screen_width") raise Failed("text must be shorter then screen_width")

@ -10,13 +10,14 @@ except ModuleNotFoundError:
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("--my-tests", dest="tests", help=argparse.SUPPRESS, action="store_true", default=False) parser.add_argument("--my-tests", dest="tests", help=argparse.SUPPRESS, action="store_true", default=False)
parser.add_argument("--debug", dest="debug", help=argparse.SUPPRESS, action="store_true", default=False) parser.add_argument("-db", "--debug", dest="debug", help=argparse.SUPPRESS, action="store_true", default=False)
parser.add_argument("-c", "--config", dest="config", help="Run with desired *.yml file", type=str) parser.add_argument("-c", "--config", dest="config", help="Run with desired *.yml file", type=str)
parser.add_argument("-t", "--time", dest="time", help="Time to update each day use format HH:MM (Default: 03:00)", default="03:00", type=str) parser.add_argument("-t", "--time", dest="time", help="Time to update each day use format HH:MM (Default: 03:00)", default="03:00", type=str)
parser.add_argument("-re", "--resume", dest="resume", help="Resume collection run from a specific collection", type=str) parser.add_argument("-re", "--resume", dest="resume", help="Resume collection run from a specific collection", type=str)
parser.add_argument("-r", "--run", dest="run", help="Run without the scheduler", action="store_true", default=False) parser.add_argument("-r", "--run", dest="run", help="Run without the scheduler", action="store_true", default=False)
parser.add_argument("-rt", "--test", "--tests", "--run-test", "--run-tests", dest="test", help="Run in debug mode with only collections that have test: true", action="store_true", default=False) parser.add_argument("-rt", "--test", "--tests", "--run-test", "--run-tests", dest="test", help="Run in debug mode with only collections that have test: true", action="store_true", default=False)
parser.add_argument("-cl", "--collection", "--collections", dest="collections", help="Process only specified collections (comma-separated list)", type=str) parser.add_argument("-cl", "--collection", "--collections", dest="collections", help="Process only specified collections (comma-separated list)", type=str)
parser.add_argument("-l", "--library", "--libraries", dest="libraries", help="Process only specified libraries (comma-separated list)", type=str)
parser.add_argument("-d", "--divider", dest="divider", help="Character that divides the sections (Default: '=')", default="=", type=str) parser.add_argument("-d", "--divider", dest="divider", help="Character that divides the sections (Default: '=')", default="=", type=str)
parser.add_argument("-w", "--width", dest="width", help="Screen Width (Default: 100)", default=100, type=int) parser.add_argument("-w", "--width", dest="width", help="Screen Width (Default: 100)", default=100, type=int)
args = parser.parse_args() args = parser.parse_args()
@ -38,6 +39,7 @@ test = check_bool("PMM_TEST", args.test)
debug = check_bool("PMM_DEBUG", args.debug) debug = check_bool("PMM_DEBUG", args.debug)
run = check_bool("PMM_RUN", args.run) run = check_bool("PMM_RUN", args.run)
collections = os.environ.get("PMM_COLLECTIONS") if os.environ.get("PMM_COLLECTIONS") else args.collections collections = os.environ.get("PMM_COLLECTIONS") if os.environ.get("PMM_COLLECTIONS") else args.collections
libraries = os.environ.get("PMM_LIBRARIES") if os.environ.get("PMM_LIBRARIES") else args.libraries
resume = os.environ.get("PMM_RESUME") if os.environ.get("PMM_RESUME") else args.resume resume = os.environ.get("PMM_RESUME") if os.environ.get("PMM_RESUME") else args.resume
time_to_run = os.environ.get("PMM_TIME") if os.environ.get("PMM_TIME") else args.time time_to_run = os.environ.get("PMM_TIME") if os.environ.get("PMM_TIME") else args.time
@ -89,22 +91,23 @@ util.centered("| |_) | |/ _ \\ \\/ / | |\\/| |/ _ \\ __/ _` | | |\\/| |/ _` | '_
util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | ") util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | ")
util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| ") util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| ")
util.centered(" |___/ ") util.centered(" |___/ ")
util.centered(" Version: 1.7.2 ") util.centered(" Version: 1.8.0 ")
util.separator() util.separator()
if my_tests: if my_tests:
tests.run_tests(default_dir) tests.run_tests(default_dir)
sys.exit(0) sys.exit(0)
def start(config_path, is_test, daily, collections_to_run, resume_from): def start(config_path, is_test, daily, collections_to_run, libraries_to_run, resume_from):
if daily: start_type = "Daily " if daily: start_type = "Daily "
elif is_test: start_type = "Test " elif is_test: start_type = "Test "
elif collections_to_run: start_type = "Collections " elif collections_to_run: start_type = "Collections "
else: start_type = "" elif libraries_to_run: start_type = "Libraries "
else: start_type = ""
start_time = datetime.now() start_time = datetime.now()
util.separator(f"Starting {start_type}Run") util.separator(f"Starting {start_type}Run")
try: try:
config = Config(default_dir, config_path) config = Config(default_dir, config_path, libraries_to_run)
config.update_libraries(is_test, collections_to_run, resume_from) config.update_libraries(is_test, collections_to_run, resume_from)
except Exception as e: except Exception as e:
util.print_stacktrace() util.print_stacktrace()
@ -113,11 +116,11 @@ def start(config_path, is_test, daily, collections_to_run, resume_from):
util.separator(f"Finished {start_type}Run\nRun Time: {str(datetime.now() - start_time).split('.')[0]}") util.separator(f"Finished {start_type}Run\nRun Time: {str(datetime.now() - start_time).split('.')[0]}")
try: try:
if run or test or collections or resume: if run or test or collections or libraries or resume:
start(config_file, test, False, collections, resume) start(config_file, test, False, collections, libraries, resume)
else: else:
length = 0 length = 0
schedule.every().day.at(time_to_run).do(start, config_file, False, True, None, None) schedule.every().day.at(time_to_run).do(start, config_file, False, True, None, None, None)
while True: while True:
schedule.run_pending() schedule.run_pending()
current = datetime.now().strftime("%H:%M") current = datetime.now().strftime("%H:%M")

Loading…
Cancel
Save