Merge pull request #240 from meisnate12/develop

v1.9.0
pull/246/head v1.9.0
meisnate12 4 years ago committed by GitHub
commit be23b4a060
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1,5 +1,5 @@
# Plex Meta Manager # Plex Meta Manager
#### Version 1.8.0 #### Version 1.9.0
The original concept for Plex Meta Manager is [Plex Auto Collections](https://github.com/mza921/Plex-Auto-Collections), but this is rewritten from the ground up to be able to include a scheduler, metadata edits, multiple libraries, and logging. Plex Meta Manager is a Python 3 script that can be continuously run using YAML configuration files to update on a schedule the metadata of the movies, shows, and collections in your libraries as well as automatically build collections based on various methods all detailed in the wiki. Some collection examples that the script can automatically build and update daily include Plex Based Searches like actor, genre, or studio collections or Collections based on TMDb, IMDb, Trakt, TVDb, AniDB, or MyAnimeList lists and various other services. The original concept for Plex Meta Manager is [Plex Auto Collections](https://github.com/mza921/Plex-Auto-Collections), but this is rewritten from the ground up to be able to include a scheduler, metadata edits, multiple libraries, and logging. Plex Meta Manager is a Python 3 script that can be continuously run using YAML configuration files to update on a schedule the metadata of the movies, shows, and collections in your libraries as well as automatically build collections based on various methods all detailed in the wiki. Some collection examples that the script can automatically build and update daily include Plex Based Searches like actor, genre, or studio collections or Collections based on TMDb, IMDb, Trakt, TVDb, AniDB, or MyAnimeList lists and various other services.

@ -2,8 +2,21 @@
libraries: # Library mappings must have a colon (:) placed after them libraries: # Library mappings must have a colon (:) placed after them
Movies: Movies:
metadata_path:
- file: config/Movies.yml # You have to create this file the other are online
- git: meisnate12/MovieCharts
- git: meisnate12/Studios
- git: meisnate12/IMDBGenres
- git: meisnate12/People
TV Shows: TV Shows:
metadata_path:
- file: config/TV Shows.yml # You have to create this file the other are online
- git: meisnate12/ShowCharts
- git: meisnate12/Networks
Anime: Anime:
metadata_path:
- file: config/Anime.yml # You have to create this file the other are online
- git: meisnate12/AnimeCharts
settings: # Can be individually specified per library as well settings: # Can be individually specified per library as well
cache: true cache: true
cache_expiration: 60 cache_expiration: 60

@ -17,56 +17,50 @@ class AniDBAPI:
"relation": "/relation/graph" "relation": "/relation/graph"
} }
def get_AniDB_IDs(self):
return html.fromstring(requests.get("https://raw.githubusercontent.com/Anime-Lists/anime-lists/master/anime-list-master.xml").content)
@retry(stop_max_attempt_number=6, wait_fixed=10000) @retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_request(self, url, language): def _request(self, url, language):
return html.fromstring(requests.get(url, headers={"Accept-Language": language, "User-Agent": "Mozilla/5.0 x64"}).content) return html.fromstring(requests.get(url, headers={"Accept-Language": language, "User-Agent": "Mozilla/5.0 x64"}).content)
def get_popular(self, language): def _popular(self, language):
response = self.send_request(self.urls["popular"], language) response = self._request(self.urls["popular"], language)
return util.get_int_list(response.xpath("//td[@class='name anime']/a/@href"), "AniDB ID") return util.get_int_list(response.xpath("//td[@class='name anime']/a/@href"), "AniDB ID")
def validate_anidb_id(self, anidb_id, language): def _relations(self, anidb_id, language):
response = self.send_request(f"{self.urls['anime']}/{anidb_id}", language) response = self._request(f"{self.urls['anime']}/{anidb_id}{self.urls['relation']}", language)
return util.get_int_list(response.xpath("//area/@href"), "AniDB ID")
def _validate(self, anidb_id, language):
response = self._request(f"{self.urls['anime']}/{anidb_id}", language)
ids = response.xpath(f"//*[text()='a{anidb_id}']/text()") ids = response.xpath(f"//*[text()='a{anidb_id}']/text()")
if len(ids) > 0: if len(ids) > 0:
return util.regex_first_int(ids[0], "AniDB ID") return util.regex_first_int(ids[0], "AniDB ID")
raise Failed(f"AniDB Error: AniDB ID: {anidb_id} not found") raise Failed(f"AniDB Error: AniDB ID: {anidb_id} not found")
def get_anidb_relations(self, anidb_id, language):
response = self.send_request(f"{self.urls['anime']}/{anidb_id}{self.urls['relation']}", language)
return util.get_int_list(response.xpath("//area/@href"), "AniDB ID")
def validate_anidb_list(self, anidb_list, language): def validate_anidb_list(self, anidb_list, language):
anidb_values = [] anidb_values = []
for anidb_id in anidb_list: for anidb_id in anidb_list:
try: try:
anidb_values.append(self.validate_anidb_id(anidb_id, language)) anidb_values.append(self._validate(anidb_id, language))
except Failed as e: except Failed as e:
logger.error(e) logger.error(e)
if len(anidb_values) > 0: if len(anidb_values) > 0:
return anidb_values return anidb_values
raise Failed(f"AniDB Error: No valid AniDB IDs in {anidb_list}") raise Failed(f"AniDB Error: No valid AniDB IDs in {anidb_list}")
def get_items(self, method, data, language, status_message=True): def get_items(self, method, data, language):
pretty = util.pretty_names[method] if method in util.pretty_names else method pretty = util.pretty_names[method] if method in util.pretty_names else method
if status_message: logger.debug(f"Data: {data}")
logger.debug(f"Data: {data}")
anidb_ids = [] anidb_ids = []
if method == "anidb_popular": if method == "anidb_popular":
if status_message: logger.info(f"Processing {pretty}: {data} Anime")
logger.info(f"Processing {pretty}: {data} Anime") anidb_ids.extend(self._popular(language)[:data])
anidb_ids.extend(self.get_popular(language)[:data])
else: else:
if status_message: logger.info(f"Processing {pretty}: {data}") logger.info(f"Processing {pretty}: {data}")
if method == "anidb_id": anidb_ids.append(data) if method == "anidb_id": anidb_ids.append(data)
elif method == "anidb_relation": anidb_ids.extend(self.get_anidb_relations(data, language)) elif method == "anidb_relation": anidb_ids.extend(self._relations(data, language))
else: raise Failed(f"AniDB Error: Method {method} not supported") else: raise Failed(f"AniDB Error: Method {method} not supported")
movie_ids, show_ids = self.config.Arms.anidb_to_ids(anidb_ids, language) movie_ids, show_ids = self.config.Convert.anidb_to_ids(anidb_ids)
if status_message: logger.debug(f"AniDB IDs Found: {anidb_ids}")
logger.debug(f"AniDB IDs Found: {anidb_ids}") logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TMDb IDs Found: {movie_ids}") logger.debug(f"TVDb IDs Found: {show_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}")
return movie_ids, show_ids return movie_ids, show_ids

@ -19,6 +19,8 @@ pretty_names = {
"score": "Average Score", "score": "Average Score",
"popular": "Popularity" "popular": "Popularity"
} }
tag_query = "query{MediaTagCollection {name}}"
genre_query = "query{GenreCollection}"
class AniListAPI: class AniListAPI:
def __init__(self, config): def __init__(self, config):
@ -26,19 +28,12 @@ class AniListAPI:
self.url = "https://graphql.anilist.co" self.url = "https://graphql.anilist.co"
self.tags = {} self.tags = {}
self.genres = {} self.genres = {}
self.tags = {t["name"].lower(): t["name"] for t in self._request(tag_query, {})["data"]["MediaTagCollection"]}
for tag in self.send_request("query{MediaTagCollection {name}}", {})["data"]["MediaTagCollection"]: self.genres = {g.lower(): g for g in self._request(genre_query, {})["data"]["GenreCollection"]}
self.tags[tag["name"].lower()] = tag["name"]
for genre in self.send_request("query{GenreCollection}", {})["data"]["GenreCollection"]:
self.genres[genre.lower()] = genre
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def post(self, query, variables):
return requests.post(self.url, json={"query": query, "variables": variables})
@retry(stop_max_attempt_number=2, retry_on_exception=util.retry_if_not_failed) @retry(stop_max_attempt_number=2, retry_on_exception=util.retry_if_not_failed)
def send_request(self, query, variables): def _request(self, query, variables):
response = self.post(query, variables) response = requests.post(self.url, json={"query": query, "variables": variables})
json_obj = response.json() json_obj = response.json()
if "errors" in json_obj: if "errors" in json_obj:
if json_obj['errors'][0]['message'] == "Too Many Requests.": if json_obj['errors'][0]['message'] == "Too Many Requests.":
@ -51,14 +46,14 @@ class AniListAPI:
time.sleep(0.4) time.sleep(0.4)
return json_obj return json_obj
def anilist_id(self, anilist_id): def _validate(self, anilist_id):
query = "query ($id: Int) {Media(id: $id) {id title{romaji english}}}" query = "query ($id: Int) {Media(id: $id) {id title{romaji english}}}"
media = self.send_request(query, {"id": anilist_id})["data"]["Media"] media = self._request(query, {"id": anilist_id})["data"]["Media"]
if media["id"]: if media["id"]:
return media["id"], media["title"]["english" if media["title"]["english"] else "romaji"] return media["id"], media["title"]["english" if media["title"]["english"] else "romaji"]
raise Failed(f"AniList Error: No AniList ID found for {anilist_id}") raise Failed(f"AniList Error: No AniList ID found for {anilist_id}")
def get_pagenation(self, query, limit=0, variables=None): def _pagenation(self, query, limit=0, variables=None):
anilist_ids = [] anilist_ids = []
count = 0 count = 0
page_num = 0 page_num = 0
@ -68,7 +63,7 @@ class AniListAPI:
while next_page: while next_page:
page_num += 1 page_num += 1
variables["page"] = page_num variables["page"] = page_num
json_obj = self.send_request(query, variables) json_obj = self._request(query, variables)
next_page = json_obj["data"]["Page"]["pageInfo"]["hasNextPage"] next_page = json_obj["data"]["Page"]["pageInfo"]["hasNextPage"]
for media in json_obj["data"]["Page"]["media"]: for media in json_obj["data"]["Page"]["media"]:
if media["id"]: if media["id"]:
@ -80,7 +75,7 @@ class AniListAPI:
break break
return anilist_ids return anilist_ids
def top_rated(self, limit): def _top_rated(self, limit):
query = """ query = """
query ($page: Int) { query ($page: Int) {
Page(page: $page) { Page(page: $page) {
@ -89,9 +84,9 @@ class AniListAPI:
} }
} }
""" """
return self.get_pagenation(query, limit=limit) return self._pagenation(query, limit=limit)
def popular(self, limit): def _popular(self, limit):
query = """ query = """
query ($page: Int) { query ($page: Int) {
Page(page: $page) { Page(page: $page) {
@ -100,9 +95,9 @@ class AniListAPI:
} }
} }
""" """
return self.get_pagenation(query, limit=limit) return self._pagenation(query, limit=limit)
def season(self, season, year, sort, limit): def _season(self, season, year, sort, limit):
query = """ query = """
query ($page: Int, $season: MediaSeason, $year: Int, $sort: [MediaSort]) { query ($page: Int, $season: MediaSeason, $year: Int, $sort: [MediaSort]) {
Page(page: $page){ Page(page: $page){
@ -112,9 +107,9 @@ class AniListAPI:
} }
""" """
variables = {"season": season.upper(), "year": year, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"} variables = {"season": season.upper(), "year": year, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"}
return self.get_pagenation(query, limit=limit, variables=variables) return self._pagenation(query, limit=limit, variables=variables)
def genre(self, genre, sort, limit): def _genre(self, genre, sort, limit):
query = """ query = """
query ($page: Int, $genre: String, $sort: [MediaSort]) { query ($page: Int, $genre: String, $sort: [MediaSort]) {
Page(page: $page){ Page(page: $page){
@ -124,9 +119,9 @@ class AniListAPI:
} }
""" """
variables = {"genre": genre, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"} variables = {"genre": genre, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"}
return self.get_pagenation(query, limit=limit, variables=variables) return self._pagenation(query, limit=limit, variables=variables)
def tag(self, tag, sort, limit): def _tag(self, tag, sort, limit):
query = """ query = """
query ($page: Int, $tag: String, $sort: [MediaSort]) { query ($page: Int, $tag: String, $sort: [MediaSort]) {
Page(page: $page){ Page(page: $page){
@ -136,9 +131,9 @@ class AniListAPI:
} }
""" """
variables = {"tag": tag, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"} variables = {"tag": tag, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"}
return self.get_pagenation(query, limit=limit, variables=variables) return self._pagenation(query, limit=limit, variables=variables)
def studio(self, studio_id): def _studio(self, studio_id):
query = """ query = """
query ($page: Int, $id: Int) { query ($page: Int, $id: Int) {
Studio(id: $id) { Studio(id: $id) {
@ -156,7 +151,7 @@ class AniListAPI:
name = None name = None
while next_page: while next_page:
page_num += 1 page_num += 1
json_obj = self.send_request(query, {"id": studio_id, "page": page_num}) json_obj = self._request(query, {"id": studio_id, "page": page_num})
if not name: if not name:
name = json_obj["data"]["Studio"]["name"] name = json_obj["data"]["Studio"]["name"]
next_page = json_obj["data"]["Studio"]["media"]["pageInfo"]["hasNextPage"] next_page = json_obj["data"]["Studio"]["media"]["pageInfo"]["hasNextPage"]
@ -165,7 +160,7 @@ class AniListAPI:
anilist_ids.append(media["id"]) anilist_ids.append(media["id"])
return anilist_ids, name return anilist_ids, name
def relations(self, anilist_id, ignore_ids=None): def _relations(self, anilist_id, ignore_ids=None):
query = """ query = """
query ($id: Int) { query ($id: Int) {
Media(id: $id) { Media(id: $id) {
@ -182,9 +177,9 @@ class AniListAPI:
name = "" name = ""
if not ignore_ids: if not ignore_ids:
ignore_ids = [anilist_id] ignore_ids = [anilist_id]
anilist_id, name = self.anilist_id(anilist_id) anilist_id, name = self._validate(anilist_id)
anilist_ids.append(anilist_id) anilist_ids.append(anilist_id)
json_obj = self.send_request(query, {"id": anilist_id}) json_obj = self._request(query, {"id": anilist_id})
edges = [media["node"]["id"] for media in json_obj["data"]["Media"]["relations"]["edges"] edges = [media["node"]["id"] for media in json_obj["data"]["Media"]["relations"]["edges"]
if media["relationType"] not in ["CHARACTER", "OTHER"] and media["node"]["type"] == "ANIME"] if media["relationType"] not in ["CHARACTER", "OTHER"] and media["node"]["type"] == "ANIME"]
for media in json_obj["data"]["Media"]["relations"]["nodes"]: for media in json_obj["data"]["Media"]["relations"]["nodes"]:
@ -194,7 +189,7 @@ class AniListAPI:
anilist_ids.append(media["id"]) anilist_ids.append(media["id"])
for next_id in new_anilist_ids: for next_id in new_anilist_ids:
new_relation_ids, ignore_ids, _ = self.relations(next_id, ignore_ids=ignore_ids) new_relation_ids, ignore_ids, _ = self._relations(next_id, ignore_ids=ignore_ids)
anilist_ids.extend(new_relation_ids) anilist_ids.extend(new_relation_ids)
return anilist_ids, ignore_ids, name return anilist_ids, ignore_ids, name
@ -215,48 +210,40 @@ class AniListAPI:
if studio: query = "query ($id: Int) {Studio(id: $id) {name}}" if studio: query = "query ($id: Int) {Studio(id: $id) {name}}"
else: query = "query ($id: Int) {Media(id: $id) {id}}" else: query = "query ($id: Int) {Media(id: $id) {id}}"
try: try:
self.send_request(query, {"id": anilist_id}) self._request(query, {"id": anilist_id})
anilist_values.append(anilist_id) anilist_values.append(anilist_id)
except Failed as e: logger.error(e) except Failed as e: logger.error(e)
if len(anilist_values) > 0: if len(anilist_values) > 0:
return anilist_values return anilist_values
raise Failed(f"AniList Error: No valid AniList IDs in {anilist_ids}") raise Failed(f"AniList Error: No valid AniList IDs in {anilist_ids}")
def get_items(self, method, data, language, status_message=True): def get_items(self, method, data):
if status_message: logger.debug(f"Data: {data}")
logger.debug(f"Data: {data}")
pretty = util.pretty_names[method] if method in util.pretty_names else method pretty = util.pretty_names[method] if method in util.pretty_names else method
if method == "anilist_id": if method == "anilist_id":
anilist_id, name = self.anilist_id(data) anilist_id, name = self._validate(data)
anilist_ids = [anilist_id] anilist_ids = [anilist_id]
if status_message: logger.info(f"Processing {pretty}: ({data}) {name}")
logger.info(f"Processing {pretty}: ({data}) {name}")
elif method in ["anilist_popular", "anilist_top_rated"]: elif method in ["anilist_popular", "anilist_top_rated"]:
anilist_ids = self.popular(data) if method == "anilist_popular" else self.top_rated(data) anilist_ids = self._popular(data) if method == "anilist_popular" else self._top_rated(data)
if status_message: logger.info(f"Processing {pretty}: {data} Anime")
logger.info(f"Processing {pretty}: {data} Anime")
elif method == "anilist_season": elif method == "anilist_season":
anilist_ids = self.season(data["season"], data["year"], data["sort_by"], data["limit"]) anilist_ids = self._season(data["season"], data["year"], data["sort_by"], data["limit"])
if status_message: logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from {util.pretty_seasons[data['season']]} {data['year']} sorted by {pretty_names[data['sort_by']]}")
logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from {util.pretty_seasons[data['season']]} {data['year']} sorted by {pretty_names[data['sort_by']]}")
elif method == "anilist_genre": elif method == "anilist_genre":
anilist_ids = self.genre(data["genre"], data["sort_by"], data["limit"]) anilist_ids = self._genre(data["genre"], data["sort_by"], data["limit"])
if status_message: logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from the Genre: {data['genre']} sorted by {pretty_names[data['sort_by']]}")
logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from the Genre: {data['genre']} sorted by {pretty_names[data['sort_by']]}")
elif method == "anilist_tag": elif method == "anilist_tag":
anilist_ids = self.tag(data["tag"], data["sort_by"], data["limit"]) anilist_ids = self._tag(data["tag"], data["sort_by"], data["limit"])
if status_message: logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from the Tag: {data['tag']} sorted by {pretty_names[data['sort_by']]}")
logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from the Tag: {data['tag']} sorted by {pretty_names[data['sort_by']]}")
elif method in ["anilist_studio", "anilist_relations"]: elif method in ["anilist_studio", "anilist_relations"]:
if method == "anilist_studio": anilist_ids, name = self.studio(data) if method == "anilist_studio": anilist_ids, name = self._studio(data)
else: anilist_ids, _, name = self.relations(data) else: anilist_ids, _, name = self._relations(data)
if status_message: logger.info(f"Processing {pretty}: ({data}) {name} ({len(anilist_ids)} Anime)")
logger.info(f"Processing {pretty}: ({data}) {name} ({len(anilist_ids)} Anime)")
else: else:
raise Failed(f"AniList Error: Method {method} not supported") raise Failed(f"AniList Error: Method {method} not supported")
movie_ids, show_ids = self.config.Arms.anilist_to_ids(anilist_ids, language) movie_ids, show_ids = self.config.Convert.anilist_to_ids(anilist_ids)
if status_message: logger.debug(f"AniList IDs Found: {anilist_ids}")
logger.debug(f"AniList IDs Found: {anilist_ids}") logger.debug(f"Shows Found: {show_ids}")
logger.debug(f"Shows Found: {show_ids}") logger.debug(f"Movies Found: {movie_ids}")
logger.debug(f"Movies Found: {movie_ids}")
return movie_ids, show_ids return movie_ids, show_ids

@ -1,161 +0,0 @@
import logging, requests
from lxml import html
from modules import util
from modules.util import Failed
from retrying import retry
logger = logging.getLogger("Plex Meta Manager")
class ArmsAPI:
def __init__(self, config):
self.config = config
self.arms_url = "https://relations.yuna.moe/api/ids"
self.anidb_url = "https://raw.githubusercontent.com/Anime-Lists/anime-lists/master/anime-list-master.xml"
self.AniDBIDs = self._get_anidb()
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def _get_anidb(self):
return html.fromstring(requests.get(self.anidb_url).content)
def anidb_to_tvdb(self, anidb_id): return self._anidb(anidb_id, "tvdbid")
def anidb_to_imdb(self, anidb_id): return self._anidb(anidb_id, "imdbid")
def _anidb(self, input_id, to_id):
ids = self.AniDBIDs.xpath(f"//anime[contains(@anidbid, '{input_id}')]/@{to_id}")
if len(ids) > 0:
try:
if len(ids[0]) > 0:
return ids[0].split(",") if to_id == "imdbid" else int(ids[0])
raise ValueError
except ValueError:
raise Failed(f"Arms Error: No {util.pretty_ids[to_id]} ID found for AniDB ID: {input_id}")
else:
raise Failed(f"Arms Error: AniDB ID: {input_id} not found")
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def _request(self, ids):
return requests.post(self.arms_url, json=ids).json()
def mal_to_anidb(self, mal_id):
anime_ids = self._arms_ids(mal_ids=mal_id)
if anime_ids[0] is None:
raise Failed(f"Arms Error: MyAnimeList ID: {mal_id} does not exist")
if anime_ids[0]["anidb"] is None:
raise Failed(f"Arms Error: No AniDB ID for MyAnimeList ID: {mal_id}")
return anime_ids[0]["anidb"]
def anidb_to_ids(self, anidb_list, language):
show_ids = []
movie_ids = []
for anidb_id in anidb_list:
try:
for imdb_id in self.anidb_to_imdb(anidb_id):
tmdb_id, _ = self.imdb_to_ids(imdb_id, language)
if tmdb_id:
movie_ids.append(tmdb_id)
break
else:
raise Failed
except Failed:
try:
tvdb_id = self.anidb_to_tvdb(anidb_id)
if tvdb_id:
show_ids.append(tvdb_id)
except Failed:
logger.error(f"Arms Error: No TVDb ID or IMDb ID found for AniDB ID: {anidb_id}")
return movie_ids, show_ids
def anilist_to_ids(self, anilist_ids, language):
anidb_ids = []
for id_set in self._arms_ids(anilist_ids=anilist_ids):
if id_set["anidb"] is not None:
anidb_ids.append(id_set["anidb"])
else:
logger.error(f"Arms Error: AniDB ID not found for AniList ID: {id_set['anilist']}")
return self.anidb_to_ids(anidb_ids, language)
def myanimelist_to_ids(self, mal_ids, language):
anidb_ids = []
for id_set in self._arms_ids(mal_ids=mal_ids):
if id_set["anidb"] is not None:
anidb_ids.append(id_set["anidb"])
else:
logger.error(f"Arms Error: AniDB ID not found for MyAnimeList ID: {id_set['myanimelist']}")
return self.anidb_to_ids(anidb_ids, language)
def _arms_ids(self, anilist_ids=None, anidb_ids=None, mal_ids=None):
all_ids = []
def collect_ids(ids, id_name):
if ids:
if isinstance(ids, list):
all_ids.extend([{id_name: a_id} for a_id in ids])
else:
all_ids.append({id_name: ids})
collect_ids(anilist_ids, "anilist")
collect_ids(anidb_ids, "anidb")
collect_ids(mal_ids, "myanimelist")
converted_ids = []
if self.config.Cache:
unconverted_ids = []
for anime_dict in all_ids:
for id_type, anime_id in anime_dict.items():
query_ids, update = self.config.Cache.query_anime_map(anime_id, id_type)
if not update and query_ids:
converted_ids.append(query_ids)
else:
unconverted_ids.append({id_type: anime_id})
else:
unconverted_ids = all_ids
for anime_ids in self._request(unconverted_ids):
if anime_ids:
if self.config.Cache:
self.config.Cache.update_anime(False, anime_ids)
converted_ids.append(anime_ids)
return converted_ids
def imdb_to_ids(self, imdb_id, language):
update_tmdb = False
update_tvdb = False
if self.config.Cache:
tmdb_id, tvdb_id = self.config.Cache.get_ids_from_imdb(imdb_id)
update_tmdb = False
if not tmdb_id:
tmdb_id, update_tmdb = self.config.Cache.get_tmdb_from_imdb(imdb_id)
if update_tmdb:
tmdb_id = None
update_tvdb = False
if not tvdb_id:
tvdb_id, update_tvdb = self.config.Cache.get_tvdb_from_imdb(imdb_id)
if update_tvdb:
tvdb_id = None
else:
tmdb_id = None
tvdb_id = None
from_cache = tmdb_id is not None or tvdb_id is not None
if not tmdb_id and not tvdb_id and self.config.TMDb:
try: tmdb_id = self.config.TMDb.convert_imdb_to_tmdb(imdb_id)
except Failed: pass
if not tmdb_id and not tvdb_id and self.config.TMDb:
try: tvdb_id = self.config.TMDb.convert_imdb_to_tvdb(imdb_id)
except Failed: pass
if not tmdb_id and not tvdb_id and self.config.Trakt:
try: tmdb_id = self.config.Trakt.convert_imdb_to_tmdb(imdb_id)
except Failed: pass
if not tmdb_id and not tvdb_id and self.config.Trakt:
try: tvdb_id = self.config.Trakt.convert_imdb_to_tvdb(imdb_id)
except Failed: pass
if tmdb_id and not from_cache:
try: self.config.TMDb.get_movie(tmdb_id)
except Failed: tmdb_id = None
if tvdb_id and not from_cache:
try: self.config.TVDb.get_series(language, tvdb_id)
except Failed: tvdb_id = None
if not tmdb_id and not tvdb_id:
raise Failed(f"Arms Error: No TMDb ID or TVDb ID found for IMDb: {imdb_id}")
if self.config.Cache:
if tmdb_id and update_tmdb is not False:
self.config.Cache.update_imdb("movie", update_tmdb, imdb_id, tmdb_id)
if tvdb_id and update_tvdb is not False:
self.config.Cache.update_imdb("show", update_tvdb, imdb_id, tvdb_id)
return tmdb_id, tvdb_id

File diff suppressed because it is too large Load Diff

@ -1,7 +1,6 @@
import logging, os, random, sqlite3 import logging, os, random, sqlite3
from contextlib import closing from contextlib import closing
from datetime import datetime, timedelta from datetime import datetime, timedelta
from modules.util import Failed
logger = logging.getLogger("Plex Meta Manager") logger = logging.getLogger("Plex Meta Manager")
@ -11,29 +10,42 @@ class Cache:
with sqlite3.connect(cache) as connection: with sqlite3.connect(cache) as connection:
connection.row_factory = sqlite3.Row connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor: with closing(connection.cursor()) as cursor:
cursor.execute("SELECT count(name) FROM sqlite_master WHERE type='table' AND name='guids'") cursor.execute("SELECT count(name) FROM sqlite_master WHERE type='table' AND name='guid_map'")
if cursor.fetchone()[0] == 0: if cursor.fetchone()[0] == 0:
logger.info(f"Initializing cache database at {cache}") logger.info(f"Initializing cache database at {cache}")
else: else:
logger.info(f"Using cache database at {cache}") logger.info(f"Using cache database at {cache}")
cursor.execute("DROP TABLE IF EXISTS guids")
cursor.execute("DROP TABLE IF EXISTS imdb_map")
cursor.execute( cursor.execute(
"""CREATE TABLE IF NOT EXISTS guids ( """CREATE TABLE IF NOT EXISTS guid_map (
INTEGER PRIMARY KEY, INTEGER PRIMARY KEY,
plex_guid TEXT UNIQUE, plex_guid TEXT UNIQUE,
t_id TEXT,
media_type TEXT,
expiration_date TEXT)"""
)
cursor.execute(
"""CREATE TABLE IF NOT EXISTS imdb_to_tmdb_map (
INTEGER PRIMARY KEY,
imdb_id TEXT UNIQUE,
tmdb_id TEXT, tmdb_id TEXT,
imdb_id TEXT, media_type TEXT,
tvdb_id TEXT, expiration_date TEXT)"""
anidb_id TEXT,
expiration_date TEXT,
media_type TEXT)"""
) )
cursor.execute( cursor.execute(
"""CREATE TABLE IF NOT EXISTS imdb_map ( """CREATE TABLE IF NOT EXISTS imdb_to_tvdb_map (
INTEGER PRIMARY KEY, INTEGER PRIMARY KEY,
imdb_id TEXT UNIQUE, imdb_id TEXT UNIQUE,
t_id TEXT, tvdb_id TEXT UNIQUE,
expiration_date TEXT, expiration_date TEXT)"""
media_type TEXT)""" )
cursor.execute(
"""CREATE TABLE IF NOT EXISTS tmdb_to_tvdb_map (
INTEGER PRIMARY KEY,
tmdb_id TEXT UNIQUE,
tvdb_id TEXT UNIQUE,
expiration_date TEXT)"""
) )
cursor.execute( cursor.execute(
"""CREATE TABLE IF NOT EXISTS letterboxd_map ( """CREATE TABLE IF NOT EXISTS letterboxd_map (
@ -68,145 +80,85 @@ class Cache:
self.expiration = expiration self.expiration = expiration
self.cache_path = cache self.cache_path = cache
def get_ids_from_imdb(self, imdb_id): def query_guid_map(self, plex_guid):
tmdb_id, tmdb_expired = self.get_tmdb_id("movie", imdb_id=imdb_id)
tvdb_id, tvdb_expired = self.get_tvdb_id("show", imdb_id=imdb_id)
return tmdb_id, tvdb_id
def get_tmdb_id(self, media_type, plex_guid=None, imdb_id=None, tvdb_id=None, anidb_id=None):
return self.get_id_from(media_type, "tmdb_id", plex_guid=plex_guid, imdb_id=imdb_id, tvdb_id=tvdb_id, anidb_id=anidb_id)
def get_imdb_id(self, media_type, plex_guid=None, tmdb_id=None, tvdb_id=None, anidb_id=None):
return self.get_id_from(media_type, "imdb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, tvdb_id=tvdb_id, anidb_id=anidb_id)
def get_tvdb_id(self, media_type, plex_guid=None, tmdb_id=None, imdb_id=None, anidb_id=None):
return self.get_id_from(media_type, "tvdb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, imdb_id=imdb_id, anidb_id=anidb_id)
def get_anidb_id(self, media_type, plex_guid=None, tmdb_id=None, imdb_id=None, tvdb_id=None):
return self.get_id_from(media_type, "anidb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, imdb_id=imdb_id, tvdb_id=tvdb_id)
def get_id_from(self, media_type, id_from, plex_guid=None, tmdb_id=None, imdb_id=None, tvdb_id=None, anidb_id=None):
if plex_guid: return self.get_id(media_type, "plex_guid", id_from, plex_guid)
elif tmdb_id: return self.get_id(media_type, "tmdb_id", id_from, tmdb_id)
elif imdb_id: return self.get_id(media_type, "imdb_id", id_from, imdb_id)
elif tvdb_id: return self.get_id(media_type, "tvdb_id", id_from, tvdb_id)
elif anidb_id: return self.get_id(media_type, "anidb_id", id_from, anidb_id)
else: return None, None
def get_id(self, media_type, from_id, to_id, key):
id_to_return = None id_to_return = None
media_type = None
expired = None expired = None
with sqlite3.connect(self.cache_path) as connection: with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor: with closing(connection.cursor()) as cursor:
cursor.execute(f"SELECT * FROM guids WHERE {from_id} = ? AND media_type = ?", (key, media_type)) cursor.execute(f"SELECT * FROM guid_map WHERE plex_guid = ?", (plex_guid,))
row = cursor.fetchone()
if row and row[to_id]:
datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d")
time_between_insertion = datetime.now() - datetime_object
id_to_return = int(row[to_id])
expired = time_between_insertion.days > self.expiration
return id_to_return, expired
def get_ids(self, media_type, plex_guid=None, tmdb_id=None, imdb_id=None, tvdb_id=None):
ids_to_return = {}
expired = None
if plex_guid:
key = plex_guid
key_type = "plex_guid"
elif tmdb_id:
key = tmdb_id
key_type = "tmdb_id"
elif imdb_id:
key = imdb_id
key_type = "imdb_id"
elif tvdb_id:
key = tvdb_id
key_type = "tvdb_id"
else:
raise Failed("ID Required")
with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor:
cursor.execute(f"SELECT * FROM guids WHERE {key_type} = ? AND media_type = ?", (key, media_type))
row = cursor.fetchone() row = cursor.fetchone()
if row: if row:
if row["plex_guid"]: ids_to_return["plex"] = row["plex_guid"] time_between_insertion = datetime.now() - datetime.strptime(row["expiration_date"], "%Y-%m-%d")
if row["tmdb_id"]: ids_to_return["tmdb"] = int(row["tmdb_id"]) id_to_return = row["t_id"]
if row["imdb_id"]: ids_to_return["imdb"] = row["imdb_id"] media_type = row["media_type"]
if row["tvdb_id"]: ids_to_return["tvdb"] = int(row["tvdb_id"])
if row["anidb_id"]: ids_to_return["anidb"] = int(row["anidb_id"])
datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d")
time_between_insertion = datetime.now() - datetime_object
expired = time_between_insertion.days > self.expiration expired = time_between_insertion.days > self.expiration
return ids_to_return, expired return id_to_return, media_type, expired
def update_guid(self, media_type, plex_guid, tmdb_id, imdb_id, tvdb_id, anidb_id, expired): def update_guid_map(self, media_type, plex_guid, t_id, expired):
expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration))) self._update_map("guid_map", "plex_guid", plex_guid, "t_id", t_id, expired, media_type=media_type)
with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor:
cursor.execute("INSERT OR IGNORE INTO guids(plex_guid) VALUES(?)", (plex_guid,))
cursor.execute(
"""UPDATE guids SET
tmdb_id = ?,
imdb_id = ?,
tvdb_id = ?,
anidb_id = ?,
expiration_date = ?,
media_type = ?
WHERE plex_guid = ?""", (tmdb_id, imdb_id, tvdb_id, anidb_id, expiration_date.strftime("%Y-%m-%d"), media_type, plex_guid))
if imdb_id and (tmdb_id or tvdb_id):
cursor.execute("INSERT OR IGNORE INTO imdb_map(imdb_id) VALUES(?)", (imdb_id,))
cursor.execute("UPDATE imdb_map SET t_id = ?, expiration_date = ?, media_type = ? WHERE imdb_id = ?", (tmdb_id if media_type == "movie" else tvdb_id, expiration_date.strftime("%Y-%m-%d"), media_type, imdb_id))
def get_tmdb_from_imdb(self, imdb_id): return self.query_imdb_map("movie", imdb_id) def query_imdb_to_tmdb_map(self, media_type, _id, imdb=True):
def get_tvdb_from_imdb(self, imdb_id): return self.query_imdb_map("show", imdb_id) from_id = "imdb_id" if imdb else "tmdb_id"
def query_imdb_map(self, media_type, imdb_id): to_id = "tmdb_id" if imdb else "imdb_id"
id_to_return = None return self._query_map("imdb_to_tmdb_map", _id, from_id, to_id, media_type=media_type)
expired = None
with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor:
cursor.execute("SELECT * FROM imdb_map WHERE imdb_id = ? AND media_type = ?", (imdb_id, media_type))
row = cursor.fetchone()
if row and row["t_id"]:
datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d")
time_between_insertion = datetime.now() - datetime_object
id_to_return = int(row["t_id"])
expired = time_between_insertion.days > self.expiration
return id_to_return, expired
def update_imdb(self, media_type, expired, imdb_id, t_id): def update_imdb_to_tmdb_map(self, media_type, expired, imdb_id, tmdb_id):
expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration))) self._update_map("imdb_to_tmdb_map", "imdb_id", imdb_id, "tmdb_id", tmdb_id, expired, media_type=media_type)
with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row def query_imdb_to_tvdb_map(self, _id, imdb=True):
with closing(connection.cursor()) as cursor: from_id = "imdb_id" if imdb else "tvdb_id"
cursor.execute("INSERT OR IGNORE INTO imdb_map(imdb_id) VALUES(?)", (imdb_id,)) to_id = "tvdb_id" if imdb else "imdb_id"
cursor.execute("UPDATE imdb_map SET t_id = ?, expiration_date = ?, media_type = ? WHERE imdb_id = ?", (t_id, expiration_date.strftime("%Y-%m-%d"), media_type, imdb_id)) return self._query_map("imdb_to_tvdb_map", _id, from_id, to_id)
def update_imdb_to_tvdb_map(self, expired, imdb_id, tvdb_id):
self._update_map("imdb_to_tvdb_map", "imdb_id", imdb_id, "tvdb_id", tvdb_id, expired)
def query_tmdb_to_tvdb_map(self, _id, tmdb=True):
from_id = "tmdb_id" if tmdb else "tvdb_id"
to_id = "tvdb_id" if tmdb else "tmdb_id"
return self._query_map("tmdb_to_tvdb_map", _id, from_id, to_id)
def update_tmdb_to_tvdb_map(self, expired, tmdb_id, tvdb_id):
self._update_map("tmdb_to_tvdb_map", "tmdb_id", tmdb_id, "tvdb_id", tvdb_id, expired)
def query_letterboxd_map(self, letterboxd_id): def query_letterboxd_map(self, letterboxd_id):
tmdb_id = None return self._query_map("letterboxd_map", letterboxd_id, "letterboxd_id", "tmdb_id")
def update_letterboxd_map(self, expired, letterboxd_id, tmdb_id):
self._update_map("letterboxd_map", "letterboxd_id", letterboxd_id, "tmdb_id", tmdb_id, expired)
def _query_map(self, map_name, _id, from_id, to_id, media_type=None):
id_to_return = None
expired = None expired = None
with sqlite3.connect(self.cache_path) as connection: with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor: with closing(connection.cursor()) as cursor:
cursor.execute("SELECT * FROM letterboxd_map WHERE letterboxd_id = ?", (letterboxd_id, )) if media_type is None:
cursor.execute(f"SELECT * FROM {map_name} WHERE {from_id} = ?", (_id,))
else:
cursor.execute(f"SELECT * FROM {map_name} WHERE {from_id} = ? AND media_type = ?", (_id, media_type))
row = cursor.fetchone() row = cursor.fetchone()
if row and row["tmdb_id"]: if row and row[to_id]:
datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d") datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d")
time_between_insertion = datetime.now() - datetime_object time_between_insertion = datetime.now() - datetime_object
tmdb_id = int(row["tmdb_id"]) id_to_return = int(row[to_id])
expired = time_between_insertion.days > self.expiration expired = time_between_insertion.days > self.expiration
return tmdb_id, expired return id_to_return, expired
def update_letterboxd(self, expired, letterboxd_id, tmdb_id): def _update_map(self, map_name, val1_name, val1, val2_name, val2, expired, media_type=None):
expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration))) expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration)))
with sqlite3.connect(self.cache_path) as connection: with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor: with closing(connection.cursor()) as cursor:
cursor.execute("INSERT OR IGNORE INTO letterboxd_map(letterboxd_id) VALUES(?)", (letterboxd_id,)) cursor.execute(f"INSERT OR IGNORE INTO {map_name}({val1_name}) VALUES(?)", (val1,))
cursor.execute("UPDATE letterboxd_map SET tmdb_id = ?, expiration_date = ? WHERE letterboxd_id = ?", (tmdb_id, expiration_date.strftime("%Y-%m-%d"), letterboxd_id)) if media_type is None:
sql = f"UPDATE {map_name} SET {val2_name} = ?, expiration_date = ? WHERE {val1_name} = ?"
cursor.execute(sql, (val2, expiration_date.strftime("%Y-%m-%d"), val1))
else:
sql = f"UPDATE {map_name} SET {val2_name} = ?, expiration_date = ?{'' if media_type is None else ', media_type = ?'} WHERE {val1_name} = ?"
cursor.execute(sql, (val2, expiration_date.strftime("%Y-%m-%d"), media_type, val1))
def query_omdb(self, imdb_id): def query_omdb(self, imdb_id):
omdb_dict = {} omdb_dict = {}
@ -260,7 +212,7 @@ class Cache:
expired = time_between_insertion.days > self.expiration expired = time_between_insertion.days > self.expiration
return ids, expired return ids, expired
def update_anime(self, expired, anime_ids): def update_anime_map(self, expired, anime_ids):
expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration))) expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration)))
with sqlite3.connect(self.cache_path) as connection: with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row connection.row_factory = sqlite3.Row

@ -1,10 +1,9 @@
import glob, logging, os, re, requests, time import logging, os
from modules import util from modules import util
from modules.anidb import AniDBAPI from modules.anidb import AniDBAPI
from modules.anilist import AniListAPI from modules.anilist import AniListAPI
from modules.arms import ArmsAPI
from modules.builder import CollectionBuilder
from modules.cache import Cache from modules.cache import Cache
from modules.convert import Convert
from modules.imdb import IMDbAPI from modules.imdb import IMDbAPI
from modules.letterboxd import LetterboxdAPI from modules.letterboxd import LetterboxdAPI
from modules.mal import MyAnimeListAPI from modules.mal import MyAnimeListAPI
@ -17,8 +16,6 @@ from modules.tmdb import TMDbAPI
from modules.trakttv import TraktAPI from modules.trakttv import TraktAPI
from modules.tvdb import TVDbAPI from modules.tvdb import TVDbAPI
from modules.util import Failed from modules.util import Failed
from plexapi.exceptions import BadRequest
from retrying import retry
from ruamel import yaml from ruamel import yaml
logger = logging.getLogger("Plex Meta Manager") logger = logging.getLogger("Plex Meta Manager")
@ -47,7 +44,7 @@ sonarr_series_types = {
"daily": "Episodes released daily or less frequently that use year-month-day (2017-05-25)", "daily": "Episodes released daily or less frequently that use year-month-day (2017-05-25)",
"anime": "Episodes released using an absolute episode number" "anime": "Episodes released using an absolute episode number"
} }
mass_genre_update_options = {"tmdb": "Use TMDb Metadata", "omdb": "Use IMDb Metadata through OMDb"} mass_update_options = {"tmdb": "Use TMDb Metadata", "omdb": "Use IMDb Metadata through OMDb"}
library_types = {"movie": "For Movie Libraries", "show": "For Show Libraries"} library_types = {"movie": "For Movie Libraries", "show": "For Show Libraries"}
class Config: class Config:
@ -144,7 +141,7 @@ class Config:
else: message = f"Path {os.path.abspath(data[attribute])} does not exist" else: message = f"Path {os.path.abspath(data[attribute])} does not exist"
elif var_type == "list": return util.get_list(data[attribute]) elif var_type == "list": return util.get_list(data[attribute])
elif var_type == "list_path": elif var_type == "list_path":
temp_list = [path for path in util.get_list(data[attribute], split=True) if os.path.exists(os.path.abspath(path))] temp_list = [p for p in util.get_list(data[attribute], split=True) if os.path.exists(os.path.abspath(p))]
if len(temp_list) > 0: return temp_list if len(temp_list) > 0: return temp_list
else: message = "No Paths exist" else: message = "No Paths exist"
elif var_type == "lower_list": return util.get_list(data[attribute], lower=True) elif var_type == "lower_list": return util.get_list(data[attribute], lower=True)
@ -153,7 +150,7 @@ class Config:
if var_type == "path" and default and os.path.exists(os.path.abspath(default)): if var_type == "path" and default and os.path.exists(os.path.abspath(default)):
return default return default
elif var_type == "path" and default: elif var_type == "path" and default:
if attribute in data and data[attribute]: if data and attribute in data and data[attribute]:
message = f"neither {data[attribute]} or the default path {default} could be found" message = f"neither {data[attribute]} or the default path {default} could be found"
else: else:
message = f"no {text} found and the default path {default} could not be found" message = f"no {text} found and the default path {default} could not be found"
@ -206,7 +203,7 @@ class Config:
try: self.tmdb["apikey"] = check_for_attribute(self.data, "apikey", parent="tmdb", throw=True) try: self.tmdb["apikey"] = check_for_attribute(self.data, "apikey", parent="tmdb", throw=True)
except Failed as e: raise Failed(e) except Failed as e: raise Failed(e)
self.tmdb["language"] = check_for_attribute(self.data, "language", parent="tmdb", default="en") self.tmdb["language"] = check_for_attribute(self.data, "language", parent="tmdb", default="en")
self.TMDb = TMDbAPI(self.tmdb) self.TMDb = TMDbAPI(self, self.tmdb)
logger.info(f"TMDb Connection {'Failed' if self.TMDb is None else 'Successful'}") logger.info(f"TMDb Connection {'Failed' if self.TMDb is None else 'Successful'}")
else: else:
raise Failed("Config Error: tmdb attribute not found") raise Failed("Config Error: tmdb attribute not found")
@ -265,8 +262,7 @@ class Config:
self.TVDb = TVDbAPI(self) self.TVDb = TVDbAPI(self)
self.IMDb = IMDbAPI(self) self.IMDb = IMDbAPI(self)
self.AniDB = AniDBAPI(self) self.AniDB = AniDBAPI(self)
self.Arms = ArmsAPI(self) self.Convert = Convert(self)
self.AniDBIDs = self.AniDB.get_AniDB_IDs()
self.AniList = AniListAPI(self) self.AniList = AniListAPI(self)
self.Letterboxd = LetterboxdAPI(self) self.Letterboxd = LetterboxdAPI(self)
@ -278,6 +274,9 @@ class Config:
self.general["plex"]["url"] = check_for_attribute(self.data, "url", parent="plex", default_is_none=True) self.general["plex"]["url"] = check_for_attribute(self.data, "url", parent="plex", default_is_none=True)
self.general["plex"]["token"] = check_for_attribute(self.data, "token", parent="plex", default_is_none=True) self.general["plex"]["token"] = check_for_attribute(self.data, "token", parent="plex", default_is_none=True)
self.general["plex"]["timeout"] = check_for_attribute(self.data, "timeout", parent="plex", var_type="int", default=60) self.general["plex"]["timeout"] = check_for_attribute(self.data, "timeout", parent="plex", var_type="int", default=60)
self.general["plex"]["clean_bundles"] = check_for_attribute(self.data, "clean_bundles", parent="plex", var_type="bool", default=False)
self.general["plex"]["empty_trash"] = check_for_attribute(self.data, "empty_trash", parent="plex", var_type="bool", default=False)
self.general["plex"]["optimize"] = check_for_attribute(self.data, "optimize", parent="plex", var_type="bool", default=False)
self.general["radarr"] = {} self.general["radarr"] = {}
self.general["radarr"]["url"] = check_for_attribute(self.data, "url", parent="radarr", default_is_none=True) self.general["radarr"]["url"] = check_for_attribute(self.data, "url", parent="radarr", default_is_none=True)
@ -367,20 +366,64 @@ class Config:
params["save_missing"] = check_for_attribute(lib, "save_missing", var_type="bool", default=self.general["save_missing"], do_print=False, save=False) params["save_missing"] = check_for_attribute(lib, "save_missing", var_type="bool", default=self.general["save_missing"], do_print=False, save=False)
if lib and "mass_genre_update" in lib and lib["mass_genre_update"]: if lib and "mass_genre_update" in lib and lib["mass_genre_update"]:
params["mass_genre_update"] = check_for_attribute(lib, "mass_genre_update", test_list=mass_genre_update_options, default_is_none=True, save=False) params["mass_genre_update"] = check_for_attribute(lib, "mass_genre_update", test_list=mass_update_options, default_is_none=True, save=False)
if self.OMDb is None and params["mass_genre_update"] == "omdb":
params["mass_genre_update"] = None
logger.error("Config Error: mass_genre_update cannot be omdb without a successful OMDb Connection")
else: else:
params["mass_genre_update"] = None params["mass_genre_update"] = None
if params["mass_genre_update"] == "omdb" and self.OMDb is None: if lib and "mass_audience_rating_update" in lib and lib["mass_audience_rating_update"]:
params["mass_genre_update"] = None params["mass_audience_rating_update"] = check_for_attribute(lib, "mass_audience_rating_update", test_list=mass_update_options, default_is_none=True, save=False)
logger.error("Config Error: mass_genre_update cannot be omdb without a successful OMDb Connection") if self.OMDb is None and params["mass_audience_rating_update"] == "omdb":
params["mass_audience_rating_update"] = None
logger.error("Config Error: mass_audience_rating_update cannot be omdb without a successful OMDb Connection")
else:
params["mass_audience_rating_update"] = None
if lib and "mass_critic_rating_update" in lib and lib["mass_critic_rating_update"]:
params["mass_critic_rating_update"] = check_for_attribute(lib, "mass_critic_rating_update", test_list=mass_update_options, default_is_none=True, save=False)
if self.OMDb is None and params["mass_critic_rating_update"] == "omdb":
params["mass_critic_rating_update"] = None
logger.error("Config Error: mass_critic_rating_update cannot be omdb without a successful OMDb Connection")
else:
params["mass_critic_rating_update"] = None
try: try:
params["metadata_path"] = check_for_attribute(lib, "metadata_path", var_type="path", default=os.path.join(default_dir, f"{library_name}.yml"), throw=True) if lib and "metadata_path" in lib:
params["metadata_path"] = []
if lib["metadata_path"] is None:
raise Failed("Config Error: metadata_path attribute is blank")
paths_to_check = lib["metadata_path"] if isinstance(lib["metadata_path"], list) else [lib["metadata_path"]]
for path in paths_to_check:
if isinstance(path, dict):
if "url" in path:
if path["url"] is None:
logger.error("Config Error: metadata_path url is blank")
else:
params["metadata_path"].append(("URL", path["url"]))
if "git" in path:
if path["git"] is None:
logger.error("Config Error: metadata_path git is blank")
else:
params["metadata_path"].append(("Git", path['git']))
if "file" in path:
if path["file"] is None:
logger.error("Config Error: metadata_path file is blank")
else:
params["metadata_path"].append(("File", path['file']))
else:
params["metadata_path"].append(("File", path))
else:
params["metadata_path"] = [("File", os.path.join(default_dir, f"{library_name}.yml"))]
params["default_dir"] = default_dir
params["plex"] = {} params["plex"] = {}
params["plex"]["url"] = check_for_attribute(lib, "url", parent="plex", default=self.general["plex"]["url"], req_default=True, save=False) params["plex"]["url"] = check_for_attribute(lib, "url", parent="plex", default=self.general["plex"]["url"], req_default=True, save=False)
params["plex"]["token"] = check_for_attribute(lib, "token", parent="plex", default=self.general["plex"]["token"], req_default=True, save=False) params["plex"]["token"] = check_for_attribute(lib, "token", parent="plex", default=self.general["plex"]["token"], req_default=True, save=False)
params["plex"]["timeout"] = check_for_attribute(lib, "timeout", parent="plex", var_type="int", default=self.general["plex"]["timeout"], save=False) params["plex"]["timeout"] = check_for_attribute(lib, "timeout", parent="plex", var_type="int", default=self.general["plex"]["timeout"], save=False)
params["plex"]["clean_bundles"] = check_for_attribute(lib, "clean_bundles", parent="plex", var_type="bool", default=self.general["plex"]["clean_bundles"], save=False)
params["plex"]["empty_trash"] = check_for_attribute(lib, "empty_trash", parent="plex", var_type="bool", default=self.general["plex"]["empty_trash"], save=False)
params["plex"]["optimize"] = check_for_attribute(lib, "optimize", parent="plex", var_type="bool", default=self.general["plex"]["optimize"], save=False)
library = PlexAPI(params, self.TMDb, self.TVDb) library = PlexAPI(params, self.TMDb, self.TVDb)
logger.info(f"{params['name']} Library Connection Successful") logger.info(f"{params['name']} Library Connection Successful")
except Failed as e: except Failed as e:
@ -388,7 +431,7 @@ class Config:
logger.info(f"{params['name']} Library Connection Failed") logger.info(f"{params['name']} Library Connection Failed")
continue continue
if self.general["radarr"]["url"] or "radarr" in lib: if self.general["radarr"]["url"] or (lib and "radarr" in lib):
logger.info("") logger.info("")
logger.info(f"Connecting to {params['name']} library's Radarr...") logger.info(f"Connecting to {params['name']} library's Radarr...")
radarr_params = {} radarr_params = {}
@ -408,7 +451,7 @@ class Config:
util.print_multiline(e, error=True) util.print_multiline(e, error=True)
logger.info(f"{params['name']} library's Radarr Connection {'Failed' if library.Radarr is None else 'Successful'}") logger.info(f"{params['name']} library's Radarr Connection {'Failed' if library.Radarr is None else 'Successful'}")
if self.general["sonarr"]["url"] or "sonarr" in lib: if self.general["sonarr"]["url"] or (lib and "sonarr" in lib):
logger.info("") logger.info("")
logger.info(f"Connecting to {params['name']} library's Sonarr...") logger.info(f"Connecting to {params['name']} library's Sonarr...")
sonarr_params = {} sonarr_params = {}
@ -434,7 +477,7 @@ class Config:
util.print_multiline(e, error=True) util.print_multiline(e, error=True)
logger.info(f"{params['name']} library's Sonarr Connection {'Failed' if library.Sonarr is None else 'Successful'}") logger.info(f"{params['name']} library's Sonarr Connection {'Failed' if library.Sonarr is None else 'Successful'}")
if self.general["tautulli"]["url"] or "tautulli" in lib: if self.general["tautulli"]["url"] or (lib and "tautulli" in lib):
logger.info("") logger.info("")
logger.info(f"Connecting to {params['name']} library's Tautulli...") logger.info(f"Connecting to {params['name']} library's Tautulli...")
tautulli_params = {} tautulli_params = {}
@ -458,441 +501,3 @@ class Config:
util.separator() util.separator()
def update_libraries(self, test, requested_collections, resume_from):
for library in self.libraries:
os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout)
logger.info("")
util.separator(f"{library.name} Library")
logger.info("")
util.separator(f"Mapping {library.name} Library")
logger.info("")
movie_map, show_map = self.map_guids(library)
if not test and not resume_from:
if library.mass_genre_update:
self.mass_metadata(library, movie_map, show_map)
try: library.update_metadata(self.TMDb, test)
except Failed as e: logger.error(e)
logger.info("")
util.separator(f"{library.name} Library {'Test ' if test else ''}Collections")
collections = {c: library.collections[c] for c in util.get_list(requested_collections) if c in library.collections} if requested_collections else library.collections
if resume_from and resume_from not in collections:
logger.warning(f"Collection: {resume_from} not in {library.name}")
continue
if collections:
for mapping_name, collection_attrs in collections.items():
if test and ("test" not in collection_attrs or collection_attrs["test"] is not True):
no_template_test = True
if "template" in collection_attrs and collection_attrs["template"]:
for data_template in util.get_list(collection_attrs["template"], split=False):
if "name" in data_template \
and data_template["name"] \
and library.templates \
and data_template["name"] in library.templates \
and library.templates[data_template["name"]] \
and "test" in library.templates[data_template["name"]] \
and library.templates[data_template["name"]]["test"] is True:
no_template_test = False
if no_template_test:
continue
try:
if resume_from and resume_from != mapping_name:
continue
elif resume_from == mapping_name:
resume_from = None
logger.info("")
util.separator(f"Resuming Collections")
logger.info("")
util.separator(f"{mapping_name} Collection")
logger.info("")
rating_key_map = {}
try:
builder = CollectionBuilder(self, library, mapping_name, collection_attrs)
except Failed as ef:
util.print_multiline(ef, error=True)
continue
except Exception as ee:
util.print_stacktrace()
logger.error(ee)
continue
try:
collection_obj = library.get_collection(mapping_name)
collection_name = collection_obj.title
except Failed:
collection_obj = None
collection_name = mapping_name
if len(builder.schedule) > 0:
util.print_multiline(builder.schedule, info=True)
logger.info("")
if builder.sync:
logger.info("Sync Mode: sync")
if collection_obj:
for item in collection_obj.items():
rating_key_map[item.ratingKey] = item
else:
logger.info("Sync Mode: append")
for i, f in enumerate(builder.filters):
if i == 0:
logger.info("")
logger.info(f"Collection Filter {f[0]}: {f[1]}")
builder.run_methods(collection_obj, collection_name, rating_key_map, movie_map, show_map)
try:
plex_collection = library.get_collection(collection_name)
except Failed as e:
logger.debug(e)
continue
builder.update_details(plex_collection)
if builder.run_again and (len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0):
library.run_again.append(builder)
except Exception as e:
util.print_stacktrace()
logger.error(f"Unknown Error: {e}")
if library.assets_for_all is True and not test and not requested_collections:
logger.info("")
util.separator(f"All {'Movies' if library.is_movie else 'Shows'} Assets Check for {library.name} Library")
logger.info("")
for item in library.get_all():
folder = os.path.basename(os.path.dirname(item.locations[0]) if library.is_movie else item.locations[0])
for ad in library.asset_directory:
if library.asset_folders:
poster_path = os.path.join(ad, folder, "poster.*")
else:
poster_path = os.path.join(ad, f"{folder}.*")
matches = glob.glob(poster_path)
if len(matches) > 0:
item.uploadPoster(filepath=os.path.abspath(matches[0]))
logger.info(f"Detail: asset_directory updated {item.title}'s poster to [file] {os.path.abspath(matches[0])}")
if library.asset_folders:
matches = glob.glob(os.path.join(ad, folder, "background.*"))
if len(matches) > 0:
item.uploadArt(filepath=os.path.abspath(matches[0]))
logger.info(f"Detail: asset_directory updated {item.title}'s background to [file] {os.path.abspath(matches[0])}")
if library.is_show:
for season in item.seasons():
matches = glob.glob(os.path.join(ad, folder, f"Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}.*"))
if len(matches) > 0:
season_path = os.path.abspath(matches[0])
season.uploadPoster(filepath=season_path)
logger.info(f"Detail: asset_directory updated {item.title} Season {season.seasonNumber}'s poster to [file] {season_path}")
for episode in season.episodes():
matches = glob.glob(os.path.join(ad, folder, f"{episode.seasonEpisode.upper()}.*"))
if len(matches) > 0:
episode_path = os.path.abspath(matches[0])
episode.uploadPoster(filepath=episode_path)
logger.info(f"Detail: asset_directory updated {item.title} {episode.seasonEpisode.upper()}'s poster to [file] {episode_path}")
if library.show_unmanaged is True and not test and not requested_collections:
logger.info("")
util.separator(f"Unmanaged Collections in {library.name} Library")
logger.info("")
unmanaged_count = 0
collections_in_plex = [str(plex_col) for plex_col in collections]
for col in library.get_all_collections():
if col.title not in collections_in_plex:
logger.info(col.title)
unmanaged_count += 1
logger.info("{} Unmanaged Collections".format(unmanaged_count))
else:
logger.info("")
logger.error("No collection to update")
has_run_again = False
for library in self.libraries:
if library.run_again:
has_run_again = True
break
if has_run_again:
logger.info("")
util.separator("Run Again")
logger.info("")
length = 0
for x in range(1, self.general["run_again_delay"] + 1):
length = util.print_return(length, f"Waiting to run again in {self.general['run_again_delay'] - x + 1} minutes")
for y in range(60):
time.sleep(1)
util.print_end(length)
for library in self.libraries:
if library.run_again:
os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout)
logger.info("")
util.separator(f"{library.name} Library Run Again")
logger.info("")
collections = {c: library.collections[c] for c in util.get_list(requested_collections) if c in library.collections} if requested_collections else library.collections
if collections:
util.separator(f"Mapping {library.name} Library")
logger.info("")
movie_map, show_map = self.map_guids(library)
for builder in library.run_again:
logger.info("")
util.separator(f"{builder.name} Collection")
logger.info("")
try:
collection_obj = library.get_collection(builder.name)
except Failed as e:
util.print_multiline(e, error=True)
continue
builder.run_collections_again(collection_obj, movie_map, show_map)
def mass_metadata(self, library, movie_map, show_map):
length = 0
logger.info("")
util.separator(f"Mass Editing {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
logger.info("")
items = library.Plex.all()
for i, item in enumerate(items, 1):
length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}")
ids = {}
if self.Cache:
ids, expired = self.Cache.get_ids("movie" if library.is_movie else "show", plex_guid=item.guid)
elif library.is_movie:
for tmdb, rating_keys in movie_map.items():
if item.ratingKey in rating_keys:
ids["tmdb"] = tmdb
break
else:
for tvdb, rating_keys in show_map.items():
if item.ratingKey in rating_keys:
ids["tvdb"] = tvdb
break
if library.mass_genre_update:
if library.mass_genre_update == "tmdb":
if "tmdb" not in ids:
util.print_end(length, f"{item.title[:25]:<25} | No TMDb for Guid: {item.guid}")
continue
try:
tmdb_item = self.TMDb.get_movie(ids["tmdb"]) if library.is_movie else self.TMDb.get_show(ids["tmdb"])
except Failed as e:
util.print_end(length, str(e))
continue
new_genres = [genre.name for genre in tmdb_item.genres]
elif library.mass_genre_update in ["omdb", "imdb"]:
if self.OMDb.limit is True:
break
if "imdb" not in ids:
util.print_end(length, f"{item.title[:25]:<25} | No IMDb for Guid: {item.guid}")
continue
try:
omdb_item = self.OMDb.get_omdb(ids["imdb"])
except Failed as e:
util.print_end(length, str(e))
continue
new_genres = omdb_item.genres
else:
raise Failed
item_genres = [genre.tag for genre in item.genres]
display_str = ""
for genre in (g for g in item_genres if g not in new_genres):
item.removeGenre(genre)
display_str += f"{', ' if len(display_str) > 0 else ''}-{genre}"
for genre in (g for g in new_genres if g not in item_genres):
item.addGenre(genre)
display_str += f"{', ' if len(display_str) > 0 else ''}+{genre}"
if len(display_str) > 0:
util.print_end(length, f"{item.title[:25]:<25} | Genres | {display_str}")
def map_guids(self, library):
movie_map = {}
show_map = {}
length = 0
logger.info(f"Mapping {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
items = library.Plex.all()
for i, item in enumerate(items, 1):
length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}")
try:
id_type, main_id = self.get_id(item, library, length)
except BadRequest:
util.print_stacktrace()
util.print_end(length, f"{'Cache | ! |' if self.Cache else 'Mapping Error:'} | {item.guid} for {item.title} not found")
continue
if isinstance(main_id, list):
if id_type == "movie":
for m in main_id:
if m in movie_map:
movie_map[m].append(item.ratingKey)
else:
movie_map[m] = [item.ratingKey]
elif id_type == "show":
for m in main_id:
if m in show_map:
show_map[m].append(item.ratingKey)
else:
show_map[m] = [item.ratingKey]
else:
if id_type == "movie":
if main_id in movie_map:
movie_map[main_id].append(item.ratingKey)
else:
movie_map[main_id] = [item.ratingKey]
elif id_type == "show":
if main_id in show_map:
show_map[main_id].append(item.ratingKey)
else:
show_map[main_id] = [item.ratingKey]
util.print_end(length, f"Processed {len(items)} {'Movies' if library.is_movie else 'Shows'}")
return movie_map, show_map
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def get_guids(self, item):
return item.guids
def get_id(self, item, library, length):
expired = None
tmdb_id = None
imdb_id = None
tvdb_id = None
anidb_id = None
mal_id = None
error_message = None
if self.Cache:
if library.is_movie: tmdb_id, expired = self.Cache.get_tmdb_id("movie", plex_guid=item.guid)
else: tvdb_id, expired = self.Cache.get_tvdb_id("show", plex_guid=item.guid)
if not tvdb_id and library.is_show:
tmdb_id, expired = self.Cache.get_tmdb_id("show", plex_guid=item.guid)
anidb_id, expired = self.Cache.get_anidb_id("show", plex_guid=item.guid)
if expired or (not tmdb_id and library.is_movie) or (not tvdb_id and not tmdb_id and library.is_show):
guid = requests.utils.urlparse(item.guid)
item_type = guid.scheme.split(".")[-1]
check_id = guid.netloc
if item_type == "plex" and check_id == "movie":
try:
for guid_tag in self.get_guids(item):
url_parsed = requests.utils.urlparse(guid_tag.id)
if url_parsed.scheme == "tmdb": tmdb_id = int(url_parsed.netloc)
elif url_parsed.scheme == "imdb": imdb_id = url_parsed.netloc
except requests.exceptions.ConnectionError:
util.print_stacktrace()
logger.error(f"{'Cache | ! |' if self.Cache else 'Mapping Error:'} {item.guid:<46} | No External GUIDs found for {item.title}")
return None, None
elif item_type == "plex" and check_id == "show":
try:
for guid_tag in self.get_guids(item):
url_parsed = requests.utils.urlparse(guid_tag.id)
if url_parsed.scheme == "tvdb": tvdb_id = int(url_parsed.netloc)
elif url_parsed.scheme == "imdb": imdb_id = url_parsed.netloc
elif url_parsed.scheme == "tmdb": tmdb_id = int(url_parsed.netloc)
except requests.exceptions.ConnectionError:
util.print_stacktrace()
logger.error(f"{'Cache | ! |' if self.Cache else 'Mapping Error:'} {item.guid:<46} | No External GUIDs found for {item.title}")
return None, None
elif item_type == "imdb": imdb_id = check_id
elif item_type == "thetvdb": tvdb_id = int(check_id)
elif item_type == "themoviedb": tmdb_id = int(check_id)
elif item_type == "hama":
if check_id.startswith("tvdb"): tvdb_id = int(re.search("-(.*)", check_id).group(1))
elif check_id.startswith("anidb"): anidb_id = re.search("-(.*)", check_id).group(1)
else: error_message = f"Hama Agent ID: {check_id} not supported"
elif item_type == "myanimelist": mal_id = check_id
elif item_type == "local": error_message = "No match in Plex"
else: error_message = f"Agent {item_type} not supported"
if not error_message:
if mal_id and not anidb_id:
try: anidb_id = self.Arms.mal_to_anidb(mal_id)
except Failed: pass
if anidb_id and not tvdb_id:
try: tvdb_id = self.Arms.anidb_to_tvdb(anidb_id)
except Failed: pass
if anidb_id and not imdb_id:
try: imdb_id = self.Arms.anidb_to_imdb(anidb_id)
except Failed: pass
if not tmdb_id and imdb_id:
if isinstance(imdb_id, list):
tmdb_id = []
new_imdb_id = []
for imdb in imdb_id:
try:
tmdb_id.append(self.TMDb.convert_imdb_to_tmdb(imdb))
new_imdb_id.append(imdb)
except Failed:
if self.Trakt:
try:
tmdb_id.append(self.Trakt.convert_imdb_to_tmdb(imdb))
new_imdb_id.append(imdb)
except Failed:
continue
else:
continue
imdb_id = new_imdb_id
else:
try: tmdb_id = self.TMDb.convert_imdb_to_tmdb(imdb_id)
except Failed: pass
if not tmdb_id and self.Trakt:
try: tmdb_id = self.Trakt.convert_imdb_to_tmdb(imdb_id)
except Failed: pass
if not tmdb_id and tvdb_id and library.is_show:
try: tmdb_id = self.TMDb.convert_tvdb_to_tmdb(tvdb_id)
except Failed: pass
if not tmdb_id and self.Trakt:
try: tmdb_id = self.Trakt.convert_tvdb_to_tmdb(tvdb_id)
except Failed: pass
if not imdb_id and tmdb_id and library.is_movie:
try: imdb_id = self.TMDb.convert_tmdb_to_imdb(tmdb_id)
except Failed: pass
if not imdb_id and self.Trakt:
try: imdb_id = self.Trakt.convert_tmdb_to_imdb(tmdb_id)
except Failed: pass
if not imdb_id and tvdb_id and library.is_show:
try: imdb_id = self.TMDb.convert_tvdb_to_imdb(tvdb_id)
except Failed: pass
if not imdb_id and self.Trakt:
try: imdb_id = self.Trakt.convert_tvdb_to_imdb(tvdb_id)
except Failed: pass
if not tvdb_id and library.is_show:
if tmdb_id:
try: tvdb_id = self.TMDb.convert_tmdb_to_tvdb(tmdb_id)
except Failed: pass
if not tvdb_id and self.Trakt:
try: tvdb_id = self.Trakt.convert_tmdb_to_tvdb(tmdb_id)
except Failed: pass
if not tvdb_id and imdb_id:
try: tvdb_id = self.TMDb.convert_imdb_to_tvdb(imdb_id)
except Failed: pass
if not tvdb_id and self.Trakt:
try: tvdb_id = self.Trakt.convert_imdb_to_tvdb(tmdb_id)
except Failed: pass
if (not tmdb_id and library.is_movie) or (not tvdb_id and not (anidb_id and tmdb_id) and library.is_show):
service_name = "TMDb ID" if library.is_movie else "TVDb ID"
if self.Trakt: api_name = "TMDb or Trakt"
else: api_name = "TMDb"
if tmdb_id and imdb_id: id_name = f"TMDb ID: {tmdb_id} or IMDb ID: {imdb_id}"
elif imdb_id and tvdb_id: id_name = f"IMDb ID: {imdb_id} or TVDb ID: {tvdb_id}"
elif tmdb_id: id_name = f"TMDb ID: {tmdb_id}"
elif imdb_id: id_name = f"IMDb ID: {imdb_id}"
elif tvdb_id: id_name = f"TVDb ID: {tvdb_id}"
else: id_name = None
if anidb_id and not tmdb_id and not tvdb_id: error_message = f"Unable to convert AniDB ID: {anidb_id} to TMDb ID or TVDb ID"
elif id_name: error_message = f"Unable to convert {id_name} to {service_name} using {api_name}"
else: error_message = f"No ID to convert to {service_name}"
if self.Cache and ((tmdb_id and library.is_movie) or ((tvdb_id or (anidb_id and tmdb_id)) and library.is_show)):
if not isinstance(tmdb_id, list): tmdb_id = [tmdb_id]
if not isinstance(imdb_id, list): imdb_id = [imdb_id]
for i in range(len(tmdb_id)):
try: imdb_value = imdb_id[i]
except IndexError: imdb_value = None
util.print_end(length, f"Cache | {'^' if expired is True else '+'} | {item.guid:<46} | {tmdb_id[i] if tmdb_id[i] else 'None':<6} | {imdb_value if imdb_value else 'None':<10} | {tvdb_id if tvdb_id else 'None':<6} | {anidb_id if anidb_id else 'None':<5} | {item.title}")
self.Cache.update_guid("movie" if library.is_movie else "show", item.guid, tmdb_id[i], imdb_value, tvdb_id, anidb_id, expired)
if tmdb_id and library.is_movie: return "movie", tmdb_id
elif tvdb_id and library.is_show: return "show", tvdb_id
elif anidb_id and tmdb_id: return "movie", tmdb_id
else:
util.print_end(length, f"{'Cache | ! |' if self.Cache else 'Mapping Error:'} {item.guid:<46} | {error_message} for {item.title}")
return None, None

@ -0,0 +1,360 @@
import logging, re, requests
from lxml import html
from modules import util
from modules.util import Failed
from retrying import retry
logger = logging.getLogger("Plex Meta Manager")
class Convert:
def __init__(self, config):
self.config = config
self.arms_url = "https://relations.yuna.moe/api/ids"
self.anidb_url = "https://raw.githubusercontent.com/Anime-Lists/anime-lists/master/anime-list-master.xml"
self.AniDBIDs = self._get_anidb()
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def _get_anidb(self):
return html.fromstring(requests.get(self.anidb_url).content)
def _anidb(self, input_id, to_id, fail=False):
ids = self.AniDBIDs.xpath(f"//anime[contains(@anidbid, '{input_id}')]/@{to_id}")
if len(ids) > 0:
try:
if len(ids[0]) > 0:
return util.get_list(ids[0]) if to_id == "imdbid" else int(ids[0])
raise ValueError
except ValueError:
fail_text = f"Convert Error: No {util.pretty_ids[to_id]} ID found for AniDB ID: {input_id}"
else:
fail_text = f"Convert Error: AniDB ID: {input_id} not found"
if fail:
raise Failed(fail_text)
return [] if to_id == "imdbid" else None
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def _request(self, ids):
return requests.post(self.arms_url, json=ids).json()
def _arms_ids(self, anilist_ids=None, anidb_ids=None, mal_ids=None):
all_ids = []
def collect_ids(ids, id_name):
if ids:
if isinstance(ids, list):
all_ids.extend([{id_name: a_id} for a_id in ids])
else:
all_ids.append({id_name: ids})
collect_ids(anilist_ids, "anilist")
collect_ids(anidb_ids, "anidb")
collect_ids(mal_ids, "myanimelist")
converted_ids = []
unconverted_ids = []
unconverted_id_sets = []
for anime_dict in all_ids:
if self.config.Cache:
for id_type, anime_id in anime_dict.items():
query_ids, expired = self.config.Cache.query_anime_map(anime_id, id_type)
if query_ids and not expired:
converted_ids.append(query_ids)
else:
unconverted_ids.append({id_type: anime_id})
if len(unconverted_ids) == 100:
unconverted_id_sets.append(unconverted_ids)
unconverted_ids = []
else:
unconverted_ids.append(anime_dict)
if len(unconverted_ids) == 100:
unconverted_id_sets.append(unconverted_ids)
unconverted_ids = []
for unconverted_id_set in unconverted_id_sets:
for anime_ids in self._request(unconverted_id_set):
if anime_ids:
if self.config.Cache:
self.config.Cache.update_anime_map(False, anime_ids)
converted_ids.append(anime_ids)
return converted_ids
def anidb_to_ids(self, anidb_list):
show_ids = []
movie_ids = []
for anidb_id in anidb_list:
imdb_ids = self.anidb_to_imdb(anidb_id)
tmdb_ids = []
if imdb_ids:
for imdb_id in imdb_ids:
tmdb_id = self.imdb_to_tmdb(imdb_id)
if tmdb_id:
tmdb_ids.append(tmdb_id)
tvdb_id = self.anidb_to_tvdb(anidb_id)
if tvdb_id:
show_ids.append(tvdb_id)
if tmdb_ids:
movie_ids.extend(tmdb_ids)
if not tvdb_id and not tmdb_ids:
logger.error(f"Convert Error: No TVDb ID or IMDb ID found for AniDB ID: {anidb_id}")
return movie_ids, show_ids
def anilist_to_ids(self, anilist_ids):
anidb_ids = []
for id_set in self._arms_ids(anilist_ids=anilist_ids):
if id_set["anidb"] is not None:
anidb_ids.append(id_set["anidb"])
else:
logger.error(f"Convert Error: AniDB ID not found for AniList ID: {id_set['anilist']}")
return self.anidb_to_ids(anidb_ids)
def myanimelist_to_ids(self, mal_ids):
anidb_ids = []
for id_set in self._arms_ids(mal_ids=mal_ids):
if id_set["anidb"] is not None:
anidb_ids.append(id_set["anidb"])
else:
logger.error(f"Convert Error: AniDB ID not found for MyAnimeList ID: {id_set['myanimelist']}")
return self.anidb_to_ids(anidb_ids)
def anidb_to_tvdb(self, anidb_id, fail=False):
return self._anidb(anidb_id, "tvdbid", fail=fail)
def anidb_to_imdb(self, anidb_id, fail=False):
return self._anidb(anidb_id, "imdbid", fail=fail)
def tmdb_to_imdb(self, tmdb_id, is_movie=True, fail=False):
media_type = "movie" if is_movie else "show"
expired = False
if self.config.Cache and is_movie:
cache_id, expired = self.config.Cache.query_imdb_to_tmdb_map(media_type, tmdb_id, imdb=False)
if cache_id and not expired:
return cache_id
imdb_id = None
try:
imdb_id = self.config.TMDb.convert_from(tmdb_id, "imdb_id", is_movie)
except Failed:
if self.config.Trakt:
try:
imdb_id = self.config.Trakt.convert(tmdb_id, "tmdb", "imdb", "movie" if is_movie else "show")
except Failed:
pass
if fail and imdb_id is None:
raise Failed(f"Convert Error: No IMDb ID Found for TMDb ID: {tmdb_id}")
if self.config.Cache and imdb_id:
self.config.Cache.update_imdb_to_tmdb_map(media_type, expired, imdb_id, tmdb_id)
return imdb_id
def imdb_to_tmdb(self, imdb_id, is_movie=True, fail=False):
media_type = "movie" if is_movie else "show"
expired = False
if self.config.Cache and is_movie:
cache_id, expired = self.config.Cache.query_imdb_to_tmdb_map(media_type, imdb_id, imdb=True)
if cache_id and not expired:
return cache_id
tmdb_id = None
try:
tmdb_id = self.config.TMDb.convert_to(imdb_id, "imdb_id", is_movie)
except Failed:
if self.config.Trakt:
try:
tmdb_id = self.config.Trakt.convert(imdb_id, "imdb", "tmdb", media_type)
except Failed:
pass
if fail and tmdb_id is None:
raise Failed(f"Convert Error: No TMDb ID Found for IMDb ID: {imdb_id}")
if self.config.Cache and tmdb_id:
self.config.Cache.update_imdb_to_tmdb_map(media_type, expired, imdb_id, tmdb_id)
return tmdb_id
def tmdb_to_tvdb(self, tmdb_id, fail=False):
expired = False
if self.config.Cache:
cache_id, expired = self.config.Cache.query_tmdb_to_tvdb_map(tmdb_id, tmdb=True)
if cache_id and not expired:
return cache_id
tvdb_id = None
try:
tvdb_id = self.config.TMDb.convert_from(tmdb_id, "tvdb_id", False)
except Failed:
if self.config.Trakt:
try:
tvdb_id = self.config.Trakt.convert(tmdb_id, "tmdb", "tvdb", "show")
except Failed:
pass
if fail and tvdb_id is None:
raise Failed(f"Convert Error: No TVDb ID Found for TMDb ID: {tmdb_id}")
if self.config.Cache and tvdb_id:
self.config.Cache.update_tmdb_to_tvdb_map(expired, tmdb_id, tvdb_id)
return tvdb_id
def tvdb_to_tmdb(self, tvdb_id, fail=False):
expired = False
if self.config.Cache:
cache_id, expired = self.config.Cache.query_tmdb_to_tvdb_map(tvdb_id, tmdb=False)
if cache_id and not expired:
return cache_id
tmdb_id = None
try:
tmdb_id = self.config.TMDb.convert_to(tvdb_id, "tvdb_id", False)
except Failed:
if self.config.Trakt:
try:
tmdb_id = self.config.Trakt.convert(tvdb_id, "tvdb", "tmdb", "show")
except Failed:
pass
if fail and tmdb_id is None:
raise Failed(f"Convert Error: No TMDb ID Found for TVDb ID: {tvdb_id}")
if self.config.Cache and tmdb_id:
self.config.Cache.update_tmdb_to_tvdb_map(expired, tmdb_id, tvdb_id)
return tmdb_id
def tvdb_to_imdb(self, tvdb_id, fail=False):
expired = False
if self.config.Cache:
cache_id, expired = self.config.Cache.query_imdb_to_tvdb_map(tvdb_id, imdb=False)
if cache_id and not expired:
return cache_id
imdb_id = None
try:
imdb_id = self.tmdb_to_imdb(self.tvdb_to_tmdb(tvdb_id), False)
except Failed:
if self.config.Trakt:
try:
imdb_id = self.config.Trakt.convert(tvdb_id, "tvdb", "imdb", "show")
except Failed:
pass
if fail and imdb_id is None:
raise Failed(f"Convert Error: No IMDb ID Found for TVDb ID: {tvdb_id}")
if self.config.Cache and imdb_id:
self.config.Cache.update_imdb_to_tvdb_map(expired, imdb_id, tvdb_id)
return imdb_id
def imdb_to_tvdb(self, imdb_id, fail=False):
expired = False
if self.config.Cache:
cache_id, expired = self.config.Cache.query_imdb_to_tvdb_map(imdb_id, imdb=True)
if cache_id and not expired:
return cache_id
tvdb_id = None
try:
tvdb_id = self.tmdb_to_tvdb(self.imdb_to_tmdb(imdb_id, False))
except Failed:
if self.config.Trakt:
try:
tvdb_id = self.config.Trakt.convert(imdb_id, "imdb", "tvdb", "show")
except Failed:
pass
if fail and tvdb_id is None:
raise Failed(f"Convert Error: No TVDb ID Found for IMDb ID: {imdb_id}")
if self.config.Cache and tvdb_id:
self.config.Cache.update_imdb_to_tvdb_map(expired, imdb_id, tvdb_id)
return tvdb_id
def get_id(self, item, library, length):
expired = None
if self.config.Cache:
cache_id, media_type, expired = self.config.Cache.query_guid_map(item.guid)
if cache_id and not expired:
media_id_type = "movie" if "movie" in media_type else "show"
return media_id_type, util.get_list(cache_id, int_list=True)
try:
tmdb_id = None
imdb_id = None
tvdb_id = None
anidb_id = None
guid = requests.utils.urlparse(item.guid)
item_type = guid.scheme.split(".")[-1]
check_id = guid.netloc
if item_type == "plex":
tmdb_id = []
imdb_id = []
tvdb_id = []
try:
for guid_tag in library.get_guids(item):
url_parsed = requests.utils.urlparse(guid_tag.id)
if url_parsed.scheme == "tvdb": tvdb_id.append(int(url_parsed.netloc))
elif url_parsed.scheme == "imdb": imdb_id.append(url_parsed.netloc)
elif url_parsed.scheme == "tmdb": tmdb_id.append(int(url_parsed.netloc))
except requests.exceptions.ConnectionError:
util.print_stacktrace()
raise Failed("No External GUIDs found")
elif item_type == "imdb": imdb_id = check_id
elif item_type == "thetvdb": tvdb_id = int(check_id)
elif item_type == "themoviedb": tmdb_id = int(check_id)
elif item_type == "hama":
if check_id.startswith("tvdb"): tvdb_id = int(re.search("-(.*)", check_id).group(1))
elif check_id.startswith("anidb"): anidb_id = re.search("-(.*)", check_id).group(1)
else: raise Failed(f"Hama Agent ID: {check_id} not supported")
elif item_type == "myanimelist":
anime_ids = self._arms_ids(mal_ids=check_id)
if anime_ids[0] and anime_ids[0]["anidb"]: anidb_id = anime_ids[0]["anidb"]
else: raise Failed(f"Unable to convert MyAnimeList ID: {check_id} to AniDB ID")
elif item_type == "local": raise Failed("No match in Plex")
else: raise Failed(f"Agent {item_type} not supported")
if anidb_id:
tvdb_id = self.anidb_to_tvdb(anidb_id)
if not tvdb_id:
imdb_id = self.anidb_to_imdb(anidb_id)
if not imdb_id and not tvdb_id:
raise Failed(f"Unable to convert AniDB ID: {anidb_id} to TVDb ID or IMDb ID")
if not tmdb_id and imdb_id:
if isinstance(imdb_id, list):
tmdb_id = []
for imdb in imdb_id:
try:
tmdb_id.append(self.imdb_to_tmdb(imdb, fail=True))
except Failed:
continue
else:
tmdb_id = self.imdb_to_tmdb(imdb_id)
if not tmdb_id:
raise Failed(f"Unable to convert IMDb ID: {util.compile_list(imdb_id)} to TMDb ID")
if not anidb_id and not tvdb_id and tmdb_id and library.is_show:
if isinstance(tmdb_id, list):
tvdb_id = []
for tmdb in tmdb_id:
try:
tvdb_id.append(self.tmdb_to_tvdb(tmdb, fail=True))
except Failed:
continue
else:
tvdb_id = self.tmdb_to_tvdb(tmdb_id)
if not tvdb_id:
raise Failed(f"Unable to convert TMDb ID: {util.compile_list(tmdb_id)} to TVDb ID")
if tvdb_id:
if isinstance(tvdb_id, list):
new_tvdb_id = []
for tvdb in tvdb_id:
try:
new_tvdb_id.append(int(tvdb))
except ValueError:
continue
tvdb_id = new_tvdb_id
else:
try:
tvdb_id = int(tvdb_id)
except ValueError:
tvdb_id = None
def update_cache(cache_ids, id_type, guid_type):
if self.config.Cache:
cache_ids = util.compile_list(cache_ids)
util.print_end(length, f" Cache | {'^' if expired else '+'} | {item.guid:<46} | {id_type} ID: {cache_ids:<6} | {item.title}")
self.config.Cache.update_guid_map(guid_type, item.guid, cache_ids, expired)
if tmdb_id and library.is_movie:
update_cache(tmdb_id, "TMDb", "movie")
return "movie", tmdb_id
elif tvdb_id and library.is_show:
update_cache(tvdb_id, "TVDb", "show")
return "show", tvdb_id
elif anidb_id and tmdb_id and library.is_show:
update_cache(tmdb_id, "TMDb", "show_movie")
return "movie", tmdb_id
else:
raise Failed(f"No ID to convert")
except Failed as e:
util.print_end(length, f"Mapping Error | {item.guid:<46} | {e} for {item.title}")
return None, None

@ -21,12 +21,12 @@ class IMDbAPI:
imdb_url = imdb_url.strip() imdb_url = imdb_url.strip()
if not imdb_url.startswith(self.urls["list"]) and not imdb_url.startswith(self.urls["search"]) and not imdb_url.startswith(self.urls["keyword"]): if not imdb_url.startswith(self.urls["list"]) and not imdb_url.startswith(self.urls["search"]) and not imdb_url.startswith(self.urls["keyword"]):
raise Failed(f"IMDb Error: {imdb_url} must begin with either:\n{self.urls['list']} (For Lists)\n{self.urls['search']} (For Searches)\n{self.urls['keyword']} (For Keyword Searches)") raise Failed(f"IMDb Error: {imdb_url} must begin with either:\n{self.urls['list']} (For Lists)\n{self.urls['search']} (For Searches)\n{self.urls['keyword']} (For Keyword Searches)")
total, _ = self.get_total(self.fix_url(imdb_url), language) total, _ = self._total(self._fix_url(imdb_url), language)
if total > 0: if total > 0:
return imdb_url return imdb_url
raise Failed(f"IMDb Error: {imdb_url} failed to parse") raise Failed(f"IMDb Error: {imdb_url} failed to parse")
def fix_url(self, imdb_url): def _fix_url(self, imdb_url):
if imdb_url.startswith(self.urls["list"]): if imdb_url.startswith(self.urls["list"]):
try: list_id = re.search("(\\d+)", str(imdb_url)).group(1) try: list_id = re.search("(\\d+)", str(imdb_url)).group(1)
except AttributeError: raise Failed(f"IMDb Error: Failed to parse List ID from {imdb_url}") except AttributeError: raise Failed(f"IMDb Error: Failed to parse List ID from {imdb_url}")
@ -36,10 +36,10 @@ class IMDbAPI:
else: else:
return imdb_url return imdb_url
def get_total(self, imdb_url, language): def _total(self, imdb_url, language):
header = {"Accept-Language": language} header = {"Accept-Language": language}
if imdb_url.startswith(self.urls["keyword"]): if imdb_url.startswith(self.urls["keyword"]):
results = self.send_request(imdb_url, header).xpath("//div[@class='desc']/text()") results = self._request(imdb_url, header).xpath("//div[@class='desc']/text()")
total = None total = None
for result in results: for result in results:
if "title" in result: if "title" in result:
@ -52,15 +52,15 @@ class IMDbAPI:
raise Failed(f"IMDb Error: No Results at URL: {imdb_url}") raise Failed(f"IMDb Error: No Results at URL: {imdb_url}")
return total, 50 return total, 50
else: else:
try: results = self.send_request(imdb_url, header).xpath("//div[@class='desc']/span/text()")[0].replace(",", "") try: results = self._request(imdb_url, header).xpath("//div[@class='desc']/span/text()")[0].replace(",", "")
except IndexError: raise Failed(f"IMDb Error: Failed to parse URL: {imdb_url}") except IndexError: raise Failed(f"IMDb Error: Failed to parse URL: {imdb_url}")
try: total = int(re.findall("(\\d+) title", results)[0]) try: total = int(re.findall("(\\d+) title", results)[0])
except IndexError: raise Failed(f"IMDb Error: No Results at URL: {imdb_url}") except IndexError: raise Failed(f"IMDb Error: No Results at URL: {imdb_url}")
return total, 250 return total, 250
def get_imdb_ids_from_url(self, imdb_url, language, limit): def _ids_from_url(self, imdb_url, language, limit):
current_url = self.fix_url(imdb_url) current_url = self._fix_url(imdb_url)
total, item_count = self.get_total(current_url, language) total, item_count = self._total(current_url, language)
header = {"Accept-Language": language} header = {"Accept-Language": language}
length = 0 length = 0
imdb_ids = [] imdb_ids = []
@ -76,9 +76,9 @@ class IMDbAPI:
start_num = (i - 1) * item_count + 1 start_num = (i - 1) * item_count + 1
length = util.print_return(length, f"Parsing Page {i}/{num_of_pages} {start_num}-{limit if i == num_of_pages else i * item_count}") length = util.print_return(length, f"Parsing Page {i}/{num_of_pages} {start_num}-{limit if i == num_of_pages else i * item_count}")
if imdb_url.startswith(self.urls["keyword"]): if imdb_url.startswith(self.urls["keyword"]):
response = self.send_request(f"{current_url}&page={i}", header) response = self._request(f"{current_url}&page={i}", header)
else: else:
response = self.send_request(f"{current_url}&count={remainder if i == num_of_pages else item_count}&start={start_num}", header) response = self._request(f"{current_url}&count={remainder if i == num_of_pages else item_count}&start={start_num}", header)
if imdb_url.startswith(self.urls["keyword"]) and i == num_of_pages: if imdb_url.startswith(self.urls["keyword"]) and i == num_of_pages:
imdb_ids.extend(response.xpath("//div[contains(@class, 'lister-item-image')]//a/img//@data-tconst")[:remainder]) imdb_ids.extend(response.xpath("//div[contains(@class, 'lister-item-image')]//a/img//@data-tconst")[:remainder])
else: else:
@ -88,39 +88,39 @@ class IMDbAPI:
else: raise Failed(f"IMDb Error: No IMDb IDs Found at {imdb_url}") else: raise Failed(f"IMDb Error: No IMDb IDs Found at {imdb_url}")
@retry(stop_max_attempt_number=6, wait_fixed=10000) @retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_request(self, url, header): def _request(self, url, header):
return html.fromstring(requests.get(url, headers=header).content) return html.fromstring(requests.get(url, headers=header).content)
def get_items(self, method, data, language, status_message=True): def get_items(self, method, data, language):
pretty = util.pretty_names[method] if method in util.pretty_names else method pretty = util.pretty_names[method] if method in util.pretty_names else method
if status_message: logger.debug(f"Data: {data}")
logger.debug(f"Data: {data}")
show_ids = [] show_ids = []
movie_ids = [] movie_ids = []
if method == "imdb_id": if method == "imdb_id":
if status_message: logger.info(f"Processing {pretty}: {data}")
logger.info(f"Processing {pretty}: {data}") tmdb_id = self.config.Convert.imdb_to_tmdb(data)
tmdb_id, tvdb_id = self.config.Arms.imdb_to_ids(data, language) tvdb_id = self.config.Convert.imdb_to_tvdb(data)
if not tmdb_id and not tvdb_id:
logger.error(f"Convert Error: No TMDb ID or TVDb ID found for IMDb: {data}")
if tmdb_id: movie_ids.append(tmdb_id) if tmdb_id: movie_ids.append(tmdb_id)
if tvdb_id: show_ids.append(tvdb_id) if tvdb_id: show_ids.append(tvdb_id)
elif method == "imdb_list": elif method == "imdb_list":
if status_message: status = f"{data['limit']} Items at " if data['limit'] > 0 else ''
status = f"{data['limit']} Items at " if data['limit'] > 0 else '' logger.info(f"Processing {pretty}: {status}{data['url']}")
logger.info(f"Processing {pretty}: {status}{data['url']}") imdb_ids = self._ids_from_url(data["url"], language, data["limit"])
imdb_ids = self.get_imdb_ids_from_url(data["url"], language, data["limit"])
total_ids = len(imdb_ids) total_ids = len(imdb_ids)
length = 0 length = 0
for i, imdb_id in enumerate(imdb_ids, 1): for i, imdb_id in enumerate(imdb_ids, 1):
length = util.print_return(length, f"Converting IMDb ID {i}/{total_ids}") length = util.print_return(length, f"Converting IMDb ID {i}/{total_ids}")
try: tmdb_id = self.config.Convert.imdb_to_tmdb(imdb_id)
tmdb_id, tvdb_id = self.config.Arms.imdb_to_ids(imdb_id, language) tvdb_id = self.config.Convert.imdb_to_tvdb(imdb_id)
if tmdb_id: movie_ids.append(tmdb_id) if not tmdb_id and not tvdb_id:
if tvdb_id: show_ids.append(tvdb_id) logger.error(f"Convert Error: No TMDb ID or TVDb ID found for IMDb: {imdb_id}")
except Failed as e: logger.warning(e) if tmdb_id: movie_ids.append(tmdb_id)
if tvdb_id: show_ids.append(tvdb_id)
util.print_end(length, f"Processed {total_ids} IMDb IDs") util.print_end(length, f"Processed {total_ids} IMDb IDs")
else: else:
raise Failed(f"IMDb Error: Method {method} not supported") raise Failed(f"IMDb Error: Method {method} not supported")
if status_message: logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TMDb IDs Found: {movie_ids}") logger.debug(f"TVDb IDs Found: {show_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}")
return movie_ids, show_ids return movie_ids, show_ids

@ -14,15 +14,11 @@ class LetterboxdAPI:
self.url = "https://letterboxd.com" self.url = "https://letterboxd.com"
@retry(stop_max_attempt_number=6, wait_fixed=10000) @retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_request(self, url, language): def _request(self, url, language):
return html.fromstring(requests.get(url, headers={"Accept-Language": language, "User-Agent": "Mozilla/5.0 x64"}).content) return html.fromstring(requests.get(url, headers={"Accept-Language": language, "User-Agent": "Mozilla/5.0 x64"}).content)
def get_list_description(self, list_url, language): def _parse_list(self, list_url, language):
descriptions = self.send_request(list_url, language).xpath("//meta[@property='og:description']/@content") response = self._request(list_url, language)
return descriptions[0] if len(descriptions) > 0 and len(descriptions[0]) > 0 else None
def parse_list(self, list_url, language):
response = self.send_request(list_url, language)
letterboxd_ids = response.xpath("//div[@class='poster film-poster really-lazy-load']/@data-film-id") letterboxd_ids = response.xpath("//div[@class='poster film-poster really-lazy-load']/@data-film-id")
items = [] items = []
for letterboxd_id in letterboxd_ids: for letterboxd_id in letterboxd_ids:
@ -30,14 +26,11 @@ class LetterboxdAPI:
items.append((letterboxd_id, slugs[0])) items.append((letterboxd_id, slugs[0]))
next_url = response.xpath("//a[@class='next']/@href") next_url = response.xpath("//a[@class='next']/@href")
if len(next_url) > 0: if len(next_url) > 0:
items.extend(self.parse_list(f"{self.url}{next_url[0]}", language)) items.extend(self._parse_list(f"{self.url}{next_url[0]}", language))
return items return items
def get_tmdb_from_slug(self, slug, language): def _tmdb(self, letterboxd_url, language):
return self.get_tmdb(f"{self.url}{slug}", language) response = self._request(letterboxd_url, language)
def get_tmdb(self, letterboxd_url, language):
response = self.send_request(letterboxd_url, language)
ids = response.xpath("//a[@data-track-action='TMDb']/@href") ids = response.xpath("//a[@data-track-action='TMDb']/@href")
if len(ids) > 0 and ids[0]: if len(ids) > 0 and ids[0]:
if "themoviedb.org/movie" in ids[0]: if "themoviedb.org/movie" in ids[0]:
@ -45,32 +38,36 @@ class LetterboxdAPI:
raise Failed(f"Letterboxd Error: TMDb Movie ID not found in {ids[0]}") raise Failed(f"Letterboxd Error: TMDb Movie ID not found in {ids[0]}")
raise Failed(f"Letterboxd Error: TMDb Movie ID not found at {letterboxd_url}") raise Failed(f"Letterboxd Error: TMDb Movie ID not found at {letterboxd_url}")
def get_items(self, method, data, language, status_message=True): def get_list_description(self, list_url, language):
descriptions = self._request(list_url, language).xpath("//meta[@property='og:description']/@content")
return descriptions[0] if len(descriptions) > 0 and len(descriptions[0]) > 0 else None
def get_items(self, method, data, language):
pretty = util.pretty_names[method] if method in util.pretty_names else method pretty = util.pretty_names[method] if method in util.pretty_names else method
movie_ids = [] movie_ids = []
if status_message: logger.info(f"Processing {pretty}: {data}")
logger.info(f"Processing {pretty}: {data}") items = self._parse_list(data, language)
items = self.parse_list(data, language)
total_items = len(items) total_items = len(items)
if total_items == 0: if total_items > 0:
raise Failed(f"Letterboxd Error: No List Items found in {data}") length = 0
length = 0 for i, item in enumerate(items, 1):
for i, item in enumerate(items, 1): letterboxd_id, slug = item
length = util.print_return(length, f"Finding TMDb ID {i}/{total_items}") length = util.print_return(length, f"Finding TMDb ID {i}/{total_items}")
tmdb_id = None tmdb_id = None
expired = None expired = None
if self.config.Cache:
tmdb_id, expired = self.config.Cache.query_letterboxd_map(item[0])
if not tmdb_id or expired is not False:
try:
tmdb_id = self.get_tmdb_from_slug(item[1], language)
except Failed as e:
logger.error(e)
continue
if self.config.Cache: if self.config.Cache:
self.config.Cache.update_letterboxd(expired, item[0], tmdb_id) tmdb_id, expired = self.config.Cache.query_letterboxd_map(letterboxd_id)
movie_ids.append(tmdb_id) if not tmdb_id or expired is not False:
util.print_end(length, f"Processed {total_items} TMDb IDs") try:
if status_message: tmdb_id = self._tmdb(f"{self.url}{slug}", language)
logger.debug(f"TMDb IDs Found: {movie_ids}") except Failed as e:
logger.error(e)
continue
if self.config.Cache:
self.config.Cache.update_letterboxd_map(expired, letterboxd_id, tmdb_id)
movie_ids.append(tmdb_id)
util.print_end(length, f"Processed {total_items} TMDb IDs")
else:
logger.error(f"Letterboxd Error: No List Items found in {data}")
logger.debug(f"TMDb IDs Found: {movie_ids}")
return movie_ids, [] return movie_ids, []

@ -1,4 +1,4 @@
import json, logging, re, requests, secrets, webbrowser import logging, re, requests, secrets, webbrowser
from modules import util from modules import util
from modules.util import Failed, TimeoutExpired from modules.util import Failed, TimeoutExpired
from retrying import retry from retrying import retry
@ -87,11 +87,11 @@ class MyAnimeListAPI:
self.client_secret = params["client_secret"] self.client_secret = params["client_secret"]
self.config_path = params["config_path"] self.config_path = params["config_path"]
self.authorization = authorization self.authorization = authorization
if not self.save_authorization(self.authorization): if not self._save(self.authorization):
if not self.refresh_authorization(): if not self._refresh():
self.get_authorization() self._authorization()
def get_authorization(self): def _authorization(self):
code_verifier = secrets.token_urlsafe(100)[:128] code_verifier = secrets.token_urlsafe(100)[:128]
url = f"{self.urls['oauth_authorize']}?response_type=code&client_id={self.client_id}&code_challenge={code_verifier}" url = f"{self.urls['oauth_authorize']}?response_type=code&client_id={self.client_id}&code_challenge={code_verifier}"
logger.info("") logger.info("")
@ -114,21 +114,21 @@ class MyAnimeListAPI:
"code_verifier": code_verifier, "code_verifier": code_verifier,
"grant_type": "authorization_code" "grant_type": "authorization_code"
} }
new_authorization = self.oauth_request(data) new_authorization = self._oauth(data)
if "error" in new_authorization: if "error" in new_authorization:
raise Failed("MyAnimeList Error: Invalid code") raise Failed("MyAnimeList Error: Invalid code")
if not self.save_authorization(new_authorization): if not self._save(new_authorization):
raise Failed("MyAnimeList Error: New Authorization Failed") raise Failed("MyAnimeList Error: New Authorization Failed")
def check_authorization(self, authorization): def _check(self, authorization):
try: try:
self.send_request(self.urls["suggestions"], authorization=authorization) self._request(self.urls["suggestions"], authorization=authorization)
return True return True
except Failed as e: except Failed as e:
logger.debug(e) logger.debug(e)
return False return False
def refresh_authorization(self): def _refresh(self):
if self.authorization and "refresh_token" in self.authorization and self.authorization["refresh_token"]: if self.authorization and "refresh_token" in self.authorization and self.authorization["refresh_token"]:
logger.info("Refreshing Access Token...") logger.info("Refreshing Access Token...")
data = { data = {
@ -137,12 +137,12 @@ class MyAnimeListAPI:
"refresh_token": self.authorization["refresh_token"], "refresh_token": self.authorization["refresh_token"],
"grant_type": "refresh_token" "grant_type": "refresh_token"
} }
refreshed_authorization = self.oauth_request(data) refreshed_authorization = self._oauth(data)
return self.save_authorization(refreshed_authorization) return self._save(refreshed_authorization)
return False return False
def save_authorization(self, authorization): def _save(self, authorization):
if authorization is not None and "access_token" in authorization and authorization["access_token"] and self.check_authorization(authorization): if authorization is not None and "access_token" in authorization and authorization["access_token"] and self._check(authorization):
if self.authorization != authorization: if self.authorization != authorization:
yaml.YAML().allow_duplicate_keys = True yaml.YAML().allow_duplicate_keys = True
config, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.config_path)) config, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.config_path))
@ -159,69 +159,62 @@ class MyAnimeListAPI:
return False return False
@retry(stop_max_attempt_number=6, wait_fixed=10000) @retry(stop_max_attempt_number=6, wait_fixed=10000)
def oauth_request(self, data): def _oauth(self, data):
return requests.post(self.urls["oauth_token"], data).json() return requests.post(self.urls["oauth_token"], data).json()
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def send_request(self, url, authorization=None): def _request(self, url, authorization=None):
new_authorization = authorization if authorization else self.authorization new_authorization = authorization if authorization else self.authorization
response = requests.get(url, headers={"Authorization": f"Bearer {new_authorization['access_token']}"}).json() response = requests.get(url, headers={"Authorization": f"Bearer {new_authorization['access_token']}"}).json()
if "error" in response: raise Failed(f"MyAnimeList Error: {response['error']}") if "error" in response: raise Failed(f"MyAnimeList Error: {response['error']}")
else: return response else: return response
def request_and_parse_mal_ids(self, url): def _parse_request(self, url):
data = self.send_request(url) data = self._request(url)
return [d["node"]["id"] for d in data["data"]] if "data" in data else [] return [d["node"]["id"] for d in data["data"]] if "data" in data else []
def get_username(self): def _username(self):
return self.send_request(f"{self.urls['user']}/@me")["name"] return self._request(f"{self.urls['user']}/@me")["name"]
def get_ranked(self, ranking_type, limit): def _ranked(self, ranking_type, limit):
url = f"{self.urls['ranking']}?ranking_type={ranking_type}&limit={limit}" url = f"{self.urls['ranking']}?ranking_type={ranking_type}&limit={limit}"
return self.request_and_parse_mal_ids(url) return self._parse_request(url)
def get_season(self, season, year, sort_by, limit): def _season(self, season, year, sort_by, limit):
url = f"{self.urls['season']}/{year}/{season}?sort={sort_by}&limit={limit}" url = f"{self.urls['season']}/{year}/{season}?sort={sort_by}&limit={limit}"
return self.request_and_parse_mal_ids(url) return self._parse_request(url)
def get_suggestions(self, limit): def _suggestions(self, limit):
url = f"{self.urls['suggestions']}?limit={limit}" url = f"{self.urls['suggestions']}?limit={limit}"
return self.request_and_parse_mal_ids(url) return self._parse_request(url)
def get_userlist(self, username, status, sort_by, limit): def _userlist(self, username, status, sort_by, limit):
final_status = "" if status == "all" else f"status={status}&" final_status = "" if status == "all" else f"status={status}&"
url = f"{self.urls['user']}/{username}/animelist?{final_status}sort={sort_by}&limit={limit}" url = f"{self.urls['user']}/{username}/animelist?{final_status}sort={sort_by}&limit={limit}"
return self.request_and_parse_mal_ids(url) return self._parse_request(url)
def get_items(self, method, data, language, status_message=True): def get_items(self, method, data):
if status_message: logger.debug(f"Data: {data}")
logger.debug(f"Data: {data}")
pretty = util.pretty_names[method] if method in util.pretty_names else method pretty = util.pretty_names[method] if method in util.pretty_names else method
if method == "mal_id": if method == "mal_id":
mal_ids = [data] mal_ids = [data]
if status_message: logger.info(f"Processing {pretty}: {data}")
logger.info(f"Processing {pretty}: {data}")
elif method in mal_ranked_name: elif method in mal_ranked_name:
mal_ids = self.get_ranked(mal_ranked_name[method], data) mal_ids = self._ranked(mal_ranked_name[method], data)
if status_message: logger.info(f"Processing {pretty}: {data} Anime")
logger.info(f"Processing {pretty}: {data} Anime")
elif method == "mal_season": elif method == "mal_season":
mal_ids = self.get_season(data["season"], data["year"], data["sort_by"], data["limit"]) mal_ids = self._season(data["season"], data["year"], data["sort_by"], data["limit"])
if status_message: logger.info(f"Processing {pretty}: {data['limit']} Anime from {util.pretty_seasons[data['season']]} {data['year']} sorted by {pretty_names[data['sort_by']]}")
logger.info(f"Processing {pretty}: {data['limit']} Anime from {util.pretty_seasons[data['season']]} {data['year']} sorted by {pretty_names[data['sort_by']]}")
elif method == "mal_suggested": elif method == "mal_suggested":
mal_ids = self.get_suggestions(data) mal_ids = self._suggestions(data)
if status_message: logger.info(f"Processing {pretty}: {data} Anime")
logger.info(f"Processing {pretty}: {data} Anime")
elif method == "mal_userlist": elif method == "mal_userlist":
mal_ids = self.get_userlist(data["username"], data["status"], data["sort_by"], data["limit"]) mal_ids = self._userlist(data["username"], data["status"], data["sort_by"], data["limit"])
if status_message: logger.info(f"Processing {pretty}: {data['limit']} Anime from {self._username() if data['username'] == '@me' else data['username']}'s {pretty_names[data['status']]} list sorted by {pretty_names[data['sort_by']]}")
logger.info(f"Processing {pretty}: {data['limit']} Anime from {self.get_username() if data['username'] == '@me' else data['username']}'s {pretty_names[data['status']]} list sorted by {pretty_names[data['sort_by']]}")
else: else:
raise Failed(f"MyAnimeList Error: Method {method} not supported") raise Failed(f"MyAnimeList Error: Method {method} not supported")
movie_ids, show_ids = self.config.Arms.myanimelist_to_ids(mal_ids, language) movie_ids, show_ids = self.config.Convert.myanimelist_to_ids(mal_ids)
if status_message: logger.debug(f"MyAnimeList IDs Found: {mal_ids}")
logger.debug(f"MyAnimeList IDs Found: {mal_ids}") logger.debug(f"Shows Found: {show_ids}")
logger.debug(f"Shows Found: {show_ids}") logger.debug(f"Movies Found: {movie_ids}")
logger.debug(f"Movies Found: {movie_ids}")
return movie_ids, show_ids return movie_ids, show_ids

@ -0,0 +1,379 @@
import logging, os, re, requests
from datetime import datetime
from modules import plex, util
from modules.util import Failed
from plexapi.exceptions import NotFound
from ruamel import yaml
logger = logging.getLogger("Plex Meta Manager")
class Metadata:
def __init__(self, library, file_type, path):
self.library = library
self.type = file_type
self.path = path
self.github_base = "https://raw.githubusercontent.com/meisnate12/Plex-Meta-Manager-Configs/master/"
logger.info("")
logger.info(f"Loading Metadata {file_type}: {path}")
def get_dict(attribute, attr_data, check_list=None):
if attribute in attr_data:
if attr_data[attribute]:
if isinstance(attr_data[attribute], dict):
if check_list:
new_dict = {}
for a_name, a_data in attr_data[attribute].items():
if a_name in check_list:
logger.error(f"Config Warning: Skipping duplicate {attribute[:-1] if attribute[-1] == 's' else attribute}: {a_name}")
else:
new_dict[a_name] = a_data
return new_dict
else:
return attr_data[attribute]
else:
logger.warning(f"Config Warning: {attribute} must be a dictionary")
else:
logger.warning(f"Config Warning: {attribute} attribute is blank")
return None
try:
if file_type in ["URL", "Git"]:
content_path = path if file_type == "URL" else f"{self.github_base}{path}.yml"
response = requests.get(content_path)
if response.status_code >= 400:
raise Failed(f"URL Error: No file found at {content_path}")
content = response.content
elif os.path.exists(os.path.abspath(path)):
content = open(path, encoding="utf-8")
else:
raise Failed(f"File Error: File does not exist {path}")
data, ind, bsi = yaml.util.load_yaml_guess_indent(content)
self.metadata = get_dict("metadata", data, library.metadatas)
self.templates = get_dict("templates", data)
self.collections = get_dict("collections", data, library.collections)
if self.metadata is None and self.collections is None:
raise Failed("YAML Error: metadata or collections attribute is required")
logger.info(f"Metadata File Loaded Successfully")
except yaml.scanner.ScannerError as ye:
raise Failed(f"YAML Error: {util.tab_new_lines(ye)}")
except Exception as e:
util.print_stacktrace()
raise Failed(f"YAML Error: {e}")
def get_collections(self, requested_collections):
if requested_collections:
return {c: self.collections[c] for c in util.get_list(requested_collections) if c in self.collections}
else:
return self.collections
def update_metadata(self, TMDb, test):
logger.info("")
util.separator(f"Running Metadata")
logger.info("")
if not self.metadata:
raise Failed("No metadata to edit")
for mapping_name, meta in self.metadata.items():
methods = {mm.lower(): mm for mm in meta}
if test and ("test" not in methods or meta[methods["test"]] is not True):
continue
updated = False
edits = {}
advance_edits = {}
def add_edit(name, current, group, alias, key=None, value=None, var_type="str"):
if value or name in alias:
if value or group[alias[name]]:
if key is None: key = name
if value is None: value = group[alias[name]]
try:
if var_type == "date":
final_value = util.check_date(value, name, return_string=True, plex_date=True)
elif var_type == "float":
final_value = util.check_number(value, name, number_type="float", minimum=0, maximum=10)
else:
final_value = value
if str(current) != str(final_value):
edits[f"{key}.value"] = final_value
edits[f"{key}.locked"] = 1
logger.info(f"Detail: {name} updated to {final_value}")
except Failed as ee:
logger.error(ee)
else:
logger.error(f"Metadata Error: {name} attribute is blank")
def add_advanced_edit(attr, obj, group, alias, show_library=False, new_agent=False):
key, options = plex.advance_keys[attr]
if attr in alias:
if new_agent and self.library.agent not in plex.new_plex_agents:
logger.error(f"Metadata Error: {attr} attribute only works for with the New Plex Movie Agent and New Plex TV Agent")
elif show_library and not self.library.is_show:
logger.error(f"Metadata Error: {attr} attribute only works for show libraries")
elif group[alias[attr]]:
method_data = str(group[alias[attr]]).lower()
if method_data not in options:
logger.error(f"Metadata Error: {group[alias[attr]]} {attr} attribute invalid")
elif getattr(obj, key) != options[method_data]:
advance_edits[key] = options[method_data]
logger.info(f"Detail: {attr} updated to {method_data}")
else:
logger.error(f"Metadata Error: {attr} attribute is blank")
def edit_tags(attr, obj, group, alias, key=None, extra=None, movie_library=False):
if key is None:
key = f"{attr}s"
if attr in alias and f"{attr}.sync" in alias:
logger.error(f"Metadata Error: Cannot use {attr} and {attr}.sync together")
elif attr in alias or f"{attr}.sync" in alias:
attr_key = attr if attr in alias else f"{attr}.sync"
if movie_library and not self.library.is_movie:
logger.error(f"Metadata Error: {attr_key} attribute only works for movie libraries")
elif group[alias[attr_key]] or extra:
item_tags = [item_tag.tag for item_tag in getattr(obj, key)]
input_tags = []
if group[alias[attr_key]]:
input_tags.extend(util.get_list(group[alias[attr_key]]))
if extra:
input_tags.extend(extra)
if f"{attr}.sync" in alias:
remove_method = getattr(obj, f"remove{attr.capitalize()}")
for tag in (t for t in item_tags if t not in input_tags):
updated = True
remove_method(tag)
logger.info(f"Detail: {attr.capitalize()} {tag} removed")
add_method = getattr(obj, f"add{attr.capitalize()}")
for tag in (t for t in input_tags if t not in item_tags):
updated = True
add_method(tag)
logger.info(f"Detail: {attr.capitalize()} {tag} added")
else:
logger.error(f"Metadata Error: {attr} attribute is blank")
def set_image(attr, obj, group, alias, poster=True, url=True):
if group[alias[attr]]:
message = f"{'poster' if poster else 'background'} to [{'URL' if url else 'File'}] {group[alias[attr]]}"
self.library.upload_image(obj, group[alias[attr]], poster=poster, url=url)
logger.info(f"Detail: {attr} updated {message}")
else:
logger.error(f"Metadata Error: {attr} attribute is blank")
def set_images(obj, group, alias):
if "url_poster" in alias:
set_image("url_poster", obj, group, alias)
elif "file_poster" in alias:
set_image("file_poster", obj, group, alias, url=False)
if "url_background" in alias:
set_image("url_background", obj, group, alias, poster=False)
elif "file_background" in alias:
set_image("file_background", obj, group, alias, poster=False, url=False)
logger.info("")
util.separator()
logger.info("")
year = None
if "year" in methods:
year = util.check_number(meta[methods["year"]], "year", minimum=1800, maximum=datetime.now().year + 1)
title = mapping_name
if "title" in methods:
if meta[methods["title"]] is None:
logger.error("Metadata Error: title attribute is blank")
else:
title = meta[methods["title"]]
item = self.library.search_item(title, year=year)
if item is None:
item = self.library.search_item(f"{title} (SUB)", year=year)
if item is None and "alt_title" in methods:
if meta[methods["alt_title"]] is None:
logger.error("Metadata Error: alt_title attribute is blank")
else:
alt_title = meta["alt_title"]
item = self.library.search_item(alt_title, year=year)
if item is None:
logger.error(f"Plex Error: Item {mapping_name} not found")
logger.error(f"Skipping {mapping_name}")
continue
item_type = "Movie" if self.library.is_movie else "Show"
logger.info(f"Updating {item_type}: {title}...")
tmdb_item = None
tmdb_is_movie = None
if ("tmdb_show" in methods or "tmdb_id" in methods) and "tmdb_movie" in methods:
logger.error("Metadata Error: Cannot use tmdb_movie and tmdb_show when editing the same metadata item")
if "tmdb_show" in methods or "tmdb_id" in methods or "tmdb_movie" in methods:
try:
if "tmdb_show" in methods or "tmdb_id" in methods:
data = meta[methods["tmdb_show" if "tmdb_show" in methods else "tmdb_id"]]
if data is None:
logger.error("Metadata Error: tmdb_show attribute is blank")
else:
tmdb_is_movie = False
tmdb_item = TMDb.get_show(util.regex_first_int(data, "Show"))
elif "tmdb_movie" in methods:
if meta[methods["tmdb_movie"]] is None:
logger.error("Metadata Error: tmdb_movie attribute is blank")
else:
tmdb_is_movie = True
tmdb_item = TMDb.get_movie(util.regex_first_int(meta[methods["tmdb_movie"]], "Movie"))
except Failed as e:
logger.error(e)
originally_available = None
original_title = None
rating = None
studio = None
tagline = None
summary = None
genres = []
if tmdb_item:
originally_available = tmdb_item.release_date if tmdb_is_movie else tmdb_item.first_air_date
if tmdb_item and tmdb_is_movie is True and tmdb_item.original_title != tmdb_item.title:
original_title = tmdb_item.original_title
elif tmdb_item and tmdb_is_movie is False and tmdb_item.original_name != tmdb_item.name:
original_title = tmdb_item.original_name
rating = tmdb_item.vote_average
if tmdb_is_movie is True and tmdb_item.production_companies:
studio = tmdb_item.production_companies[0].name
elif tmdb_is_movie is False and tmdb_item.networks:
studio = tmdb_item.networks[0].name
tagline = tmdb_item.tagline if len(tmdb_item.tagline) > 0 else None
summary = tmdb_item.overview
genres = [genre.name for genre in tmdb_item.genres]
edits = {}
add_edit("title", item.title, meta, methods, value=title)
add_edit("sort_title", item.titleSort, meta, methods, key="titleSort")
add_edit("originally_available", str(item.originallyAvailableAt)[:-9], meta, methods,
key="originallyAvailableAt", value=originally_available, var_type="date")
add_edit("critic_rating", item.rating, meta, methods, value=rating, key="rating", var_type="float")
add_edit("audience_rating", item.audienceRating, meta, methods, key="audienceRating", var_type="float")
add_edit("content_rating", item.contentRating, meta, methods, key="contentRating")
add_edit("original_title", item.originalTitle, meta, methods, key="originalTitle", value=original_title)
add_edit("studio", item.studio, meta, methods, value=studio)
add_edit("tagline", item.tagline, meta, methods, value=tagline)
add_edit("summary", item.summary, meta, methods, value=summary)
self.library.edit_item(item, mapping_name, item_type, edits)
advance_edits = {}
add_advanced_edit("episode_sorting", item, meta, methods, show_library=True)
add_advanced_edit("keep_episodes", item, meta, methods, show_library=True)
add_advanced_edit("delete_episodes", item, meta, methods, show_library=True)
add_advanced_edit("season_display", item, meta, methods, show_library=True)
add_advanced_edit("episode_ordering", item, meta, methods, show_library=True)
add_advanced_edit("metadata_language", item, meta, methods, new_agent=True)
add_advanced_edit("use_original_title", item, meta, methods, new_agent=True)
self.library.edit_item(item, mapping_name, item_type, advance_edits, advanced=True)
edit_tags("genre", item, meta, methods, extra=genres)
edit_tags("label", item, meta, methods)
edit_tags("collection", item, meta, methods)
edit_tags("country", item, meta, methods, key="countries", movie_library=True)
edit_tags("director", item, meta, methods, movie_library=True)
edit_tags("producer", item, meta, methods, movie_library=True)
edit_tags("writer", item, meta, methods, movie_library=True)
logger.info(f"{item_type}: {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
set_images(item, meta, methods)
if "seasons" in methods and self.library.is_show:
if meta[methods["seasons"]]:
for season_id in meta[methods["seasons"]]:
updated = False
logger.info("")
logger.info(f"Updating season {season_id} of {mapping_name}...")
if isinstance(season_id, int):
season = None
for s in item.seasons():
if s.index == season_id:
season = s
break
if season is None:
logger.error(f"Metadata Error: Season: {season_id} not found")
else:
season_dict = meta[methods["seasons"]][season_id]
season_methods = {sm.lower(): sm for sm in season_dict}
if "title" in season_methods and season_dict[season_methods["title"]]:
title = season_dict[season_methods["title"]]
else:
title = season.title
if "sub" in season_methods:
if season_dict[season_methods["sub"]] is None:
logger.error("Metadata Error: sub attribute is blank")
elif season_dict[season_methods["sub"]] is True and "(SUB)" not in title:
title = f"{title} (SUB)"
elif season_dict[season_methods["sub"]] is False and title.endswith(" (SUB)"):
title = title[:-6]
else:
logger.error("Metadata Error: sub attribute must be True or False")
edits = {}
add_edit("title", season.title, season_dict, season_methods, value=title)
add_edit("summary", season.summary, season_dict, season_methods)
self.library.edit_item(season, season_id, "Season", edits)
set_images(season, season_dict, season_methods)
else:
logger.error(f"Metadata Error: Season: {season_id} invalid, it must be an integer")
logger.info(f"Season {season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
else:
logger.error("Metadata Error: seasons attribute is blank")
elif "seasons" in methods:
logger.error("Metadata Error: seasons attribute only works for show libraries")
if "episodes" in methods and self.library.is_show:
if meta[methods["episodes"]]:
for episode_str in meta[methods["episodes"]]:
updated = False
logger.info("")
match = re.search("[Ss]\\d+[Ee]\\d+", episode_str)
if match:
output = match.group(0)[1:].split("E" if "E" in match.group(0) else "e")
season_id = int(output[0])
episode_id = int(output[1])
logger.info(f"Updating episode S{season_id}E{episode_id} of {mapping_name}...")
try:
episode = item.episode(season=season_id, episode=episode_id)
except NotFound:
logger.error(f"Metadata Error: episode {episode_id} of season {season_id} not found")
else:
episode_dict = meta[methods["episodes"]][episode_str]
episode_methods = {em.lower(): em for em in episode_dict}
if "title" in episode_methods and episode_dict[episode_methods["title"]]:
title = episode_dict[episode_methods["title"]]
else:
title = episode.title
if "sub" in episode_dict:
if episode_dict[episode_methods["sub"]] is None:
logger.error("Metadata Error: sub attribute is blank")
elif episode_dict[episode_methods["sub"]] is True and "(SUB)" not in title:
title = f"{title} (SUB)"
elif episode_dict[episode_methods["sub"]] is False and title.endswith(" (SUB)"):
title = title[:-6]
else:
logger.error("Metadata Error: sub attribute must be True or False")
edits = {}
add_edit("title", episode.title, episode_dict, episode_methods, value=title)
add_edit("sort_title", episode.titleSort, episode_dict, episode_methods,
key="titleSort")
add_edit("rating", episode.rating, episode_dict, episode_methods)
add_edit("originally_available", str(episode.originallyAvailableAt)[:-9],
episode_dict, episode_methods, key="originallyAvailableAt")
add_edit("summary", episode.summary, episode_dict, episode_methods)
self.library.edit_item(episode, f"{season_id} Episode: {episode_id}", "Season", edits)
edit_tags("director", episode, episode_dict, episode_methods)
edit_tags("writer", episode, episode_dict, episode_methods)
set_images(episode, episode_dict, episode_methods)
logger.info(f"Episode S{episode_id}E{season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
else:
logger.error(f"Metadata Error: episode {episode_str} invalid must have S##E## format")
else:
logger.error("Metadata Error: episodes attribute is blank")
elif "episodes" in methods:
logger.error("Metadata Error: episodes attribute only works for show libraries")

File diff suppressed because it is too large Load Diff

@ -38,7 +38,7 @@ class RadarrAPI:
def get_profile_id(self, profile_name): def get_profile_id(self, profile_name):
profiles = "" profiles = ""
for profile in self.send_get("qualityProfile" if self.version == "v3" else "profile"): for profile in self._get("qualityProfile" if self.version == "v3" else "profile"):
if len(profiles) > 0: if len(profiles) > 0:
profiles += ", " profiles += ", "
profiles += profile["name"] profiles += profile["name"]
@ -47,19 +47,19 @@ class RadarrAPI:
raise Failed(f"Radarr Error: quality_profile: {profile_name} does not exist in radarr. Profiles available: {profiles}") raise Failed(f"Radarr Error: quality_profile: {profile_name} does not exist in radarr. Profiles available: {profiles}")
def get_tags(self): def get_tags(self):
return {tag["label"]: tag["id"] for tag in self.send_get("tag")} return {tag["label"]: tag["id"] for tag in self._get("tag")}
def add_tags(self, tags): def add_tags(self, tags):
added = False added = False
for label in tags: for label in tags:
if label not in self.tags: if str(label).lower() not in self.tags:
added = True added = True
self.send_post("tag", {"label": str(label)}) self._post("tag", {"label": str(label).lower()})
if added: if added:
self.tags = self.get_tags() self.tags = self.get_tags()
def lookup(self, tmdb_id): def lookup(self, tmdb_id):
results = self.send_get("movie/lookup", params={"term": f"tmdb:{tmdb_id}"}) results = self._get("movie/lookup", params={"term": f"tmdb:{tmdb_id}"})
if results: if results:
return results[0] return results[0]
else: else:
@ -78,7 +78,7 @@ class RadarrAPI:
search = options["search"] if "search" in options else self.search search = options["search"] if "search" in options else self.search
if tags: if tags:
self.add_tags(tags) self.add_tags(tags)
tag_nums = [self.tags[label] for label in tags if label in self.tags] tag_nums = [self.tags[label.lower()] for label in tags if label.lower() in self.tags]
for tmdb_id in tmdb_ids: for tmdb_id in tmdb_ids:
try: try:
movie_info = self.lookup(tmdb_id) movie_info = self.lookup(tmdb_id)
@ -105,7 +105,7 @@ class RadarrAPI:
} }
if tag_nums: if tag_nums:
url_json["tags"] = tag_nums url_json["tags"] = tag_nums
response = self.send_post("movie", url_json) response = self._post("movie", url_json)
if response.status_code < 400: if response.status_code < 400:
logger.info(f"Added to Radarr | {tmdb_id:<6} | {movie_info['title']}") logger.info(f"Added to Radarr | {tmdb_id:<6} | {movie_info['title']}")
add_count += 1 add_count += 1
@ -118,7 +118,7 @@ class RadarrAPI:
logger.info(f"{add_count} Movie{'s' if add_count > 1 else ''} added to Radarr") logger.info(f"{add_count} Movie{'s' if add_count > 1 else ''} added to Radarr")
@retry(stop_max_attempt_number=6, wait_fixed=10000) @retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_get(self, url, params=None): def _get(self, url, params=None):
url_params = {"apikey": f"{self.token}"} url_params = {"apikey": f"{self.token}"}
if params: if params:
for param in params: for param in params:
@ -126,5 +126,5 @@ class RadarrAPI:
return requests.get(f"{self.base_url}{url}", params=url_params).json() return requests.get(f"{self.base_url}{url}", params=url_params).json()
@retry(stop_max_attempt_number=6, wait_fixed=10000) @retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_post(self, url, url_json): def _post(self, url, url_json):
return requests.post(f"{self.base_url}{url}", json=url_json, params={"apikey": f"{self.token}"}) return requests.post(f"{self.base_url}{url}", json=url_json, params={"apikey": f"{self.token}"})

@ -58,7 +58,7 @@ class SonarrAPI:
endpoint = "languageProfile" endpoint = "languageProfile"
else: else:
endpoint = "profile" endpoint = "profile"
for profile in self.send_get(endpoint): for profile in self._get(endpoint):
if len(profiles) > 0: if len(profiles) > 0:
profiles += ", " profiles += ", "
profiles += profile["name"] profiles += profile["name"]
@ -67,19 +67,19 @@ class SonarrAPI:
raise Failed(f"Sonarr Error: {profile_type}: {profile_name} does not exist in sonarr. Profiles available: {profiles}") raise Failed(f"Sonarr Error: {profile_type}: {profile_name} does not exist in sonarr. Profiles available: {profiles}")
def get_tags(self): def get_tags(self):
return {tag["label"]: tag["id"] for tag in self.send_get("tag")} return {tag["label"]: tag["id"] for tag in self._get("tag")}
def add_tags(self, tags): def add_tags(self, tags):
added = False added = False
for label in tags: for label in tags:
if label not in self.tags: if str(label).lower() not in self.tags:
added = True added = True
self.send_post("tag", {"label": str(label)}) self._post("tag", {"label": str(label).lower()})
if added: if added:
self.tags = self.get_tags() self.tags = self.get_tags()
def lookup(self, tvdb_id): def lookup(self, tvdb_id):
results = self.send_get("series/lookup", params={"term": f"tvdb:{tvdb_id}"}) results = self._get("series/lookup", params={"term": f"tvdb:{tvdb_id}"})
if results: if results:
return results[0] return results[0]
else: else:
@ -101,7 +101,7 @@ class SonarrAPI:
cutoff_search = options["cutoff_search"] if "cutoff_search" in options else self.cutoff_search cutoff_search = options["cutoff_search"] if "cutoff_search" in options else self.cutoff_search
if tags: if tags:
self.add_tags(tags) self.add_tags(tags)
tag_nums = [self.tags[label] for label in tags if label in self.tags] tag_nums = [self.tags[label.lower()] for label in tags if label.lower() in self.tags]
for tvdb_id in tvdb_ids: for tvdb_id in tvdb_ids:
try: try:
show_info = self.lookup(tvdb_id) show_info = self.lookup(tvdb_id)
@ -135,7 +135,7 @@ class SonarrAPI:
} }
if tag_nums: if tag_nums:
url_json["tags"] = tag_nums url_json["tags"] = tag_nums
response = self.send_post("series", url_json) response = self._post("series", url_json)
if response.status_code < 400: if response.status_code < 400:
logger.info(f"Added to Sonarr | {tvdb_id:<6} | {show_info['title']}") logger.info(f"Added to Sonarr | {tvdb_id:<6} | {show_info['title']}")
add_count += 1 add_count += 1
@ -152,7 +152,7 @@ class SonarrAPI:
logger.info(f"{add_count} Show{'s' if add_count > 1 else ''} added to Sonarr") logger.info(f"{add_count} Show{'s' if add_count > 1 else ''} added to Sonarr")
@retry(stop_max_attempt_number=6, wait_fixed=10000) @retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_get(self, url, params=None): def _get(self, url, params=None):
url_params = {"apikey": f"{self.token}"} url_params = {"apikey": f"{self.token}"}
if params: if params:
for param in params: for param in params:
@ -160,5 +160,5 @@ class SonarrAPI:
return requests.get(f"{self.base_url}{url}", params=url_params).json() return requests.get(f"{self.base_url}{url}", params=url_params).json()
@retry(stop_max_attempt_number=6, wait_fixed=10000) @retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_post(self, url, url_json): def _post(self, url, url_json):
return requests.post(f"{self.base_url}{url}", json=url_json, params={"apikey": f"{self.token}"}) return requests.post(f"{self.base_url}{url}", json=url_json, params={"apikey": f"{self.token}"})

@ -1,6 +1,8 @@
import logging, requests import logging, requests
from modules import util from modules import util
from modules.util import Failed from modules.util import Failed
from plexapi.exceptions import BadRequest, NotFound
from plexapi.video import Movie, Show
from retrying import retry from retrying import retry
logger = logging.getLogger("Plex Meta Manager") logger = logging.getLogger("Plex Meta Manager")
@ -19,17 +21,11 @@ class TautulliAPI:
self.url = params["url"] self.url = params["url"]
self.apikey = params["apikey"] self.apikey = params["apikey"]
def get_popular(self, library, time_range=30, stats_count=20, stats_count_buffer=20, status_message=True): def get_items(self, library, params):
return self.get_items(library, time_range=time_range, stats_count=stats_count, list_type="popular", stats_count_buffer=stats_count_buffer, status_message=status_message) query_size = int(params["list_size"]) + int(params["list_buffer"])
logger.info(f"Processing Tautulli Most {params['list_type'].capitalize()}: {params['list_size']} {'Movies' if library.is_movie else 'Shows'}")
def get_top(self, library, time_range=30, stats_count=20, stats_count_buffer=20, status_message=True): response = self._request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_home_stats&time_range={params['list_days']}&stats_count={query_size}")
return self.get_items(library, time_range=time_range, stats_count=stats_count, list_type="top", stats_count_buffer=stats_count_buffer, status_message=status_message) stat_id = f"{'popular' if params['list_type'] == 'popular' else 'top'}_{'movies' if library.is_movie else 'tv'}"
def get_items(self, library, time_range=30, stats_count=20, list_type="popular", stats_count_buffer=20, status_message=True):
if status_message:
logger.info(f"Processing Tautulli Most {'Popular' if list_type == 'popular' else 'Watched'}: {stats_count} {'Movies' if library.is_movie else 'Shows'}")
response = self.send_request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_home_stats&time_range={time_range}&stats_count={int(stats_count) + int(stats_count_buffer)}")
stat_id = f"{'popular' if list_type == 'popular' else 'top'}_{'movies' if library.is_movie else 'tv'}"
items = None items = None
for entry in response["response"]["data"]: for entry in response["response"]["data"]:
@ -39,17 +35,26 @@ class TautulliAPI:
if items is None: if items is None:
raise Failed("Tautulli Error: No Items found in the response") raise Failed("Tautulli Error: No Items found in the response")
section_id = self.get_section_id(library.name) section_id = self._section_id(library.name)
rating_keys = [] rating_keys = []
count = 0 count = 0
for item in items: for item in items:
if item["section_id"] == section_id and count < int(stats_count): if item["section_id"] == section_id and count < int(params['list_size']):
rating_keys.append(item["rating_key"]) try:
library.fetchItem(int(item["rating_key"]))
rating_keys.append(item["rating_key"])
except (BadRequest, NotFound):
new_item = library.exact_search(item["title"], year=item["year"])
if new_item:
rating_keys.append(new_item[0].ratingKey)
else:
logger.error(f"Plex Error: Item {item} not found")
continue
count += 1 count += 1
return rating_keys return rating_keys
def get_section_id(self, library_name): def _section_id(self, library_name):
response = self.send_request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_library_names") response = self._request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_library_names")
section_id = None section_id = None
for entry in response["response"]["data"]: for entry in response["response"]["data"]:
if entry["section_name"] == library_name: if entry["section_name"] == library_name:
@ -59,6 +64,6 @@ class TautulliAPI:
else: raise Failed(f"Tautulli Error: No Library named {library_name} in the response") else: raise Failed(f"Tautulli Error: No Library named {library_name} in the response")
@retry(stop_max_attempt_number=6, wait_fixed=10000) @retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_request(self, url): def _request(self, url):
logger.debug(f"Tautulli URL: {url.replace(self.apikey, '################################')}") logger.debug(f"Tautulli URL: {url.replace(self.apikey, '################################')}")
return requests.get(url).json() return requests.get(url).json()

@ -1,346 +0,0 @@
import logging
from modules import util
from modules.config import Config
from modules.util import Failed
logger = logging.getLogger("Plex Meta Manager")
def run_tests(default_dir):
try:
config = Config(default_dir)
logger.info("")
util.separator("Mapping Tests")
for library in config.libraries:
config.map_guids(library)
anidb_tests(config)
imdb_tests(config)
mal_tests(config)
tautulli_tests(config)
tmdb_tests(config)
trakt_tests(config)
tvdb_tests(config)
util.separator("Finished All Plex Meta Manager Tests")
except KeyboardInterrupt:
util.separator("Canceled Plex Meta Manager Tests")
def anidb_tests(config):
if config.AniDB:
util.separator("AniDB Tests")
try:
config.AniDB.get_items("anidb_id", 69, "en", status_message=False)
logger.info("Success | Get AniDB ID")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Get AniDB ID: {e}")
try:
config.AniDB.get_items("anidb_relation", 69, "en", status_message=False)
logger.info("Success | Get AniDB Relation")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Get AniDB Relation: {e}")
try:
config.AniDB.get_items("anidb_popular", 30, "en", status_message=False)
logger.info("Success | Get AniDB Popular")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Get AniDB Popular: {e}")
try:
config.AniDB.validate_anidb_list(["69", "112"], "en")
logger.info("Success | Validate AniDB List")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Validate AniDB List: {e}")
else:
util.separator("AniDB Not Configured")
def imdb_tests(config):
if config.IMDb:
util.separator("IMDb Tests")
tmdb_ids, tvdb_ids = config.IMDb.get_items("imdb_list", {"url": "https://www.imdb.com/search/title/?groups=top_1000", "limit": 0}, "en", status_message=False)
if len(tmdb_ids) == 1000: logger.info("Success | IMDb URL get TMDb IDs")
else: logger.error(f"Failure | IMDb URL get TMDb IDs: {len(tmdb_ids)} Should be 1000")
tmdb_ids, tvdb_ids = config.IMDb.get_items("imdb_list", {"url": "https://www.imdb.com/list/ls026173135/", "limit": 0}, "en", status_message=False)
if len(tmdb_ids) == 250: logger.info("Success | IMDb URL get TMDb IDs")
else: logger.error(f"Failure | IMDb URL get TMDb IDs: {len(tmdb_ids)} Should be 250")
tmdb_ids, tvdb_ids = config.IMDb.get_items("imdb_id", "tt0814243", "en", status_message=False)
if len(tmdb_ids) == 1: logger.info("Success | IMDb ID get TMDb IDs")
else: logger.error(f"Failure | IMDb ID get TMDb IDs: {len(tmdb_ids)} Should be 1")
else:
util.separator("IMDb Not Configured")
def mal_tests(config):
if config.MyAnimeList:
util.separator("MyAnimeList Tests")
mal_list_tests = [
("mal_all", 10),
("mal_airing", 10),
("mal_upcoming", 10),
("mal_tv", 10),
("mal_movie", 10),
("mal_ova", 10),
("mal_special", 10),
("mal_popular", 10),
("mal_favorite", 10),
("mal_suggested", 10),
("mal_userlist", {"limit": 10, "username": "@me", "status": "completed", "sort_by": "list_score"}),
("mal_season", {"limit": 10, "season": "fall", "year": 2020, "sort_by": "anime_score"})
]
for mal_list_test in mal_list_tests:
try:
config.MyAnimeList.get_items(mal_list_test[0], mal_list_test[1], status_message=False)
logger.info(f"Success | Get Anime using {util.pretty_names[mal_list_test[0]]}")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Get Anime using {util.pretty_names[mal_list_test[0]]}: {e}")
else:
util.separator("MyAnimeList Not Configured")
def tautulli_tests(config):
if config.libraries[0].Tautulli:
util.separator("Tautulli Tests")
try:
config.libraries[0].Tautulli.get_section_id(config.libraries[0].name)
logger.info("Success | Get Section ID")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Get Section ID: {e}")
try:
config.libraries[0].Tautulli.get_popular(config.libraries[0], status_message=False)
logger.info("Success | Get Popular")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Get Popular: {e}")
try:
config.libraries[0].Tautulli.get_top(config.libraries[0], status_message=False)
logger.info("Success | Get Top")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Get Top: {e}")
else:
util.separator("Tautulli Not Configured")
def tmdb_tests(config):
if config.TMDb:
util.separator("TMDb Tests")
try:
config.TMDb.convert_imdb_to_tmdb("tt0076759")
logger.info("Success | Convert IMDb to TMDb")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert IMDb to TMDb: {e}")
try:
config.TMDb.convert_tmdb_to_imdb(11)
logger.info("Success | Convert TMDb to IMDb")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert TMDb to IMDb: {e}")
try:
config.TMDb.convert_imdb_to_tvdb("tt0458290")
logger.info("Success | Convert IMDb to TVDb")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert IMDb to TVDb: {e}")
try:
config.TMDb.convert_tvdb_to_imdb(83268)
logger.info("Success | Convert TVDb to IMDb")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert TVDb to IMDb: {e}")
tmdb_list_tests = [
([11], "Movie"),
([4194], "Show"),
([10], "Collection"),
([1], "Person"),
([1], "Company"),
([2739], "Network"),
([8136], "List")
]
for tmdb_list_test in tmdb_list_tests:
try:
config.TMDb.validate_tmdb_list(tmdb_list_test[0], tmdb_type=tmdb_list_test[1])
logger.info(f"Success | Get TMDb {tmdb_list_test[1]}")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Get TMDb {tmdb_list_test[1]}: {e}")
tmdb_list_tests = [
("tmdb_discover", {"sort_by": "popularity.desc", "limit": 100}, True),
("tmdb_discover", {"sort_by": "popularity.desc", "limit": 100}, False),
("tmdb_company", 1, True),
("tmdb_company", 1, False),
("tmdb_network", 2739, False),
("tmdb_keyword", 180547, True),
("tmdb_keyword", 180547, False),
("tmdb_now_playing", 10, True),
("tmdb_popular", 10, True),
("tmdb_popular", 10, False),
("tmdb_top_rated", 10, True),
("tmdb_top_rated", 10, False),
("tmdb_trending_daily", 10, True),
("tmdb_trending_daily", 10, False),
("tmdb_trending_weekly", 10, True),
("tmdb_trending_weekly", 10, False),
("tmdb_list", 7068209, True),
("tmdb_list", 7068209, False),
("tmdb_movie", 11, True),
("tmdb_collection", 10, True),
("tmdb_show", 4194, False)
]
for tmdb_list_test in tmdb_list_tests:
try:
config.TMDb.get_items(tmdb_list_test[0], tmdb_list_test[1], tmdb_list_test[2], status_message=False)
logger.info(f"Success | Get {'Movies' if tmdb_list_test[2] else 'Shows'} using {util.pretty_names[tmdb_list_test[0]]}")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Get {'Movies' if tmdb_list_test[2] else 'Shows'} using {util.pretty_names[tmdb_list_test[0]]}: {e}")
else:
util.separator("TMDb Not Configured")
def trakt_tests(config):
if config.Trakt:
util.separator("Trakt Tests")
try:
config.Trakt.convert_imdb_to_tmdb("tt0076759")
logger.info("Success | Convert IMDb to TMDb")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert IMDb to TMDb: {e}")
try:
config.Trakt.convert_tmdb_to_imdb(11)
logger.info("Success | Convert TMDb to IMDb")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert TMDb to IMDb: {e}")
try:
config.Trakt.convert_imdb_to_tvdb("tt0458290")
logger.info("Success | Convert IMDb to TVDb")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert IMDb to TVDb: {e}")
try:
config.Trakt.convert_tvdb_to_imdb(83268)
logger.info("Success | Convert TVDb to IMDb")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert TVDb to IMDb: {e}")
try:
config.Trakt.convert_tmdb_to_tvdb(11)
logger.info("Success | Convert TMDb to TVDb")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert TMDb to TVDb: {e}")
try:
config.Trakt.convert_tvdb_to_tmdb(83268)
logger.info("Success | Convert TVDb to TMDb")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Convert TVDb to TMDb: {e}")
try:
config.Trakt.validate_trakt_list(["https://trakt.tv/users/movistapp/lists/christmas-movies"])
logger.info("Success | Get List")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Get List: {e}")
try:
config.Trakt.validate_trakt_watchlist(["me"], True)
logger.info("Success | Get Watchlist Movies")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Get Watchlist Movies: {e}")
try:
config.Trakt.validate_trakt_watchlist(["me"], False)
logger.info("Success | Get Watchlist Shows")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Get Watchlist Shows: {e}")
trakt_list_tests = [
("trakt_list", "https://trakt.tv/users/movistapp/lists/christmas-movies", True),
("trakt_trending", 10, True),
("trakt_trending", 10, False),
("trakt_watchlist", "me", True),
("trakt_watchlist", "me", False)
]
for trakt_list_test in trakt_list_tests:
try:
config.Trakt.get_items(trakt_list_test[0], trakt_list_test[1], trakt_list_test[2], status_message=False)
logger.info(f"Success | Get {'Movies' if trakt_list_test[2] else 'Shows'} using {util.pretty_names[trakt_list_test[0]]}")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | Get {'Movies' if trakt_list_test[2] else 'Shows'} using {util.pretty_names[trakt_list_test[0]]}: {e}")
else:
util.separator("Trakt Not Configured")
def tvdb_tests(config):
if config.TVDb:
util.separator("TVDb Tests")
tmdb_ids, tvdb_ids = config.TVDb.get_items("tvdb_list", "https://www.thetvdb.com/lists/arrowverse", "en", status_message=False)
if len(tvdb_ids) == 10 and len(tmdb_ids) == 0: logger.info("Success | TVDb URL get TVDb IDs and TMDb IDs")
else: logger.error(f"Failure | TVDb URL get TVDb IDs and TMDb IDs: {len(tvdb_ids)} Should be 10 and {len(tmdb_ids)} Should be 0")
tmdb_ids, tvdb_ids = config.TVDb.get_items("tvdb_list", "https://www.thetvdb.com/lists/6957", "en", status_message=False)
if len(tvdb_ids) == 4 and len(tmdb_ids) == 2: logger.info("Success | TVDb URL get TVDb IDs and TMDb IDs")
else: logger.error(f"Failure | TVDb URL get TVDb IDs and TMDb IDs: {len(tvdb_ids)} Should be 4 and {len(tmdb_ids)} Should be 2")
try:
config.TVDb.get_items("tvdb_show", "https://www.thetvdb.com/series/arrow", "en", status_message=False)
logger.info("Success | TVDb URL get TVDb Series ID")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | TVDb URL get TVDb Series ID: {e}")
try:
config.TVDb.get_items("tvdb_show", 279121, "en", status_message=False)
logger.info("Success | TVDb ID get TVDb Series ID")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | TVDb ID get TVDb Series ID: {e}")
try:
config.TVDb.get_items("tvdb_movie", "https://www.thetvdb.com/movies/the-lord-of-the-rings-the-fellowship-of-the-ring", "en", status_message=False)
logger.info("Success | TVDb URL get TVDb Movie ID")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | TVDb URL get TVDb Movie ID: {e}")
try:
config.TVDb.get_items("tvdb_movie", 107, "en", status_message=False)
logger.info("Success | TVDb ID get TVDb Movie ID")
except Failed as e:
util.print_stacktrace()
logger.error(f"Failure | TVDb ID get TVDb Movie ID: {e}")
else:
util.separator("TVDb Not Configured")

@ -109,7 +109,8 @@ discover_tv_sort = [
] ]
class TMDbAPI: class TMDbAPI:
def __init__(self, params): def __init__(self, config, params):
self.config = config
self.TMDb = tmdbv3api.TMDb() self.TMDb = tmdbv3api.TMDb()
self.TMDb.api_key = params["apikey"] self.TMDb.api_key = params["apikey"]
self.TMDb.language = params["language"] self.TMDb.language = params["language"]
@ -131,29 +132,22 @@ class TMDbAPI:
self.image_url = "https://image.tmdb.org/t/p/original" self.image_url = "https://image.tmdb.org/t/p/original"
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def convert_from_tmdb(self, tmdb_id, convert_to, is_movie): def convert_from(self, tmdb_id, convert_to, is_movie):
try: try:
id_to_return = self.Movie.external_ids(tmdb_id)[convert_to] if is_movie else self.TV.external_ids(tmdb_id)[convert_to] id_to_return = self.Movie.external_ids(tmdb_id)[convert_to] if is_movie else self.TV.external_ids(tmdb_id)[convert_to]
if not id_to_return or (convert_to == "tvdb_id" and id_to_return == 0): if not id_to_return or (convert_to == "tvdb_id" and id_to_return == 0):
raise Failed(f"TMDb Error: No {convert_to.upper().replace('B_', 'b ')} found for TMDb ID {tmdb_id}") raise Failed(f"TMDb Error: No {convert_to.upper().replace('B_', 'b ')} found for TMDb ID {tmdb_id}")
return id_to_return return id_to_return
except TMDbException: except TMDbException:
raise Failed(f"TMDb Error: {'Movie' if is_movie else 'Show'} TMDb ID: {tmdb_id} not found") raise Failed(f"TMDb Error: TMDb {'Movie' if is_movie else 'Show'} ID: {tmdb_id} not found")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def convert_to_tmdb(self, external_id, external_source, is_movie): def convert_to(self, external_id, external_source, is_movie):
search_results = self.Movie.external(external_id=external_id, external_source=external_source) search_results = self.Movie.external(external_id=external_id, external_source=external_source)
search = search_results["movie_results" if is_movie else "tv_results"] search = search_results["movie_results" if is_movie else "tv_results"]
if len(search) == 1: return int(search[0]["id"]) if len(search) == 1: return int(search[0]["id"])
else: raise Failed(f"TMDb Error: No TMDb ID found for {external_source.upper().replace('B_', 'b ')} {external_id}") else: raise Failed(f"TMDb Error: No TMDb ID found for {external_source.upper().replace('B_', 'b ')} {external_id}")
def convert_tmdb_to_imdb(self, tmdb_id, is_movie=True): return self.convert_from_tmdb(tmdb_id, "imdb_id", is_movie)
def convert_imdb_to_tmdb(self, imdb_id, is_movie=True): return self.convert_to_tmdb(imdb_id, "imdb_id", is_movie)
def convert_tmdb_to_tvdb(self, tmdb_id): return self.convert_from_tmdb(tmdb_id, "tvdb_id", False)
def convert_tvdb_to_tmdb(self, tvdb_id): return self.convert_to_tmdb(tvdb_id, "tvdb_id", False)
def convert_tvdb_to_imdb(self, tvdb_id): return self.convert_tmdb_to_imdb(self.convert_tvdb_to_tmdb(tvdb_id), False)
def convert_imdb_to_tvdb(self, imdb_id): return self.convert_tmdb_to_tvdb(self.convert_imdb_to_tmdb(imdb_id, False))
def get_movie_show_or_collection(self, tmdb_id, is_movie): def get_movie_show_or_collection(self, tmdb_id, is_movie):
if is_movie: if is_movie:
try: return self.get_collection(tmdb_id) try: return self.get_collection(tmdb_id)
@ -183,22 +177,22 @@ class TMDbAPI:
except TMDbException as e: raise Failed(f"TMDb Error: No Person found for TMDb ID {tmdb_id}: {e}") except TMDbException as e: raise Failed(f"TMDb Error: No Person found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_person_credits(self, tmdb_id): def _person_credits(self, tmdb_id):
try: return self.Person.combined_credits(tmdb_id) try: return self.Person.combined_credits(tmdb_id)
except TMDbException as e: raise Failed(f"TMDb Error: No Person found for TMDb ID {tmdb_id}: {e}") except TMDbException as e: raise Failed(f"TMDb Error: No Person found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_company(self, tmdb_id): def _company(self, tmdb_id):
try: return self.Company.details(tmdb_id) try: return self.Company.details(tmdb_id)
except TMDbException as e: raise Failed(f"TMDb Error: No Company found for TMDb ID {tmdb_id}: {e}") except TMDbException as e: raise Failed(f"TMDb Error: No Company found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_network(self, tmdb_id): def _network(self, tmdb_id):
try: return self.Network.details(tmdb_id) try: return self.Network.details(tmdb_id)
except TMDbException as e: raise Failed(f"TMDb Error: No Network found for TMDb ID {tmdb_id}: {e}") except TMDbException as e: raise Failed(f"TMDb Error: No Network found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_keyword(self, tmdb_id): def _keyword(self, tmdb_id):
try: return self.Keyword.details(tmdb_id) try: return self.Keyword.details(tmdb_id)
except TMDbException as e: raise Failed(f"TMDb Error: No Keyword found for TMDb ID {tmdb_id}: {e}") except TMDbException as e: raise Failed(f"TMDb Error: No Keyword found for TMDb ID {tmdb_id}: {e}")
@ -207,17 +201,17 @@ class TMDbAPI:
try: return self.List.details(tmdb_id, all_details=True) try: return self.List.details(tmdb_id, all_details=True)
except TMDbException as e: raise Failed(f"TMDb Error: No List found for TMDb ID {tmdb_id}: {e}") except TMDbException as e: raise Failed(f"TMDb Error: No List found for TMDb ID {tmdb_id}: {e}")
def get_credits(self, tmdb_id, actor=False, crew=False, director=False, producer=False, writer=False): def _credits(self, tmdb_id, actor=False, crew=False, director=False, producer=False, writer=False):
movie_ids = [] movie_ids = []
show_ids = [] show_ids = []
actor_credits = self.get_person_credits(tmdb_id) actor_credits = self._person_credits(tmdb_id)
if actor: if actor:
for credit in actor_credits.cast: for credit in actor_credits.cast:
if credit.media_type == "movie": if credit.media_type == "movie":
movie_ids.append(credit.id) movie_ids.append(credit.id)
elif credit.media_type == "tv": elif credit.media_type == "tv":
try: try:
show_ids.append(self.convert_tmdb_to_tvdb(credit.id)) show_ids.append(self.config.Convert.tmdb_to_tvdb(credit.id, fail=True))
except Failed as e: except Failed as e:
logger.warning(e) logger.warning(e)
for credit in actor_credits.crew: for credit in actor_credits.crew:
@ -229,12 +223,12 @@ class TMDbAPI:
movie_ids.append(credit.id) movie_ids.append(credit.id)
elif credit.media_type == "tv": elif credit.media_type == "tv":
try: try:
show_ids.append(self.convert_tmdb_to_tvdb(credit.id)) show_ids.append(self.config.Convert.tmdb_to_tvdb(credit.id, fail=True))
except Failed as e: except Failed as e:
logger.warning(e) logger.warning(e)
return movie_ids, show_ids return movie_ids, show_ids
def get_pagenation(self, method, amount, is_movie): def _pagenation(self, method, amount, is_movie):
ids = [] ids = []
count = 0 count = 0
for x in range(int(amount / 20) + 1): for x in range(int(amount / 20) + 1):
@ -246,15 +240,16 @@ class TMDbAPI:
else: raise Failed(f"TMDb Error: {method} method not supported") else: raise Failed(f"TMDb Error: {method} method not supported")
for tmdb_item in tmdb_items: for tmdb_item in tmdb_items:
try: try:
ids.append(tmdb_item.id if is_movie else self.convert_tmdb_to_tvdb(tmdb_item.id)) ids.append(tmdb_item.id if is_movie else self.config.Convert.tmdb_to_tvdb(tmdb_item.id, fail=True))
count += 1 count += 1
except Failed: except Failed as e:
logger.error(e)
pass pass
if count == amount: break if count == amount: break
if count == amount: break if count == amount: break
return ids return ids
def get_discover(self, attrs, amount, is_movie): def _discover(self, attrs, amount, is_movie):
ids = [] ids = []
count = 0 count = 0
for date_attr in discover_dates: for date_attr in discover_dates:
@ -269,17 +264,35 @@ class TMDbAPI:
tmdb_items = self.Discover.discover_movies(attrs) if is_movie else self.Discover.discover_tv_shows(attrs) tmdb_items = self.Discover.discover_movies(attrs) if is_movie else self.Discover.discover_tv_shows(attrs)
for tmdb_item in tmdb_items: for tmdb_item in tmdb_items:
try: try:
ids.append(tmdb_item.id if is_movie else self.convert_tmdb_to_tvdb(tmdb_item.id)) ids.append(tmdb_item.id if is_movie else self.config.Convert.tmdb_to_tvdb(tmdb_item.id, fail=True))
count += 1 count += 1
except Failed: except Failed as e:
logger.error(e)
pass pass
if count == amount: break if count == amount: break
if count == amount: break if count == amount: break
return ids, amount return ids, amount
def get_items(self, method, data, is_movie, status_message=True): def validate_tmdb_list(self, tmdb_list, tmdb_type):
if status_message: tmdb_values = []
logger.debug(f"Data: {data}") for tmdb_id in tmdb_list:
try: tmdb_values.append(self.validate_tmdb(tmdb_id, tmdb_type))
except Failed as e: logger.error(e)
if len(tmdb_values) == 0: raise Failed(f"TMDb Error: No valid TMDb IDs in {tmdb_list}")
return tmdb_values
def validate_tmdb(self, tmdb_id, tmdb_type):
if tmdb_type == "Movie": self.get_movie(tmdb_id)
elif tmdb_type == "Show": self.get_show(tmdb_id)
elif tmdb_type == "Collection": self.get_collection(tmdb_id)
elif tmdb_type == "Person": self.get_person(tmdb_id)
elif tmdb_type == "Company": self._company(tmdb_id)
elif tmdb_type == "Network": self._network(tmdb_id)
elif tmdb_type == "List": self.get_list(tmdb_id)
return tmdb_id
def get_items(self, method, data, is_movie):
logger.debug(f"Data: {data}")
pretty = util.pretty_names[method] if method in util.pretty_names else method pretty = util.pretty_names[method] if method in util.pretty_names else method
media_type = "Movie" if is_movie else "Show" media_type = "Movie" if is_movie else "Show"
movie_ids = [] movie_ids = []
@ -291,32 +304,30 @@ class TMDbAPI:
if method in ["tmdb_company", "tmdb_network", "tmdb_keyword"]: if method in ["tmdb_company", "tmdb_network", "tmdb_keyword"]:
tmdb_id = int(data) tmdb_id = int(data)
if method == "tmdb_company": if method == "tmdb_company":
tmdb_name = str(self.get_company(tmdb_id).name) tmdb_name = str(self._company(tmdb_id).name)
attrs = {"with_companies": tmdb_id} attrs = {"with_companies": tmdb_id}
elif method == "tmdb_network": elif method == "tmdb_network":
tmdb_name = str(self.get_network(tmdb_id).name) tmdb_name = str(self._network(tmdb_id).name)
attrs = {"with_networks": tmdb_id} attrs = {"with_networks": tmdb_id}
elif method == "tmdb_keyword": elif method == "tmdb_keyword":
tmdb_name = str(self.get_keyword(tmdb_id).name) tmdb_name = str(self._keyword(tmdb_id).name)
attrs = {"with_keywords": tmdb_id} attrs = {"with_keywords": tmdb_id}
limit = 0 limit = 0
else: else:
attrs = data.copy() attrs = data.copy()
limit = int(attrs.pop("limit")) limit = int(attrs.pop("limit"))
if is_movie: movie_ids, amount = self.get_discover(attrs, limit, is_movie) if is_movie: movie_ids, amount = self._discover(attrs, limit, is_movie)
else: show_ids, amount = self.get_discover(attrs, limit, is_movie) else: show_ids, amount = self._discover(attrs, limit, is_movie)
if status_message: if method in ["tmdb_company", "tmdb_network", "tmdb_keyword"]:
if method in ["tmdb_company", "tmdb_network", "tmdb_keyword"]: logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({amount} {media_type}{'' if amount == 1 else 's'})")
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({amount} {media_type}{'' if amount == 1 else 's'})") elif method == "tmdb_discover":
elif method == "tmdb_discover": logger.info(f"Processing {pretty}: {amount} {media_type}{'' if amount == 1 else 's'}")
logger.info(f"Processing {pretty}: {amount} {media_type}{'' if amount == 1 else 's'}") for attr, value in attrs.items():
for attr, value in attrs.items(): logger.info(f" {attr}: {value}")
logger.info(f" {attr}: {value}")
elif method in ["tmdb_popular", "tmdb_top_rated", "tmdb_now_playing", "tmdb_trending_daily", "tmdb_trending_weekly"]: elif method in ["tmdb_popular", "tmdb_top_rated", "tmdb_now_playing", "tmdb_trending_daily", "tmdb_trending_weekly"]:
if is_movie: movie_ids = self.get_pagenation(method, data, is_movie) if is_movie: movie_ids = self._pagenation(method, data, is_movie)
else: show_ids = self.get_pagenation(method, data, is_movie) else: show_ids = self._pagenation(method, data, is_movie)
if status_message: logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}")
logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}")
else: else:
tmdb_id = int(data) tmdb_id = int(data)
if method == "tmdb_list": if method == "tmdb_list":
@ -326,7 +337,7 @@ class TMDbAPI:
if tmdb_item.media_type == "movie": if tmdb_item.media_type == "movie":
movie_ids.append(tmdb_item.id) movie_ids.append(tmdb_item.id)
elif tmdb_item.media_type == "tv": elif tmdb_item.media_type == "tv":
try: show_ids.append(self.convert_tmdb_to_tvdb(tmdb_item.id)) try: show_ids.append(self.config.Convert.tmdb_to_tvdb(tmdb_item.id, fail=True))
except Failed: pass except Failed: pass
elif method == "tmdb_movie": elif method == "tmdb_movie":
tmdb_name = str(self.get_movie(tmdb_id).title) tmdb_name = str(self.get_movie(tmdb_id).title)
@ -338,38 +349,19 @@ class TMDbAPI:
movie_ids.append(tmdb_item["id"]) movie_ids.append(tmdb_item["id"])
elif method == "tmdb_show": elif method == "tmdb_show":
tmdb_name = str(self.get_show(tmdb_id).name) tmdb_name = str(self.get_show(tmdb_id).name)
show_ids.append(self.convert_tmdb_to_tvdb(tmdb_id)) show_ids.append(self.config.Convert.tmdb_to_tvdb(tmdb_id, fail=True))
else: else:
tmdb_name = str(self.get_person(tmdb_id).name) tmdb_name = str(self.get_person(tmdb_id).name)
if method == "tmdb_actor": movie_ids, show_ids = self.get_credits(tmdb_id, actor=True) if method == "tmdb_actor": movie_ids, show_ids = self._credits(tmdb_id, actor=True)
elif method == "tmdb_director": movie_ids, show_ids = self.get_credits(tmdb_id, director=True) elif method == "tmdb_director": movie_ids, show_ids = self._credits(tmdb_id, director=True)
elif method == "tmdb_producer": movie_ids, show_ids = self.get_credits(tmdb_id, producer=True) elif method == "tmdb_producer": movie_ids, show_ids = self._credits(tmdb_id, producer=True)
elif method == "tmdb_writer": movie_ids, show_ids = self.get_credits(tmdb_id, writer=True) elif method == "tmdb_writer": movie_ids, show_ids = self._credits(tmdb_id, writer=True)
elif method == "tmdb_crew": movie_ids, show_ids = self.get_credits(tmdb_id, crew=True) elif method == "tmdb_crew": movie_ids, show_ids = self._credits(tmdb_id, crew=True)
else: raise Failed(f"TMDb Error: Method {method} not supported") else: raise Failed(f"TMDb Error: Method {method} not supported")
if status_message and len(movie_ids) > 0: if len(movie_ids) > 0:
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(movie_ids)} Movie{'' if len(movie_ids) == 1 else 's'})") logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(movie_ids)} Movie{'' if len(movie_ids) == 1 else 's'})")
if status_message and not is_movie and len(show_ids) > 0: if not is_movie and len(show_ids) > 0:
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(show_ids)} Show{'' if len(show_ids) == 1 else 's'})") logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(show_ids)} Show{'' if len(show_ids) == 1 else 's'})")
if status_message: logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TMDb IDs Found: {movie_ids}") logger.debug(f"TVDb IDs Found: {show_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}")
return movie_ids, show_ids return movie_ids, show_ids
def validate_tmdb_list(self, tmdb_list, tmdb_type):
tmdb_values = []
for tmdb_id in tmdb_list:
try: tmdb_values.append(self.validate_tmdb(tmdb_id, tmdb_type))
except Failed as e: logger.error(e)
if len(tmdb_values) == 0: raise Failed(f"TMDb Error: No valid TMDb IDs in {tmdb_list}")
return tmdb_values
def validate_tmdb(self, tmdb_id, tmdb_type):
if tmdb_type == "Movie": self.get_movie(tmdb_id)
elif tmdb_type == "Show": self.get_show(tmdb_id)
elif tmdb_type == "Collection": self.get_collection(tmdb_id)
elif tmdb_type == "Person": self.get_person(tmdb_id)
elif tmdb_type == "Company": self.get_company(tmdb_id)
elif tmdb_type == "Network": self.get_network(tmdb_id)
elif tmdb_type == "List": self.get_list(tmdb_id)
return tmdb_id

@ -37,11 +37,11 @@ class TraktAPI:
self.config_path = params["config_path"] self.config_path = params["config_path"]
self.authorization = authorization self.authorization = authorization
Trakt.configuration.defaults.client(self.client_id, self.client_secret) Trakt.configuration.defaults.client(self.client_id, self.client_secret)
if not self.save_authorization(self.authorization): if not self._save(self.authorization):
if not self.refresh_authorization(): if not self._refresh():
self.get_authorization() self._authorization()
def get_authorization(self): def _authorization(self):
url = Trakt["oauth"].authorize_url(self.redirect_uri) url = Trakt["oauth"].authorize_url(self.redirect_uri)
logger.info(f"Navigate to: {url}") logger.info(f"Navigate to: {url}")
logger.info("If you get an OAuth error your client_id or client_secret is invalid") logger.info("If you get an OAuth error your client_id or client_secret is invalid")
@ -52,10 +52,10 @@ class TraktAPI:
new_authorization = Trakt["oauth"].token(pin, self.redirect_uri) new_authorization = Trakt["oauth"].token(pin, self.redirect_uri)
if not new_authorization: if not new_authorization:
raise Failed("Trakt Error: Invalid trakt pin. If you're sure you typed it in correctly your client_id or client_secret may be invalid") raise Failed("Trakt Error: Invalid trakt pin. If you're sure you typed it in correctly your client_id or client_secret may be invalid")
if not self.save_authorization(new_authorization): if not self._save(new_authorization):
raise Failed("Trakt Error: New Authorization Failed") raise Failed("Trakt Error: New Authorization Failed")
def check_authorization(self, authorization): def _check(self, authorization):
try: try:
with Trakt.configuration.oauth.from_response(authorization, refresh=True): with Trakt.configuration.oauth.from_response(authorization, refresh=True):
if Trakt["users/settings"].get(): if Trakt["users/settings"].get():
@ -63,15 +63,15 @@ class TraktAPI:
except ValueError: pass except ValueError: pass
return False return False
def refresh_authorization(self): def _refresh(self):
if self.authorization and "refresh_token" in self.authorization and self.authorization["refresh_token"]: if self.authorization and "refresh_token" in self.authorization and self.authorization["refresh_token"]:
logger.info("Refreshing Access Token...") logger.info("Refreshing Access Token...")
refreshed_authorization = Trakt["oauth"].token_refresh(self.authorization["refresh_token"], self.redirect_uri) refreshed_authorization = Trakt["oauth"].token_refresh(self.authorization["refresh_token"], self.redirect_uri)
return self.save_authorization(refreshed_authorization) return self._save(refreshed_authorization)
return False return False
def save_authorization(self, authorization): def _save(self, authorization):
if authorization and self.check_authorization(authorization): if authorization and self._check(authorization):
if self.authorization != authorization: if self.authorization != authorization:
yaml.YAML().allow_duplicate_keys = True yaml.YAML().allow_duplicate_keys = True
config, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.config_path)) config, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.config_path))
@ -90,30 +90,23 @@ class TraktAPI:
return True return True
return False return False
def convert_tmdb_to_imdb(self, tmdb_id, is_movie=True): return self.convert_id(tmdb_id, "tmdb", "imdb", "movie" if is_movie else "show")
def convert_imdb_to_tmdb(self, imdb_id, is_movie=True): return self.convert_id(imdb_id, "imdb", "tmdb", "movie" if is_movie else "show")
def convert_tmdb_to_tvdb(self, tmdb_id): return self.convert_id(tmdb_id, "tmdb", "tvdb", "show")
def convert_tvdb_to_tmdb(self, tvdb_id): return self.convert_id(tvdb_id, "tvdb", "tmdb", "show")
def convert_tvdb_to_imdb(self, tvdb_id): return self.convert_id(tvdb_id, "tvdb", "imdb", "show")
def convert_imdb_to_tvdb(self, imdb_id): return self.convert_id(imdb_id, "imdb", "tvdb", "show")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def convert_id(self, external_id, from_source, to_source, media_type): def convert(self, external_id, from_source, to_source, media_type):
lookup = Trakt["search"].lookup(external_id, from_source, media_type) lookup = Trakt["search"].lookup(external_id, from_source, media_type)
if lookup: if lookup:
lookup = lookup[0] if isinstance(lookup, list) else lookup lookup = lookup[0] if isinstance(lookup, list) else lookup
if lookup.get_key(to_source): if lookup.get_key(to_source):
return lookup.get_key(to_source) if to_source == "imdb" else int(lookup.get_key(to_source)) return lookup.get_key(to_source) if to_source == "imdb" else int(lookup.get_key(to_source))
raise Failed(f"No {to_source.upper().replace('B', 'b')} ID found for {from_source.upper().replace('B', 'b')} ID {external_id}") raise Failed(f"Trakt Error: No {to_source.upper().replace('B', 'b')} ID found for {from_source.upper().replace('B', 'b')} ID: {external_id}")
def collection(self, data, is_movie): def collection(self, data, is_movie):
return self.user_list("collection", data, is_movie) return self._user_list("collection", data, is_movie)
def watchlist(self, data, is_movie): def _watchlist(self, data, is_movie):
return self.user_list("watchlist", data, is_movie) return self._user_list("watchlist", data, is_movie)
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed) @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def user_list(self, list_type, data, is_movie): def _user_list(self, list_type, data, is_movie):
items = Trakt[f"users/{data}/{list_type}"].movies() if is_movie else Trakt[f"users/{data}/{list_type}"].shows() items = Trakt[f"users/{data}/{list_type}"].movies() if is_movie else Trakt[f"users/{data}/{list_type}"].shows()
if items is None: raise Failed("Trakt Error: No List found") if items is None: raise Failed("Trakt Error: No List found")
else: return [i for i in items] else: return [i for i in items]
@ -126,16 +119,16 @@ class TraktAPI:
else: return trakt_list else: return trakt_list
@retry(stop_max_attempt_number=6, wait_fixed=10000) @retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_request(self, url): def _request(self, url):
return requests.get(url, headers={"Content-Type": "application/json", "trakt-api-version": "2", "trakt-api-key": self.client_id}).json() return requests.get(url, headers={"Content-Type": "application/json", "trakt-api-version": "2", "trakt-api-key": self.client_id}).json()
def get_collection(self, username, is_movie): def _collection(self, username, is_movie):
items = self.send_request(f"{self.base_url}/users/{username}/collection/{'movies' if is_movie else 'shows'}") items = self._request(f"{self.base_url}/users/{username}/collection/{'movies' if is_movie else 'shows'}")
if is_movie: return [item["movie"]["ids"]["tmdb"] for item in items], [] if is_movie: return [item["movie"]["ids"]["tmdb"] for item in items], []
else: return [], [item["show"]["ids"]["tvdb"] for item in items] else: return [], [item["show"]["ids"]["tvdb"] for item in items]
def get_pagenation(self, pagenation, amount, is_movie): def _pagenation(self, pagenation, amount, is_movie):
items = self.send_request(f"{self.base_url}/{'movies' if is_movie else 'shows'}/{pagenation}?limit={amount}") items = self._request(f"{self.base_url}/{'movies' if is_movie else 'shows'}/{pagenation}?limit={amount}")
if pagenation == "popular" and is_movie: return [item["ids"]["tmdb"] for item in items], [] if pagenation == "popular" and is_movie: return [item["ids"]["tmdb"] for item in items], []
elif pagenation == "popular": return [], [item["ids"]["tvdb"] for item in items] elif pagenation == "popular": return [], [item["ids"]["tvdb"] for item in items]
elif is_movie: return [item["movie"]["ids"]["tmdb"] for item in items], [] elif is_movie: return [item["movie"]["ids"]["tmdb"] for item in items], []
@ -146,9 +139,9 @@ class TraktAPI:
for value in values: for value in values:
try: try:
if trakt_type == "watchlist" and is_movie is not None: if trakt_type == "watchlist" and is_movie is not None:
self.watchlist(value, is_movie) self._watchlist(value, is_movie)
elif trakt_type == "collection" and is_movie is not None: elif trakt_type == "collection" and is_movie is not None:
self.get_collection(value, is_movie) self._collection(value, is_movie)
else: else:
self.standard_list(value) self.standard_list(value)
trakt_values.append(value) trakt_values.append(value)
@ -163,33 +156,31 @@ class TraktAPI:
raise Failed(f"Trakt Error: No valid Trakt Lists in {values}") raise Failed(f"Trakt Error: No valid Trakt Lists in {values}")
return trakt_values return trakt_values
def get_items(self, method, data, is_movie, status_message=True): def get_items(self, method, data, is_movie):
if status_message: logger.debug(f"Data: {data}")
logger.debug(f"Data: {data}")
pretty = self.aliases[method] if method in self.aliases else method pretty = self.aliases[method] if method in self.aliases else method
media_type = "Movie" if is_movie else "Show" media_type = "Movie" if is_movie else "Show"
if method in ["trakt_trending", "trakt_popular", "trakt_recommended", "trakt_watched", "trakt_collected"]: if method in ["trakt_trending", "trakt_popular", "trakt_recommended", "trakt_watched", "trakt_collected"]:
movie_ids, show_ids = self.get_pagenation(method[6:], data, is_movie) movie_ids, show_ids = self._pagenation(method[6:], data, is_movie)
if status_message: logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}")
logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}")
elif method == "trakt_collection": elif method == "trakt_collection":
movie_ids, show_ids = self.get_collection(data, is_movie) movie_ids, show_ids = self._collection(data, is_movie)
if status_message: logger.info(f"Processing {pretty} {media_type}s for {data}")
logger.info(f"Processing {pretty} {media_type}s for {data}")
else: else:
show_ids = [] show_ids = []
movie_ids = [] movie_ids = []
if method == "trakt_watchlist": trakt_items = self.watchlist(data, is_movie) if method == "trakt_watchlist": trakt_items = self._watchlist(data, is_movie)
elif method == "trakt_list": trakt_items = self.standard_list(data).items() elif method == "trakt_list": trakt_items = self.standard_list(data).items()
else: raise Failed(f"Trakt Error: Method {method} not supported") else: raise Failed(f"Trakt Error: Method {method} not supported")
if status_message: logger.info(f"Processing {pretty}: {data}") logger.info(f"Processing {pretty}: {data}")
for trakt_item in trakt_items: for trakt_item in trakt_items:
if isinstance(trakt_item, Movie): movie_ids.append(int(trakt_item.get_key("tmdb"))) if isinstance(trakt_item, Movie):
elif isinstance(trakt_item, Show) and trakt_item.pk[1] not in show_ids: show_ids.append(int(trakt_item.pk[1])) movie_ids.append(int(trakt_item.get_key("tmdb")))
elif (isinstance(trakt_item, (Season, Episode))) and trakt_item.show.pk[1] not in show_ids: show_ids.append(int(trakt_item.show.pk[1])) elif isinstance(trakt_item, Show) and trakt_item.pk[1] not in show_ids:
if status_message: show_ids.append(int(trakt_item.pk[1]))
logger.debug(f"Trakt {media_type} Found: {trakt_items}") elif (isinstance(trakt_item, (Season, Episode))) and trakt_item.show.pk[1] not in show_ids:
if status_message: show_ids.append(int(trakt_item.show.pk[1]))
logger.debug(f"TMDb IDs Found: {movie_ids}") logger.debug(f"Trakt {media_type} Found: {trakt_items}")
logger.debug(f"TVDb IDs Found: {show_ids}") logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}")
return movie_ids, show_ids return movie_ids, show_ids

@ -25,7 +25,7 @@ class TVDbObj:
else: else:
raise Failed(f"TVDb Error: {tvdb_url} must begin with {TVDb.movies_url if is_movie else TVDb.series_url}") raise Failed(f"TVDb Error: {tvdb_url} must begin with {TVDb.movies_url if is_movie else TVDb.series_url}")
response = TVDb.send_request(tvdb_url, language) response = TVDb._request(tvdb_url, language)
results = response.xpath(f"//*[text()='TheTVDB.com {self.media_type} ID']/parent::node()/span/text()") results = response.xpath(f"//*[text()='TheTVDB.com {self.media_type} ID']/parent::node()/span/text()")
if len(results) > 0: if len(results) > 0:
self.id = int(results[0]) self.id = int(results[0])
@ -57,15 +57,17 @@ class TVDbObj:
if len(results) > 0: if len(results) > 0:
try: try:
tmdb_id = util.regex_first_int(results[0], "TMDb ID") tmdb_id = util.regex_first_int(results[0], "TMDb ID")
except Failed as e: except Failed:
logger.error(e) pass
if not tmdb_id: if tmdb_id is None:
results = response.xpath("//*[text()='IMDB']/@href") results = response.xpath("//*[text()='IMDB']/@href")
if len(results) > 0: if len(results) > 0:
try: try:
tmdb_id, _ = TVDb.config.Arms.imdb_to_ids(util.get_id_from_imdb_url(results[0]), language) tmdb_id = TVDb.config.Convert.imdb_to_tmdb(util.get_id_from_imdb_url(results[0]), fail=True)
except Failed as e: except Failed:
logger.error(e) pass
if tmdb_id is None:
raise Failed(f"TVDB Error: No TMDb ID found for {self.title}")
self.tmdb_id = tmdb_id self.tmdb_id = tmdb_id
self.tvdb_url = tvdb_url self.tvdb_url = tvdb_url
self.language = language self.language = language
@ -104,16 +106,16 @@ class TVDbAPI:
return TVDbObj(tvdb_url, language, True, self) return TVDbObj(tvdb_url, language, True, self)
def get_list_description(self, tvdb_url, language): def get_list_description(self, tvdb_url, language):
description = self.send_request(tvdb_url, language).xpath("//div[@class='block']/div[not(@style='display:none')]/p/text()") description = self._request(tvdb_url, language).xpath("//div[@class='block']/div[not(@style='display:none')]/p/text()")
return description[0] if len(description) > 0 and len(description[0]) > 0 else "" return description[0] if len(description) > 0 and len(description[0]) > 0 else ""
def get_tvdb_ids_from_url(self, tvdb_url, language): def _ids_from_url(self, tvdb_url, language):
show_ids = [] show_ids = []
movie_ids = [] movie_ids = []
tvdb_url = tvdb_url.strip() tvdb_url = tvdb_url.strip()
if tvdb_url.startswith((self.list_url, self.alt_list_url)): if tvdb_url.startswith((self.list_url, self.alt_list_url)):
try: try:
items = self.send_request(tvdb_url, language).xpath("//div[@class='col-xs-12 col-sm-12 col-md-8 col-lg-8 col-md-pull-4']/div[@class='row']") items = self._request(tvdb_url, language).xpath("//div[@class='col-xs-12 col-sm-12 col-md-8 col-lg-8 col-md-pull-4']/div[@class='row']")
for item in items: for item in items:
title = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/text()")[0] title = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/text()")[0]
item_url = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/@href")[0] item_url = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/@href")[0]
@ -143,26 +145,24 @@ class TVDbAPI:
raise Failed(f"TVDb Error: {tvdb_url} must begin with {self.list_url}") raise Failed(f"TVDb Error: {tvdb_url} must begin with {self.list_url}")
@retry(stop_max_attempt_number=6, wait_fixed=10000) @retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_request(self, url, language): def _request(self, url, language):
return html.fromstring(requests.get(url, headers={"Accept-Language": language}).content) return html.fromstring(requests.get(url, headers={"Accept-Language": language}).content)
def get_items(self, method, data, language, status_message=True): def get_items(self, method, data, language):
pretty = util.pretty_names[method] if method in util.pretty_names else method pretty = util.pretty_names[method] if method in util.pretty_names else method
show_ids = [] show_ids = []
movie_ids = [] movie_ids = []
if status_message: logger.info(f"Processing {pretty}: {data}")
logger.info(f"Processing {pretty}: {data}")
if method == "tvdb_show": if method == "tvdb_show":
show_ids.append(self.get_series(language, data).id) show_ids.append(self.get_series(language, data).id)
elif method == "tvdb_movie": elif method == "tvdb_movie":
movie_ids.append(self.get_movie(language, data).id) movie_ids.append(self.get_movie(language, data).tmdb_id)
elif method == "tvdb_list": elif method == "tvdb_list":
tmdb_ids, tvdb_ids = self.get_tvdb_ids_from_url(data, language) tmdb_ids, tvdb_ids = self._ids_from_url(data, language)
movie_ids.extend(tmdb_ids) movie_ids.extend(tmdb_ids)
show_ids.extend(tvdb_ids) show_ids.extend(tvdb_ids)
else: else:
raise Failed(f"TVDb Error: Method {method} not supported") raise Failed(f"TVDb Error: Method {method} not supported")
if status_message: logger.debug(f"TMDb IDs Found: {movie_ids}")
logger.debug(f"TMDb IDs Found: {movie_ids}") logger.debug(f"TVDb IDs Found: {show_ids}")
logger.debug(f"TVDb IDs Found: {show_ids}")
return movie_ids, show_ids return movie_ids, show_ids

@ -1,5 +1,6 @@
import logging, re, signal, sys, time, traceback import logging, re, signal, sys, time, traceback
from datetime import datetime from datetime import datetime
from plexapi.exceptions import BadRequest, NotFound, Unauthorized
try: try:
import msvcrt import msvcrt
@ -19,6 +20,9 @@ class Failed(Exception):
def retry_if_not_failed(exception): def retry_if_not_failed(exception):
return not isinstance(exception, Failed) return not isinstance(exception, Failed)
def retry_if_not_plex(exception):
return not isinstance(exception, (BadRequest, NotFound, Unauthorized))
separating_character = "=" separating_character = "="
screen_width = 100 screen_width = 100
@ -207,11 +211,22 @@ def get_bool(method_name, method_data):
else: else:
raise Failed(f"Collection Error: {method_name} attribute: {method_data} invalid must be either true or false") raise Failed(f"Collection Error: {method_name} attribute: {method_data} invalid must be either true or false")
def get_list(data, lower=False, split=True): def compile_list(data):
if isinstance(data, list):
text = ""
for item in data:
text += f"{',' if len(text) > 0 else ''}{item}"
return text
else:
return data
def get_list(data, lower=False, split=True, int_list=False):
if isinstance(data, list): return data if isinstance(data, list): return data
elif isinstance(data, dict): return [data] elif isinstance(data, dict): return [data]
elif split is False: return [str(data)] elif split is False: return [str(data)]
elif lower is True: return [d.strip().lower() for d in str(data).split(",")] elif lower is True: return [d.strip().lower() for d in str(data).split(",")]
elif int_list is True: return [int(d.strip()) for d in str(data).split(",")]
else: return [d.strip() for d in str(data).split(",")] else: return [d.strip() for d in str(data).split(",")]
def get_int_list(data, id_type): def get_int_list(data, id_type):

@ -2,14 +2,16 @@ import argparse, logging, os, re, sys, time
from datetime import datetime from datetime import datetime
try: try:
import schedule import schedule
from modules import tests, util from modules import util
from modules.builder import CollectionBuilder
from modules.config import Config from modules.config import Config
from modules.util import Failed
from plexapi.exceptions import BadRequest
except ModuleNotFoundError: except ModuleNotFoundError:
print("Error: Requirements are not installed") print("Error: Requirements are not installed")
sys.exit(0) sys.exit(0)
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("--my-tests", dest="tests", help=argparse.SUPPRESS, action="store_true", default=False)
parser.add_argument("-db", "--debug", dest="debug", help=argparse.SUPPRESS, action="store_true", default=False) parser.add_argument("-db", "--debug", dest="debug", help=argparse.SUPPRESS, action="store_true", default=False)
parser.add_argument("-c", "--config", dest="config", help="Run with desired *.yml file", type=str) parser.add_argument("-c", "--config", dest="config", help="Run with desired *.yml file", type=str)
parser.add_argument("-t", "--time", dest="time", help="Time to update each day use format HH:MM (Default: 03:00)", default="03:00", type=str) parser.add_argument("-t", "--time", dest="time", help="Time to update each day use format HH:MM (Default: 03:00)", default="03:00", type=str)
@ -34,7 +36,6 @@ def check_bool(env_str, default):
else: else:
return default return default
my_tests = check_bool("PMM_TESTS", args.tests)
test = check_bool("PMM_TEST", args.test) test = check_bool("PMM_TEST", args.test)
debug = check_bool("PMM_DEBUG", args.debug) debug = check_bool("PMM_DEBUG", args.debug)
run = check_bool("PMM_RUN", args.run) run = check_bool("PMM_RUN", args.run)
@ -76,7 +77,7 @@ file_handler.setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(lev
cmd_handler = logging.StreamHandler() cmd_handler = logging.StreamHandler()
cmd_handler.setFormatter(logging.Formatter("| %(message)-100s |")) cmd_handler.setFormatter(logging.Formatter("| %(message)-100s |"))
cmd_handler.setLevel(logging.DEBUG if my_tests or test or debug else logging.INFO) cmd_handler.setLevel(logging.DEBUG if test or debug else logging.INFO)
logger.addHandler(cmd_handler) logger.addHandler(cmd_handler)
logger.addHandler(file_handler) logger.addHandler(file_handler)
@ -91,35 +92,311 @@ util.centered("| |_) | |/ _ \\ \\/ / | |\\/| |/ _ \\ __/ _` | | |\\/| |/ _` | '_
util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | ") util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | ")
util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| ") util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| ")
util.centered(" |___/ ") util.centered(" |___/ ")
util.centered(" Version: 1.8.0 ") util.centered(" Version: 1.9.0 ")
util.separator() util.separator()
if my_tests: def start(config_path, is_test, daily, requested_collections, requested_libraries, resume_from):
tests.run_tests(default_dir)
sys.exit(0)
def start(config_path, is_test, daily, collections_to_run, libraries_to_run, resume_from):
if daily: start_type = "Daily " if daily: start_type = "Daily "
elif is_test: start_type = "Test " elif is_test: start_type = "Test "
elif collections_to_run: start_type = "Collections " elif requested_collections: start_type = "Collections "
elif libraries_to_run: start_type = "Libraries " elif requested_libraries: start_type = "Libraries "
else: start_type = "" else: start_type = ""
start_time = datetime.now() start_time = datetime.now()
util.separator(f"Starting {start_type}Run") util.separator(f"Starting {start_type}Run")
try: try:
config = Config(default_dir, config_path, libraries_to_run) config = Config(default_dir, config_path, requested_libraries)
config.update_libraries(is_test, collections_to_run, resume_from) update_libraries(config, is_test, requested_collections, resume_from)
except Exception as e: except Exception as e:
util.print_stacktrace() util.print_stacktrace()
logger.critical(e) logger.critical(e)
logger.info("") logger.info("")
util.separator(f"Finished {start_type}Run\nRun Time: {str(datetime.now() - start_time).split('.')[0]}") util.separator(f"Finished {start_type}Run\nRun Time: {str(datetime.now() - start_time).split('.')[0]}")
def update_libraries(config, is_test, requested_collections, resume_from):
for library in config.libraries:
os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout)
logger.info("")
util.separator(f"{library.name} Library")
logger.info("")
util.separator(f"Mapping {library.name} Library")
logger.info("")
movie_map, show_map = map_guids(config, library)
if not is_test and not resume_from and library.mass_update:
mass_metadata(config, library, movie_map, show_map)
for metadata in library.metadata_files:
logger.info("")
util.separator(f"Running Metadata File\n{metadata.path}")
if not is_test and not resume_from:
try:
metadata.update_metadata(config.TMDb, is_test)
except Failed as e:
logger.error(e)
logger.info("")
util.separator(f"{'Test ' if is_test else ''}Collections")
collections_to_run = metadata.get_collections(requested_collections)
if resume_from and resume_from not in collections_to_run:
logger.warning(f"Collection: {resume_from} not in Metadata File: {metadata.path}")
continue
if collections_to_run:
resume_from = run_collection(config, library, metadata, collections_to_run, is_test, resume_from, movie_map, show_map)
if library.show_unmanaged is True and not is_test and not requested_collections:
logger.info("")
util.separator(f"Unmanaged Collections in {library.name} Library")
logger.info("")
unmanaged_count = 0
collections_in_plex = [str(plex_col) for plex_col in library.collections]
for col in library.get_all_collections():
if col.title not in collections_in_plex:
logger.info(col.title)
unmanaged_count += 1
logger.info("{} Unmanaged Collections".format(unmanaged_count))
if library.assets_for_all is True and not is_test and not requested_collections:
logger.info("")
util.separator(f"All {'Movies' if library.is_movie else 'Shows'} Assets Check for {library.name} Library")
logger.info("")
for item in library.get_all():
library.update_item_from_assets(item)
has_run_again = False
for library in config.libraries:
if library.run_again:
has_run_again = True
break
if has_run_again:
logger.info("")
util.separator("Run Again")
logger.info("")
length = 0
for x in range(1, config.general["run_again_delay"] + 1):
length = util.print_return(length, f"Waiting to run again in {config.general['run_again_delay'] - x + 1} minutes")
for y in range(60):
time.sleep(1)
util.print_end(length)
for library in config.libraries:
if library.run_again:
os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout)
logger.info("")
util.separator(f"{library.name} Library Run Again")
logger.info("")
movie_map, show_map = map_guids(config, library)
for builder in library.run_again:
logger.info("")
util.separator(f"{builder.name} Collection")
logger.info("")
try:
builder.run_collections_again(movie_map, show_map)
except Failed as e:
util.print_stacktrace()
util.print_multiline(e, error=True)
used_url = []
for library in config.libraries:
if library.url not in used_url:
used_url.append(library.url)
if library.empty_trash:
library.query(library.PlexServer.library.emptyTrash)
if library.clean_bundles:
library.query(library.PlexServer.library.cleanBundles)
if library.optimize:
library.query(library.PlexServer.library.optimize)
def map_guids(config, library):
movie_map = {}
show_map = {}
length = 0
logger.info(f"Mapping {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
items = library.Plex.all()
for i, item in enumerate(items, 1):
length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}")
try:
id_type, main_id = config.Convert.get_id(item, library, length)
except BadRequest:
util.print_stacktrace()
util.print_end(length, f"Mapping Error: | {item.guid} for {item.title} not found")
continue
if not isinstance(main_id, list):
main_id = [main_id]
if id_type == "movie":
for m in main_id:
if m in movie_map: movie_map[m].append(item.ratingKey)
else: movie_map[m] = [item.ratingKey]
elif id_type == "show":
for m in main_id:
if m in show_map: show_map[m].append(item.ratingKey)
else: show_map[m] = [item.ratingKey]
util.print_end(length, f"Processed {len(items)} {'Movies' if library.is_movie else 'Shows'}")
return movie_map, show_map
def mass_metadata(config, library, movie_map, show_map):
length = 0
logger.info("")
util.separator(f"Mass Editing {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
logger.info("")
items = library.Plex.all()
for i, item in enumerate(items, 1):
length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}")
tmdb_id = None
tvdb_id = None
imdb_id = None
if config.Cache:
t_id, guid_media_type, _ = config.Cache.config.Cache.query_guid_map(item.guid)
if t_id:
if "movie" in guid_media_type:
tmdb_id = t_id
else:
tvdb_id = t_id
if not tmdb_id and not tvdb_id:
for tmdb, rating_keys in movie_map.items():
if item.ratingKey in rating_keys:
tmdb_id = tmdb
break
if not tmdb_id and not tvdb_id and library.is_show:
for tvdb, rating_keys in show_map.items():
if item.ratingKey in rating_keys:
tvdb_id = tvdb
break
if tmdb_id:
imdb_id = config.Convert.tmdb_to_imdb(tmdb_id)
elif tvdb_id:
tmdb_id = config.Convert.tvdb_to_tmdb(tvdb_id)
imdb_id = config.Convert.tvdb_to_imdb(tvdb_id)
tmdb_item = None
if library.mass_genre_update == "tmdb" or library.mass_audience_rating_update == "tmdb" or library.mass_critic_rating_update == "tmdb":
if tmdb_id:
try:
tmdb_item = config.TMDb.get_movie(tmdb_id) if library.is_movie else config.TMDb.get_show(tmdb_id)
except Failed as e:
util.print_end(length, str(e))
else:
util.print_end(length, f"{item.title[:25]:<25} | No TMDb ID for Guid: {item.guid}")
omdb_item = None
if library.mass_genre_update in ["omdb", "imdb"] or library.mass_audience_rating_update in ["omdb", "imdb"] or library.mass_critic_rating_update in ["omdb", "imdb"]:
if config.OMDb.limit is False:
if imdb_id:
try:
omdb_item = config.OMDb.get_omdb(imdb_id)
except Failed as e:
util.print_end(length, str(e))
else:
util.print_end(length, f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}")
if not tmdb_item and not omdb_item:
continue
if library.mass_genre_update:
try:
if tmdb_item and library.mass_genre_update == "tmdb":
new_genres = [genre.name for genre in tmdb_item.genres]
elif omdb_item and library.mass_genre_update in ["omdb", "imdb"]:
new_genres = omdb_item.genres
else:
raise Failed
item_genres = [genre.tag for genre in item.genres]
display_str = ""
for genre in (g for g in item_genres if g not in new_genres):
library.query_data(item.removeGenre, genre)
display_str += f"{', ' if len(display_str) > 0 else ''}-{genre}"
for genre in (g for g in new_genres if g not in item_genres):
library.query_data(item.addGenre, genre)
display_str += f"{', ' if len(display_str) > 0 else ''}+{genre}"
if len(display_str) > 0:
util.print_end(length, f"{item.title[:25]:<25} | Genres | {display_str}")
except Failed:
pass
if library.mass_audience_rating_update or library.mass_critic_rating_update:
try:
if tmdb_item and library.mass_genre_update == "tmdb":
new_rating = tmdb_item.vote_average
elif omdb_item and library.mass_genre_update in ["omdb", "imdb"]:
new_rating = omdb_item.imdb_rating
else:
raise Failed
if new_rating is None:
util.print_end(length, f"{item.title[:25]:<25} | No Rating Found")
else:
if library.mass_audience_rating_update and str(item.audienceRating) != str(new_rating):
library.edit_query(item, {"audienceRating.value": new_rating, "audienceRating.locked": 1})
util.print_end(length, f"{item.title[:25]:<25} | Audience Rating | {new_rating}")
if library.mass_critic_rating_update and str(item.rating) != str(new_rating):
library.edit_query(item, {"rating.value": new_rating, "rating.locked": 1})
util.print_end(length, f"{item.title[:25]:<25} | Critic Rating | {new_rating}")
except Failed:
pass
def run_collection(config, library, metadata, requested_collections, is_test, resume_from, movie_map, show_map):
for mapping_name, collection_attrs in requested_collections.items():
if is_test and ("test" not in collection_attrs or collection_attrs["test"] is not True):
no_template_test = True
if "template" in collection_attrs and collection_attrs["template"]:
for data_template in util.get_list(collection_attrs["template"], split=False):
if "name" in data_template \
and data_template["name"] \
and metadata.templates \
and data_template["name"] in metadata.templates \
and metadata.templates[data_template["name"]] \
and "test" in metadata.templates[data_template["name"]] \
and metadata.templates[data_template["name"]]["test"] is True:
no_template_test = False
if no_template_test:
continue
try:
if resume_from and resume_from != mapping_name:
continue
elif resume_from == mapping_name:
resume_from = None
logger.info("")
util.separator(f"Resuming Collections")
logger.info("")
util.separator(f"{mapping_name} Collection")
logger.info("")
builder = CollectionBuilder(config, library, metadata, mapping_name, collection_attrs)
if len(builder.schedule) > 0:
util.print_multiline(builder.schedule, info=True)
logger.info("")
logger.info(f"Sync Mode: {'sync' if builder.sync else 'append'}")
if len(builder.filters) > 0:
logger.info("")
for filter_key, filter_value in builder.filters:
logger.info(f"Collection Filter {filter_key}: {filter_value}")
if not builder.smart_url:
builder.collect_rating_keys(movie_map, show_map)
logger.info("")
if len(builder.rating_keys) > 0:
builder.add_to_collection(movie_map)
if len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0:
builder.run_missing()
if builder.sync and len(builder.rating_keys) > 0:
builder.sync_collection()
logger.info("")
builder.update_details()
if builder.run_again and (len(builder.run_again_movies) > 0 or len(builder.run_again_shows) > 0):
library.run_again.append(builder)
except Failed as e:
util.print_stacktrace()
util.print_multiline(e, error=True)
except Exception as e:
util.print_stacktrace()
logger.error(f"Unknown Error: {e}")
return resume_from
try: try:
if run or test or collections or libraries or resume: if run or test or collections or libraries or resume:
start(config_file, test, False, collections, libraries, resume) start(config_file, test, False, collections, libraries, resume)
else: else:
length = 0 time_length = 0
schedule.every().day.at(time_to_run).do(start, config_file, False, True, None, None, None) schedule.every().day.at(time_to_run).do(start, config_file, False, True, None, None, None)
while True: while True:
schedule.run_pending() schedule.run_pending()
@ -132,7 +409,7 @@ try:
time_str = f"{hours} Hour{'s' if hours > 1 else ''} and " if hours > 0 else "" time_str = f"{hours} Hour{'s' if hours > 1 else ''} and " if hours > 0 else ""
time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}" time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}"
length = util.print_return(length, f"Current Time: {current} | {time_str} until the daily run at {time_to_run}") time_length = util.print_return(time_length, f"Current Time: {current} | {time_str} until the daily run at {time_to_run}")
time.sleep(1) time.sleep(1)
except KeyboardInterrupt: except KeyboardInterrupt:
util.separator("Exiting Plex Meta Manager") util.separator("Exiting Plex Meta Manager")

Loading…
Cancel
Save