add Folder option for metadata files and cleanup

pull/362/head
meisnate12 3 years ago
parent 26eb57587f
commit 0cc9d81283

@ -9,22 +9,33 @@ builders = [
"anilist_season", "anilist_studio", "anilist_tag", "anilist_top_rated" "anilist_season", "anilist_studio", "anilist_tag", "anilist_top_rated"
] ]
pretty_names = {"score": "Average Score", "popular": "Popularity"} pretty_names = {"score": "Average Score", "popular": "Popularity"}
search_translation = {
"season": "MediaSeason", "seasonYear": "Int", "isAdult": "Boolean",
"startDate_greater": "FuzzyDateInt", "startDate_lesser": "FuzzyDateInt", "endDate_greater": "FuzzyDateInt", "endDate_lesser": "FuzzyDateInt",
"format_in": "[MediaFormat]", "format_not_in": "[MediaFormat]", "status_in": "[MediaStatus]", "status_not_in": "[MediaStatus]",
"episodes_greater": "Int", "episodes_lesser": "Int", "duration_greater": "Int", "duration_lesser": "Int",
"genre_in": "[String]", "genre_not_in": "[String]", "tag_in": "[String]", "tag_not_in": "[String]",
"averageScore_greater": "Int", "averageScore_lesser": "Int", "popularity_greater": "Int", "popularity_lesser": "Int"
}
base_url = "https://graphql.anilist.co" base_url = "https://graphql.anilist.co"
tag_query = "query{MediaTagCollection {name}}" tag_query = "query{MediaTagCollection {name, category}}"
genre_query = "query{GenreCollection}" genre_query = "query{GenreCollection}"
class AniList: class AniList:
def __init__(self, config): def __init__(self, config):
self.config = config self.config = config
self.tags = {} self.tags = {}
self.genres = {} self.categories = {}
self.tags = {t["name"].lower(): t["name"] for t in self._request(tag_query, {})["data"]["MediaTagCollection"]} for media_tag in self._request(tag_query, {})["data"]["MediaTagCollection"]:
self.genres = {g.lower(): g for g in self._request(genre_query, {})["data"]["GenreCollection"]} self.tags[media_tag["name"].lower().replace(" ", "-")] = media_tag["name"]
self.categories[media_tag["category"].lower().replace(" ", "-")] = media_tag["category"]
self.genres = {g.lower().replace(" ", "-"): g for g in self._request(genre_query, {})["data"]["GenreCollection"]}
def _request(self, query, variables): def _request(self, query, variables):
response = self.config.post(base_url, json={"query": query, "variables": variables}) response = self.config.post(base_url, json={"query": query, "variables": variables})
json_obj = response.json() json_obj = response.json()
if "errors" in json_obj: if "errors" in json_obj:
logger.debug(json_obj)
if json_obj['errors'][0]['message'] == "Too Many Requests.": if json_obj['errors'][0]['message'] == "Too Many Requests.":
if "Retry-After" in response.headers: if "Retry-After" in response.headers:
time.sleep(int(response.headers["Retry-After"])) time.sleep(int(response.headers["Retry-After"]))
@ -35,7 +46,7 @@ class AniList:
time.sleep(0.4) time.sleep(0.4)
return json_obj return json_obj
def _validate(self, anilist_id): def _validate_id(self, anilist_id):
query = "query ($id: Int) {Media(id: $id) {id title{romaji english}}}" query = "query ($id: Int) {Media(id: $id) {id title{romaji english}}}"
media = self._request(query, {"id": anilist_id})["data"]["Media"] media = self._request(query, {"id": anilist_id})["data"]["Media"]
if media["id"]: if media["id"]:
@ -65,62 +76,31 @@ class AniList:
return anilist_ids return anilist_ids
def _top_rated(self, limit): def _top_rated(self, limit):
query = """ return self._search(limit=limit, averageScore_greater=3)
query ($page: Int) {
Page(page: $page) {
pageInfo {hasNextPage}
media(averageScore_greater: 3, sort: SCORE_DESC, type: ANIME) {id}
}
}
"""
return self._pagenation(query, limit=limit)
def _popular(self, limit): def _popular(self, limit):
query = """ return self._search(sort="popular", limit=limit, popularity_greater=1000)
query ($page: Int) {
Page(page: $page) {
pageInfo {hasNextPage}
media(popularity_greater: 1000, sort: POPULARITY_DESC, type: ANIME) {id}
}
}
"""
return self._pagenation(query, limit=limit)
def _season(self, season, year, sort, limit): def _season(self, season, year, sort, limit):
query = """ return self._search(sort=sort, limit=limit, season=season.upper(), year=year)
query ($page: Int, $season: MediaSeason, $year: Int, $sort: [MediaSort]) {
Page(page: $page){ def _search(self, sort="score", limit=0, **kwargs):
pageInfo {hasNextPage} query_vars = "$page: Int, $sort: [MediaSort]"
media(season: $season, seasonYear: $year, type: ANIME, sort: $sort){id} media_vars = "sort: $sort, type: ANIME"
} variables = {"sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"}
} for key, value in kwargs.items():
""" query_vars += f", ${key}: {search_translation[key]}"
variables = {"season": season.upper(), "year": year, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"} media_vars += f", {key}: ${key}"
variables[key] = value
query = f"query ({query_vars}) {{Page(page: $page){{pageInfo {{hasNextPage}}media({media_vars}){{id}}}}}}"
logger.info(query)
return self._pagenation(query, limit=limit, variables=variables) return self._pagenation(query, limit=limit, variables=variables)
def _genre(self, genre, sort, limit): def _genre(self, genre, sort, limit):
query = """ return self._search(sort=sort, limit=limit, genre=genre)
query ($page: Int, $genre: String, $sort: [MediaSort]) {
Page(page: $page){
pageInfo {hasNextPage}
media(genre: $genre, sort: $sort){id}
}
}
"""
variables = {"genre": genre, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"}
return self._pagenation(query, limit=limit, variables=variables)
def _tag(self, tag, sort, limit): def _tag(self, tag, sort, limit):
query = """ return self._search(sort=sort, limit=limit, tag=tag)
query ($page: Int, $tag: String, $sort: [MediaSort]) {
Page(page: $page){
pageInfo {hasNextPage}
media(tag: $tag, sort: $sort){id}
}
}
"""
variables = {"tag": tag, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"}
return self._pagenation(query, limit=limit, variables=variables)
def _studio(self, studio_id): def _studio(self, studio_id):
query = """ query = """
@ -166,7 +146,7 @@ class AniList:
name = "" name = ""
if not ignore_ids: if not ignore_ids:
ignore_ids = [anilist_id] ignore_ids = [anilist_id]
anilist_id, name = self._validate(anilist_id) anilist_id, name = self._validate_id(anilist_id)
anilist_ids.append(anilist_id) anilist_ids.append(anilist_id)
json_obj = self._request(query, {"id": anilist_id}) json_obj = self._request(query, {"id": anilist_id})
edges = [media["node"]["id"] for media in json_obj["data"]["Media"]["relations"]["edges"] edges = [media["node"]["id"] for media in json_obj["data"]["Media"]["relations"]["edges"]
@ -183,22 +163,26 @@ class AniList:
return anilist_ids, ignore_ids, name return anilist_ids, ignore_ids, name
def validate_tag(self, tag):
return self._validate(tag, self.tags, "Tag")
def validate_category(self, category):
return self._validate(category, self.categories, "Category")
def validate_genre(self, genre): def validate_genre(self, genre):
if genre.lower() in self.genres: return self._validate(genre, self.genres, "Genre")
return self.genres[genre.lower()]
raise Failed(f"AniList Error: Genre: {genre} does not exist")
def validate_tag(self, tag): def _validate(self, data, options, name):
if tag.lower() in self.tags: data_check = data.lower().replace(" / ", "-").replace(" ", "-")
return self.tags[tag.lower()] if data_check in options:
raise Failed(f"AniList Error: Tag: {tag} does not exist") return options[data_check]
raise Failed(f"AniList Error: {name}: {data} does not exist\nOptions: {', '.join([v for k, v in options.items()])}")
def validate_anilist_ids(self, anilist_ids, studio=False): def validate_anilist_ids(self, anilist_ids, studio=False):
anilist_id_list = util.get_int_list(anilist_ids, "AniList ID") anilist_id_list = util.get_int_list(anilist_ids, "AniList ID")
anilist_values = [] anilist_values = []
query = f"query ($id: Int) {{{'Studio(id: $id) {name}' if studio else 'Media(id: $id) {id}'}}}"
for anilist_id in anilist_id_list: for anilist_id in anilist_id_list:
if studio: query = "query ($id: Int) {Studio(id: $id) {name}}"
else: query = "query ($id: Int) {Media(id: $id) {id}}"
try: try:
self._request(query, {"id": anilist_id}) self._request(query, {"id": anilist_id})
anilist_values.append(anilist_id) anilist_values.append(anilist_id)
@ -210,7 +194,7 @@ class AniList:
def get_anilist_ids(self, method, data): def get_anilist_ids(self, method, data):
if method == "anilist_id": if method == "anilist_id":
logger.info(f"Processing AniList ID: {data}") logger.info(f"Processing AniList ID: {data}")
anilist_id, name = self._validate(data) anilist_id, name = self._validate_id(data)
anilist_ids = [anilist_id] anilist_ids = [anilist_id]
elif method == "anilist_popular": elif method == "anilist_popular":
logger.info(f"Processing AniList Popular: {data} Anime") logger.info(f"Processing AniList Popular: {data} Anime")

@ -384,21 +384,16 @@ class Config:
paths_to_check = lib["metadata_path"] if isinstance(lib["metadata_path"], list) else [lib["metadata_path"]] paths_to_check = lib["metadata_path"] if isinstance(lib["metadata_path"], list) else [lib["metadata_path"]]
for path in paths_to_check: for path in paths_to_check:
if isinstance(path, dict): if isinstance(path, dict):
if "url" in path: def check_dict(attr, name):
if path["url"] is None: if attr in path:
logger.error("Config Error: metadata_path url is blank") if path[attr] is None:
logger.error(f"Config Error: metadata_path {attr} is blank")
else: else:
params["metadata_path"].append(("URL", path["url"])) params["metadata_path"].append((name, path[attr]))
if "git" in path: check_dict("url", "URL")
if path["git"] is None: check_dict("git", "Git")
logger.error("Config Error: metadata_path git is blank") check_dict("file", "File")
else: check_dict("folder", "Folder")
params["metadata_path"].append(("Git", path['git']))
if "file" in path:
if path["file"] is None:
logger.error("Config Error: metadata_path file is blank")
else:
params["metadata_path"].append(("File", path['file']))
else: else:
params["metadata_path"].append(("File", path)) params["metadata_path"].append(("File", path))
else: else:

@ -296,7 +296,7 @@ class Convert:
if tvdb: if tvdb:
tvdb_id.append(tvdb) tvdb_id.append(tvdb)
if not tvdb_id: if not tvdb_id:
raise Failed(f"Unable to convert TMDb ID: {util.compile_list(tmdb_id)} to TVDb ID") raise Failed(f"Unable to convert TMDb ID: {', '.join(tmdb_id)} to TVDb ID")
if not imdb_id and tvdb_id: if not imdb_id and tvdb_id:
for tvdb in tvdb_id: for tvdb in tvdb_id:
@ -306,8 +306,8 @@ class Convert:
def update_cache(cache_ids, id_type, imdb_in, guid_type): def update_cache(cache_ids, id_type, imdb_in, guid_type):
if self.config.Cache: if self.config.Cache:
cache_ids = util.compile_list(cache_ids) cache_ids = ",".join(cache_ids)
imdb_in = util.compile_list(imdb_in) if imdb_in else None imdb_in = ",".join(imdb_in) if imdb_in else None
ids = f"{item.guid:<46} | {id_type} ID: {cache_ids:<7} | IMDb ID: {str(imdb_in):<10}" ids = f"{item.guid:<46} | {id_type} ID: {cache_ids:<7} | IMDb ID: {str(imdb_in):<10}"
logger.info(util.adjust_space(f" Cache | {'^' if expired else '+'} | {ids} | {item.title}")) logger.info(util.adjust_space(f" Cache | {'^' if expired else '+'} | {ids} | {item.title}"))
self.config.Cache.update_guid_map(item.guid, cache_ids, imdb_in, expired, guid_type) self.config.Cache.update_guid_map(item.guid, cache_ids, imdb_in, expired, guid_type)

@ -240,7 +240,20 @@ class Plex:
self.metadatas = [] self.metadatas = []
self.metadata_files = [] self.metadata_files = []
metadata = []
for file_type, metadata_file in params["metadata_path"]: for file_type, metadata_file in params["metadata_path"]:
if file_type == "folder":
if os.path.isdir(metadata_file):
yml_files = util.glob_filter(os.path.join(metadata_file, "*.yml"))
if yml_files:
metadata.extend([("File", yml) for yml in yml_files])
else:
logger.error(f"Config Error: No YAML (.yml) files found in {metadata_file}")
else:
logger.error(f"Config Error: Folder not found: {metadata_file}")
else:
metadata.append((file_type, metadata_file))
for file_type, metadata_file in metadata:
try: try:
meta_obj = Metadata(config, self, file_type, metadata_file) meta_obj = Metadata(config, self, file_type, metadata_file)
if meta_obj.collections: if meta_obj.collections:
@ -747,11 +760,11 @@ class Plex:
if _add: if _add:
updated = True updated = True
self.query_data(getattr(obj, f"add{attr.capitalize()}"), _add) self.query_data(getattr(obj, f"add{attr.capitalize()}"), _add)
logger.info(f"Detail: {attr.capitalize()} {util.compile_list(_add)} added to {obj.title}") logger.info(f"Detail: {attr.capitalize()} {','.join(_add)} added to {obj.title}")
if _remove: if _remove:
updated = True updated = True
self.query_data(getattr(obj, f"remove{attr.capitalize()}"), _remove) self.query_data(getattr(obj, f"remove{attr.capitalize()}"), _remove)
logger.info(f"Detail: {attr.capitalize()} {util.compile_list(_remove)} removed to {obj.title}") logger.info(f"Detail: {attr.capitalize()} {','.join(_remove)} removed to {obj.title}")
return updated return updated
def update_item_from_assets(self, item, overlay=None, create=False): def update_item_from_assets(self, item, overlay=None, create=False):

@ -69,15 +69,6 @@ def add_dict_list(keys, value, dict_map):
else: else:
dict_map[key] = [value] dict_map[key] = [value]
def compile_list(data):
if isinstance(data, list):
text = ""
for item in data:
text += f"{',' if len(text) > 0 else ''}{item}"
return text
else:
return data
def get_list(data, lower=False, split=True, int_list=False): def get_list(data, lower=False, split=True, int_list=False):
if data is None: return None if data is None: return None
elif isinstance(data, list): return data elif isinstance(data, list): return data

@ -368,17 +368,13 @@ def mass_metadata(config, library, items):
raise Failed raise Failed
item_genres = [genre.tag for genre in item.genres] item_genres = [genre.tag for genre in item.genres]
display_str = "" display_str = ""
add_genre = [] add_genre = [genre for genre in (g for g in new_genres if g not in item_genres)]
for genre in (g for g in new_genres if g not in item_genres):
add_genre.append(genre)
display_str += f"{', ' if len(display_str) > 0 else ''}+{genre}"
if len(add_genre) > 0: if len(add_genre) > 0:
display_str += f"+{', +'.join(add_genre)}"
library.query_data(item.addGenre, add_genre) library.query_data(item.addGenre, add_genre)
remove_genre = [] remove_genre = [genre for genre in (g for g in item_genres if g not in new_genres)]
for genre in (g for g in item_genres if g not in new_genres):
remove_genre.append(genre)
display_str += f"{', ' if len(display_str) > 0 else ''}-{genre}"
if len(remove_genre) > 0: if len(remove_genre) > 0:
display_str += f"-{', -'.join(remove_genre)}"
library.query_data(item.removeGenre, remove_genre) library.query_data(item.removeGenre, remove_genre)
if len(display_str) > 0: if len(display_str) > 0:
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Genres | {display_str}")) logger.info(util.adjust_space(f"{item.title[:25]:<25} | Genres | {display_str}"))
@ -568,7 +564,7 @@ try:
minutes = int((seconds % 3600) // 60) minutes = int((seconds % 3600) // 60)
time_str = f"{hours} Hour{'s' if hours > 1 else ''} and " if hours > 0 else "" time_str = f"{hours} Hour{'s' if hours > 1 else ''} and " if hours > 0 else ""
time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}" time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}"
util.print_return(f"Current Time: {current} | {time_str} until the next run at {og_time_str} {util.compile_list(times_to_run)}") util.print_return(f"Current Time: {current} | {time_str} until the next run at {og_time_str} | Runs: {', '.join(times_to_run)}")
time.sleep(60) time.sleep(60)
except KeyboardInterrupt: except KeyboardInterrupt:
util.separator("Exiting Plex Meta Manager") util.separator("Exiting Plex Meta Manager")

Loading…
Cancel
Save