Merge pull request #487 from meisnate12/develop

v1.13.2
pull/544/head v1.13.2
meisnate12 3 years ago committed by GitHub
commit e9992ae694
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -24,6 +24,11 @@ jobs:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
@ -34,5 +39,6 @@ jobs:
with:
context: ./
file: ./Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ secrets.DOCKER_HUB_USERNAME }}/plex-meta-manager:develop

@ -3,8 +3,6 @@ name: Docker Latest Release
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
@ -22,6 +20,11 @@ jobs:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
@ -32,5 +35,6 @@ jobs:
with:
context: ./
file: ./Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ secrets.DOCKER_HUB_USERNAME }}/plex-meta-manager:latest

@ -21,6 +21,11 @@ jobs:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
@ -35,5 +40,6 @@ jobs:
with:
context: ./
file: ./Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ secrets.DOCKER_HUB_USERNAME }}/plex-meta-manager:${{ steps.get_version.outputs.VERSION }}

@ -24,7 +24,7 @@ The script works with most Metadata agents including the new Plex Movie Agent, N
## IBRACORP Video Walkthrough
[IBRACORP](https://ibracorp.io/) made a video walkthough for installing Plex Meta Manager on Unraid. While you might not be using Unraid the video goes over many key accepts of Plex Meta Manager and can be a great place to start learning how to use the script.
[IBRACORP](https://ibracorp.io/) made a video walkthough for installing Plex Meta Manager on Unraid. While you might not be using Unraid the video goes over many key aspects of Plex Meta Manager and can be a great place to start learning how to use the script.
[![Plex Meta Manager](https://img.youtube.com/vi/dF69MNoot3w/0.jpg)](https://www.youtube.com/watch?v=dF69MNoot3w "Plex Meta Manager")
@ -33,6 +33,6 @@ The script works with most Metadata agents including the new Plex Movie Agent, N
* Before posting on GitHub about an enhancement, error, or configuration question please visit the [Plex Meta Manager Discord Server](https://discord.gg/TsdpsFYqqm).
* If you're getting an Error or have an Enhancement post in the [Issues](https://github.com/meisnate12/Plex-Meta-Manager/issues).
* If you have a configuration question post in the [Discussions](https://github.com/meisnate12/Plex-Meta-Manager/discussions).
* To see user submitted Metadata configuration files, and you to even add your own, go to the [Plex Meta Manager Configs](https://github.com/meisnate12/Plex-Meta-Manager-Configs).
* Pull Request are welcome but please submit them to the develop branch.
* To see user submitted Metadata configuration files, and even add your own, go to the [Plex Meta Manager Configs](https://github.com/meisnate12/Plex-Meta-Manager-Configs).
* Pull Requests are welcome but please submit them to the develop branch.
* If you wish to contribute to the Wiki please fork and send a pull request on the [Plex Meta Manager Wiki Repository](https://github.com/meisnate12/Plex-Meta-Manager-Wiki).

@ -1 +1 @@
1.13.1
1.13.2

@ -18,25 +18,25 @@ settings: # Can be individually specified
cache_expiration: 60
asset_directory: config/assets
asset_folders: true
create_asset_folders: false
sync_mode: append
show_unmanaged: true
show_filtered: false
show_missing: true
show_missing_assets: true
save_missing: true
run_again_delay: 2
released_missing_only: false
create_asset_folders: false
missing_only_released: false
only_filter_missing: false
collection_minimum: 1
delete_below_minimum: true
delete_not_scheduled: false
tvdb_language: eng
webhooks: # Can be individually specified per library as well
error:
run_start:
run_end:
collection_creation:
collection_addition:
collection_removal:
collection_changes:
plex: # Can be individually specified per library as well; REQUIRED for the script to run
url: http://192.168.1.12:32400
token: ####################

@ -37,7 +37,7 @@ method_alias = {
"producers": "producer",
"writers": "writer",
"years": "year", "show_year": "year", "show_years": "year",
"show_title": "title",
"show_title": "title", "filter": "filters",
"seasonyear": "year", "isadult": "adult", "startdate": "start", "enddate": "end", "averagescore": "score",
"minimum_tag_percentage": "min_tag_percent", "minimumtagrank": "min_tag_percent", "minimum_tag_rank": "min_tag_percent",
"anilist_tag": "anilist_search", "anilist_genre": "anilist_search", "anilist_season": "anilist_search",
@ -80,19 +80,20 @@ poster_details = ["url_poster", "tmdb_poster", "tmdb_profile", "tvdb_poster", "f
background_details = ["url_background", "tmdb_background", "tvdb_background", "file_background"]
boolean_details = [
"visible_library", "visible_home", "visible_shared", "show_filtered", "show_missing", "save_missing",
"missing_only_released", "delete_below_minimum"
"missing_only_released", "only_filter_missing", "delete_below_minimum"
]
string_details = ["sort_title", "content_rating", "name_mapping"]
ignored_details = [
"smart_filter", "smart_label", "smart_url", "run_again", "schedule", "sync_mode", "template", "test",
"smart_filter", "smart_label", "smart_url", "run_again", "schedule", "sync_mode", "template", "test", "delete_not_scheduled",
"tmdb_person", "build_collection", "collection_order", "collection_level", "validate_builders", "collection_name"
]
notification_details = ["collection_creation_webhooks", "collection_addition_webhooks", "collection_removal_webhooks"]
details = ["collection_mode", "collection_order", "collection_level", "collection_minimum", "label"] + boolean_details + string_details + notification_details
details = ["ignore_ids", "ignore_imdb_ids", "server_preroll", "collection_changes_webhooks", "collection_mode", "collection_order",
"collection_level", "collection_minimum", "label"] + boolean_details + string_details
collectionless_details = ["collection_order", "plex_collectionless", "label", "label_sync_mode", "test"] + \
poster_details + background_details + summary_details + string_details
item_bool_details = ["item_assets", "revert_overlay", "item_lock_background", "item_lock_poster", "item_lock_title", "item_refresh"]
item_details = ["item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay"] + item_bool_details + list(plex.item_advance_keys.keys())
none_details = ["label.sync", "item_label.sync"]
radarr_details = ["radarr_add", "radarr_add_existing", "radarr_folder", "radarr_monitor", "radarr_search", "radarr_availability", "radarr_quality", "radarr_tag"]
sonarr_details = [
"sonarr_add", "sonarr_add_existing", "sonarr_folder", "sonarr_monitor", "sonarr_language", "sonarr_series",
@ -146,11 +147,12 @@ show_only_filters = ["first_episode_aired", "last_episode_aired", "network"]
smart_invalid = ["collection_order", "collection_level"]
smart_url_invalid = ["filters", "run_again", "sync_mode", "show_filtered", "show_missing", "save_missing", "smart_label"] + radarr_details + sonarr_details
custom_sort_builders = [
"tmdb_list", "tmdb_popular", "tmdb_now_playing", "tmdb_top_rated",
"plex_search", "tmdb_list", "tmdb_popular", "tmdb_now_playing", "tmdb_top_rated",
"tmdb_trending_daily", "tmdb_trending_weekly", "tmdb_discover",
"tvdb_list", "imdb_list", "stevenlu_popular", "anidb_popular",
"trakt_list", "trakt_trending", "trakt_popular", "trakt_boxoffice",
"trakt_collected_daily", "trakt_collected_weekly", "trakt_collected_monthly", "trakt_collected_yearly", "trakt_collected_all",
"flixpatrol_url", "flixpatrol_demographics", "flixpatrol_popular", "flixpatrol_top",
"trakt_recommended_daily", "trakt_recommended_weekly", "trakt_recommended_monthly", "trakt_recommended_yearly", "trakt_recommended_all",
"trakt_watched_daily", "trakt_watched_weekly", "trakt_watched_monthly", "trakt_watched_yearly", "trakt_watched_all",
"tautulli_popular", "tautulli_watched", "letterboxd_list", "icheckmovies_list",
@ -177,11 +179,11 @@ class CollectionBuilder:
"show_missing": self.library.show_missing,
"save_missing": self.library.save_missing,
"missing_only_released": self.library.missing_only_released,
"only_filter_missing": self.library.only_filter_missing,
"create_asset_folders": self.library.create_asset_folders,
"delete_below_minimum": self.library.delete_below_minimum,
"collection_creation_webhooks": self.library.collection_creation_webhooks,
"collection_addition_webhooks": self.library.collection_addition_webhooks,
"collection_removal_webhooks": self.library.collection_removal_webhooks,
"delete_not_scheduled": self.library.delete_not_scheduled,
"collection_changes_webhooks": self.library.collection_changes_webhooks
}
self.item_details = {}
self.radarr_details = {}
@ -204,6 +206,9 @@ class CollectionBuilder:
self.summaries = {}
self.schedule = ""
self.minimum = self.library.collection_minimum
self.ignore_ids = [i for i in self.library.ignore_ids]
self.ignore_imdb_ids = [i for i in self.library.ignore_imdb_ids]
self.server_preroll = None
self.current_time = datetime.now()
self.current_year = self.current_time.year
self.exists = False
@ -268,17 +273,21 @@ class CollectionBuilder:
optional = []
if "optional" in template:
if template["optional"]:
if isinstance(template["optional"], list):
for op in template["optional"]:
if op not in default:
optional.append(op)
else:
logger.warning(f"Template Warning: variable {op} cannot be optional if it has a default")
else:
optional.append(str(template["optional"]))
for op in util.get_list(template["optional"]):
if op not in default:
optional.append(str(op))
else:
logger.warning(f"Template Warning: variable {op} cannot be optional if it has a default")
else:
raise Failed("Collection Error: template sub-attribute optional is blank")
if "move_collection_prefix" in template:
if template["move_collection_prefix"]:
for op in util.get_list(template["move_collection_prefix"]):
variables["collection_name"] = variables["collection_name"].replace(f"{str(op).strip()} ", "") + f", {str(op).strip()}"
else:
raise Failed("Collection Error: template sub-attribute move_collection_prefix is blank")
def check_data(_data):
if isinstance(_data, dict):
final_data = {}
@ -324,7 +333,7 @@ class CollectionBuilder:
return final_data
for method_name, attr_data in template.items():
if method_name not in self.data and method_name not in ["default", "optional"]:
if method_name not in self.data and method_name not in ["default", "optional", "move_collection_prefix"]:
if attr_data is None:
logger.error(f"Template Error: template attribute {method_name} is blank")
continue
@ -334,6 +343,12 @@ class CollectionBuilder:
except Failed:
continue
if "delete_not_scheduled" in methods:
logger.debug("")
logger.debug("Validating Method: delete_not_scheduled")
logger.debug(f"Value: {data[methods['delete_not_scheduled']]}")
self.details["delete_not_scheduled"] = util.parse("delete_not_scheduled", self.data, datatype="bool", methods=methods, default=False)
if "schedule" in methods:
logger.debug("")
logger.debug("Validating Method: schedule")
@ -349,7 +364,7 @@ class CollectionBuilder:
run_time = str(schedule).lower()
if run_time.startswith(("day", "daily")):
skip_collection = False
elif run_time.startswith(("hour", "week", "month", "year")):
elif run_time.startswith(("hour", "week", "month", "year", "range")):
match = re.search("\\(([^)]+)\\)", run_time)
if not match:
logger.error(f"Collection Error: failed to parse schedule: {schedule}")
@ -384,21 +399,47 @@ class CollectionBuilder:
except ValueError:
logger.error(f"Collection Error: monthly schedule attribute {schedule} invalid must be an integer between 1 and 31")
elif run_time.startswith("year"):
match = re.match("^(1[0-2]|0?[1-9])/(3[01]|[12][0-9]|0?[1-9])$", param)
if not match:
try:
if "/" in param:
opt = param.split("/")
month = int(opt[0])
day = int(opt[1])
self.schedule += f"\nScheduled yearly on {util.pretty_months[month]} {util.make_ordinal(day)}"
if self.current_time.month == month and (self.current_time.day == day or (self.current_time.day == last_day.day and day > last_day.day)):
skip_collection = False
else:
raise ValueError
except ValueError:
logger.error(f"Collection Error: yearly schedule attribute {schedule} invalid must be in the MM/DD format i.e. yearly(11/22)")
elif run_time.startswith("range"):
match = re.match("^(1[0-2]|0?[1-9])/(3[01]|[12][0-9]|0?[1-9])-(1[0-2]|0?[1-9])/(3[01]|[12][0-9]|0?[1-9])$", param)
if not match:
logger.error(f"Collection Error: range schedule attribute {schedule} invalid must be in the MM/DD-MM/DD format i.e. range(12/01-12/25)")
continue
month = int(match.group(1))
day = int(match.group(2))
self.schedule += f"\nScheduled yearly on {util.pretty_months[month]} {util.make_ordinal(day)}"
if self.current_time.month == month and (self.current_time.day == day or (self.current_time.day == last_day.day and day > last_day.day)):
month_start = int(match.group(1))
day_start = int(match.group(2))
month_end = int(match.group(3))
day_end = int(match.group(4))
check = datetime.strptime(f"{self.current_time.month}/{self.current_time.day}", "%m/%d")
start = datetime.strptime(f"{month_start}/{day_start}", "%m/%d")
end = datetime.strptime(f"{month_end}/{day_end}", "%m/%d")
self.schedule += f"\nScheduled between {util.pretty_months[month_start]} {util.make_ordinal(day_start)} and {util.pretty_months[month_end]} {util.make_ordinal(day_end)}"
if start <= check <= end if start < end else check <= end or check >= start:
skip_collection = False
else:
logger.error(f"Collection Error: schedule attribute {schedule} invalid")
if len(self.schedule) == 0:
skip_collection = False
if skip_collection:
raise NotScheduled(f"{self.schedule}\n\nCollection {self.name} not scheduled to run")
suffix = ""
if self.details["delete_not_scheduled"]:
try:
self.obj = self.library.get_collection(self.name)
self.delete_collection()
suffix = f" and was deleted"
except Failed:
suffix = f" and could not be found to delete"
raise NotScheduled(f"{self.schedule}\n\nCollection {self.name} not scheduled to run{suffix}")
self.collectionless = "plex_collectionless" in methods
@ -548,7 +589,7 @@ class CollectionBuilder:
logger.debug(f"Value: {method_data}")
try:
if method_data is None and method_name in all_builders + plex.searches: raise Failed(f"Collection Error: {method_final} attribute is blank")
elif method_data is None: logger.warning(f"Collection Warning: {method_final} attribute is blank")
elif method_data is None and method_final not in none_details: logger.warning(f"Collection Warning: {method_final} attribute is blank")
elif not self.config.Trakt and "trakt" in method_name: raise Failed(f"Collection Error: {method_final} requires Trakt to be configured")
elif not self.library.Radarr and "radarr" in method_name: raise Failed(f"Collection Error: {method_final} requires Radarr to be configured")
elif not self.library.Sonarr and "sonarr" in method_name: raise Failed(f"Collection Error: {method_final} requires Sonarr to be configured")
@ -558,7 +599,7 @@ class CollectionBuilder:
elif self.library.is_show and method_name in movie_only_builders: raise Failed(f"Collection Error: {method_final} attribute only works for movie libraries")
elif self.library.is_show and method_name in plex.movie_only_searches: raise Failed(f"Collection Error: {method_final} plex search only works for movie libraries")
elif self.library.is_movie and method_name in plex.show_only_searches: raise Failed(f"Collection Error: {method_final} plex search only works for show libraries")
elif self.parts_collection and method_name not in parts_collection_valid: raise Failed(f"Collection Error: {method_final} attribute does not work with Collection Level: {self.details['collection_level'].capitalize()}")
elif self.parts_collection and method_name not in parts_collection_valid: raise Failed(f"Collection Error: {method_final} attribute does not work with Collection Level: {self.collection_level.capitalize()}")
elif self.smart and method_name in smart_invalid: raise Failed(f"Collection Error: {method_final} attribute only works with normal collections")
elif self.collectionless and method_name not in collectionless_details: raise Failed(f"Collection Error: {method_final} attribute does not work for Collectionless collection")
elif self.smart_url and method_name in all_builders + smart_url_invalid: raise Failed(f"Collection Error: {method_final} builder not allowed when using smart_filter")
@ -706,16 +747,22 @@ class CollectionBuilder:
raise Failed(f"Collection Error: {method_data} collection_mode invalid\n\tdefault (Library default)\n\thide (Hide Collection)\n\thide_items (Hide Items in this Collection)\n\tshow_items (Show this Collection and its Items)")
elif method_name == "collection_minimum":
self.minimum = util.parse(method_name, method_data, datatype="int", minimum=1)
elif method_name == "server_preroll":
self.server_preroll = util.parse(method_name, method_data)
elif method_name == "ignore_ids":
self.ignore_ids.extend(util.parse(method_name, method_data, datatype="intlist"))
elif method_name == "ignore_imdb_ids":
self.ignore_imdb_ids.extend(util.parse(method_name, method_data, datatype="list"))
elif method_name == "label":
if "label" in methods and "label.sync" in methods:
raise Failed("Collection Error: Cannot use label and label.sync together")
if "label.remove" in methods and "label.sync" in methods:
raise Failed("Collection Error: Cannot use label.remove and label.sync together")
if method_final == "label" and "label_sync_mode" in methods and self.data[methods["label_sync_mode"]] == "sync":
self.details["label.sync"] = util.get_list(method_data)
self.details["label.sync"] = util.get_list(method_data) if method_data else []
else:
self.details[method_final] = util.get_list(method_data)
elif method_name in notification_details:
self.details[method_final] = util.get_list(method_data) if method_data else []
elif method_name == "collection_changes_webhooks":
self.details[method_name] = util.parse(method_name, method_data, datatype="list")
elif method_name in boolean_details:
default = self.details[method_name] if method_name in self.details else None
@ -729,7 +776,7 @@ class CollectionBuilder:
raise Failed(f"Collection Error: Cannot use item_label and item_label.sync together")
if "item_label.remove" in methods and "item_label.sync" in methods:
raise Failed(f"Collection Error: Cannot use item_label.remove and item_label.sync together")
self.item_details[method_final] = util.get_list(method_data)
self.item_details[method_final] = util.get_list(method_data) if method_data else []
elif method_name in ["item_radarr_tag", "item_sonarr_tag"]:
if method_name in methods and f"{method_name}.sync" in methods:
raise Failed(f"Collection Error: Cannot use {method_name} and {method_name}.sync together")
@ -824,6 +871,7 @@ class CollectionBuilder:
elif self.current_time.month in [3, 4, 5]: current_season = "spring"
elif self.current_time.month in [6, 7, 8]: current_season = "summer"
else: current_season = "fall"
default_year = self.current_year + 1 if self.current_time.month == 12 else self.current_year
for dict_data, dict_methods in util.parse(method_name, method_data, datatype="dictlist"):
new_dictionary = {}
for search_method, search_data in dict_data.items():
@ -833,10 +881,10 @@ class CollectionBuilder:
elif search_attr == "season":
new_dictionary[search_attr] = util.parse(search_attr, search_data, parent=method_name, default=current_season, options=util.seasons)
if "year" not in dict_methods:
logger.warning(f"Collection Warning: {method_name} year attribute not found using this year: {self.current_year} by default")
new_dictionary["year"] = self.current_year
logger.warning(f"Collection Warning: {method_name} year attribute not found using this year: {default_year} by default")
new_dictionary["year"] = default_year
elif search_attr == "year":
new_dictionary[search_attr] = util.parse(search_attr, search_data, datatype="int", parent=method_name, default=self.current_year, minimum=1917, maximum=self.current_year + 1)
new_dictionary[search_attr] = util.parse(search_attr, search_data, datatype="int", parent=method_name, default=default_year, minimum=1917, maximum=default_year + 1)
elif search_data is None:
raise Failed(f"Collection Error: {method_name} {search_final} attribute is blank")
elif search_attr == "adult":
@ -1195,10 +1243,11 @@ class CollectionBuilder:
if id_type == "ratingKey":
rating_keys.append(input_id)
elif id_type == "tmdb" and not self.parts_collection:
if input_id in self.library.movie_map:
rating_keys.extend(self.library.movie_map[input_id])
elif input_id not in self.missing_movies:
self.missing_movies.append(input_id)
if input_id not in self.ignore_ids:
if input_id in self.library.movie_map:
rating_keys.extend(self.library.movie_map[input_id])
elif input_id not in self.missing_movies:
self.missing_movies.append(input_id)
elif id_type in ["tvdb", "tmdb_show"] and not self.parts_collection:
if id_type == "tmdb_show":
try:
@ -1206,27 +1255,29 @@ class CollectionBuilder:
except Failed as e:
logger.error(e)
continue
if input_id in self.library.show_map:
rating_keys.extend(self.library.show_map[input_id])
elif input_id not in self.missing_shows:
self.missing_shows.append(input_id)
if input_id not in self.ignore_ids:
if input_id in self.library.show_map:
rating_keys.extend(self.library.show_map[input_id])
elif input_id not in self.missing_shows:
self.missing_shows.append(input_id)
elif id_type == "imdb" and not self.parts_collection:
if input_id in self.library.imdb_map:
rating_keys.extend(self.library.imdb_map[input_id])
else:
if self.do_missing:
try:
tmdb_id, tmdb_type = self.config.Convert.imdb_to_tmdb(input_id, fail=True)
if tmdb_type == "movie":
if tmdb_id not in self.missing_movies:
self.missing_movies.append(tmdb_id)
else:
tvdb_id = self.config.Convert.tmdb_to_tvdb(tmdb_id, fail=True)
if tvdb_id not in self.missing_shows:
self.missing_shows.append(tvdb_id)
except Failed as e:
logger.error(e)
continue
if input_id not in self.ignore_imdb_ids:
if input_id in self.library.imdb_map:
rating_keys.extend(self.library.imdb_map[input_id])
else:
if self.do_missing:
try:
tmdb_id, tmdb_type = self.config.Convert.imdb_to_tmdb(input_id, fail=True)
if tmdb_type == "movie":
if tmdb_id not in self.missing_movies:
self.missing_movies.append(tmdb_id)
else:
tvdb_id = self.config.Convert.tmdb_to_tvdb(tmdb_id, fail=True)
if tvdb_id not in self.missing_shows:
self.missing_shows.append(tvdb_id)
except Failed as e:
logger.error(e)
continue
elif id_type == "tvdb_season" and self.collection_level == "season":
show_id, season_num = input_id.split("_")
show_id = int(show_id)
@ -1562,7 +1613,7 @@ class CollectionBuilder:
else:
self.library.alter_collection(current, name, smart_label_collection=self.smart_label_collection)
amount_added += 1
if self.details["collection_addition_webhooks"]:
if self.details["collection_changes_webhooks"]:
if self.library.is_movie and current.ratingKey in self.library.movie_rating_key_map:
add_id = self.library.movie_rating_key_map[current.ratingKey]
elif self.library.is_show and current.ratingKey in self.library.show_rating_key_map:
@ -1586,7 +1637,7 @@ class CollectionBuilder:
self.library.reload(item)
logger.info(f"{self.name} Collection | - | {self.item_title(item)}")
self.library.alter_collection(item, self.name, smart_label_collection=self.smart_label_collection, add=False)
if self.details["collection_removal_webhooks"]:
if self.details["collection_changes_webhooks"]:
if self.library.is_movie and item.ratingKey in self.library.movie_rating_key_map:
remove_id = self.library.movie_rating_key_map[item.ratingKey]
elif self.library.is_show and item.ratingKey in self.library.show_rating_key_map:
@ -1639,7 +1690,7 @@ class CollectionBuilder:
return True
def check_filters(self, current, display):
if self.filters or self.tmdb_filters:
if (self.filters or self.tmdb_filters) and not self.details["only_filter_missing"]:
util.print_return(f"Filtering {display} {current.title}")
if self.tmdb_filters:
if current.ratingKey not in self.library.movie_rating_key_map and current.ratingKey not in self.library.show_rating_key_map:
@ -2113,17 +2164,12 @@ class CollectionBuilder:
previous = key
def send_notifications(self):
if self.obj and (
(self.details["collection_creation_webhooks"] and self.created) or
(self.details["collection_addition_webhooks"] and len(self.notification_additions) > 0) or
(self.details["collection_removal_webhooks"] and len(self.notification_removals) > 0)
):
if self.obj and self.details["collection_changes_webhooks"] and \
(self.created or len(self.notification_additions) > 0 or len(self.notification_removals) > 0):
self.obj.reload()
try:
self.library.Webhooks.collection_hooks(
self.details["collection_creation_webhooks"] +
self.details["collection_addition_webhooks"] +
self.details["collection_removal_webhooks"],
self.details["collection_changes_webhooks"],
self.obj,
created=self.created,
additions=self.notification_additions,

@ -44,7 +44,7 @@ class Config:
self.default_dir = default_dir
self.test_mode = attrs["test"] if "test" in attrs else False
self.trace_mode = attrs["trace"] if "trace" in attrs else False
self.run_start_time = attrs["time"]
self.start_time = attrs["time_obj"]
self.run_hour = datetime.strptime(attrs["time"], "%H:%M").hour
self.requested_collections = util.get_list(attrs["collections"]) if "collections" in attrs else None
self.requested_libraries = util.get_list(attrs["libraries"]) if "libraries" in attrs else None
@ -84,9 +84,28 @@ class Config:
replace_attr(new_config["libraries"][library], "show_filtered", "plex")
replace_attr(new_config["libraries"][library], "show_missing", "plex")
replace_attr(new_config["libraries"][library], "save_missing", "plex")
if new_config["libraries"][library] and "webhooks" in new_config["libraries"][library] and "collection_changes" not in new_config["libraries"][library]["webhooks"]:
changes = []
def hooks(attr):
if attr in new_config["libraries"][library]["webhooks"]:
changes.extend([w for w in util.get_list(new_config["libraries"][library]["webhooks"].pop(attr), split=False) if w not in changes])
hooks("collection_creation")
hooks("collection_addition")
hooks("collection_removal")
new_config["libraries"][library]["webhooks"]["collection_changes"] = changes if changes else None
if "libraries" in new_config: new_config["libraries"] = new_config.pop("libraries")
if "settings" in new_config: new_config["settings"] = new_config.pop("settings")
if "webhooks" in new_config: new_config["webhooks"] = new_config.pop("webhooks")
if "webhooks" in new_config:
temp = new_config.pop("webhooks")
changes = []
def hooks(attr):
if attr in temp:
changes.extend([w for w in util.get_list(temp.pop(attr), split=False) if w not in changes])
hooks("collection_creation")
hooks("collection_addition")
hooks("collection_removal")
temp["collection_changes"] = changes if changes else None
new_config["webhooks"] = temp
if "plex" in new_config: new_config["plex"] = new_config.pop("plex")
if "tmdb" in new_config: new_config["tmdb"] = new_config.pop("tmdb")
if "tautulli" in new_config: new_config["tautulli"] = new_config.pop("tautulli")
@ -124,8 +143,9 @@ class Config:
elif attribute not in loaded_config[parent]: loaded_config[parent][attribute] = default
else: endline = ""
yaml.round_trip_dump(loaded_config, open(self.config_path, "w"), indent=None, block_seq_indent=2)
if default_is_none and var_type in ["list", "int_list"]: return []
elif data[attribute] is None:
if default_is_none and var_type == "list": return []
if default_is_none and var_type in ["list", "int_list"]: return []
elif default_is_none: return None
else: message = f"{text} is blank"
elif var_type == "url":
@ -141,8 +161,19 @@ class Config:
if os.path.exists(os.path.abspath(data[attribute])): return data[attribute]
else: message = f"Path {os.path.abspath(data[attribute])} does not exist"
elif var_type == "list": return util.get_list(data[attribute], split=False)
elif var_type == "int_list": return util.get_list(data[attribute], int_list=True)
elif var_type == "list_path":
temp_list = [p for p in util.get_list(data[attribute], split=False) if os.path.exists(os.path.abspath(p))]
temp_list = []
warning_message = ""
for p in util.get_list(data[attribute], split=False):
if os.path.exists(os.path.abspath(p)):
temp_list.append(p)
else:
if len(warning_message) > 0:
warning_message += "\n"
warning_message += f"Config Warning: Path does not exist: {os.path.abspath(p)}"
if do_print:
util.print_multiline(f"Config Warning: {warning_message}")
if len(temp_list) > 0: return temp_list
else: message = "No Paths exist"
elif var_type == "lower_list": return util.get_list(data[attribute], lower=True)
@ -184,27 +215,30 @@ class Config:
"cache_expiration": check_for_attribute(self.data, "cache_expiration", parent="settings", var_type="int", default=60),
"asset_directory": check_for_attribute(self.data, "asset_directory", parent="settings", var_type="list_path", default=[os.path.join(default_dir, "assets")], default_is_none=True),
"asset_folders": check_for_attribute(self.data, "asset_folders", parent="settings", var_type="bool", default=True),
"assets_for_all": check_for_attribute(self.data, "assets_for_all", parent="settings", var_type="bool", default=False, save=False, do_print=False),
"create_asset_folders": check_for_attribute(self.data, "create_asset_folders", parent="settings", var_type="bool", default=False),
"show_missing_season_assets": check_for_attribute(self.data, "show_missing_season_assets", parent="settings", var_type="bool", default=False),
"sync_mode": check_for_attribute(self.data, "sync_mode", parent="settings", default="append", test_list=sync_modes),
"collection_minimum": check_for_attribute(self.data, "collection_minimum", parent="settings", var_type="int", default=1),
"delete_below_minimum": check_for_attribute(self.data, "delete_below_minimum", parent="settings", var_type="bool", default=False),
"delete_not_scheduled": check_for_attribute(self.data, "delete_not_scheduled", parent="settings", var_type="bool", default=False),
"run_again_delay": check_for_attribute(self.data, "run_again_delay", parent="settings", var_type="int", default=0),
"missing_only_released": check_for_attribute(self.data, "missing_only_released", parent="settings", var_type="bool", default=False),
"only_filter_missing": check_for_attribute(self.data, "only_filter_missing", parent="settings", var_type="bool", default=False),
"show_unmanaged": check_for_attribute(self.data, "show_unmanaged", parent="settings", var_type="bool", default=True),
"show_filtered": check_for_attribute(self.data, "show_filtered", parent="settings", var_type="bool", default=False),
"show_missing": check_for_attribute(self.data, "show_missing", parent="settings", var_type="bool", default=True),
"show_missing_assets": check_for_attribute(self.data, "show_missing_assets", parent="settings", var_type="bool", default=True),
"save_missing": check_for_attribute(self.data, "save_missing", parent="settings", var_type="bool", default=True),
"missing_only_released": check_for_attribute(self.data, "missing_only_released", parent="settings", var_type="bool", default=False),
"create_asset_folders": check_for_attribute(self.data, "create_asset_folders", parent="settings", var_type="bool", default=False),
"collection_minimum": check_for_attribute(self.data, "collection_minimum", parent="settings", var_type="int", default=1),
"delete_below_minimum": check_for_attribute(self.data, "delete_below_minimum", parent="settings", var_type="bool", default=False),
"tvdb_language": check_for_attribute(self.data, "tvdb_language", parent="settings", default="default")
"tvdb_language": check_for_attribute(self.data, "tvdb_language", parent="settings", default="default"),
"ignore_ids": check_for_attribute(self.data, "ignore_ids", parent="settings", var_type="int_list", default_is_none=True),
"ignore_imdb_ids": check_for_attribute(self.data, "ignore_imdb_ids", parent="settings", var_type="list", default_is_none=True),
"assets_for_all": check_for_attribute(self.data, "assets_for_all", parent="settings", var_type="bool", default=False, save=False, do_print=False)
}
self.webhooks = {
"error": check_for_attribute(self.data, "error", parent="webhooks", var_type="list", default_is_none=True),
"run_start": check_for_attribute(self.data, "run_start", parent="webhooks", var_type="list", default_is_none=True),
"run_end": check_for_attribute(self.data, "run_end", parent="webhooks", var_type="list", default_is_none=True),
"collection_creation": check_for_attribute(self.data, "collection_creation", parent="webhooks", var_type="list", default_is_none=True),
"collection_addition": check_for_attribute(self.data, "collection_addition", parent="webhooks", var_type="list", default_is_none=True),
"collection_removal": check_for_attribute(self.data, "collection_removal", parent="webhooks", var_type="list", default_is_none=True),
"collection_changes": check_for_attribute(self.data, "collection_changes", parent="webhooks", var_type="list", default_is_none=True)
}
if self.general["cache"]:
util.separator()
@ -231,7 +265,7 @@ class Config:
self.Webhooks = Webhooks(self, self.webhooks, notifiarr=self.NotifiarrFactory)
try:
self.Webhooks.start_time_hooks(self.run_start_time)
self.Webhooks.start_time_hooks(self.start_time)
except Failed as e:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
@ -408,15 +442,20 @@ class Config:
params["show_missing_assets"] = check_for_attribute(lib, "show_missing_assets", parent="settings", var_type="bool", default=self.general["show_missing_assets"], do_print=False, save=False)
params["save_missing"] = check_for_attribute(lib, "save_missing", parent="settings", var_type="bool", default=self.general["save_missing"], do_print=False, save=False)
params["missing_only_released"] = check_for_attribute(lib, "missing_only_released", parent="settings", var_type="bool", default=self.general["missing_only_released"], do_print=False, save=False)
params["only_filter_missing"] = check_for_attribute(lib, "only_filter_missing", parent="settings", var_type="bool", default=self.general["only_filter_missing"], do_print=False, save=False)
params["create_asset_folders"] = check_for_attribute(lib, "create_asset_folders", parent="settings", var_type="bool", default=self.general["create_asset_folders"], do_print=False, save=False)
params["show_missing_season_assets"] = check_for_attribute(lib, "show_missing_season_assets", parent="settings", var_type="bool", default=self.general["show_missing_season_assets"], do_print=False, save=False)
params["collection_minimum"] = check_for_attribute(lib, "collection_minimum", parent="settings", var_type="int", default=self.general["collection_minimum"], do_print=False, save=False)
params["delete_below_minimum"] = check_for_attribute(lib, "delete_below_minimum", parent="settings", var_type="bool", default=self.general["delete_below_minimum"], do_print=False, save=False)
params["delete_not_scheduled"] = check_for_attribute(lib, "delete_not_scheduled", parent="settings", var_type="bool", default=self.general["delete_not_scheduled"], do_print=False, save=False)
params["delete_unmanaged_collections"] = check_for_attribute(lib, "delete_unmanaged_collections", parent="settings", var_type="bool", default=False, do_print=False, save=False)
params["delete_collections_with_less"] = check_for_attribute(lib, "delete_collections_with_less", parent="settings", var_type="int", default_is_none=True, do_print=False, save=False)
params["ignore_ids"] = check_for_attribute(lib, "ignore_ids", parent="settings", var_type="int_list", default_is_none=True, do_print=False, save=False)
params["ignore_ids"].extend([i for i in self.general["ignore_ids"] if i not in params["ignore_ids"]])
params["ignore_imdb_ids"] = check_for_attribute(lib, "ignore_imdb_ids", parent="settings", var_type="list", default_is_none=True, do_print=False, save=False)
params["ignore_imdb_ids"].extend([i for i in self.general["ignore_imdb_ids"] if i not in params["ignore_imdb_ids"]])
params["error_webhooks"] = check_for_attribute(lib, "error", parent="webhooks", var_type="list", default=self.webhooks["error"], do_print=False, save=False, default_is_none=True)
params["collection_creation_webhooks"] = check_for_attribute(lib, "collection_creation", parent="webhooks", var_type="list", default=self.webhooks["collection_creation"], do_print=False, save=False, default_is_none=True)
params["collection_addition_webhooks"] = check_for_attribute(lib, "collection_addition", parent="webhooks", var_type="list", default=self.webhooks["collection_addition"], do_print=False, save=False, default_is_none=True)
params["collection_removal_webhooks"] = check_for_attribute(lib, "collection_removal", parent="webhooks", var_type="list", default=self.webhooks["collection_removal"], do_print=False, save=False, default_is_none=True)
params["collection_changes_webhooks"] = check_for_attribute(lib, "collection_creation", parent="webhooks", var_type="list", default=self.webhooks["collection_changes"], do_print=False, save=False, default_is_none=True)
params["assets_for_all"] = check_for_attribute(lib, "assets_for_all", parent="settings", var_type="bool", default=self.general["assets_for_all"], do_print=False, save=False)
params["mass_genre_update"] = check_for_attribute(lib, "mass_genre_update", test_list=mass_update_options, default_is_none=True, save=False, do_print=False)
params["mass_audience_rating_update"] = check_for_attribute(lib, "mass_audience_rating_update", test_list=mass_update_options, default_is_none=True, save=False, do_print=False)
@ -425,6 +464,8 @@ class Config:
params["split_duplicates"] = check_for_attribute(lib, "split_duplicates", var_type="bool", default=False, save=False, do_print=False)
params["radarr_add_all"] = check_for_attribute(lib, "radarr_add_all", var_type="bool", default=False, save=False, do_print=False)
params["sonarr_add_all"] = check_for_attribute(lib, "sonarr_add_all", var_type="bool", default=False, save=False, do_print=False)
params["tmdb_collections"] = None
params["genre_mapper"] = None
if lib and "operations" in lib and lib["operations"]:
if isinstance(lib["operations"], dict):
@ -448,6 +489,27 @@ class Config:
params["radarr_add_all"] = check_for_attribute(lib["operations"], "radarr_add_all", var_type="bool", default=False, save=False)
if "sonarr_add_all" in lib["operations"]:
params["sonarr_add_all"] = check_for_attribute(lib["operations"], "sonarr_add_all", var_type="bool", default=False, save=False)
if "tmdb_collections" in lib["operations"]:
params["tmdb_collections"] = {"exclude_ids": [], "remove_suffix": None, "template": {"tmdb_collection_details": "<<collection_id>>"}}
if lib["operations"]["tmdb_collections"] and isinstance(lib["operations"]["tmdb_collections"], dict):
params["tmdb_collections"]["exclude_ids"] = check_for_attribute(lib["operations"]["tmdb_collections"], "exclude_ids", var_type="int_list", default_is_none=True, save=False)
params["tmdb_collections"]["remove_suffix"] = check_for_attribute(lib["operations"]["tmdb_collections"], "remove_suffix", default_is_none=True, save=False)
if "template" in lib["operations"]["tmdb_collections"] and lib["operations"]["tmdb_collections"]["template"] and isinstance(lib["operations"]["tmdb_collections"]["template"], dict):
params["tmdb_collections"]["template"] = lib["operations"]["tmdb_collections"]["template"]
else:
logger.warning("Config Warning: Using default template for tmdb_collections")
else:
logger.error("Config Error: tmdb_collections blank using default settings")
if params["tmdb_collections"]["remove_suffix"]:
params["tmdb_collections"]["remove_suffix"] = params["tmdb_collections"]["remove_suffix"].strip()
if "genre_mapper" in lib["operations"]:
if lib["operations"]["genre_mapper"] and isinstance(lib["operations"]["genre_mapper"], dict):
params["genre_mapper"] = {}
for new_genre, old_genres in lib["operations"]["genre_mapper"].items():
for old_genre in util.get_list(old_genres, split=False):
params["genre_mapper"][old_genre] = new_genre
else:
logger.error("Config Error: genre_mapper is blank")
else:
logger.error("Config Error: operations must be a dictionary")
@ -500,7 +562,6 @@ class Config:
"optimize": check_for_attribute(lib, "optimize", parent="plex", var_type="bool", default=self.general["plex"]["optimize"], save=False)
}
library = Plex(self, params)
logger.info("")
logger.info(f"{display_name} Library Connection Successful")
except Failed as e:
self.errors.append(e)

@ -228,7 +228,8 @@ class Convert:
if check_id.startswith("tvdb"):
tvdb_id.append(int(re.search("-(.*)", check_id).group(1)))
elif check_id.startswith("anidb"):
anidb_id = int(re.search("-(.*)", check_id).group(1))
anidb_str = str(re.search("-(.*)", check_id).group(1))
anidb_id = int(anidb_str[1:] if anidb_str[0] == "a" else anidb_str)
library.anidb_map[anidb_id] = item.ratingKey
else:
raise Failed(f"Hama Agent ID: {check_id} not supported")
@ -304,8 +305,8 @@ class Convert:
logger.debug(f"TMDb: {tmdb_id}, IMDb: {imdb_id}, TVDb: {tvdb_id}")
raise Failed(f"No ID to convert")
except Failed as e:
logger.info(util.adjust_space(f"Mapping Error | {item.guid:<46} | {e} for {item.title}"))
logger.info(util.adjust_space(f'Mapping Error | {item.guid:<46} | {e} for "{item.title}"'))
except BadRequest:
util.print_stacktrace()
logger.info(util.adjust_space(f"Mapping Error | {item.guid:<46} | Bad Request for {item.title}"))
logger.info(util.adjust_space(f'Mapping Error | {item.guid:<46} | Bad Request for "{item.title}"'))
return None, None, None

@ -1,5 +1,4 @@
import logging
from datetime import datetime, timedelta
from modules import util
from modules.util import Failed

@ -34,20 +34,27 @@ class Library(ABC):
self.name = params["name"]
self.original_mapping_name = params["mapping_name"]
self.metadata_path = params["metadata_path"]
self.asset_directory = params["asset_directory"]
self.asset_directory = params["asset_directory"] if params["asset_directory"] else []
self.default_dir = params["default_dir"]
self.mapping_name, output = util.validate_filename(self.original_mapping_name)
self.image_table_name = self.config.Cache.get_image_table_name(self.original_mapping_name) if self.config.Cache else None
self.missing_path = os.path.join(self.default_dir, f"{self.original_mapping_name}_missing.yml")
self.missing_path = os.path.join(self.default_dir, f"{self.mapping_name}_missing.yml")
self.asset_folders = params["asset_folders"]
self.create_asset_folders = params["create_asset_folders"]
self.show_missing_season_assets = params["show_missing_season_assets"]
self.sync_mode = params["sync_mode"]
self.collection_minimum = params["collection_minimum"]
self.delete_below_minimum = params["delete_below_minimum"]
self.delete_not_scheduled = params["delete_not_scheduled"]
self.missing_only_released = params["missing_only_released"]
self.show_unmanaged = params["show_unmanaged"]
self.show_filtered = params["show_filtered"]
self.show_missing = params["show_missing"]
self.show_missing_assets = params["show_missing_assets"]
self.save_missing = params["save_missing"]
self.missing_only_released = params["missing_only_released"]
self.create_asset_folders = params["create_asset_folders"]
self.only_filter_missing = params["only_filter_missing"]
self.ignore_ids = params["ignore_ids"]
self.ignore_imdb_ids = params["ignore_imdb_ids"]
self.assets_for_all = params["assets_for_all"]
self.delete_unmanaged_collections = params["delete_unmanaged_collections"]
self.delete_collections_with_less = params["delete_collections_with_less"]
@ -57,17 +64,18 @@ class Library(ABC):
self.mass_trakt_rating_update = params["mass_trakt_rating_update"]
self.radarr_add_all = params["radarr_add_all"]
self.sonarr_add_all = params["sonarr_add_all"]
self.collection_minimum = params["collection_minimum"]
self.delete_below_minimum = params["delete_below_minimum"]
self.tmdb_collections = params["tmdb_collections"]
self.genre_mapper = params["genre_mapper"]
self.error_webhooks = params["error_webhooks"]
self.collection_creation_webhooks = params["collection_creation_webhooks"]
self.collection_addition_webhooks = params["collection_addition_webhooks"]
self.collection_removal_webhooks = params["collection_removal_webhooks"]
self.collection_changes_webhooks = params["collection_changes_webhooks"]
self.split_duplicates = params["split_duplicates"] # TODO: Here or just in Plex?
self.clean_bundles = params["plex"]["clean_bundles"] # TODO: Here or just in Plex?
self.empty_trash = params["plex"]["empty_trash"] # TODO: Here or just in Plex?
self.optimize = params["plex"]["optimize"] # TODO: Here or just in Plex?
self.library_operation = self.assets_for_all or self.delete_unmanaged_collections or self.delete_collections_with_less \
or self.mass_genre_update or self.mass_audience_rating_update or self.mass_critic_rating_update \
or self.mass_trakt_rating_update or self.radarr_add_all or self.sonarr_add_all \
or self.tmdb_collections or self.genre_mapper
metadata = []
for file_type, metadata_file in self.metadata_path:
if file_type == "Folder":
@ -92,9 +100,9 @@ class Library(ABC):
except Failed as e:
util.print_multiline(e, error=True)
if len(self.metadata_files) == 0:
if len(self.metadata_files) == 0 and not self.library_operation:
logger.info("")
raise Failed("Metadata File Error: No valid metadata files found")
raise Failed("Config Error: No valid metadata files or library operations found")
if self.asset_directory:
logger.info("")

@ -15,8 +15,6 @@ class Metadata:
self.library = library
self.type = file_type
self.path = path
logger.info("")
logger.info(f"Loading Metadata {file_type}: {path}")
def get_dict(attribute, attr_data, check_list=None):
if check_list is None:
check_list = []
@ -35,30 +33,37 @@ class Metadata:
else:
logger.warning(f"Config Warning: {attribute} attribute is blank")
return None
try:
if file_type in ["URL", "Git"]:
content_path = path if file_type == "URL" else f"{github_base}{path}.yml"
response = self.config.get(content_path)
if response.status_code >= 400:
raise Failed(f"URL Error: No file found at {content_path}")
content = response.content
elif os.path.exists(os.path.abspath(path)):
content = open(path, encoding="utf-8")
else:
raise Failed(f"File Error: File does not exist {path}")
data, ind, bsi = yaml.util.load_yaml_guess_indent(content)
self.metadata = get_dict("metadata", data, library.metadatas)
self.templates = get_dict("templates", data)
self.collections = get_dict("collections", data, library.collections)
if self.metadata is None and self.collections is None:
raise Failed("YAML Error: metadata or collections attribute is required")
logger.info(f"Metadata File Loaded Successfully")
except yaml.scanner.ScannerError as ye:
raise Failed(f"YAML Error: {util.tab_new_lines(ye)}")
except Exception as e:
util.print_stacktrace()
raise Failed(f"YAML Error: {e}")
if file_type == "Data":
self.metadata = None
self.collections = get_dict("collections", path, library.collections)
self.templates = get_dict("templates", path)
else:
try:
logger.info("")
logger.info(f"Loading Metadata {file_type}: {path}")
if file_type in ["URL", "Git"]:
content_path = path if file_type == "URL" else f"{github_base}{path}.yml"
response = self.config.get(content_path)
if response.status_code >= 400:
raise Failed(f"URL Error: No file found at {content_path}")
content = response.content
elif os.path.exists(os.path.abspath(path)):
content = open(path, encoding="utf-8")
else:
raise Failed(f"File Error: File does not exist {path}")
data, ind, bsi = yaml.util.load_yaml_guess_indent(content)
self.metadata = get_dict("metadata", data, library.metadatas)
self.templates = get_dict("templates", data)
self.collections = get_dict("collections", data, library.collections)
if self.metadata is None and self.collections is None:
raise Failed("YAML Error: metadata or collections attribute is required")
logger.info(f"Metadata File Loaded Successfully")
except yaml.scanner.ScannerError as ye:
raise Failed(f"YAML Error: {util.tab_new_lines(ye)}")
except Exception as e:
util.print_stacktrace()
raise Failed(f"YAML Error: {e}")
def get_collections(self, requested_collections):
if requested_collections:
@ -139,7 +144,7 @@ class Metadata:
if extra:
add_tags.extend(extra)
remove_tags = util.get_list(group[alias[f"{attr}.remove"]]) if f"{attr}.remove" in alias else None
sync_tags = util.get_list(group[alias[f"{attr}.sync"]]) if f"{attr}.sync" in alias else None
sync_tags = util.get_list(group[alias[f"{attr}.sync"]] if group[alias[f"{attr}.sync"]] else []) if f"{attr}.sync" in alias else None
return self.library.edit_tags(attr, obj, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags)
return False

@ -25,6 +25,7 @@ class Notifiarr:
def get_url(self, path):
url = f"{dev_url if self.develop else base_url}{'notification/test' if self.test else f'{path}{self.apikey}'}"
logger.debug(url.replace(self.apikey, "APIKEY"))
if self.config.trace_mode:
logger.debug(url.replace(self.apikey, "APIKEY"))
params = {"event": "pmm" if self.test else "collections"}
return url, params

@ -260,6 +260,13 @@ class Plex(Library):
self.is_other = self.agent == "com.plexapp.agents.none"
if self.is_other:
self.type = "Video"
if self.tmdb_collections and self.is_show:
self.tmdb_collections = None
logger.error("Config Error: tmdb_collections only work with Movie Libraries.")
def set_server_preroll(self, preroll):
self.PlexServer.settings.get('cinemaTrailersPrerollID').set(preroll)
self.PlexServer.settings.save()
def get_all_collections(self):
return self.search(libtype="collection")
@ -592,7 +599,7 @@ class Plex(Library):
def edit_tags(self, attr, obj, add_tags=None, remove_tags=None, sync_tags=None):
display = ""
key = builder.filter_translation[attr] if attr in builder.filter_translation else attr
if add_tags or remove_tags or sync_tags:
if add_tags or remove_tags or sync_tags is not None:
_add_tags = add_tags if add_tags else []
_remove_tags = [t.lower() for t in remove_tags] if remove_tags else []
_sync_tags = [t.lower() for t in sync_tags] if sync_tags else []
@ -602,7 +609,7 @@ class Plex(Library):
except BadRequest:
_item_tags = []
_add = [f"{t[:1].upper()}{t[1:]}" for t in _add_tags + _sync_tags if t.lower() not in _item_tags]
_remove = [t for t in _item_tags if (_sync_tags and t not in _sync_tags) or t in _remove_tags]
_remove = [t for t in _item_tags if (sync_tags is not None and t not in _sync_tags) or t in _remove_tags]
if _add:
self.query_data(getattr(obj, f"add{attr.capitalize()}"), _add)
display += f"+{', +'.join(_add)}"
@ -644,6 +651,8 @@ class Plex(Library):
if poster or background:
self.upload_images(item, poster=poster, background=background, overlay=overlay)
if self.is_show:
missing_assets = ""
found_season = False
for season in self.query(item.seasons):
season_name = f"Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}"
if item_dir:
@ -652,11 +661,14 @@ class Plex(Library):
else:
season_poster_filter = os.path.join(ad, f"{name}_{season_name}.*")
season_background_filter = os.path.join(ad, f"{name}_{season_name}_background.*")
matches = util.glob_filter(season_poster_filter)
season_poster = None
season_background = None
matches = util.glob_filter(season_poster_filter)
if len(matches) > 0:
season_poster = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title} Season {season.seasonNumber}'s ", is_url=False)
found_season = True
elif season.seasonNumber > 0:
missing_assets += f"\nMissing Season {season.seasonNumber} Poster"
matches = util.glob_filter(season_background_filter)
if len(matches) > 0:
season_background = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title} Season {season.seasonNumber}'s ", is_poster=False, is_url=False)
@ -671,6 +683,8 @@ class Plex(Library):
if len(matches) > 0:
episode_poster = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title} {episode.seasonEpisode.upper()}'s ", is_url=False)
self.upload_images(episode, poster=episode_poster)
if self.show_missing_season_assets and found_season and missing_assets:
util.print_multiline(f"Missing Season Posters for {item.title}{missing_assets}", info=True)
if not poster and overlay:
self.upload_images(item, overlay=overlay)
if create and self.asset_folders and not found_folder:

@ -19,6 +19,7 @@ class Radarr:
try:
self.api = RadarrAPI(self.url, self.token, session=self.config.session)
self.api.respect_list_exclusions_when_adding()
self.api._validate_add_options(params["root_folder_path"], params["quality_profile"])
except ArrException as e:
raise Failed(e)
self.add = params["add"]
@ -53,10 +54,21 @@ class Radarr:
tags = options["tag"] if "tag" in options else self.tag
search = options["search"] if "search" in options else self.search
arr_paths = {}
arr_ids = {}
for movie in self.api.all_movies():
if movie.path:
arr_paths[movie.path] = movie.tmdbId
arr_ids[movie.tmdbId] = movie
added = []
exists = []
skipped = []
invalid = []
movies = []
path_lookup = {}
mismatched = {}
path_in_use = {}
for i, item in enumerate(tmdb_ids, 1):
path = item[1] if isinstance(item, tuple) else None
tmdb_id = item[0] if isinstance(item, tuple) else item
@ -64,11 +76,24 @@ class Radarr:
if self.config.Cache:
_id = self.config.Cache.query_radarr_adds(tmdb_id, self.library.original_mapping_name)
if _id:
exists.append(item)
skipped.append(item)
continue
try:
if tmdb_id in arr_ids:
exists.append(arr_ids[tmdb_id])
continue
if path in arr_paths:
mismatched[path] = tmdb_id
continue
movie = self.api.get_movie(tmdb_id=tmdb_id)
movies.append((movie, path) if path else movie)
if f"{folder}/{movie.folder}" in arr_paths:
path_in_use[f"{folder}/{movie.folder}"] = tmdb_id
continue
if path:
movies.append((movie, path))
path_lookup[path] = tmdb_id
else:
movies.append(movie)
except ArrException:
invalid.append(item)
if len(movies) == 100 or len(tmdb_ids) == i:
@ -90,18 +115,37 @@ class Radarr:
self.config.Cache.update_radarr_adds(movie.tmdbId, self.library.original_mapping_name)
logger.info(f"{len(added)} Movie{'s' if len(added) > 1 else ''} added to Radarr")
if len(exists) > 0:
if len(exists) > 0 or len(skipped) > 0:
logger.info("")
for movie in exists:
logger.info(f"Already in Radarr | {movie.tmdbId:<6} | {movie.title}")
if self.config.Cache:
self.config.Cache.update_radarr_adds(movie.tmdbId, self.library.original_mapping_name)
logger.info(f"{len(exists)} Movie{'s' if len(exists) > 1 else ''} already existing in Radarr")
if len(exists) > 0:
for movie in exists:
logger.info(f"Already in Radarr | {movie.tmdbId:<6} | {movie.title}")
if self.config.Cache:
self.config.Cache.update_radarr_adds(movie.tmdbId, self.library.original_mapping_name)
if len(skipped) > 0:
for movie in skipped:
logger.info(f"Skipped: In Cache | {movie}")
logger.info(f"{len(exists) + len(skipped)} Movie{'s' if len(skipped) > 1 else ''} already exist in Radarr")
if len(mismatched) > 0:
logger.info("")
logger.info("Items in Plex that have already been added to Radarr but under a different TMDb ID then in Plex")
for path, tmdb_id in mismatched.items():
logger.info(f"Plex TMDb ID: {tmdb_id:<7} | Radarr TMDb ID: {arr_paths[path]:<7} | Path: {path}")
logger.info(f"{len(mismatched)} Movie{'s' if len(mismatched) > 1 else ''} with mismatched TMDb IDs")
if len(path_in_use) > 0:
logger.info("")
logger.info("TMDb IDs that cannot be added to Radarr because the path they will use is already in use by a different TMDb ID")
for path, tmdb_id in path_in_use.items():
logger.info(f"TMDb ID: {tmdb_id:<7} | Radarr TMDb ID: {arr_paths[path]:<7} | Path: {path}")
logger.info(f"{len(path_in_use)} Movie{'s' if len(path_in_use) > 1 else ''} with paths already in use by other TMDb IDs")
if len(invalid) > 0:
logger.info("")
for tmdb_id in invalid:
logger.info(f"Invalid TMDb ID | {tmdb_id}")
logger.info(f"{len(invalid)} Movie{'s' if len(invalid) > 1 else ''} with Invalid IDs")
return len(added)

@ -37,6 +37,7 @@ class Sonarr:
try:
self.api = SonarrAPI(self.url, self.token, session=self.config.session)
self.api.respect_list_exclusions_when_adding()
self.api._validate_add_options(params["root_folder_path"], params["quality_profile"], params["language_profile"])
except ArrException as e:
raise Failed(e)
self.add = params["add"]
@ -79,10 +80,21 @@ class Sonarr:
search = options["search"] if "search" in options else self.search
cutoff_search = options["cutoff_search"] if "cutoff_search" in options else self.cutoff_search
arr_paths = {}
arr_ids = {}
for series in self.api.all_series():
if series.path:
arr_paths[series.path] = series.tvdbId
arr_paths[series.tvdbId] = series
added = []
exists = []
skipped = []
invalid = []
shows = []
path_lookup = {}
mismatched = {}
path_in_use = {}
for i, item in enumerate(tvdb_ids, 1):
path = item[1] if isinstance(item, tuple) else None
tvdb_id = item[0] if isinstance(item, tuple) else item
@ -90,11 +102,24 @@ class Sonarr:
if self.config.Cache:
_id = self.config.Cache.query_sonarr_adds(tvdb_id, self.library.original_mapping_name)
if _id:
exists.append(item)
skipped.append(item)
continue
try:
if tvdb_id in arr_ids:
exists.append(arr_ids[tvdb_id])
continue
if path in arr_paths:
mismatched[path] = tvdb_id
continue
show = self.api.get_series(tvdb_id=tvdb_id)
shows.append((show, path) if path else show)
if f"{folder}/{show.folder}" in arr_paths:
path_in_use[f"{folder}/{show.folder}"] = tvdb_id
continue
if path:
shows.append((show, path))
path_lookup[path] = tvdb_id
else:
shows.append(show)
except ArrException:
invalid.append(item)
if len(shows) == 100 or len(tvdb_ids) == i:
@ -116,18 +141,37 @@ class Sonarr:
self.config.Cache.update_sonarr_adds(series.tvdbId, self.library.original_mapping_name)
logger.info(f"{len(added)} Series added to Sonarr")
if len(exists) > 0:
if len(exists) > 0 or len(skipped) > 0:
logger.info("")
for series in exists:
logger.info(f"Already in Sonarr | {series.tvdbId:<6} | {series.title}")
if self.config.Cache:
self.config.Cache.update_sonarr_adds(series.tvdbId, self.library.original_mapping_name)
logger.info(f"{len(exists)} Series already existing in Sonarr")
if len(exists) > 0:
for series in exists:
logger.info(f"Already in Sonarr | {series.tvdbId:<6} | {series.title}")
if self.config.Cache:
self.config.Cache.update_sonarr_adds(series.tvdbId, self.library.original_mapping_name)
if len(skipped) > 0:
for series in skipped:
logger.info(f"Skipped: In Cache | {series}")
logger.info(f"{len(exists) + len(skipped)} Series already exist in Sonarr")
if len(mismatched) > 0:
logger.info("")
logger.info("Items in Plex that have already been added to Sonarr but under a different TVDb ID then in Plex")
for path, tmdb_id in mismatched.items():
logger.info(f"Plex TVDb ID: {tmdb_id:<7} | Sonarr TVDb ID: {arr_paths[path]:<7} | Path: {path}")
logger.info(f"{len(mismatched)} Series with mismatched TVDb IDs")
if len(path_in_use) > 0:
logger.info("")
logger.info("TVDb IDs that cannot be added to Sonarr because the path they will use is already in use by a different TVDb ID")
for path, tvdb_id in path_in_use.items():
logger.info(f"TVDb ID: {tvdb_id:<7} | Sonarr TVDb ID: {arr_paths[path]:<7} | Path: {path}")
logger.info(f"{len(path_in_use)} Series with paths already in use by other TVDb IDs")
if len(invalid) > 0:
for tvdb_id in invalid:
logger.info("")
logger.info(f"Invalid TVDb ID | {tvdb_id}")
logger.info(f"{len(invalid)} Series with Invalid IDs")
return len(added)

@ -202,6 +202,8 @@ class Trakt:
values = util.get_list(trakt_lists, split=False)
trakt_values = []
for value in values:
if isinstance(value, dict):
raise Failed("Trakt Error: List cannot be a dictionary")
try:
if trakt_type == "list":
self._user_list(value)

@ -200,7 +200,10 @@ def separator(text=None, space=True, border=True, debug=False):
if text:
text_list = text.split("\n")
for t in text_list:
logger.info(f"|{sep}{centered(t, sep=sep)}{sep}|")
if debug:
logger.debug(f"|{sep}{centered(t, sep=sep)}{sep}|")
else:
logger.info(f"|{sep}{centered(t, sep=sep)}{sep}|")
if border and debug:
logger.debug(border_text)
elif border:
@ -330,6 +333,13 @@ def parse(attribute, data, datatype=None, methods=None, parent=None, default=Non
if value:
return [v for v in value if v] if isinstance(value, list) else [str(value)]
return []
elif datatype == "intlist":
if value:
try:
return [int(v) for v in value if v] if isinstance(value, list) else [int(value)]
except ValueError:
pass
return []
elif datatype == "dictlist":
final_list = []
for dict_data in get_list(value):

@ -43,12 +43,13 @@ class Webhooks:
def start_time_hooks(self, start_time):
if self.run_start_webhooks:
self._request(self.run_start_webhooks, {"start_time": start_time})
self._request(self.run_start_webhooks, {"start_time": start_time.strftime("%Y-%m-%d %H:%M:%S")})
def end_time_hooks(self, start_time, run_time, stats):
def end_time_hooks(self, start_time, end_time, run_time, stats):
if self.run_end_webhooks:
self._request(self.run_end_webhooks, {
"start_time": start_time.strftime("%Y-%m-%dT%H:%M:%SZ"),
"start_time": start_time.strftime("%Y-%m-%d %H:%M:%S"),
"end_time": end_time.strftime("%Y-%m-%d %H:%M:%S"),
"run_time": run_time,
"collections_created": stats["created"],
"collections_modified": stats["modified"],

@ -6,6 +6,7 @@ try:
from modules import util
from modules.builder import CollectionBuilder
from modules.config import Config
from modules.meta import Metadata
from modules.util import Failed, NotScheduled
except ModuleNotFoundError:
print("Requirements Error: Requirements are not installed")
@ -50,21 +51,21 @@ def get_arg(env_str, default, arg_bool=False, arg_int=False):
else:
return default
test = get_arg("PMM_TEST", args.test, arg_bool=True)
debug = get_arg("PMM_DEBUG", args.debug, arg_bool=True)
trace = get_arg("PMM_TRACE", args.trace, arg_bool=True)
config_file = get_arg("PMM_CONFIG", args.config)
times = get_arg("PMM_TIME", args.times)
run = get_arg("PMM_RUN", args.run, arg_bool=True)
no_countdown = get_arg("PMM_NO_COUNTDOWN", args.no_countdown, arg_bool=True)
no_missing = get_arg("PMM_NO_MISSING", args.no_missing, arg_bool=True)
library_only = get_arg("PMM_LIBRARIES_ONLY", args.library_only, arg_bool=True)
test = get_arg("PMM_TEST", args.test, arg_bool=True)
collection_only = get_arg("PMM_COLLECTIONS_ONLY", args.collection_only, arg_bool=True)
library_only = get_arg("PMM_LIBRARIES_ONLY", args.library_only, arg_bool=True)
collections = get_arg("PMM_COLLECTIONS", args.collections)
libraries = get_arg("PMM_LIBRARIES", args.libraries)
resume = get_arg("PMM_RESUME", args.resume)
times = get_arg("PMM_TIME", args.times)
no_countdown = get_arg("PMM_NO_COUNTDOWN", args.no_countdown, arg_bool=True)
no_missing = get_arg("PMM_NO_MISSING", args.no_missing, arg_bool=True)
divider = get_arg("PMM_DIVIDER", args.divider)
screen_width = get_arg("PMM_WIDTH", args.width, arg_int=True)
config_file = get_arg("PMM_CONFIG", args.config)
debug = get_arg("PMM_DEBUG", args.debug, arg_bool=True)
trace = get_arg("PMM_TRACE", args.trace, arg_bool=True)
stats = {}
util.separating_character = divider[0]
@ -135,6 +136,24 @@ def start(attrs):
start_time = datetime.now()
if "time" not in attrs:
attrs["time"] = start_time.strftime("%H:%M")
attrs["time_obj"] = start_time
util.separator(debug=True)
logger.debug(f"--config (PMM_CONFIG): {config_file}")
logger.debug(f"--time (PMM_TIME): {times}")
logger.debug(f"--run (PMM_RUN): {run}")
logger.debug(f"--run-tests (PMM_TEST): {test}")
logger.debug(f"--collections-only (PMM_COLLECTIONS_ONLY): {collection_only}")
logger.debug(f"--libraries-only (PMM_LIBRARIES_ONLY): {library_only}")
logger.debug(f"--run-collections (PMM_COLLECTIONS): {collections}")
logger.debug(f"--run-libraries (PMM_LIBRARIES): {libraries}")
logger.debug(f"--resume (PMM_RESUME): {resume}")
logger.debug(f"--no-countdown (PMM_NO_COUNTDOWN): {no_countdown}")
logger.debug(f"--no-missing (PMM_NO_MISSING): {no_missing}")
logger.debug(f"--divider (PMM_DIVIDER): {divider}")
logger.debug(f"--width (PMM_WIDTH): {screen_width}")
logger.debug(f"--debug (PMM_DEBUG): {debug}")
logger.debug(f"--trace (PMM_TRACE): {trace}")
logger.debug("")
util.separator(f"Starting {start_type}Run")
config = None
global stats
@ -152,10 +171,11 @@ def start(attrs):
util.print_stacktrace()
util.print_multiline(e, critical=True)
logger.info("")
run_time = str(datetime.now() - start_time).split('.')[0]
end_time = datetime.now()
run_time = str(end_time - start_time).split('.')[0]
if config:
try:
config.Webhooks.end_time_hooks(start_time, run_time, stats)
config.Webhooks.end_time_hooks(start_time, end_time, run_time, stats)
except Failed as e:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
@ -178,12 +198,48 @@ def update_libraries(config):
plexapi.server.TIMEOUT = library.timeout
logger.info("")
util.separator(f"{library.name} Library")
items = None
logger.debug("")
logger.debug(f"Mapping Name: {library.original_mapping_name}")
logger.debug(f"Folder Name: {library.mapping_name}")
logger.debug(f"Missing Path: {library.missing_path}")
for ad in library.asset_directory:
logger.debug(f"Asset Directory: {ad}")
logger.debug(f"Asset Folders: {library.asset_folders}")
logger.debug(f"Create Asset Folders: {library.create_asset_folders}")
logger.debug(f"Sync Mode: {library.sync_mode}")
logger.debug(f"Collection Minimum: {library.collection_minimum}")
logger.debug(f"Delete Below Minimum: {library.delete_below_minimum}")
logger.debug(f"Delete Not Scheduled: {library.delete_not_scheduled}")
logger.debug(f"Missing Only Released: {library.missing_only_released}")
logger.debug(f"Only Filter Missing: {library.only_filter_missing}")
logger.debug(f"Show Unmanaged: {library.show_unmanaged}")
logger.debug(f"Show Filtered: {library.show_filtered}")
logger.debug(f"Show Missing: {library.show_missing}")
logger.debug(f"Show Missing Assets: {library.show_missing_assets}")
logger.debug(f"Save Missing: {library.save_missing}")
logger.debug(f"Assets For All: {library.assets_for_all}")
logger.debug(f"Delete Collections With Less: {library.delete_collections_with_less}")
logger.debug(f"Delete Unmanaged Collections: {library.delete_unmanaged_collections}")
logger.debug(f"Mass Genre Update: {library.mass_genre_update}")
logger.debug(f"Mass Audience Rating Update: {library.mass_audience_rating_update}")
logger.debug(f"Mass Critic Rating Update: {library.mass_critic_rating_update}")
logger.debug(f"Mass Trakt Rating Update: {library.mass_trakt_rating_update}")
logger.debug(f"Split Duplicates: {library.split_duplicates}")
logger.debug(f"Radarr Add All: {library.radarr_add_all}")
logger.debug(f"Sonarr Add All: {library.sonarr_add_all}")
logger.debug(f"TMDb Collections: {library.tmdb_collections}")
logger.debug(f"Genre Mapper: {library.genre_mapper}")
logger.debug(f"Clean Bundles: {library.clean_bundles}")
logger.debug(f"Empty Trash: {library.empty_trash}")
logger.debug(f"Optimize: {library.optimize}")
logger.debug(f"Timeout: {library.timeout}")
if not library.is_other:
logger.info("")
util.separator(f"Mapping {library.name} Library", space=False, border=False)
logger.info("")
items = library.map_guids()
library.map_guids()
for metadata in library.metadata_files:
logger.info("")
util.separator(f"Running Metadata File\n{metadata.path}")
@ -215,7 +271,7 @@ def update_libraries(config):
builder.sort_collection()
if not config.test_mode and not collection_only:
library_operations(config, library, items=items)
library_operations(config, library)
logger.removeHandler(library_handler)
except Exception as e:
@ -278,10 +334,26 @@ def update_libraries(config):
if library.optimize:
library.query(library.PlexServer.library.optimize)
def library_operations(config, library, items=None):
def library_operations(config, library):
logger.info("")
util.separator(f"{library.name} Library Operations")
logger.info("")
logger.debug(f"Assets For All: {library.assets_for_all}")
logger.debug(f"Delete Collections With Less: {library.delete_collections_with_less}")
logger.debug(f"Delete Unmanaged Collections: {library.delete_unmanaged_collections}")
logger.debug(f"Mass Genre Update: {library.mass_genre_update}")
logger.debug(f"Mass Audience Rating Update: {library.mass_audience_rating_update}")
logger.debug(f"Mass Critic Rating Update: {library.mass_critic_rating_update}")
logger.debug(f"Mass Trakt Rating Update: {library.mass_trakt_rating_update}")
logger.debug(f"Split Duplicates: {library.split_duplicates}")
logger.debug(f"Radarr Add All: {library.radarr_add_all}")
logger.debug(f"Sonarr Add All: {library.sonarr_add_all}")
logger.debug(f"TMDb Collections: {library.tmdb_collections}")
logger.debug(f"Genre Mapper: {library.genre_mapper}")
tmdb_operation = library.assets_for_all or library.mass_genre_update or library.mass_audience_rating_update \
or library.mass_critic_rating_update or library.mass_trakt_rating_update \
or library.tmdb_collections or library.radarr_add_all or library.sonarr_add_all
logger.debug(f"TMDb Operation: {tmdb_operation}")
if library.split_duplicates:
items = library.search(**{"duplicate": True})
@ -289,12 +361,11 @@ def library_operations(config, library, items=None):
item.split()
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Splitting"))
if library.assets_for_all or library.mass_genre_update or library.mass_audience_rating_update or \
library.mass_critic_rating_update or library.mass_trakt_rating_update or library.radarr_add_all or library.sonarr_add_all:
if items is None:
items = library.get_all()
if tmdb_operation:
items = library.get_all()
radarr_adds = []
sonarr_adds = []
tmdb_collections = {}
trakt_ratings = config.Trakt.user_ratings(library.is_movie) if library.mass_trakt_rating_update else []
for i, item in enumerate(items, 1):
@ -344,7 +415,7 @@ def library_operations(config, library, items=None):
sonarr_adds.append((tvdb_id, f"{path.replace(library.Sonarr.plex_path, library.Sonarr.sonarr_path)}/"))
tmdb_item = None
if library.mass_genre_update == "tmdb" or library.mass_audience_rating_update == "tmdb" or library.mass_critic_rating_update == "tmdb":
if library.tmdb_collections or library.mass_genre_update == "tmdb" or library.mass_audience_rating_update == "tmdb" or library.mass_critic_rating_update == "tmdb":
if tvdb_id and not tmdb_id:
tmdb_id = config.Convert.tvdb_to_tmdb(tvdb_id)
if tmdb_id:
@ -383,8 +454,8 @@ def library_operations(config, library, items=None):
else:
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No TVDb ID for Guid: {item.guid}"))
if not tmdb_item and not omdb_item and not tvdb_item:
continue
if library.tmdb_collections and tmdb_item and tmdb_item.belongs_to_collection:
tmdb_collections[tmdb_item.belongs_to_collection.id] = tmdb_item.belongs_to_collection.name
if library.mass_genre_update:
try:
@ -431,6 +502,18 @@ def library_operations(config, library, items=None):
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Critic Rating | {new_rating}"))
except Failed:
pass
if library.genre_mapper:
try:
adds = []
deletes = []
library.reload(item)
for genre in item.genres:
if genre.tag in library.genre_mapper:
deletes.append(genre.tag)
adds.append(library.genre_mapper[genre.tag])
library.edit_tags("genre", item, add_tags=adds, remove_tags=deletes)
except Failed:
pass
if library.Radarr and library.radarr_add_all:
try:
@ -444,6 +527,22 @@ def library_operations(config, library, items=None):
except Failed as e:
logger.error(e)
if tmdb_collections:
logger.info("")
util.separator(f"Starting TMDb Collections")
logger.info("")
metadata = Metadata(config, library, "Data", {
"collections": {
_n.replace(library.tmdb_collections["remove_suffix"], "").strip() if library.tmdb_collections["remove_suffix"] else _n:
{"template": {"name": "TMDb Collection", "collection_id": _i}}
for _i, _n in tmdb_collections.items() if int(_i) not in library.tmdb_collections["exclude_ids"]
},
"templates": {
"TMDb Collection": library.tmdb_collections["template"]
}
})
run_collection(config, library, metadata, metadata.get_collections(None))
if library.delete_collections_with_less is not None or library.delete_unmanaged_collections:
logger.info("")
suffix = ""
@ -541,7 +640,7 @@ def run_collection(config, library, metadata, requested_collections):
builder = CollectionBuilder(config, library, metadata, mapping_name, no_missing, collection_attrs)
logger.info("")
util.separator(f"Building {mapping_name} Collection", space=False, border=False)
util.separator(f"Running {mapping_name} Collection", space=False, border=False)
if len(builder.schedule) > 0:
util.print_multiline(builder.schedule, info=True)
@ -552,7 +651,7 @@ def run_collection(config, library, metadata, requested_collections):
items_added = 0
items_removed = 0
if not builder.smart_url:
if not builder.smart_url and builder.builders:
logger.info("")
logger.info(f"Sync Mode: {'sync' if builder.sync else 'append'}")
@ -594,7 +693,7 @@ def run_collection(config, library, metadata, requested_collections):
stats["sonarr"] += sonarr_add
run_item_details = True
if builder.build_collection:
if builder.build_collection and builder.builders:
try:
builder.load_collection()
if builder.created:
@ -612,9 +711,14 @@ def run_collection(config, library, metadata, requested_collections):
library.run_sort.append(builder)
# builder.sort_collection()
if builder.server_preroll is not None:
library.set_server_preroll(builder.server_preroll)
logger.info("")
logger.info(f"Plex Server Movie pre-roll video updated to {builder.server_preroll}")
builder.send_notifications()
if builder.item_details and run_item_details:
if builder.item_details and run_item_details and builder.builders:
try:
builder.load_collection_items()
except Failed:

@ -1,6 +1,6 @@
PlexAPI==4.8.0
tmdbv3api==1.7.6
arrapi==1.2.7
arrapi==1.2.8
lxml==4.6.4
requests==2.26.0
ruamel.yaml==0.17.17

Loading…
Cancel
Save