Merge pull request #465 from meisnate12/develop

v1.13.1
pull/544/head v1.13.1
meisnate12 3 years ago committed by GitHub
commit 0ea91d60e7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -22,6 +22,12 @@ The script works with most Metadata agents including the new Plex Movie Agent, N
3. After that you can start updating Metadata and building automatic Collections by creating a [Metadata File](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Metadata-File) for each Library you want to interact with.
4. Explore the [Wiki](https://github.com/meisnate12/Plex-Meta-Manager/wiki) to see all the different Collection Builders that can be used to create collections.
## IBRACORP Video Walkthrough
[IBRACORP](https://ibracorp.io/) made a video walkthough for installing Plex Meta Manager on Unraid. While you might not be using Unraid the video goes over many key accepts of Plex Meta Manager and can be a great place to start learning how to use the script.
[![Plex Meta Manager](https://img.youtube.com/vi/dF69MNoot3w/0.jpg)](https://www.youtube.com/watch?v=dF69MNoot3w "Plex Meta Manager")
## Support
* Before posting on GitHub about an enhancement, error, or configuration question please visit the [Plex Meta Manager Discord Server](https://discord.gg/TsdpsFYqqm).

@ -1 +1 @@
1.12.2-develop1115
1.13.1

@ -37,14 +37,14 @@ webhooks: # Can be individually specified
collection_creation:
collection_addition:
collection_removal:
plex: # Can be individually specified per library as well
plex: # Can be individually specified per library as well; REQUIRED for the script to run
url: http://192.168.1.12:32400
token: ####################
timeout: 60
clean_bundles: false
empty_trash: false
optimize: false
tmdb:
tmdb: # REQUIRED for the script to run
apikey: ################################
language: en
tautulli: # Can be individually specified per library as well
@ -67,6 +67,8 @@ radarr: # Can be individually specified
quality_profile: HD-1080p
tag:
search: false
radarr_path:
plex_path:
sonarr: # Can be individually specified per library as well
url: http://192.168.1.12:8989
token: ################################
@ -80,6 +82,8 @@ sonarr: # Can be individually specified
tag:
search: false
cutoff_search: false
sonarr_path:
plex_path:
trakt:
client_id: ################################################################
client_secret: ################################################################

@ -1,6 +1,6 @@
import logging, os, re
from datetime import datetime, timedelta
from modules import anidb, anilist, icheckmovies, imdb, letterboxd, mal, plex, radarr, sonarr, stevenlu, tautulli, tmdb, trakt, tvdb, util
from modules import anidb, anilist, flixpatrol, icheckmovies, imdb, letterboxd, mal, plex, radarr, sonarr, stevenlu, tautulli, tmdb, trakt, tvdb, util
from modules.util import Failed, ImageData, NotScheduled
from PIL import Image
from plexapi.exceptions import BadRequest, NotFound
@ -63,8 +63,9 @@ filter_translation = {
"writer": "writers"
}
modifier_alias = {".greater": ".gt", ".less": ".lt"}
all_builders = anidb.builders + anilist.builders + icheckmovies.builders + imdb.builders + letterboxd.builders + \
mal.builders + plex.builders + stevenlu.builders + tautulli.builders + tmdb.builders + trakt.builders + tvdb.builders
all_builders = anidb.builders + anilist.builders + flixpatrol.builders + icheckmovies.builders + imdb.builders + \
letterboxd.builders + mal.builders + plex.builders + stevenlu.builders + tautulli.builders + \
tmdb.builders + trakt.builders + tvdb.builders
show_only_builders = ["tmdb_network", "tmdb_show", "tmdb_show_details", "tvdb_show", "tvdb_show_details", "collection_level"]
movie_only_builders = [
"letterboxd_list", "letterboxd_list_details", "icheckmovies_list", "icheckmovies_list_details", "stevenlu_popular",
@ -90,7 +91,8 @@ notification_details = ["collection_creation_webhooks", "collection_addition_web
details = ["collection_mode", "collection_order", "collection_level", "collection_minimum", "label"] + boolean_details + string_details + notification_details
collectionless_details = ["collection_order", "plex_collectionless", "label", "label_sync_mode", "test"] + \
poster_details + background_details + summary_details + string_details
item_details = ["item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay", "item_assets", "revert_overlay", "item_refresh"] + list(plex.item_advance_keys.keys())
item_bool_details = ["item_assets", "revert_overlay", "item_lock_background", "item_lock_poster", "item_lock_title", "item_refresh"]
item_details = ["item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay"] + item_bool_details + list(plex.item_advance_keys.keys())
radarr_details = ["radarr_add", "radarr_add_existing", "radarr_folder", "radarr_monitor", "radarr_search", "radarr_availability", "radarr_quality", "radarr_tag"]
sonarr_details = [
"sonarr_add", "sonarr_add_existing", "sonarr_folder", "sonarr_monitor", "sonarr_language", "sonarr_series",
@ -569,6 +571,7 @@ class CollectionBuilder:
elif method_name in sonarr_details: self._sonarr(method_name, method_data)
elif method_name in anidb.builders: self._anidb(method_name, method_data)
elif method_name in anilist.builders: self._anilist(method_name, method_data)
elif method_name in flixpatrol.builders: self._flixpatrol(method_name, method_data)
elif method_name in icheckmovies.builders: self._icheckmovies(method_name, method_data)
elif method_name in letterboxd.builders: self._letterboxd(method_name, method_data)
elif method_name in imdb.builders: self._imdb(method_name, method_data)
@ -609,6 +612,9 @@ class CollectionBuilder:
self.sonarr_details["add"] = False
self.sonarr_details["add_existing"] = False
if self.radarr_details["add_existing"] or self.sonarr_details["add_existing"]:
self.item_details["add_existing"] = True
if self.collectionless:
self.details["collection_mode"] = "hide"
self.sync = True
@ -741,7 +747,7 @@ class CollectionBuilder:
raise Failed("Each Overlay can only be used once per Library")
self.library.overlays.append(method_data)
self.item_details[method_name] = method_data
elif method_name in ["item_assets", "revert_overlay", "item_refresh"]:
elif method_name in item_bool_details:
if util.parse(method_name, method_data, datatype="bool", default=False):
self.item_details[method_name] = True
elif method_name in plex.item_advance_keys:
@ -857,6 +863,38 @@ class CollectionBuilder:
new_dictionary["limit"] = util.parse("limit", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name)
self.builders.append((method_name, new_dictionary))
def _flixpatrol(self, method_name, method_data):
if method_name.startswith("flixpatrol_url"):
flixpatrol_lists = self.config.FlixPatrol.validate_flixpatrol_lists(method_data, self.language, self.library.is_movie)
for flixpatrol_list in flixpatrol_lists:
self.builders.append(("flixpatrol_url", flixpatrol_list))
elif method_name in flixpatrol.builders:
for dict_data, dict_methods in util.parse(method_name, method_data, datatype="dictlist"):
if method_name == "flixpatrol_demographics":
data = {
"generation": util.parse("generation", dict_data, methods=dict_methods, parent=method_name, options=flixpatrol.generations),
"gender": util.parse("gender", dict_data, methods=dict_methods, parent=method_name, default="all", options=flixpatrol.gender),
"location": util.parse("location", dict_data, methods=dict_methods, parent=method_name, default="world", options=flixpatrol.demo_locations),
"limit": util.parse("limit", dict_data, datatype="int", methods=dict_methods, parent=method_name, default=10)
}
elif method_name == "flixpatrol_popular":
data = {
"source": util.parse("source", dict_data, methods=dict_methods, parent=method_name, options=flixpatrol.popular),
"time_window": util.parse("time_window", dict_data, methods=dict_methods, parent=method_name, default="today"),
"limit": util.parse("limit", dict_data, datatype="int", methods=dict_methods, parent=method_name, default=10)
}
elif method_name == "flixpatrol_top":
data = {
"platform": util.parse("platform", dict_data, methods=dict_methods, parent=method_name, options=flixpatrol.platforms),
"location": util.parse("location", dict_data, methods=dict_methods, parent=method_name, default="world", options=flixpatrol.locations),
"time_window": util.parse("time_window", dict_data, methods=dict_methods, parent=method_name, default="today"),
"limit": util.parse("limit", dict_data, datatype="int", methods=dict_methods, parent=method_name, default=10)
}
else:
continue
if self.config.FlixPatrol.validate_flixpatrol_dict(method_name, data, self.language, self.library.is_movie):
self.builders.append((method_name, data))
def _icheckmovies(self, method_name, method_data):
if method_name.startswith("icheckmovies_list"):
icheckmovies_lists = self.config.ICheckMovies.validate_icheckmovies_lists(method_data, self.language)
@ -951,7 +989,8 @@ class CollectionBuilder:
"list_type": "popular" if method_name == "tautulli_popular" else "watched",
"list_days": util.parse("list_days", dict_data, datatype="int", methods=dict_methods, default=30, parent=method_name),
"list_size": util.parse("list_size", dict_data, datatype="int", methods=dict_methods, default=10, parent=method_name),
"list_buffer": util.parse("list_buffer", dict_data, datatype="int", methods=dict_methods, default=20, parent=method_name)
"list_buffer": util.parse("list_buffer", dict_data, datatype="int", methods=dict_methods, default=20, parent=method_name),
"list_minimum": util.parse("list_minimum", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name)
}))
def _tmdb(self, method_name, method_data):
@ -973,10 +1012,10 @@ class CollectionBuilder:
new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, parent=method_name, regex=regex)
elif discover_attr == "sort_by" and self.library.is_movie:
options = tmdb.discover_movie_sort if self.library.is_movie else tmdb.discover_tv_sort
new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, parent=method_name, options=options)
new_dictionary[discover_final] = util.parse(discover_attr, discover_data, parent=method_name, options=options)
elif discover_attr == "certification_country":
if "certification" in dict_data or "certification.lte" in dict_data or "certification.gte" in dict_data:
new_dictionary[discover_attr] = discover_data
new_dictionary[discover_final] = discover_data
else:
raise Failed(f"Collection Error: {method_name} {discover_attr} attribute: must be used with either certification, certification.lte, or certification.gte")
elif discover_attr == "certification":
@ -984,15 +1023,31 @@ class CollectionBuilder:
new_dictionary[discover_final] = discover_data
else:
raise Failed(f"Collection Error: {method_name} {discover_final} attribute: must be used with certification_country")
elif discover_attr in ["include_adult", "include_null_first_air_dates", "screened_theatrically"]:
elif discover_attr == "watch_region":
if "with_watch_providers" in dict_data or "without_watch_providers" in dict_data or "with_watch_monetization_types" in dict_data:
new_dictionary[discover_final] = discover_data
else:
raise Failed(f"Collection Error: {method_name} {discover_final} attribute: must be used with either with_watch_providers, without_watch_providers, or with_watch_monetization_types")
elif discover_attr == "with_watch_monetization_types":
if "watch_region" in dict_data:
new_dictionary[discover_final] = util.parse(discover_attr, discover_data, parent=method_name, options=tmdb.discover_monetization_types)
else:
raise Failed(f"Collection Error: {method_name} {discover_final} attribute: must be used with watch_region")
elif discover_attr in tmdb.discover_booleans:
new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, datatype="bool", parent=method_name)
elif discover_attr == "vote_average":
new_dictionary[discover_final] = util.parse(discover_final, discover_data, datatype="float", parent=method_name)
elif discover_attr == "with_status":
new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, datatype="int", parent=method_name, minimum=0, maximum=5)
elif discover_attr == "with_type":
new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, datatype="int", parent=method_name, minimum=0, maximum=6)
elif discover_final in tmdb.discover_dates:
new_dictionary[discover_final] = util.validate_date(discover_data, f"{method_name} {discover_final} attribute", return_as="%m/%d/%Y")
elif discover_attr in ["primary_release_year", "year", "first_air_date_year"]:
elif discover_attr in tmdb.discover_years:
new_dictionary[discover_attr] = util.parse(discover_attr, discover_data, datatype="int", parent=method_name, minimum=1800, maximum=self.current_year + 1)
elif discover_attr in ["vote_count", "vote_average", "with_runtime"]:
elif discover_attr in tmdb.discover_ints:
new_dictionary[discover_final] = util.parse(discover_final, discover_data, datatype="int", parent=method_name)
elif discover_final in ["with_cast", "with_crew", "with_people", "with_companies", "with_networks", "with_genres", "without_genres", "with_keywords", "without_keywords", "with_original_language", "timezone"]:
elif discover_final in tmdb.discover_strings:
new_dictionary[discover_final] = discover_data
elif discover_attr != "limit":
raise Failed(f"Collection Error: {method_name} {discover_final} attribute not supported")
@ -1114,6 +1169,8 @@ class CollectionBuilder:
ids = self.config.TVDb.get_tvdb_ids(method, value)
elif "imdb" in method:
ids = self.config.IMDb.get_imdb_ids(method, value, self.language)
elif "flixpatrol" in method:
ids = self.config.FlixPatrol.get_flixpatrol_ids(method, value, self.language, self.library.is_movie)
elif "icheckmovies" in method:
ids = self.config.ICheckMovies.get_icheckmovies_ids(method, value, self.language)
elif "letterboxd" in method:
@ -1202,7 +1259,7 @@ class CollectionBuilder:
rating_keys = [rating_keys]
total = len(rating_keys)
max_length = len(str(total))
if self.filters and self.details["show_filtered"] is True:
if (self.filters or self.tmdb_filters) and self.details["show_filtered"] is True:
logger.info("")
logger.info("Filtering Builder:")
for i, key in enumerate(rating_keys, 1):
@ -1342,7 +1399,6 @@ class CollectionBuilder:
results = ""
display_add = ""
for og_value, result in validation:
print(og_value, result)
built_arg = build_url_arg(quote(str(result)) if attr in string_filters else result, arg_s=og_value)
display_add += built_arg[1]
results += f"{conjunction if len(results) > 0 else ''}{built_arg[0]}"
@ -1812,8 +1868,8 @@ class CollectionBuilder:
remove_tags = self.item_details["item_label.remove"] if "item_label.remove" in self.item_details else None
sync_tags = self.item_details["item_label.sync"] if "item_label.sync" in self.item_details else None
tmdb_ids = []
tvdb_ids = []
tmdb_paths = []
tvdb_paths = []
for item in self.items:
if int(item.ratingKey) in rating_keys and not revert:
rating_keys.remove(int(item.ratingKey))
@ -1823,10 +1879,11 @@ class CollectionBuilder:
except Failed as e:
logger.error(e)
self.library.edit_tags("label", item, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags)
if item.ratingKey in self.library.movie_rating_key_map:
tmdb_ids.append(self.library.movie_rating_key_map[item.ratingKey])
if item.ratingKey in self.library.show_rating_key_map:
tvdb_ids.append(self.library.show_rating_key_map[item.ratingKey])
path = os.path.dirname(str(item.locations[0])) if self.library.is_movie else str(item.locations[0])
if self.library.Radarr and item.ratingKey in self.library.movie_rating_key_map:
tmdb_paths.append((self.library.movie_rating_key_map[item.ratingKey], f"{path.replace(self.library.Radarr.plex_path, self.library.Radarr.radarr_path)}/"))
if self.library.Sonarr and item.ratingKey in self.library.show_rating_key_map:
tvdb_paths.append((self.library.show_rating_key_map[item.ratingKey], f"{path.replace(self.library.Sonarr.plex_path, self.library.Sonarr.sonarr_path)}/"))
advance_edits = {}
for method_name, method_data in self.item_details.items():
if method_name in plex.item_advance_keys:
@ -1834,20 +1891,29 @@ class CollectionBuilder:
if getattr(item, key) != options[method_data]:
advance_edits[key] = options[method_data]
self.library.edit_item(item, item.title, self.collection_level.capitalize(), advance_edits, advanced=True)
# Locking should come before refreshing since refreshing can change metadata (i.e. if specified to both lock
# background/poster and also refreshing, assume that the current background/poster should be kept)
if "item_lock_background" in self.item_details:
item.lockArt()
if "item_lock_poster" in self.item_details:
item.lockPoster()
if "item_lock_title" in self.item_details:
item.edit(**{"title.locked": 1})
if "item_refresh" in self.item_details:
item.refresh()
if len(tmdb_ids) > 0:
if self.library.Radarr and tmdb_paths:
if "item_radarr_tag" in self.item_details:
self.library.Radarr.edit_tags(tmdb_ids, self.item_details["item_radarr_tag"], self.item_details["apply_tags"])
self.library.Radarr.edit_tags([t[0] if isinstance(t, tuple) else t for t in tmdb_paths], self.item_details["item_radarr_tag"], self.item_details["apply_tags"])
if self.radarr_details["add_existing"]:
self.library.Radarr.add_tmdb(tmdb_ids, **self.radarr_details)
self.library.Radarr.add_tmdb(tmdb_paths, **self.radarr_details)
if len(tvdb_ids) > 0:
if self.library.Sonarr and tvdb_paths:
if "item_sonarr_tag" in self.item_details:
self.library.Sonarr.edit_tags(tvdb_ids, self.item_details["item_sonarr_tag"], self.item_details["apply_tags"])
self.library.Sonarr.edit_tags([t[0] if isinstance(t, tuple) else t for t in tvdb_paths], self.item_details["item_sonarr_tag"], self.item_details["apply_tags"])
if self.sonarr_details["add_existing"]:
self.library.Sonarr.add_tvdb(tvdb_ids, **self.sonarr_details)
self.library.Sonarr.add_tvdb(tvdb_paths, **self.sonarr_details)
for rating_key in rating_keys:
try:
@ -2053,13 +2119,19 @@ class CollectionBuilder:
(self.details["collection_removal_webhooks"] and len(self.notification_removals) > 0)
):
self.obj.reload()
try:
self.library.Webhooks.collection_hooks(
self.details["collection_creation_webhooks"] + self.details["collection_addition_webhooks"] + self.details["collection_removal_webhooks"],
self.details["collection_creation_webhooks"] +
self.details["collection_addition_webhooks"] +
self.details["collection_removal_webhooks"],
self.obj,
created=self.created,
additions=self.notification_additions,
removals=self.notification_removals
)
except Failed as e:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
def run_collections_again(self):
self.obj = self.library.get_collection(self.name)

@ -60,6 +60,14 @@ class Cache:
tmdb_id TEXT,
expiration_date TEXT)"""
)
cursor.execute(
"""CREATE TABLE IF NOT EXISTS flixpatrol_map (
key INTEGER PRIMARY KEY,
flixpatrol_id TEXT UNIQUE,
tmdb_id TEXT,
media_type TEXT,
expiration_date TEXT)"""
)
cursor.execute(
"""CREATE TABLE IF NOT EXISTS omdb_data (
key INTEGER PRIMARY KEY,
@ -88,6 +96,18 @@ class Cache:
key INTEGER PRIMARY KEY,
library TEXT UNIQUE)"""
)
cursor.execute(
"""CREATE TABLE IF NOT EXISTS radarr_adds (
key INTEGER PRIMARY KEY,
tmdb_id TEXT,
library TEXT)"""
)
cursor.execute(
"""CREATE TABLE IF NOT EXISTS sonarr_adds (
key INTEGER PRIMARY KEY,
tvdb_id TEXT,
library TEXT)"""
)
cursor.execute("SELECT count(name) FROM sqlite_master WHERE type='table' AND name='image_map'")
if cursor.fetchone()[0] > 0:
cursor.execute(f"SELECT DISTINCT library FROM image_map")
@ -161,6 +181,12 @@ class Cache:
def update_letterboxd_map(self, expired, letterboxd_id, tmdb_id):
self._update_map("letterboxd_map", "letterboxd_id", letterboxd_id, "tmdb_id", tmdb_id, expired)
def query_flixpatrol_map(self, flixpatrol_id, media_type):
return self._query_map("flixpatrol_map", flixpatrol_id, "flixpatrol_id", "tmdb_id", media_type=media_type)
def update_flixpatrol_map(self, expired, flixpatrol_id, tmdb_id, media_type):
self._update_map("flixpatrol_map", "flixpatrol_id", flixpatrol_id, "tmdb_id", tmdb_id, expired, media_type=media_type)
def _query_map(self, map_name, _id, from_id, to_id, media_type=None, return_type=False):
id_to_return = None
expired = None
@ -324,3 +350,31 @@ class Cache:
with closing(connection.cursor()) as cursor:
cursor.execute(f"INSERT OR IGNORE INTO {table_name}(rating_key) VALUES(?)", (rating_key,))
cursor.execute(f"UPDATE {table_name} SET location = ?, compare = ?, overlay = ? WHERE rating_key = ?", (location, compare, overlay, rating_key))
def query_radarr_adds(self, tmdb_id, library):
return self.query_arr_adds(tmdb_id, library, "radarr", "tmdb_id")
def query_sonarr_adds(self, tvdb_id, library):
return self.query_arr_adds(tvdb_id, library, "sonarr", "tvdb_id")
def query_arr_adds(self, t_id, library, arr, id_type):
with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor:
cursor.execute(f"SELECT * FROM {arr}_adds WHERE {id_type} = ? AND library = ?", (t_id, library))
row = cursor.fetchone()
if row and row[id_type]:
return int(row[id_type])
return None
def update_radarr_adds(self, tmdb_id, library):
return self.update_arr_adds(tmdb_id, library, "radarr", "tmdb_id")
def update_sonarr_adds(self, tvdb_id, library):
return self.update_arr_adds(tvdb_id, library, "sonarr", "tvdb_id")
def update_arr_adds(self, t_id, library, arr, id_type):
with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor:
cursor.execute(f"INSERT OR IGNORE INTO {arr}_adds({id_type}, library) VALUES(?, ?)", (t_id, library))

@ -6,6 +6,7 @@ from modules.anidb import AniDB
from modules.anilist import AniList
from modules.cache import Cache
from modules.convert import Convert
from modules.flixpatrol import FlixPatrol
from modules.icheckmovies import ICheckMovies
from modules.imdb import IMDb
from modules.letterboxd import Letterboxd
@ -124,7 +125,8 @@ class Config:
else: endline = ""
yaml.round_trip_dump(loaded_config, open(self.config_path, "w"), indent=None, block_seq_indent=2)
elif data[attribute] is None:
if default_is_none is True: return None
if default_is_none and var_type == "list": return []
elif default_is_none: return None
else: message = f"{text} is blank"
elif var_type == "url":
if data[attribute].endswith(("\\", "/")): return data[attribute][:-1]
@ -180,7 +182,7 @@ class Config:
self.general = {
"cache": check_for_attribute(self.data, "cache", parent="settings", var_type="bool", default=True),
"cache_expiration": check_for_attribute(self.data, "cache_expiration", parent="settings", var_type="int", default=60),
"asset_directory": check_for_attribute(self.data, "asset_directory", parent="settings", var_type="list_path", default=[os.path.join(default_dir, "assets")]),
"asset_directory": check_for_attribute(self.data, "asset_directory", parent="settings", var_type="list_path", default=[os.path.join(default_dir, "assets")], default_is_none=True),
"asset_folders": check_for_attribute(self.data, "asset_folders", parent="settings", var_type="bool", default=True),
"assets_for_all": check_for_attribute(self.data, "assets_for_all", parent="settings", var_type="bool", default=False, save=False, do_print=False),
"sync_mode": check_for_attribute(self.data, "sync_mode", parent="settings", default="append", test_list=sync_modes),
@ -228,7 +230,11 @@ class Config:
logger.warning("notifiarr attribute not found")
self.Webhooks = Webhooks(self, self.webhooks, notifiarr=self.NotifiarrFactory)
try:
self.Webhooks.start_time_hooks(self.run_start_time)
except Failed as e:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
self.errors = []
@ -320,8 +326,9 @@ class Config:
self.IMDb = IMDb(self)
self.Convert = Convert(self)
self.AniList = AniList(self)
self.Letterboxd = Letterboxd(self)
self.FlixPatrol = FlixPatrol(self)
self.ICheckMovies = ICheckMovies(self)
self.Letterboxd = Letterboxd(self)
self.StevenLu = StevenLu(self)
util.separator()
@ -346,7 +353,9 @@ class Config:
"availability": check_for_attribute(self.data, "availability", parent="radarr", test_list=radarr.availability_descriptions, default="announced"),
"quality_profile": check_for_attribute(self.data, "quality_profile", parent="radarr", default_is_none=True),
"tag": check_for_attribute(self.data, "tag", parent="radarr", var_type="lower_list", default_is_none=True),
"search": check_for_attribute(self.data, "search", parent="radarr", var_type="bool", default=False)
"search": check_for_attribute(self.data, "search", parent="radarr", var_type="bool", default=False),
"radarr_path": check_for_attribute(self.data, "radarr_path", parent="radarr", default_is_none=True),
"plex_path": check_for_attribute(self.data, "plex_path", parent="radarr", default_is_none=True)
}
self.general["sonarr"] = {
"url": check_for_attribute(self.data, "url", parent="sonarr", var_type="url", default_is_none=True),
@ -361,7 +370,9 @@ class Config:
"season_folder": check_for_attribute(self.data, "season_folder", parent="sonarr", var_type="bool", default=True),
"tag": check_for_attribute(self.data, "tag", parent="sonarr", var_type="lower_list", default_is_none=True),
"search": check_for_attribute(self.data, "search", parent="sonarr", var_type="bool", default=False),
"cutoff_search": check_for_attribute(self.data, "cutoff_search", parent="sonarr", var_type="bool", default=False)
"cutoff_search": check_for_attribute(self.data, "cutoff_search", parent="sonarr", var_type="bool", default=False),
"sonarr_path": check_for_attribute(self.data, "sonarr_path", parent="sonarr", default_is_none=True),
"plex_path": check_for_attribute(self.data, "plex_path", parent="sonarr", default_is_none=True)
}
self.general["tautulli"] = {
"url": check_for_attribute(self.data, "url", parent="tautulli", var_type="url", default_is_none=True),
@ -495,6 +506,7 @@ class Config:
self.errors.append(e)
util.print_stacktrace()
util.print_multiline(e, error=True)
logger.info("")
logger.info(f"{display_name} Library Connection Failed")
continue
@ -505,7 +517,7 @@ class Config:
logger.info(f"Connecting to {display_name} library's Radarr...")
logger.info("")
try:
library.Radarr = Radarr(self, {
library.Radarr = Radarr(self, library, {
"url": check_for_attribute(lib, "url", parent="radarr", var_type="url", default=self.general["radarr"]["url"], req_default=True, save=False),
"token": check_for_attribute(lib, "token", parent="radarr", default=self.general["radarr"]["token"], req_default=True, save=False),
"add": check_for_attribute(lib, "add", parent="radarr", var_type="bool", default=self.general["radarr"]["add"], save=False),
@ -513,9 +525,11 @@ class Config:
"root_folder_path": check_for_attribute(lib, "root_folder_path", parent="radarr", default=self.general["radarr"]["root_folder_path"], req_default=True, save=False),
"monitor": check_for_attribute(lib, "monitor", parent="radarr", var_type="bool", default=self.general["radarr"]["monitor"], save=False),
"availability": check_for_attribute(lib, "availability", parent="radarr", test_list=radarr.availability_descriptions, default=self.general["radarr"]["availability"], save=False),
"quality_profile": check_for_attribute(lib, "quality_profile", parent="radarr",default=self.general["radarr"]["quality_profile"], req_default=True, save=False),
"quality_profile": check_for_attribute(lib, "quality_profile", parent="radarr", default=self.general["radarr"]["quality_profile"], req_default=True, save=False),
"tag": check_for_attribute(lib, "tag", parent="radarr", var_type="lower_list", default=self.general["radarr"]["tag"], default_is_none=True, save=False),
"search": check_for_attribute(lib, "search", parent="radarr", var_type="bool", default=self.general["radarr"]["search"], save=False)
"search": check_for_attribute(lib, "search", parent="radarr", var_type="bool", default=self.general["radarr"]["search"], save=False),
"radarr_path": check_for_attribute(lib, "radarr_path", parent="radarr", default=self.general["radarr"]["radarr_path"], default_is_none=True, save=False),
"plex_path": check_for_attribute(lib, "plex_path", parent="radarr", default=self.general["radarr"]["plex_path"], default_is_none=True, save=False)
})
except Failed as e:
self.errors.append(e)
@ -531,7 +545,7 @@ class Config:
logger.info(f"Connecting to {display_name} library's Sonarr...")
logger.info("")
try:
library.Sonarr = Sonarr(self, {
library.Sonarr = Sonarr(self, library, {
"url": check_for_attribute(lib, "url", parent="sonarr", var_type="url", default=self.general["sonarr"]["url"], req_default=True, save=False),
"token": check_for_attribute(lib, "token", parent="sonarr", default=self.general["sonarr"]["token"], req_default=True, save=False),
"add": check_for_attribute(lib, "add", parent="sonarr", var_type="bool", default=self.general["sonarr"]["add"], save=False),
@ -544,7 +558,9 @@ class Config:
"season_folder": check_for_attribute(lib, "season_folder", parent="sonarr", var_type="bool", default=self.general["sonarr"]["season_folder"], save=False),
"tag": check_for_attribute(lib, "tag", parent="sonarr", var_type="lower_list", default=self.general["sonarr"]["tag"], default_is_none=True, save=False),
"search": check_for_attribute(lib, "search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["search"], save=False),
"cutoff_search": check_for_attribute(lib, "cutoff_search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["cutoff_search"], save=False)
"cutoff_search": check_for_attribute(lib, "cutoff_search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["cutoff_search"], save=False),
"sonarr_path": check_for_attribute(lib, "sonarr_path", parent="sonarr", default=self.general["sonarr"]["sonarr_path"], default_is_none=True, save=False),
"plex_path": check_for_attribute(lib, "plex_path", parent="sonarr", default=self.general["sonarr"]["plex_path"], default_is_none=True, save=False)
})
except Failed as e:
self.errors.append(e)
@ -560,7 +576,7 @@ class Config:
logger.info(f"Connecting to {display_name} library's Tautulli...")
logger.info("")
try:
library.Tautulli = Tautulli(self, {
library.Tautulli = Tautulli(self, library, {
"url": check_for_attribute(lib, "url", parent="tautulli", var_type="url", default=self.general["tautulli"]["url"], req_default=True, save=False),
"apikey": check_for_attribute(lib, "apikey", parent="tautulli", default=self.general["tautulli"]["apikey"], req_default=True, save=False)
})
@ -593,7 +609,11 @@ class Config:
def notify(self, text, library=None, collection=None, critical=True):
for error in util.get_list(text, split=False):
try:
self.Webhooks.error_hooks(error, library=library, collection=collection, critical=critical)
except Failed as e:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
def get_html(self, url, headers=None, params=None):
return html.fromstring(self.get(url, headers=headers, params=params).content)

@ -198,7 +198,7 @@ class Convert:
check_id = guid.netloc
if self.config.Cache:
cache_id, imdb_check, media_type, expired = self.config.Cache.query_guid_map(item.guid)
if cache_id and not expired:
if (cache_id or imdb_check) and not expired:
media_id_type = "movie" if "movie" in media_type else "show"
if item_type == "hama" and check_id.startswith("anidb"):
anidb_id = int(re.search("-(.*)", check_id).group(1))
@ -291,13 +291,13 @@ class Convert:
logger.info(util.adjust_space(f" Cache | {'^' if expired else '+'} | {ids} | {item.title}"))
self.config.Cache.update_guid_map(item.guid, cache_ids, imdb_in, expired, guid_type)
if tmdb_id and library.is_movie:
if (tmdb_id or imdb_id) and library.is_movie:
update_cache(tmdb_id, "TMDb", imdb_id, "movie")
return "movie", tmdb_id, imdb_id
elif tvdb_id and library.is_show:
elif (tvdb_id or imdb_id) and library.is_show:
update_cache(tvdb_id, "TVDb", imdb_id, "show")
return "show", tvdb_id, imdb_id
elif anidb_id and tmdb_id and library.is_show:
elif anidb_id and (tmdb_id or imdb_id) and library.is_show:
update_cache(tmdb_id, "TMDb", imdb_id, "show_movie")
return "movie", tmdb_id, imdb_id
else:

@ -0,0 +1,160 @@
import logging
from datetime import datetime, timedelta
from modules import util
from modules.util import Failed
logger = logging.getLogger("Plex Meta Manager")
builders = ["flixpatrol_url", "flixpatrol_demographics", "flixpatrol_popular", "flixpatrol_top"]
generations = ["all", "boomers", "x", "y", "z"]
generations_translation = {"all": "all-generations", "boomers": "baby-boomers", "x": "generation-x", "y": "generation-y", "z": "generation-z"}
generations_pretty = {"all": "All generations", "boomers": "Baby Boomers", "x": "Generation X", "y": "Generation Y (Millenials)", "z": "Generation Z"}
gender = ["all", "men", "women"]
demo_locations = ["world", "brazil", "canada", "france", "germany", "india", "mexico", "united_kingdom", "united_states"]
locations = [
"world", "albania", "argentina", "armenia", "australia", "austria", "azerbaijan", "bahamas", "bahrain",
"bangladesh", "belarus", "belgium", "belize", "benin", "bolivia", "bosnia_and_herzegovina", "botswana", "brazil",
"bulgaria", "burkina_faso", "cambodia", "canada", "chile", "colombia", "costa_rica", "croatia", "cyprus",
"czech_republic", "denmark", "dominican_republic", "ecuador", "egypt", "estonia", "finland", "france", "gabon",
"germany", "ghana", "greece", "guatemala", "guinea_bissau", "haiti", "honduras", "hong_kong", "hungary", "iceland",
"india", "indonesia", "ireland", "israel", "italy", "ivory_coast", "jamaica", "japan", "jordan", "kazakhstan",
"kenya", "kuwait", "kyrgyzstan", "laos", "latvia", "lebanon", "lithuania", "luxembourg", "malaysia", "maldives",
"mali", "malta", "mexico", "moldova", "mongolia", "montenegro", "morocco", "mozambique", "namibia", "netherlands",
"new_zealand", "nicaragua", "niger", "nigeria", "north_macedonia", "norway", "oman", "pakistan", "panama",
"papua_new_guinea", "paraguay", "peru", "philippines", "poland", "portugal", "qatar", "romania", "russia",
"rwanda", "salvador", "saudi_arabia", "senegal", "serbia", "singapore", "slovakia", "slovenia", "south_africa",
"south_korea", "spain", "sri_lanka", "sweden", "switzerland", "taiwan", "tajikistan", "tanzania", "thailand",
"togo", "trinidad_and_tobago", "turkey", "turkmenistan", "uganda", "ukraine", "united_arab_emirates",
"united_kingdom", "united_states", "uruguay", "uzbekistan", "venezuela", "vietnam", "zambia", "zimbabwe"
]
popular = ["movie_db", "facebook", "google", "twitter", "twitter_trends", "instagram", "instagram_trends", "youtube", "imdb", "letterboxd", "rotten_tomatoes", "tmdb", "trakt"]
platforms = ["netflix", "hbo", "disney", "amazon", "itunes", "google", "paramount_plus", "hulu", "vudu", "imdb", "amazon_prime", "star_plus"]
base_url = "https://flixpatrol.com"
urls = {
"top10": f"{base_url}/top10/",
"popular_movies": f"{base_url}/popular/movies/",
"popular_shows": f"{base_url}/popular/tv-shows/",
"demographics": f"{base_url}/demographics/"
}
class FlixPatrol:
def __init__(self, config):
self.config = config
def _request(self, url, language, xpath):
if self.config.trace_mode:
logger.debug(f"URL: {url}")
return self.config.get_html(url, headers=util.header(language)).xpath(xpath)
def _tmdb(self, flixpatrol_url, language):
ids = self._request(flixpatrol_url, language, "//script[@type='application/ld+json']/text()")
if len(ids) > 0 and ids[0]:
if "https://www.themoviedb.org" in ids[0]:
return util.regex_first_int(ids[0].split("https://www.themoviedb.org")[1], "TMDB Movie ID")
raise Failed(f"FlixPatrol Error: TMDb Movie ID not found in {ids[0]}")
raise Failed(f"FlixPatrol Error: TMDb Movie ID not found at {flixpatrol_url}")
def _parse_list(self, list_url, language, is_movie):
flixpatrol_urls = []
if list_url.startswith(urls["top10"]):
platform = list_url[len(urls["top10"]):].split("/")[0]
flixpatrol_urls = self._request(
list_url, language,
f"//div[@id='{platform}-{'1' if is_movie else '2'}']//a[@class='hover:underline']/@href"
)
logger.info(flixpatrol_urls)
if not flixpatrol_urls:
flixpatrol_urls = self._request(
list_url, language,
f"//h3[text() = '{'TOP 10 Movies' if is_movie else 'TOP 10 TV Shows'}']/following-sibling::div//a[@class='hover:underline']/@href"
)
logger.info(flixpatrol_urls)
elif list_url.startswith(tuple([v for k, v in urls.items()])):
flixpatrol_urls = self._request(
list_url, language,
f"//a[@class='flex group' and .//span[.='{'Movie' if is_movie else 'TV Show'}']]/@href"
)
return flixpatrol_urls
def validate_flixpatrol_lists(self, flixpatrol_lists, language, is_movie):
valid_lists = []
for flixpatrol_list in util.get_list(flixpatrol_lists, split=False):
list_url = flixpatrol_list.strip()
if not list_url.startswith(tuple([v for k, v in urls.items()])):
fails = "\n".join([f"{v} (For {k.replace('_', ' ').title()})" for k, v in urls.items()])
raise Failed(f"FlixPatrol Error: {list_url} must begin with either:{fails}")
elif len(self._parse_list(list_url, language, is_movie)) > 0:
valid_lists.append(list_url)
else:
raise Failed(f"FlixPatrol Error: {list_url} failed to parse")
return valid_lists
def validate_flixpatrol_dict(self, method, data, language, is_movie):
return len(self.validate_flixpatrol_lists(self.get_url(method, data, is_movie), language, is_movie)) > 0
def get_url(self, method, data, is_movie):
if method == "flixpatrol_demographics":
return f"{urls['demographics']}" \
f"{generations_translation[data['generation']]}/" \
f"{'all-genders' if data['gender'] == 'all' else data['gender']}/" \
f"{data['location'].replace('_', '-')}/"
elif method == "flixpatrol_popular":
return f"{urls['popular_movies'] if is_movie else urls['popular_shows']}" \
f"{data['source'].replace('_', '-')}/" \
f"{util.time_window(data['time_window'])}/"
elif method == "flixpatrol_top":
return f"{urls['top10']}" \
f"{data['platform'].replace('_', '-')}/" \
f"{data['location'].replace('_', '-')}/" \
f"{util.time_window(data['time_window'])}/full/"
elif method == "flixpatrol_url":
return data
else:
raise Failed(f"FlixPatrol Error: Method {method} not supported")
def get_flixpatrol_ids(self, method, data, language, is_movie):
if method == "flixpatrol_demographics":
logger.info("Processing FlixPatrol Demographics:")
logger.info(f"\tGeneration: {generations_pretty[data['generation']]}")
logger.info(f"\tGender: {'All genders' if data['gender'] == 'all' else data['gender'].capitalize()}")
logger.info(f"\tLocation: {data['location'].replace('_', ' ').title()}")
logger.info(f"\tLimit: {data['limit']}")
elif method == "flixpatrol_popular":
logger.info("Processing FlixPatrol Popular:")
logger.info(f"\tSource: {data['source'].replace('_', ' ').title()}")
logger.info(f"\tTime Window: {data['time_window'].replace('_', ' ').title()}")
logger.info(f"\tLimit: {data['limit']}")
elif method == "flixpatrol_top":
logger.info("Processing FlixPatrol Top:")
logger.info(f"\tPlatform: {data['platform'].replace('_', ' ').title()}")
logger.info(f"\tLocation: {data['location'].replace('_', ' ').title()}")
logger.info(f"\tTime Window: {data['time_window'].replace('_', ' ').title()}")
logger.info(f"\tLimit: {data['limit']}")
elif method == "flixpatrol_url":
logger.info(f"Processing FlixPatrol URL: {data}")
url = self.get_url(method, data, is_movie)
items = self._parse_list(url, language, is_movie)
media_type = "movie" if is_movie else "show"
total_items = len(items)
if total_items > 0:
ids = []
for i, item in enumerate(items, 1):
util.print_return(f"Finding TMDb ID {i}/{total_items}")
tmdb_id = None
expired = None
if self.config.Cache:
tmdb_id, expired = self.config.Cache.query_flixpatrol_map(item, media_type)
if not tmdb_id or expired is not False:
try:
tmdb_id = self._tmdb(f"{base_url}{item}", language)
except Failed as e:
logger.error(e)
continue
if self.config.Cache:
self.config.Cache.update_flixpatrol_map(expired, item, tmdb_id, media_type)
ids.append((tmdb_id, "tmdb" if is_movie else "tmdb_show"))
logger.info(util.adjust_space(f"Processed {total_items} TMDb IDs"))
return ids
else:
raise Failed(f"FlixPatrol Error: No List Items found in {data}")

@ -102,6 +102,7 @@ class Library(ABC):
logger.info(f"Using Asset Directory: {ad}")
if output:
logger.info("")
logger.info(output)
def upload_images(self, item, poster=None, background=None, overlay=None):
@ -182,9 +183,6 @@ class Library(ABC):
self.config.Cache.update_image_map(item.ratingKey, f"{self.image_table_name}_backgrounds", item.art, background.compare)
def notify(self, text, collection=None, critical=True):
for error in util.get_list(text, split=False):
self.Webhooks.error_hooks(error, library=self, collection=collection, critical=critical)
self.config.notify(text, library=self, collection=collection, critical=critical)
@abstractmethod

@ -28,4 +28,3 @@ class Notifiarr:
logger.debug(url.replace(self.apikey, "APIKEY"))
params = {"event": "pmm" if self.test else "collections"}
return url, params

@ -40,11 +40,11 @@ class OMDb:
self.config = config
self.apikey = params["apikey"]
self.limit = False
self.get_omdb("tt0080684")
self.get_omdb("tt0080684", ignore_cache=True)
def get_omdb(self, imdb_id):
def get_omdb(self, imdb_id, ignore_cache=False):
expired = None
if self.config.Cache:
if self.config.Cache and not ignore_cache:
omdb_dict, expired = self.config.Cache.query_omdb(imdb_id)
if omdb_dict and expired is False:
return OMDbObj(imdb_id, omdb_dict)
@ -53,7 +53,7 @@ class OMDb:
response = self.config.get(base_url, params={"i": imdb_id, "apikey": self.apikey})
if response.status_code < 400:
omdb = OMDbObj(imdb_id, response.json())
if self.config.Cache:
if self.config.Cache and not ignore_cache:
self.config.Cache.update_omdb(expired, omdb)
return omdb
else:

@ -230,6 +230,7 @@ class Plex(Library):
self.url = params["plex"]["url"]
self.token = params["plex"]["token"]
self.timeout = params["plex"]["timeout"]
logger.info("")
try:
self.PlexServer = PlexServer(baseurl=self.url, token=self.token, session=self.config.session, timeout=self.timeout)
except Unauthorized:
@ -239,9 +240,15 @@ class Plex(Library):
except (requests.exceptions.ConnectionError, ParseError):
util.print_stacktrace()
raise Failed("Plex Error: Plex url is invalid")
self.Plex = next((s for s in self.PlexServer.library.sections() if s.title == params["name"]), None)
self.Plex = None
library_names = []
for s in self.PlexServer.library.sections():
library_names.append(s.title)
if s.title == params["name"]:
self.Plex = s
break
if not self.Plex:
raise Failed(f"Plex Error: Plex Library {params['name']} not found")
raise Failed(f"Plex Error: Plex Library {params['name']} not found. Options: {library_names}")
if self.Plex.type in ["movie", "show"]:
self.type = self.Plex.type.capitalize()
else:

@ -11,8 +11,9 @@ apply_tags_translation = {"": "add", "sync": "replace", "remove": "remove"}
availability_descriptions = {"announced": "For Announced", "cinemas": "For In Cinemas", "released": "For Released", "db": "For PreDB"}
class Radarr:
def __init__(self, config, params):
def __init__(self, config, library, params):
self.config = config
self.library = library
self.url = params["url"]
self.token = params["token"]
try:
@ -28,20 +29,56 @@ class Radarr:
self.quality_profile = params["quality_profile"]
self.tag = params["tag"]
self.search = params["search"]
self.radarr_path = params["radarr_path"] if params["radarr_path"] and params["plex_path"] else ""
self.plex_path = params["plex_path"] if params["radarr_path"] and params["plex_path"] else ""
def add_tmdb(self, tmdb_ids, **options):
logger.info("")
util.separator("Adding to Radarr", space=False, border=False)
logger.debug("")
logger.debug(f"TMDb IDs: {tmdb_ids}")
_ids = []
_paths = []
for tmdb_id in tmdb_ids:
if isinstance(tmdb_id, tuple):
_paths.append(tmdb_id)
else:
_ids.append(tmdb_id)
logger.debug(f"Radarr Adds: {_ids if _ids else ''}")
for tmdb_id in _paths:
logger.debug(tmdb_id)
folder = options["folder"] if "folder" in options else self.root_folder_path
monitor = options["monitor"] if "monitor" in options else self.monitor
availability = availability_translation[options["availability"] if "availability" in options else self.availability]
quality_profile = options["quality"] if "quality" in options else self.quality_profile
tags = options["tag"] if "tag" in options else self.tag
search = options["search"] if "search" in options else self.search
added = []
exists = []
invalid = []
movies = []
for i, item in enumerate(tmdb_ids, 1):
path = item[1] if isinstance(item, tuple) else None
tmdb_id = item[0] if isinstance(item, tuple) else item
util.print_return(f"Loading TMDb ID {i}/{len(tmdb_ids)} ({tmdb_id})")
if self.config.Cache:
_id = self.config.Cache.query_radarr_adds(tmdb_id, self.library.original_mapping_name)
if _id:
exists.append(item)
continue
try:
movie = self.api.get_movie(tmdb_id=tmdb_id)
movies.append((movie, path) if path else movie)
except ArrException:
invalid.append(item)
if len(movies) == 100 or len(tmdb_ids) == i:
try:
added, exists, invalid = self.api.add_multiple_movies(tmdb_ids, folder, quality_profile, monitor, search, availability, tags, per_request=100)
_a, _e, _i = self.api.add_multiple_movies(movies, folder, quality_profile, monitor, search,
availability, tags, per_request=100)
added.extend(_a)
exists.extend(_e)
invalid.extend(_i)
movies = []
except Invalid as e:
raise Failed(f"Radarr Error: {e}")
@ -49,12 +86,16 @@ class Radarr:
logger.info("")
for movie in added:
logger.info(f"Added to Radarr | {movie.tmdbId:<6} | {movie.title}")
if self.config.Cache:
self.config.Cache.update_radarr_adds(movie.tmdbId, self.library.original_mapping_name)
logger.info(f"{len(added)} Movie{'s' if len(added) > 1 else ''} added to Radarr")
if len(exists) > 0:
logger.info("")
for movie in exists:
logger.info(f"Already in Radarr | {movie.tmdbId:<6} | {movie.title}")
if self.config.Cache:
self.config.Cache.update_radarr_adds(movie.tmdbId, self.library.original_mapping_name)
logger.info(f"{len(exists)} Movie{'s' if len(exists) > 1 else ''} already existing in Radarr")
if len(invalid) > 0:

@ -29,8 +29,9 @@ monitor_descriptions = {
apply_tags_translation = {"": "add", "sync": "replace", "remove": "remove"}
class Sonarr:
def __init__(self, config, params):
def __init__(self, config, library, params):
self.config = config
self.library = library
self.url = params["url"]
self.token = params["token"]
try:
@ -50,12 +51,23 @@ class Sonarr:
self.tag = params["tag"]
self.search = params["search"]
self.cutoff_search = params["cutoff_search"]
self.sonarr_path = params["sonarr_path"] if params["sonarr_path"] and params["plex_path"] else ""
self.plex_path = params["plex_path"] if params["sonarr_path"] and params["plex_path"] else ""
def add_tvdb(self, tvdb_ids, **options):
logger.info("")
util.separator("Adding to Sonarr", space=False, border=False)
logger.debug("")
logger.debug(f"TVDb IDs: {tvdb_ids}")
_ids = []
_paths = []
for tvdb_id in tvdb_ids:
if isinstance(tvdb_id, tuple):
_paths.append(tvdb_id)
else:
_ids.append(tvdb_id)
logger.debug(f"Radarr Adds: {_ids if _ids else ''}")
for tvdb_id in _paths:
logger.debug(tvdb_id)
folder = options["folder"] if "folder" in options else self.root_folder_path
monitor = monitor_translation[options["monitor"] if "monitor" in options else self.monitor]
quality_profile = options["quality"] if "quality" in options else self.quality_profile
@ -66,8 +78,33 @@ class Sonarr:
tags = options["tag"] if "tag" in options else self.tag
search = options["search"] if "search" in options else self.search
cutoff_search = options["cutoff_search"] if "cutoff_search" in options else self.cutoff_search
added = []
exists = []
invalid = []
shows = []
for i, item in enumerate(tvdb_ids, 1):
path = item[1] if isinstance(item, tuple) else None
tvdb_id = item[0] if isinstance(item, tuple) else item
util.print_return(f"Loading TVDb ID {i}/{len(tvdb_ids)} ({tvdb_id})")
if self.config.Cache:
_id = self.config.Cache.query_sonarr_adds(tvdb_id, self.library.original_mapping_name)
if _id:
exists.append(item)
continue
try:
show = self.api.get_series(tvdb_id=tvdb_id)
shows.append((show, path) if path else show)
except ArrException:
invalid.append(item)
if len(shows) == 100 or len(tvdb_ids) == i:
try:
added, exists, invalid = self.api.add_multiple_series(tvdb_ids, folder, quality_profile, language_profile, monitor, season, search, cutoff_search, series, tags, per_request=100)
_a, _e, _i = self.api.add_multiple_series(shows, folder, quality_profile, language_profile, monitor,
season, search, cutoff_search, series, tags, per_request=100)
added.extend(_a)
exists.extend(_e)
invalid.extend(_i)
shows = []
except Invalid as e:
raise Failed(f"Sonarr Error: {e}")
@ -75,12 +112,16 @@ class Sonarr:
logger.info("")
for series in added:
logger.info(f"Added to Sonarr | {series.tvdbId:<6} | {series.title}")
if self.config.Cache:
self.config.Cache.update_sonarr_adds(series.tvdbId, self.library.original_mapping_name)
logger.info(f"{len(added)} Series added to Sonarr")
if len(exists) > 0:
logger.info("")
for series in exists:
logger.info(f"Already in Sonarr | {series.tvdbId:<6} | {series.title}")
if self.config.Cache:
self.config.Cache.update_sonarr_adds(series.tvdbId, self.library.original_mapping_name)
logger.info(f"{len(exists)} Series already existing in Sonarr")
if len(invalid) > 0:

@ -11,8 +11,9 @@ logger = logging.getLogger("Plex Meta Manager")
builders = ["tautulli_popular", "tautulli_watched"]
class Tautulli:
def __init__(self, config, params):
def __init__(self, config, library, params):
self.config = config
self.library = library
self.url = params["url"]
self.apikey = params["apikey"]
try:
@ -28,6 +29,7 @@ class Tautulli:
logger.info(f"Processing Tautulli Most {params['list_type'].capitalize()}: {params['list_size']} {'Movies' if library.is_movie else 'Shows'}")
response = self._request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_home_stats&time_range={params['list_days']}&stats_count={query_size}")
stat_id = f"{'popular' if params['list_type'] == 'popular' else 'top'}_{'movies' if library.is_movie else 'tv'}"
stat_type = "users_watched" if params['list_type'] == 'popular' else "total_plays"
items = None
for entry in response["response"]["data"]:
@ -39,9 +41,10 @@ class Tautulli:
section_id = self._section_id(library.name)
rating_keys = []
count = 0
for item in items:
if item["section_id"] == section_id and count < int(params['list_size']):
if item["section_id"] == section_id and len(rating_keys) < int(params['list_size']):
if int(item[stat_type]) < params['list_minimum']:
continue
try:
plex_item = library.fetchItem(int(item["rating_key"]))
if not isinstance(plex_item, (Movie, Show)):
@ -53,8 +56,6 @@ class Tautulli:
rating_keys.append(new_item[0].ratingKey)
else:
logger.error(f"Plex Error: Item {item} not found")
continue
count += 1
logger.debug("")
logger.debug(f"{len(rating_keys)} Keys Found: {rating_keys}")
return rating_keys

@ -27,18 +27,26 @@ discover_all = [
"year", "primary_release_year", "primary_release_date.gte", "primary_release_date.lte",
"release_date.gte", "release_date.lte", "vote_count.gte", "vote_count.lte",
"vote_average.gte", "vote_average.lte", "with_runtime.gte", "with_runtime.lte",
"with_companies", "with_genres", "without_genres", "with_keywords", "without_keywords", "include_adult",
"timezone", "screened_theatrically", "include_null_first_air_dates", "limit",
"air_date.gte", "air_date.lte", "first_air_date.gte", "first_air_date.lte", "first_air_date_year", "with_networks"
"with_companies", "without_companies ", "with_genres", "without_genres", "with_keywords", "without_keywords",
"with_watch_providers", "without_watch_providers", "watch_region", "with_watch_monetization_types", "with_status",
"include_adult", "include_video", "timezone", "screened_theatrically", "include_null_first_air_dates", "limit", "with_type",
"air_date.gte", "air_date.lte", "first_air_date.gte", "first_air_date.lte", "first_air_date_year", "with_networks", "with_release_type"
]
discover_movie_only = [
"region", "with_cast", "with_crew", "with_people", "certification_country", "certification",
"year", "primary_release_year", "primary_release_date", "release_date", "include_adult"
"region", "with_cast", "with_crew", "with_people", "certification_country", "certification", "include_video",
"year", "primary_release_year", "primary_release_date", "release_date", "include_adult", "with_release_type"
]
discover_tv_only = [
"timezone", "screened_theatrically", "include_null_first_air_dates",
"air_date", "first_air_date", "first_air_date_year", "with_networks",
"timezone", "screened_theatrically", "include_null_first_air_dates", "air_date",
"first_air_date", "first_air_date_year", "with_networks", "with_status", "with_type",
]
discover_strings = [
"with_cast", "with_crew", "with_people", "with_companies", "with_networks", "with_genres", "without_genres", "with_release_type",
"with_keywords", "without_keywords", "with_original_language", "timezone", "with_watch_providers", "without_watch_providers"
]
discover_ints = ["vote_count", "with_runtime"]
discover_years = ["primary_release_year", "year", "first_air_date_year"]
discover_booleans = ["include_adult", "include_video", "include_null_first_air_dates", "screened_theatrically"]
discover_dates = [
"primary_release_date.gte", "primary_release_date.lte", "release_date.gte", "release_date.lte",
"air_date.gte", "air_date.lte", "first_air_date.gte", "first_air_date.lte"
@ -49,6 +57,7 @@ discover_movie_sort = [
"vote_average.asc", "vote_average.desc", "vote_count.asc", "vote_count.desc"
]
discover_tv_sort = ["vote_average.desc", "vote_average.asc", "first_air_date.desc", "first_air_date.asc", "popularity.desc", "popularity.asc"]
discover_monetization_types = ["flatrate", "free", "ads", "rent", "buy"]
class TMDb:
def __init__(self, config, params):

@ -88,7 +88,9 @@ def get_list(data, lower=False, split=True, int_list=False):
elif isinstance(data, dict): return [data]
elif split is False: return [str(data)]
elif lower is True: return [d.strip().lower() for d in str(data).split(",")]
elif int_list is True: return [int(d.strip()) for d in str(data).split(",")]
elif int_list is True:
try: return [int(d.strip()) for d in str(data).split(",")]
except ValueError: return []
else: return [d.strip() for d in str(data).split(",")]
def get_int_list(data, id_type):
@ -251,6 +253,27 @@ def is_locked(filepath):
file_object.close()
return locked
def time_window(time_window):
today = datetime.now()
if time_window == "today":
return f"{today:%Y-%m-%d}"
elif time_window == "yesterday":
return f"{today - timedelta(days=1):%Y-%m-%d}"
elif time_window == "this_week":
return f"{today:%Y-0%V}"
elif time_window == "last_week":
return f"{today - timedelta(weeks=1):%Y-0%V}"
elif time_window == "this_month":
return f"{today:%Y-%m}"
elif time_window == "last_month":
return f"{today.year}-{today.month - 1 or 12}"
elif time_window == "this_year":
return f"{today.year}"
elif time_window == "last_year":
return f"{today.year - 1}"
else:
return time_window
def glob_filter(filter_in):
filter_in = filter_in.translate({ord("["): "[[]", ord("]"): "[]]"}) if "[" in filter_in else filter_in
return glob.glob(filter_in)

@ -1,4 +1,5 @@
import logging
from json import JSONDecodeError
from modules.util import Failed
@ -22,14 +23,23 @@ class Webhooks:
logger.debug(f"Webhook: {webhook}")
if webhook == "notifiarr":
url, params = self.notifiarr.get_url("notification/plex/")
for x in range(6):
response = self.config.get(url, json=json, params=params)
if response.status_code < 500:
break
else:
response = self.config.post(webhook, json=json)
try:
response_json = response.json()
if self.config.trace_mode:
logger.debug(f"Response: {response_json}")
if "result" in response_json and response_json["result"] == "error" and "details" in response_json and "response" in response_json["details"]:
raise Failed(f"Notifiarr Error: {response_json['details']['response']}")
if response.status_code >= 400 or ("result" in response_json and response_json["result"] == "error"):
raise Failed(f"({response.status_code} [{response.reason}]) {response_json}")
except JSONDecodeError:
if response.status_code >= 400:
raise Failed(f"({response.status_code} [{response.reason}])")
def start_time_hooks(self, start_time):
if self.run_start_webhooks:

@ -154,7 +154,11 @@ def start(attrs):
logger.info("")
run_time = str(datetime.now() - start_time).split('.')[0]
if config:
try:
config.Webhooks.end_time_hooks(start_time, run_time, stats)
except Failed as e:
util.print_stacktrace()
logger.error(f"Webhooks Error: {e}")
util.separator(f"Finished {start_type}Run\nRun Time: {run_time}")
logger.removeHandler(file_handler)
@ -333,10 +337,11 @@ def library_operations(config, library, items=None):
except Failed:
pass
path = os.path.dirname(str(item.locations[0])) if library.is_movie else str(item.locations[0])
if library.Radarr and library.radarr_add_all and tmdb_id:
radarr_adds.append(tmdb_id)
radarr_adds.append((tmdb_id, f"{path.replace(library.Radarr.plex_path, library.Radarr.radarr_path)}/"))
if library.Sonarr and library.sonarr_add_all and tvdb_id:
sonarr_adds.append(tvdb_id)
sonarr_adds.append((tvdb_id, f"{path.replace(library.Sonarr.plex_path, library.Sonarr.sonarr_path)}/"))
tmdb_item = None
if library.mass_genre_update == "tmdb" or library.mass_audience_rating_update == "tmdb" or library.mass_critic_rating_update == "tmdb":
@ -427,7 +432,6 @@ def library_operations(config, library, items=None):
except Failed:
pass
if library.Radarr and library.radarr_add_all:
try:
library.Radarr.add_tmdb(radarr_adds)
@ -552,10 +556,12 @@ def run_collection(config, library, metadata, requested_collections):
logger.info("")
logger.info(f"Sync Mode: {'sync' if builder.sync else 'append'}")
if len(builder.filters) > 0:
if builder.filters or builder.tmdb_filters:
logger.info("")
for filter_key, filter_value in builder.filters:
logger.info(f"Collection Filter {filter_key}: {filter_value}")
for filter_key, filter_value in builder.tmdb_filters:
logger.info(f"Collection Filter {filter_key}: {filter_value}")
builder.find_rating_keys()

@ -1,6 +1,6 @@
PlexAPI==4.7.2
PlexAPI==4.8.0
tmdbv3api==1.7.6
arrapi==1.2.3
arrapi==1.2.7
lxml==4.6.4
requests==2.26.0
ruamel.yaml==0.17.17

Loading…
Cancel
Save