[28] remove timers

pull/858/head
meisnate12 3 years ago
parent 6163a1f83b
commit d91c6bf635

@ -1 +1 @@
1.16.5-develop27 1.16.5-develop28

@ -1984,14 +1984,19 @@ class CollectionBuilder:
return attribute, modifier, final return attribute, modifier, final
def fetch_item(self, item): def fetch_item(self, item):
try: if isinstance(item, (Movie, Show, Season, Episode, Artist, Album, Track)):
key = item.ratingKey if isinstance(item, (Movie, Show, Season, Episode, Artist, Album, Track)) else int(item) if item.ratingKey not in self.library.cached_items:
self.library.cached_items[item.ratingKey] = (item, False)
return item
key = int(item)
if key in self.library.cached_items: if key in self.library.cached_items:
return self.library.cached_items[key] cached_item, full_obj = self.library.cached_items[key]
return cached_item
try:
current = self.library.fetchItem(key) current = self.library.fetchItem(key)
if not isinstance(current, (Movie, Show, Season, Episode, Artist, Album, Track)): if not isinstance(current, (Movie, Show, Season, Episode, Artist, Album, Track)):
raise NotFound raise NotFound
self.library.cached_items[key] = current self.library.cached_items[key] = (current, True)
return current return current
except (BadRequest, NotFound): except (BadRequest, NotFound):
raise Failed(f"Plex Error: Item {item} not found") raise Failed(f"Plex Error: Item {item} not found")
@ -2146,6 +2151,7 @@ class CollectionBuilder:
def check_filters(self, item, display): def check_filters(self, item, display):
if (self.filters or self.tmdb_filters) and not self.details["only_filter_missing"]: if (self.filters or self.tmdb_filters) and not self.details["only_filter_missing"]:
logger.ghost(f"Filtering {display} {item.title}") logger.ghost(f"Filtering {display} {item.title}")
self.library.reload(item)
if self.tmdb_filters and isinstance(item, (Movie, Show)): if self.tmdb_filters and isinstance(item, (Movie, Show)):
if item.ratingKey not in self.library.movie_rating_key_map and item.ratingKey not in self.library.show_rating_key_map: if item.ratingKey not in self.library.movie_rating_key_map and item.ratingKey not in self.library.show_rating_key_map:
logger.warning(f"Filter Error: No {'TMDb' if self.library.is_movie else 'TVDb'} ID found for {item.title}") logger.warning(f"Filter Error: No {'TMDb' if self.library.is_movie else 'TVDb'} ID found for {item.title}")

@ -303,7 +303,7 @@ class Library(ABC):
logger.info("") logger.info("")
items = self.get_all() items = self.get_all()
for item in items: for item in items:
self.cached_items[item.ratingKey] = item self.cached_items[item.ratingKey] = (item, False)
return items return items
def map_guids(self, items): def map_guids(self, items):

@ -67,6 +67,11 @@ class Operations:
reverse_anidb[v] = k reverse_anidb[v] = k
for i, item in enumerate(items, 1): for i, item in enumerate(items, 1):
try:
self.library.reload(item)
except Failed as e:
logger.error(e)
continue
logger.ghost(f"Processing: {i}/{len(items)} {item.title}") logger.ghost(f"Processing: {i}/{len(items)} {item.title}")
if self.library.assets_for_all: if self.library.assets_for_all:
self.library.update_asset2(item) self.library.update_asset2(item)

@ -175,13 +175,13 @@ class Overlays:
logger.separator(f"Applying Overlays for the {self.library.name} Library") logger.separator(f"Applying Overlays for the {self.library.name} Library")
logger.info("") logger.info("")
for i, (over_key, (item, over_names)) in enumerate(sorted(key_to_overlays.items(), key=lambda io: io[1][0].titleSort), 1): for i, (over_key, (item, over_names)) in enumerate(sorted(key_to_overlays.items(), key=lambda io: io[1][0].titleSort), 1):
util.check_time("Overlay Start Time")
try: try:
logger.ghost(f"Overlaying: {i}/{len(key_to_overlays)} {item.title}") logger.ghost(f"Overlaying: {i}/{len(key_to_overlays)} {item.title}")
image_compare = None image_compare = None
overlay_compare = None overlay_compare = None
if self.config.Cache: if self.config.Cache:
image, image_compare, _ = self.config.Cache.query_image_map(item.ratingKey, f"{self.library.image_table_name}_overlays") image, image_compare, _ = self.config.Cache.query_image_map(item.ratingKey, f"{self.library.image_table_name}_overlays")
overlay_compare = [] if overlay_compare is None else util.get_list(overlay_compare) overlay_compare = [] if overlay_compare is None else util.get_list(overlay_compare)
has_overlay = any([item_tag.tag.lower() == "overlay" for item_tag in item.labels]) has_overlay = any([item_tag.tag.lower() == "overlay" for item_tag in item.labels])
@ -196,13 +196,11 @@ class Overlays:
overlay_change = True overlay_change = True
clean_name, _ = util.validate_filename(item.title) clean_name, _ = util.validate_filename(item.title)
util.check_time("Initial Bit")
poster, _, item_dir = self.library.find_assets( poster, _, item_dir = self.library.find_assets(
name="poster" if self.library.asset_folders else clean_name, name="poster" if self.library.asset_folders else clean_name,
folder_name=clean_name if self.library.asset_folders else None, folder_name=clean_name if self.library.asset_folders else None,
prefix=f"{item.title}'s " prefix=f"{item.title}'s "
) )
util.check_time("Find Asset Time")
has_original = None has_original = None
changed_image = False changed_image = False
@ -210,22 +208,17 @@ class Overlays:
if poster: if poster:
if image_compare and str(poster.compare) != str(image_compare): if image_compare and str(poster.compare) != str(image_compare):
changed_image = True changed_image = True
util.check_time("Choose Image (From Assets) Time")
elif has_overlay: elif has_overlay:
test = "Backup"
if os.path.exists(os.path.join(self.library.overlay_backup, f"{item.ratingKey}.png")): if os.path.exists(os.path.join(self.library.overlay_backup, f"{item.ratingKey}.png")):
has_original = os.path.join(self.library.overlay_backup, f"{item.ratingKey}.png") has_original = os.path.join(self.library.overlay_backup, f"{item.ratingKey}.png")
elif os.path.exists(os.path.join(self.library.overlay_backup, f"{item.ratingKey}.jpg")): elif os.path.exists(os.path.join(self.library.overlay_backup, f"{item.ratingKey}.jpg")):
has_original = os.path.join(self.library.overlay_backup, f"{item.ratingKey}.jpg") has_original = os.path.join(self.library.overlay_backup, f"{item.ratingKey}.jpg")
else: else:
test = "Online"
new_backup = find_poster_url(item) new_backup = find_poster_url(item)
if new_backup is None: if new_backup is None:
new_backup = item.posterUrl new_backup = item.posterUrl
util.check_time(f"Choose Image (From {test}) Time")
else: else:
new_backup = item.posterUrl new_backup = item.posterUrl
util.check_time("Choose Image (From Plex) Time")
if new_backup: if new_backup:
changed_image = True changed_image = True
image_response = self.config.get(new_backup) image_response = self.config.get(new_backup)
@ -238,7 +231,6 @@ class Overlays:
while util.is_locked(backup_image_path): while util.is_locked(backup_image_path):
time.sleep(1) time.sleep(1)
has_original = backup_image_path has_original = backup_image_path
util.check_time("Find Image Time")
poster_compare = None poster_compare = None
if poster is None and has_original is None: if poster is None and has_original is None:
@ -246,7 +238,6 @@ class Overlays:
elif changed_image or overlay_change: elif changed_image or overlay_change:
new_poster = Image.open(poster.location if poster else has_original).convert("RGBA") new_poster = Image.open(poster.location if poster else has_original).convert("RGBA")
temp = os.path.join(self.library.overlay_folder, f"temp.png") temp = os.path.join(self.library.overlay_folder, f"temp.png")
util.check_time("Open Image Time")
try: try:
blur_num = 0 blur_num = 0
for over_name in over_names: for over_name in over_names:
@ -275,5 +266,4 @@ class Overlays:
poster_compare, overlay=','.join(over_names)) poster_compare, overlay=','.join(over_names))
except Failed as e: except Failed as e:
logger.error(e) logger.error(e)
util.check_time("Overall Overlay Time", end=True)
logger.exorcise() logger.exorcise()

@ -464,13 +464,18 @@ class Plex(Library):
collection.sortUpdate(sort=data) collection.sortUpdate(sort=data)
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex) @retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
def reload(self, item): def reload(self, item, force=True):
is_full = False
if item.ratingKey in self.cached_items:
cached_item, is_full = self.cached_items[item.ratingKey]
try: try:
if not is_full:
item.reload(checkFiles=False, includeAllConcerts=False, includeBandwidths=False, includeChapters=False, item.reload(checkFiles=False, includeAllConcerts=False, includeBandwidths=False, includeChapters=False,
includeChildren=False, includeConcerts=False, includeExternalMedia=False, includeExtras=False, includeChildren=False, includeConcerts=False, includeExternalMedia=False, includeExtras=False,
includeFields=False, includeGeolocation=False, includeLoudnessRamps=False, includeMarkers=False, includeFields=False, includeGeolocation=False, includeLoudnessRamps=False, includeMarkers=False,
includeOnDeck=False, includePopularLeaves=False, includeRelated=False, includeOnDeck=False, includePopularLeaves=False, includeRelated=False,
includeRelatedCount=0, includeReviews=False, includeStations=False) includeRelatedCount=0, includeReviews=False, includeStations=False)
self.cached_items[item.ratingKey] = (item, True)
except (BadRequest, NotFound) as e: except (BadRequest, NotFound) as e:
logger.stacktrace() logger.stacktrace()
raise Failed(f"Item Failed to Load: {e}") raise Failed(f"Item Failed to Load: {e}")
@ -711,7 +716,6 @@ class Plex(Library):
for i, item in enumerate(all_items, 1): for i, item in enumerate(all_items, 1):
logger.ghost(f"Processing: {i}/{len(all_items)} {item.title}") logger.ghost(f"Processing: {i}/{len(all_items)} {item.title}")
add_item = True add_item = True
self.reload(item)
for collection in item.collections: for collection in item.collections:
if collection.id in collection_indexes: if collection.id in collection_indexes:
add_item = False add_item = False

@ -281,6 +281,7 @@ def update_libraries(config):
if not operations_only and (library.overlay_files or library.remove_overlays): if not operations_only and (library.overlay_files or library.remove_overlays):
library.Overlays.run_overlays() library.Overlays.run_overlays()
if not operations_only and not overlays_only:
for metadata in library.metadata_files: for metadata in library.metadata_files:
metadata_name = metadata.get_file_name() metadata_name = metadata.get_file_name()
if config.requested_metadata_files and metadata_name not in config.requested_metadata_files: if config.requested_metadata_files and metadata_name not in config.requested_metadata_files:
@ -289,7 +290,7 @@ def update_libraries(config):
continue continue
logger.info("") logger.info("")
logger.separator(f"Running {metadata_name} Metadata File\n{metadata.path}") logger.separator(f"Running {metadata_name} Metadata File\n{metadata.path}")
if not config.test_mode and not config.resume_from and not collection_only and not operations_only and not overlays_only: if not config.test_mode and not config.resume_from and not collection_only:
try: try:
metadata.update_metadata() metadata.update_metadata()
except Failed as e: except Failed as e:
@ -300,7 +301,7 @@ def update_libraries(config):
logger.info("") logger.info("")
logger.warning(f"Collection: {config.resume_from} not in Metadata File: {metadata.path}") logger.warning(f"Collection: {config.resume_from} not in Metadata File: {metadata.path}")
continue continue
if collections_to_run and not operations_only and not overlays_only: if collections_to_run:
logger.info("") logger.info("")
logger.separator(f"{'Test ' if config.test_mode else ''}Collections") logger.separator(f"{'Test ' if config.test_mode else ''}Collections")
logger.remove_library_handler(library.mapping_name) logger.remove_library_handler(library.mapping_name)

Loading…
Cancel
Save