[43] update list_buffer

pull/1404/head
meisnate12 2 years ago
parent 779c32cbe7
commit 2bb5cfc69d

@ -1 +1 @@
1.19.0-develop42 1.19.0-develop43

@ -1282,7 +1282,7 @@ class CollectionBuilder:
raise Failed(f"{self.Type} Error: Cannot use item_genre.remove and item_genre.sync together") raise Failed(f"{self.Type} Error: Cannot use item_genre.remove and item_genre.sync together")
self.item_details[method_final] = util.get_list(method_data) if method_data else [] self.item_details[method_final] = util.get_list(method_data) if method_data else []
elif method_name == "item_edition": elif method_name == "item_edition":
self.item_details[method_final] = str(method_data) if method_data else "" self.item_details[method_final] = str(method_data) if method_data else "" # noqa
elif method_name == "non_item_remove_label": elif method_name == "non_item_remove_label":
if not method_data: if not method_data:
raise Failed(f"{self.Type} Error: non_item_remove_label is blank") raise Failed(f"{self.Type} Error: non_item_remove_label is blank")
@ -1312,7 +1312,7 @@ class CollectionBuilder:
elif str(method_data).lower() not in options: elif str(method_data).lower() not in options:
logger.error(f"Metadata Error: {method_data} {method_name} attribute invalid") logger.error(f"Metadata Error: {method_data} {method_name} attribute invalid")
else: else:
self.item_details[method_name] = str(method_data).lower() self.item_details[method_name] = str(method_data).lower() # noqa
def _radarr(self, method_name, method_data): def _radarr(self, method_name, method_data):
if method_name in ["radarr_add_missing", "radarr_add_existing", "radarr_upgrade_existing", "radarr_search", "radarr_monitor", "radarr_ignore_cache"]: if method_name in ["radarr_add_missing", "radarr_add_existing", "radarr_upgrade_existing", "radarr_search", "radarr_monitor", "radarr_ignore_cache"]:
@ -1670,12 +1670,12 @@ class CollectionBuilder:
"list_size": util.parse(self.Type, "list_size", dict_data, datatype="int", methods=dict_methods, default=10, parent=method_name), "list_size": util.parse(self.Type, "list_size", dict_data, datatype="int", methods=dict_methods, default=10, parent=method_name),
"list_minimum": util.parse(self.Type, "list_minimum", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name) "list_minimum": util.parse(self.Type, "list_minimum", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name)
} }
buff = final_dict["list_size"] * 3
if self.library.Tautulli.has_section: if self.library.Tautulli.has_section:
final_dict["list_buffer"] = 0 buff = 0
elif "list_buffer" in dict_methods: elif "list_buffer" in dict_methods:
final_dict["list_buffer"] = util.parse(self.Type, "list_buffer", dict_data, datatype="int", methods=dict_methods, default=20, parent=method_name) buff = util.parse(self.Type, "list_buffer", dict_data, datatype="int", methods=dict_methods, default=buff, parent=method_name)
else: final_dict["list_buffer"] = buff
final_dict["list_buffer"] = final_dict["list_size"] * 3
self.builders.append((method_name, final_dict)) self.builders.append((method_name, final_dict))
def _tmdb(self, method_name, method_data): def _tmdb(self, method_name, method_data):

@ -166,12 +166,12 @@ class ConfigFile:
self.data = YAML(self.config_path).data self.data = YAML(self.config_path).data
def replace_attr(all_data, attr, par): def replace_attr(all_data, in_attr, par):
if "settings" not in all_data: if "settings" not in all_data:
all_data["settings"] = {} all_data["settings"] = {}
if par in all_data and all_data[par] and attr in all_data[par] and attr not in all_data["settings"]: if par in all_data and all_data[par] and in_attr in all_data[par] and in_attr not in all_data["settings"]:
all_data["settings"][attr] = all_data[par][attr] all_data["settings"][in_attr] = all_data[par][in_attr]
del all_data[par][attr] del all_data[par][in_attr]
if "libraries" not in self.data: if "libraries" not in self.data:
self.data["libraries"] = {} self.data["libraries"] = {}
if "settings" not in self.data: if "settings" not in self.data:
@ -228,9 +228,9 @@ class ConfigFile:
self.data["libraries"][library]["operations"]["mass_imdb_parental_labels"] = "mild" self.data["libraries"][library]["operations"]["mass_imdb_parental_labels"] = "mild"
if "webhooks" in self.data["libraries"][library] and self.data["libraries"][library]["webhooks"] and "collection_changes" not in self.data["libraries"][library]["webhooks"]: if "webhooks" in self.data["libraries"][library] and self.data["libraries"][library]["webhooks"] and "collection_changes" not in self.data["libraries"][library]["webhooks"]:
changes = [] changes = []
def hooks(attr): def hooks(hook_attr):
if attr in self.data["libraries"][library]["webhooks"]: if hook_attr in self.data["libraries"][library]["webhooks"]:
changes.extend([w for w in util.get_list(self.data["libraries"][library]["webhooks"].pop(attr), split=False) if w not in changes]) changes.extend([w for w in util.get_list(self.data["libraries"][library]["webhooks"].pop(hook_attr), split=False) if w not in changes])
hooks("collection_creation") hooks("collection_creation")
hooks("collection_addition") hooks("collection_addition")
hooks("collection_removal") hooks("collection_removal")
@ -251,9 +251,9 @@ class ConfigFile:
temp = self.data.pop("webhooks") temp = self.data.pop("webhooks")
if "changes" not in temp: if "changes" not in temp:
changes = [] changes = []
def hooks(attr): def hooks(hook_attr):
if attr in temp: if hook_attr in temp:
items = util.get_list(temp.pop(attr), split=False) items = util.get_list(temp.pop(hook_attr), split=False)
if items: if items:
changes.extend([w for w in items if w not in changes]) changes.extend([w for w in items if w not in changes])
hooks("collection_creation") hooks("collection_creation")
@ -377,10 +377,10 @@ class ConfigFile:
raise Failed(f"Config Error: {attribute} attribute must be set under {parent} globally or under this specific Library") raise Failed(f"Config Error: {attribute} attribute must be set under {parent} globally or under this specific Library")
options = "" options = ""
if test_list: if test_list:
for option, description in test_list.items(): for test_option, test_description in test_list.items():
if len(options) > 0: if len(options) > 0:
options = f"{options}\n" options = f"{options}\n"
options = f"{options} {option} ({description})" options = f"{options} {test_option} ({test_description})"
if (default is None and not default_is_none) or throw: if (default is None and not default_is_none) or throw:
if len(options) > 0: if len(options) > 0:
message = message + "\n" + options message = message + "\n" + options
@ -873,16 +873,13 @@ class ConfigFile:
params["reapply_overlays"] = True params["reapply_overlays"] = True
if "reset_overlays" in file or "reset_overlay" in file: if "reset_overlays" in file or "reset_overlay" in file:
attr = f"reset_overlay{'s' if 'reset_overlays' in file else ''}" attr = f"reset_overlay{'s' if 'reset_overlays' in file else ''}"
if file[attr] and not isinstance(file[attr], list): reset_options = file[attr] if isinstance(file[attr], list) else [file[attr]]
test_list = [file[attr]]
else:
test_list = file[attr]
final_list = [] final_list = []
for test_item in test_list: for reset_option in reset_options:
if test_item and test_item in reset_overlay_options: if reset_option and reset_option in reset_overlay_options:
final_list.append(test_item) final_list.append(reset_option)
else: else:
final_text = f"Config Error: reset_overlays attribute {test_item} invalid. Options: " final_text = f"Config Error: reset_overlays attribute {reset_option} invalid. Options: "
for option, description in reset_overlay_options.items(): for option, description in reset_overlay_options.items():
final_text = f"{final_text}\n {option} ({description})" final_text = f"{final_text}\n {option} ({description})"
logger.error(final_text) logger.error(final_text)

@ -123,7 +123,7 @@ class IMDb:
imdb_ids = [] imdb_ids = []
parsed_url = urlparse(imdb_url) parsed_url = urlparse(imdb_url)
params = parse_qs(parsed_url.query) params = parse_qs(parsed_url.query)
imdb_base = parsed_url._replace(query=None).geturl() imdb_base = parsed_url._replace(query=None).geturl() # noqa
params.pop("start", None) # noqa params.pop("start", None) # noqa
params.pop("count", None) # noqa params.pop("count", None) # noqa
params.pop("page", None) # noqa params.pop("page", None) # noqa

@ -118,14 +118,14 @@ class Library(ABC):
self.status = {} self.status = {}
self.items_library_operation = True if self.assets_for_all or self.mass_genre_update or self.remove_title_parentheses \ self.items_library_operation = True if self.assets_for_all or self.mass_genre_update or self.remove_title_parentheses \
or self.mass_audience_rating_update or self.mass_critic_rating_update or self.mass_user_rating_update \ or self.mass_audience_rating_update or self.mass_critic_rating_update or self.mass_user_rating_update \
or self.mass_episode_audience_rating_update or self.mass_episode_critic_rating_update or self.mass_episode_user_rating_update \ or self.mass_episode_audience_rating_update or self.mass_episode_critic_rating_update or self.mass_episode_user_rating_update \
or self.mass_content_rating_update or self.mass_originally_available_update or self.mass_original_title_update\ or self.mass_content_rating_update or self.mass_originally_available_update or self.mass_original_title_update\
or self.mass_imdb_parental_labels or self.mass_episode_imdb_parental_labels or self.genre_mapper or self.content_rating_mapper or self.mass_studio_update\ or self.mass_imdb_parental_labels or self.mass_episode_imdb_parental_labels or self.genre_mapper or self.content_rating_mapper or self.mass_studio_update\
or self.radarr_add_all_existing or self.sonarr_add_all_existing or self.mass_poster_update or self.mass_background_update else False or self.radarr_add_all_existing or self.sonarr_add_all_existing or self.mass_poster_update or self.mass_background_update else False
self.library_operation = True if self.items_library_operation or self.delete_collections or self.mass_collection_mode \ self.library_operation = True if self.items_library_operation or self.delete_collections or self.mass_collection_mode \
or self.radarr_remove_by_tag or self.sonarr_remove_by_tag or self.show_unmanaged or self.show_unconfigured \ or self.radarr_remove_by_tag or self.sonarr_remove_by_tag or self.show_unmanaged or self.show_unconfigured \
or self.metadata_backup or self.update_blank_track_titles else False or self.metadata_backup or self.update_blank_track_titles else False
self.meta_operations = [i for i in [getattr(self, o) for o in operations.meta_operations] if i] self.meta_operations = [i for i in [getattr(self, o) for o in operations.meta_operations] if i]
self.label_operations = True if self.assets_for_all or self.mass_imdb_parental_labels or self.mass_episode_imdb_parental_labels else False self.label_operations = True if self.assets_for_all or self.mass_imdb_parental_labels or self.mass_episode_imdb_parental_labels else False

@ -189,7 +189,7 @@ class Mdblist:
logger.info(f"Limit: {data['limit']} items") logger.info(f"Limit: {data['limit']} items")
params["limit"] = data["limit"] params["limit"] = data["limit"]
parsed_url = urlparse(data["url"]) parsed_url = urlparse(data["url"])
url_base = str(parsed_url._replace(query=None).geturl()) url_base = str(parsed_url._replace(query=None).geturl()) # noqa
url_base = url_base if url_base.endswith("/") else f"{url_base}/" url_base = url_base if url_base.endswith("/") else f"{url_base}/"
url_base = url_base if url_base.endswith("json/") else f"{url_base}json/" url_base = url_base if url_base.endswith("json/") else f"{url_base}json/"
try: try:

@ -1,4 +1,4 @@
import math, operator, os, re, requests import math, operator, os, re
from datetime import datetime from datetime import datetime
from modules import plex, ergast, util from modules import plex, ergast, util
from modules.util import Failed, NotScheduled, YAML from modules.util import Failed, NotScheduled, YAML
@ -329,14 +329,14 @@ class DataFile:
default = {} default = {}
if all_init_defaults: if all_init_defaults:
var_default = {replace_var(dk, variables): replace_var(dv, variables) for dk, dv in all_init_defaults.items() if dk not in variables} var_default = {replace_var(dk, variables): replace_var(dv, variables) for dk, dv in all_init_defaults.items() if dk not in variables}
for dkey, dvalue in var_default.items(): for d_key, d_value in var_default.items():
final_key = replace_var(dkey, var_default) final_key = replace_var(d_key, var_default)
if final_key not in optional and final_key not in variables and final_key not in conditionals: if final_key not in optional and final_key not in variables and final_key not in conditionals:
default[final_key] = dvalue default[final_key] = d_value
if "<<" in str(dvalue): if "<<" in str(d_value):
default[f"{final_key}_encoded"] = re.sub(r'<<(.+)>>', r'<<\1_encoded>>', dvalue) default[f"{final_key}_encoded"] = re.sub(r'<<(.+)>>', r'<<\1_encoded>>', d_value)
else: else:
default[f"{final_key}_encoded"] = util.quote(dvalue) default[f"{final_key}_encoded"] = util.quote(d_value)
if "optional" in template: if "optional" in template:
if template["optional"]: if template["optional"]:
@ -483,7 +483,7 @@ class DataFile:
elif f"<<{var}" in str(og_txt): elif f"<<{var}" in str(og_txt):
final = str(og_txt).replace(f"<<{var}>>", str(actual_value)) if f"<<{var}>>" in str(og_txt) else str(og_txt) final = str(og_txt).replace(f"<<{var}>>", str(actual_value)) if f"<<{var}>>" in str(og_txt) else str(og_txt)
if f"<<{var}" in final: if f"<<{var}" in final:
match = re.search(f"<<({var}([+-])(\d+))>>", final) match = re.search(f"<<({var}([+-])(\\d+))>>", final)
if match: if match:
try: try:
final = final.replace(f"<<{match.group(1)}>>", str(int(actual_value) + (int(match.group(3)) * (-1 if match.group(2) == "-" else 1)))) final = final.replace(f"<<{match.group(1)}>>", str(int(actual_value) + (int(match.group(3)) * (-1 if match.group(2) == "-" else 1))))
@ -664,7 +664,7 @@ class MetadataFile(DataFile):
raise Failed(f"Image Section Error: No styles found for section: {section_key}") raise Failed(f"Image Section Error: No styles found for section: {section_key}")
use_key = None use_key = None
if f"use_{section_key}" in methods: if f"use_{section_key}" in methods:
use_key = util.parse("Images", f"use_{section_key}", self.temp_vars, datatype="bool",methods=methods, default=False) use_key = util.parse("Images", f"use_{section_key}", self.temp_vars, datatype="bool", methods=methods, default=False)
logger.info(f"Use {section_key}: {use_key}") logger.info(f"Use {section_key}: {use_key}")
if use_key is False: if use_key is False:
logger.trace(f"Skipped as use_{section_key} is false") logger.trace(f"Skipped as use_{section_key} is false")
@ -842,7 +842,7 @@ class MetadataFile(DataFile):
all_keys = {} all_keys = {}
auto_list = {} auto_list = {}
for i in tags: for i in tags:
final_title = self.config.TMDb.TMDb._iso_639_1[str(i.key)].english_name if str(i.key) in self.config.TMDb.TMDb._iso_639_1 else str(i.title) final_title = self.config.TMDb.TMDb._iso_639_1[str(i.key)].english_name if str(i.key) in self.config.TMDb.TMDb._iso_639_1 else str(i.title) # noqa
all_keys[str(i.key)] = final_title all_keys[str(i.key)] = final_title
if all([x not in exclude for x in [final_title, str(i.title), str(i.key)]]): if all([x not in exclude for x in [final_title, str(i.title), str(i.key)]]):
auto_list[str(i.key)] = final_title auto_list[str(i.key)] = final_title
@ -1555,7 +1555,7 @@ class MetadataFile(DataFile):
else: else:
values = [loc for loc in i.locations if loc] values = [loc for loc in i.locations if loc]
if not values: if not values:
raise Failed(f"Plex Error: No Filepaths found for {i.title}") raise Failed(f"Plex Error: No Filepaths found for {i.title}")
res = re.search(r'(?i)[\[{]edition-([^}\]]*)', values[0]) res = re.search(r'(?i)[\[{]edition-([^}\]]*)', values[0])
check = res.group(1) if res else "" check = res.group(1) if res else ""
if blank_edition and not check: if blank_edition and not check:

@ -5,7 +5,7 @@ from modules.builder import CollectionBuilder
from modules.util import Failed, FilterFailed, NonExisting, NotScheduled from modules.util import Failed, FilterFailed, NonExisting, NotScheduled
from num2words import num2words from num2words import num2words
from plexapi.exceptions import BadRequest from plexapi.exceptions import BadRequest
from plexapi.video import Movie, Show, Season, Episode from plexapi.video import Season, Episode
from PIL import Image, ImageFilter from PIL import Image, ImageFilter
logger = util.logger logger = util.logger
@ -122,17 +122,17 @@ class Overlays:
actual = plex.attribute_translation[cache_key] if cache_key in plex.attribute_translation else cache_key actual = plex.attribute_translation[cache_key] if cache_key in plex.attribute_translation else cache_key
if not hasattr(item, actual): if not hasattr(item, actual):
continue continue
actual_value = getattr(item, actual) real_value = getattr(item, actual)
if cache_value is None or actual_value is None: if cache_value is None or real_value is None:
continue continue
if cache_key in overlay.float_vars: if cache_key in overlay.float_vars:
cache_value = float(cache_value) cache_value = float(cache_value)
if cache_key in overlay.int_vars: if cache_key in overlay.int_vars:
cache_value = int(cache_value) cache_value = int(cache_value)
if cache_key in overlay.date_vars: if cache_key in overlay.date_vars:
actual_value = actual_value.strftime("%Y-%m-%d") real_value = real_value.strftime("%Y-%m-%d")
if actual_value != cache_value: if real_value != cache_value:
overlay_change = f"Special Text Changed from {cache_value} to {actual_value}" overlay_change = f"Special Text Changed from {cache_value} to {real_value}"
try: try:
poster, background, item_dir, name = self.library.find_item_assets(item) poster, background, item_dir, name = self.library.find_item_assets(item)
if not poster and self.library.assets_for_all: if not poster and self.library.assets_for_all:

@ -764,9 +764,9 @@ class Plex(Library):
try: try:
tag = next(f for f in self.Plex.listFilters(libtype) if f.filter == tag) tag = next(f for f in self.Plex.listFilters(libtype) if f.filter == tag)
except StopIteration: except StopIteration:
availableFilters = [f.filter for f in self.Plex.listFilters(libtype)] available_filters = [f.filter for f in self.Plex.listFilters(libtype)]
raise NotFound(f'Unknown filter field "{tag}" for libtype "{libtype}". ' raise NotFound(f'Unknown filter field "{tag}" for libtype "{libtype}". '
f'Available filters: {availableFilters}') from None f'Available filters: {available_filters}') from None
items = self.Plex.findItems(self.Plex._server.query(tag.key), FilterChoice) items = self.Plex.findItems(self.Plex._server.query(tag.key), FilterChoice)
if tag.key.endswith("/collection?type=4"): if tag.key.endswith("/collection?type=4"):
keys = [k.key for k in items] keys = [k.key for k in items]

@ -23,7 +23,7 @@ class Radarr:
try: try:
self.api = RadarrAPI(self.url, self.token, session=self.config.session) self.api = RadarrAPI(self.url, self.token, session=self.config.session)
self.api.respect_list_exclusions_when_adding() self.api.respect_list_exclusions_when_adding()
self.api._validate_add_options(params["root_folder_path"], params["quality_profile"]) self.api._validate_add_options(params["root_folder_path"], params["quality_profile"]) # noqa
self.profiles = self.api.quality_profile() self.profiles = self.api.quality_profile()
except ArrException as e: except ArrException as e:
raise Failed(e) raise Failed(e)

@ -39,7 +39,7 @@ class Sonarr:
try: try:
self.api = SonarrAPI(self.url, self.token, session=self.config.session) self.api = SonarrAPI(self.url, self.token, session=self.config.session)
self.api.respect_list_exclusions_when_adding() self.api.respect_list_exclusions_when_adding()
self.api._validate_add_options(params["root_folder_path"], params["quality_profile"], params["language_profile"]) self.api._validate_add_options(params["root_folder_path"], params["quality_profile"], params["language_profile"]) # noqa
self.profiles = self.api.quality_profile() self.profiles = self.api.quality_profile()
except ArrException as e: except ArrException as e:
raise Failed(e) raise Failed(e)
@ -80,7 +80,7 @@ class Sonarr:
monitor = monitor_translation[options["monitor"] if "monitor" in options else self.monitor] monitor = monitor_translation[options["monitor"] if "monitor" in options else self.monitor]
quality_profile = options["quality"] if "quality" in options else self.quality_profile quality_profile = options["quality"] if "quality" in options else self.quality_profile
language_profile = options["language"] if "language" in options else self.language_profile language_profile = options["language"] if "language" in options else self.language_profile
language_profile = language_profile if self.api._raw.v3 else 1 language_profile = language_profile if self.api._raw.v3 else 1 # noqa
series_type = options["series"] if "series" in options else self.series_type series_type = options["series"] if "series" in options else self.series_type
season = options["season"] if "season" in options else self.season_folder season = options["season"] if "season" in options else self.season_folder
tags = options["tag"] if "tag" in options else self.tag tags = options["tag"] if "tag" in options else self.tag

@ -153,9 +153,9 @@ class TMDbShow(TMDBObj):
self.type = data["type"] if isinstance(data, dict) else data.type self.type = data["type"] if isinstance(data, dict) else data.type
self.studio = data["studio"] if isinstance(data, dict) else data.networks[0].name if data.networks else None self.studio = data["studio"] if isinstance(data, dict) else data.networks[0].name if data.networks else None
self.tvdb_id = data["tvdb_id"] if isinstance(data, dict) else data.tvdb_id self.tvdb_id = data["tvdb_id"] if isinstance(data, dict) else data.tvdb_id
loop = data.origin_countries if not isinstance(data, dict) else data["countries"].split("|") if data["countries"] else [] loop = data.origin_countries if not isinstance(data, dict) else data["countries"].split("|") if data["countries"] else [] # noqa
self.countries = [TMDbCountry(c) for c in loop] self.countries = [TMDbCountry(c) for c in loop]
loop = data.seasons if not isinstance(data, dict) else data["seasons"].split("|") if data["seasons"] else [] loop = data.seasons if not isinstance(data, dict) else data["seasons"].split("|") if data["seasons"] else [] # noqa
self.seasons = [TMDbSeason(s) for s in loop] self.seasons = [TMDbSeason(s) for s in loop]
if self._tmdb.config.Cache and not ignore_cache: if self._tmdb.config.Cache and not ignore_cache:
@ -180,7 +180,7 @@ class TMDb:
self.TMDb = TMDbAPIs(self.apikey, language=self.language, session=self.config.session) self.TMDb = TMDbAPIs(self.apikey, language=self.language, session=self.config.session)
except TMDbException as e: except TMDbException as e:
raise Failed(f"TMDb Error: {e}") raise Failed(f"TMDb Error: {e}")
self.iso_3166_1 = {iso: i.name for iso, i in self.TMDb._iso_3166_1.items()} self.iso_3166_1 = {iso: i.name for iso, i in self.TMDb._iso_3166_1.items()} # noqa
def convert_from(self, tmdb_id, convert_to, is_movie): def convert_from(self, tmdb_id, convert_to, is_movie):
item = self.get_movie(tmdb_id) if is_movie else self.get_show(tmdb_id) item = self.get_movie(tmdb_id) if is_movie else self.get_show(tmdb_id)

@ -87,7 +87,7 @@ class TVDbObj:
released = parse_page("//strong[text()='First Aired']/parent::li/span/text()[normalize-space()]") released = parse_page("//strong[text()='First Aired']/parent::li/span/text()[normalize-space()]")
try: try:
self.release_date = datetime.strptime(released, "%B %d, %Y") if released else released self.release_date = datetime.strptime(released, "%B %d, %Y") if released else released # noqa
except ValueError: except ValueError:
self.release_date = None self.release_date = None

@ -14,7 +14,7 @@ except ModuleNotFoundError:
windows = False windows = False
logger: MyLogger = None logger: MyLogger = None # noqa
class TimeoutExpired(Exception): class TimeoutExpired(Exception):
pass pass
@ -308,7 +308,7 @@ def windows_input(prompt, timeout=5):
sys.stdout.write(f"| {prompt}: ") sys.stdout.write(f"| {prompt}: ")
sys.stdout.flush() sys.stdout.flush()
result = [] result = []
start_time = time.time() s_time = time.time()
while True: while True:
if msvcrt.kbhit(): if msvcrt.kbhit():
char = msvcrt.getwche() char = msvcrt.getwche()
@ -319,7 +319,7 @@ def windows_input(prompt, timeout=5):
return out return out
elif ord(char) >= 32: #space_char elif ord(char) >= 32: #space_char
result.append(char) result.append(char)
if (time.time() - start_time) > timeout: if (time.time() - s_time) > timeout:
print("") print("")
raise TimeoutExpired raise TimeoutExpired
@ -977,5 +977,3 @@ class YAML:
if self.path: if self.path:
with open(self.path, 'w', encoding="utf-8") as fp: with open(self.path, 'w', encoding="utf-8") as fp:
self.yaml.dump(self.data, fp) self.yaml.dump(self.data, fp)

@ -232,9 +232,9 @@ class Webhooks:
for col in row: for col in row:
section["fields"].append({"type": "mrkdwn", "text": col[0]}) section["fields"].append({"type": "mrkdwn", "text": col[0]})
section["fields"].append({"type": "plain_text", "text": col[1]}) section["fields"].append({"type": "plain_text", "text": col[1]})
new_json["blocks"].append(section) new_json["blocks"].append(section) # noqa
else: else:
new_json["blocks"].append({"type": "divider"}) new_json["blocks"].append({"type": "divider"}) # noqa
return new_json return new_json
def discord(self, json): def discord(self, json):

Loading…
Cancel
Save