|
|
@ -737,32 +737,27 @@ def library_operations(config, library):
|
|
|
|
logger.info("")
|
|
|
|
logger.info("")
|
|
|
|
logger.info(f"Metadata Backup Path: {library.metadata_backup['path']}")
|
|
|
|
logger.info(f"Metadata Backup Path: {library.metadata_backup['path']}")
|
|
|
|
logger.info("")
|
|
|
|
logger.info("")
|
|
|
|
try:
|
|
|
|
meta = {"metadata": {}}
|
|
|
|
meta, _, _ = yaml.util.load_yaml_guess_indent(open(library.metadata_backup["path"]))
|
|
|
|
if os.path.exists(library.metadata_backup["path"]):
|
|
|
|
except yaml.scanner.ScannerError as e:
|
|
|
|
try:
|
|
|
|
meta = {}
|
|
|
|
meta, _, _ = yaml.util.load_yaml_guess_indent(open(library.metadata_backup["path"]))
|
|
|
|
logger.error(f"YAML Error: {util.tab_new_lines(e)}")
|
|
|
|
except yaml.scanner.ScannerError as e:
|
|
|
|
filename, file_extension = os.path.splitext(library.metadata_backup["path"])
|
|
|
|
logger.error(f"YAML Error: {util.tab_new_lines(e)}")
|
|
|
|
i = 1
|
|
|
|
filename, file_extension = os.path.splitext(library.metadata_backup["path"])
|
|
|
|
while os.path.exists(f"{filename}{i}{file_extension}"):
|
|
|
|
i = 1
|
|
|
|
i += 1
|
|
|
|
while os.path.exists(f"{filename}{i}{file_extension}"):
|
|
|
|
os.rename(library.metadata_backup["path"], f"{filename}{i}{file_extension}")
|
|
|
|
i += 1
|
|
|
|
logger.error(f"Backup failed to load saving copy to {filename}{i}{file_extension}")
|
|
|
|
os.rename(library.metadata_backup["path"], f"{filename}{i}{file_extension}")
|
|
|
|
if "metadata" not in meta:
|
|
|
|
logger.error(f"Backup failed to load saving copy to {filename}{i}{file_extension}")
|
|
|
|
meta["metadata"] = {}
|
|
|
|
|
|
|
|
items = library.get_all(load=True)
|
|
|
|
items = library.get_all(load=True)
|
|
|
|
titles = [i.title for i in items]
|
|
|
|
titles = [i.title for i in items]
|
|
|
|
for i, item in enumerate(items, 1):
|
|
|
|
for i, item in enumerate(items, 1):
|
|
|
|
logger.ghost(f"Processing: {i}/{len(items)} {item.title}")
|
|
|
|
logger.ghost(f"Processing: {i}/{len(items)} {item.title}")
|
|
|
|
map_key, attrs = library.get_locked_attributes(item, titles)
|
|
|
|
map_key, attrs = library.get_locked_attributes(item, titles)
|
|
|
|
if attrs or library.metadata_backup["add_blank_entries"]:
|
|
|
|
if attrs or library.metadata_backup["add_blank_entries"]:
|
|
|
|
def run_dict(save_dict, the_dict):
|
|
|
|
def get_dict(attrs_dict):
|
|
|
|
for kk, vv in the_dict.items():
|
|
|
|
return {ak: get_dict(av) if isinstance(av, dict) else av for ak, av in attrs_dict.items()} if isinstance(attrs_dict, dict) else attrs_dict
|
|
|
|
if isinstance(vv, dict):
|
|
|
|
meta["metadata"][map_key] = get_dict(attrs)
|
|
|
|
run_dict(save_dict[kk], vv)
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
|
|
|
save_dict[kk] = vv
|
|
|
|
|
|
|
|
run_dict(meta["metadata"][map_key], attrs)
|
|
|
|
|
|
|
|
logger.exorcise()
|
|
|
|
logger.exorcise()
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
yaml.round_trip_dump(meta, open(library.metadata_backup["path"], "w", encoding="utf-8"), block_seq_indent=2)
|
|
|
|
yaml.round_trip_dump(meta, open(library.metadata_backup["path"], "w", encoding="utf-8"), block_seq_indent=2)
|
|
|
|