@ -6,7 +6,6 @@ try:
from modules . builder import CollectionBuilder
from modules . config import Config
from modules . util import Failed
from plexapi . exceptions import BadRequest
except ModuleNotFoundError :
print ( " Error: Requirements are not installed " )
sys . exit ( 0 )
@ -18,8 +17,10 @@ parser.add_argument("-t", "--time", dest="time", help="Time to update each day u
parser . add_argument ( " -re " , " --resume " , dest = " resume " , help = " Resume collection run from a specific collection " , type = str )
parser . add_argument ( " -r " , " --run " , dest = " run " , help = " Run without the scheduler " , action = " store_true " , default = False )
parser . add_argument ( " -rt " , " --test " , " --tests " , " --run-test " , " --run-tests " , dest = " test " , help = " Run in debug mode with only collections that have test: true " , action = " store_true " , default = False )
parser . add_argument ( " -cl " , " --collection " , " --collections " , dest = " collections " , help = " Process only specified collections (comma-separated list) " , type = str )
parser . add_argument ( " -l " , " --library " , " --libraries " , dest = " libraries " , help = " Process only specified libraries (comma-separated list) " , type = str )
parser . add_argument ( " -co " , " --collection-only " , " --collections-only " , dest = " collection_only " , help = " Run only collection operations " , action = " store_true " , default = False )
parser . add_argument ( " -lo " , " --library-only " , " --libraries-only " , dest = " library_only " , help = " Run only library operations " , action = " store_true " , default = False )
parser . add_argument ( " -rc " , " -cl " , " --collection " , " --collections " , " --run-collection " , " --run-collections " , dest = " collections " , help = " Process only specified collections (comma-separated list) " , type = str )
parser . add_argument ( " -rl " , " -l " , " --library " , " --libraries " , " --run-library " , " --run-libraries " , dest = " libraries " , help = " Process only specified libraries (comma-separated list) " , type = str )
parser . add_argument ( " -d " , " --divider " , dest = " divider " , help = " Character that divides the sections (Default: ' = ' ) " , default = " = " , type = str )
parser . add_argument ( " -w " , " --width " , dest = " width " , help = " Screen Width (Default: 100) " , default = 100 , type = int )
args = parser . parse_args ( )
@ -39,6 +40,8 @@ def check_bool(env_str, default):
test = check_bool ( " PMM_TEST " , args . test )
debug = check_bool ( " PMM_DEBUG " , args . debug )
run = check_bool ( " PMM_RUN " , args . run )
library_only = check_bool ( " PMM_LIBRARIES_ONLY " , args . library_only )
collection_only = check_bool ( " PMM_COLLECTIONS_ONLY " , args . collection_only )
collections = os . environ . get ( " PMM_COLLECTIONS " ) if os . environ . get ( " PMM_COLLECTIONS " ) else args . collections
libraries = os . environ . get ( " PMM_LIBRARIES " ) if os . environ . get ( " PMM_LIBRARIES " ) else args . libraries
resume = os . environ . get ( " PMM_RESUME " ) if os . environ . get ( " PMM_RESUME " ) else args . resume
@ -71,31 +74,32 @@ def fmt_filter(record):
record . filename = f " [ { record . filename } : { record . lineno } ] "
return True
file_handler = logging . handlers . TimedRotatingFileHandler ( os . path . join ( default_dir , " logs " , " meta.log " ) , when = " midnight " , backupCount = 10 , encoding = " utf-8 " )
file_handler . addFilter ( fmt_filter )
file_handler . setFormatter ( logging . Formatter ( " [ %(asctime)s ] %(filename)-27s %(levelname)-10s | %(message)-100s | " ) )
cmd_handler = logging . StreamHandler ( )
cmd_handler . setFormatter ( logging . Formatter ( " | %(message)-100s | " ) )
cmd_handler . setLevel ( logging . DEBUG if test or debug else logging . INFO )
logger . addHandler ( cmd_handler )
logger . addHandler ( file_handler )
sys . excepthook = util . my_except_hook
util . separator ( )
util . centered ( " " )
util . centered ( " ____ _ __ __ _ __ __ " )
util . centered ( " | _ \\ | | _____ __ | \\ / | ___| |_ __ _ | \\ / | __ _ _ __ __ _ __ _ ___ _ __ " )
util . centered ( " | |_) | |/ _ \\ \\ / / | | \\ /| |/ _ \\ __/ _` | | | \\ /| |/ _` | ' _ \\ / _` |/ _` |/ _ \\ ' __| " )
util . centered ( " | __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | " )
util . centered ( " |_| |_| \\ ___/_/ \\ _ \\ |_| |_| \\ ___| \\ __ \\ __,_| |_| |_| \\ __,_|_| |_| \\ __,_| \\ __, | \\ ___|_| " )
util . centered ( " |___/ " )
util . centered ( " Version: 1.9.1 " )
util . separator ( )
def start ( config_path , is_test , daily , requested_collections , requested_libraries , resume_from ) :
file_logger = os . path . join ( default_dir , " logs " , " meta.log " )
should_roll_over = os . path . isfile ( file_logger )
file_handler = logging . handlers . RotatingFileHandler ( file_logger , delay = True , mode = " w " , backupCount = 10 , encoding = " utf-8 " )
util . apply_formatter ( file_handler )
file_handler . addFilter ( fmt_filter )
if should_roll_over :
file_handler . doRollover ( )
logger . addHandler ( file_handler )
util . separator ( )
util . centered ( " " )
util . centered ( " ____ _ __ __ _ __ __ " )
util . centered ( " | _ \\ | | _____ __ | \\ / | ___| |_ __ _ | \\ / | __ _ _ __ __ _ __ _ ___ _ __ " )
util . centered ( " | |_) | |/ _ \\ \\ / / | | \\ /| |/ _ \\ __/ _` | | | \\ /| |/ _` | ' _ \\ / _` |/ _` |/ _ \\ ' __| " )
util . centered ( " | __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | " )
util . centered ( " |_| |_| \\ ___/_/ \\ _ \\ |_| |_| \\ ___| \\ __ \\ __,_| |_| |_| \\ __,_|_| |_| \\ __,_| \\ __, | \\ ___|_| " )
util . centered ( " |___/ " )
util . centered ( " Version: 1.9.2 " )
util . separator ( )
if daily : start_type = " Daily "
elif is_test : start_type = " Test "
elif requested_collections : start_type = " Collections "
@ -111,9 +115,19 @@ def start(config_path, is_test, daily, requested_collections, requested_librarie
logger . critical ( e )
logger . info ( " " )
util . separator ( f " Finished { start_type } Run \n Run Time: { str ( datetime . now ( ) - start_time ) . split ( ' . ' ) [ 0 ] } " )
logger . addHandler ( file_handler )
def update_libraries ( config , is_test , requested_collections , resume_from ) :
for library in config . libraries :
os . makedirs ( os . path . join ( default_dir , " logs " , library . mapping_name , " collections " ) , exist_ok = True )
col_file_logger = os . path . join ( default_dir , " logs " , library . mapping_name , " library.log " )
should_roll_over = os . path . isfile ( col_file_logger )
library_handler = logging . handlers . RotatingFileHandler ( col_file_logger , delay = True , mode = " w " , backupCount = 3 , encoding = " utf-8 " )
util . apply_formatter ( library_handler )
if should_roll_over :
library_handler . doRollover ( )
logger . addHandler ( library_handler )
os . environ [ " PLEXAPI_PLEXAPI_TIMEOUT " ] = str ( library . timeout )
logger . info ( " " )
util . separator ( f " { library . name } Library " )
@ -121,12 +135,12 @@ def update_libraries(config, is_test, requested_collections, resume_from):
util . separator ( f " Mapping { library . name } Library " )
logger . info ( " " )
movie_map , show_map = map_guids ( config , library )
if not is_test and not resume_from and library . mass_update :
if not is_test and not resume_from and not collection_only and library . mass_update :
mass_metadata ( config , library , movie_map , show_map )
for metadata in library . metadata_files :
logger . info ( " " )
util . separator ( f " Running Metadata File \n { metadata . path } " )
if not is_test and not resume_from :
if not is_test and not resume_from and not collection_only :
try :
metadata . update_metadata ( config . TMDb , is_test )
except Failed as e :
@ -137,34 +151,43 @@ def update_libraries(config, is_test, requested_collections, resume_from):
if resume_from and resume_from not in collections_to_run :
logger . warning ( f " Collection: { resume_from } not in Metadata File: { metadata . path } " )
continue
if collections_to_run :
if collections_to_run and not library_only :
logger . removeHandler ( library_handler )
resume_from = run_collection ( config , library , metadata , collections_to_run , is_test , resume_from , movie_map , show_map )
logger . addHandler ( library_handler )
if library . show_unmanaged is True and not is_test and not requested_collections :
logger . info ( " " )
util . separator ( f " Unmanaged Collections in { library . name } Library " )
logger . info ( " " )
unmanaged_count = 0
collections_in_plex = [ str ( plex_col ) for plex_col in library . collections ]
if not is_test and not requested_collections :
unmanaged_collections = [ ]
for col in library . get_all_collections ( ) :
if col . title not in collections_in_plex :
if col . title not in library . collections :
unmanaged_collections . append ( col )
if library . show_unmanaged and not library_only :
logger . info ( " " )
util . separator ( f " Unmanaged Collections in { library . name } Library " )
logger . info ( " " )
for col in unmanaged_collections :
logger . info ( col . title )
unmanaged_count + = 1
logger . info ( " {} Unmanaged Collections " . format ( unmanaged_count ) )
logger . info ( f " { len ( unmanaged_collections ) } Unmanaged Collections " )
if library . assets_for_all and not collection_only :
logger . info ( " " )
util . separator ( f " All { ' Movies ' if library . is_movie else ' Shows ' } Assets Check for { library . name } Library " )
logger . info ( " " )
for col in unmanaged_collections :
library . update_item_from_assets ( col , collection_mode = True )
for item in library . get_all ( ) :
library . update_item_from_assets ( item )
logger . removeHandler ( library_handler )
if library . assets_for_all is True and not is_test and not requested_collections :
logger . info ( " " )
util . separator ( f " All { ' Movies ' if library . is_movie else ' Shows ' } Assets Check for { library . name } Library " )
logger . info ( " " )
for item in library . get_all ( ) :
library . update_item_from_assets ( item )
has_run_again = False
for library in config . libraries :
if library . run_again :
has_run_again = True
break
if has_run_again :
if has_run_again and not library_only :
logger . info ( " " )
util . separator ( " Run Again " )
logger . info ( " " )
@ -176,6 +199,11 @@ def update_libraries(config, is_test, requested_collections, resume_from):
util . print_end ( length )
for library in config . libraries :
if library . run_again :
col_file_logger = os . path . join ( default_dir , " logs " , library . mapping_name , f " library.log " )
library_handler = logging . handlers . RotatingFileHandler ( col_file_logger , mode = " w " , backupCount = 3 , encoding = " utf-8 " )
util . apply_formatter ( library_handler )
logger . addHandler ( library_handler )
library_handler . addFilter ( fmt_filter )
os . environ [ " PLEXAPI_PLEXAPI_TIMEOUT " ] = str ( library . timeout )
logger . info ( " " )
util . separator ( f " { library . name } Library Run Again " )
@ -190,6 +218,7 @@ def update_libraries(config, is_test, requested_collections, resume_from):
except Failed as e :
util . print_stacktrace ( )
util . print_multiline ( e , error = True )
logger . removeHandler ( library_handler )
used_url = [ ]
for library in config . libraries :
@ -210,22 +239,18 @@ def map_guids(config, library):
items = library . Plex . all ( )
for i , item in enumerate ( items , 1 ) :
length = util . print_return ( length , f " Processing: { i } / { len ( items ) } { item . title } " )
try :
id_type , main_id = config . Convert . get_id ( item , library , length )
except BadRequest :
util . print_stacktrace ( )
util . print_end ( length , f " Mapping Error: | { item . guid } for { item . title } not found " )
continue
if not isinstance ( main_id , list ) :
main_id = [ main_id ]
if id_type == " movie " :
for m in main_id :
if m in movie_map : movie_map [ m ] . append ( item . ratingKey )
else : movie_map [ m ] = [ item . ratingKey ]
elif id_type == " show " :
for m in main_id :
if m in show_map : show_map [ m ] . append ( item . ratingKey )
else : show_map [ m ] = [ item . ratingKey ]
id_type , main_id = config . Convert . get_id ( item , library , length )
if main_id :
if not isinstance ( main_id , list ) :
main_id = [ main_id ]
if id_type == " movie " :
for m in main_id :
if m in movie_map : movie_map [ m ] . append ( item . ratingKey )
else : movie_map [ m ] = [ item . ratingKey ]
elif id_type == " show " :
for m in main_id :
if m in show_map : show_map [ m ] . append ( item . ratingKey )
else : show_map [ m ] = [ item . ratingKey ]
util . print_end ( length , f " Processed { len ( items ) } { ' Movies ' if library . is_movie else ' Shows ' } " )
return movie_map , show_map
@ -234,6 +259,8 @@ def mass_metadata(config, library, movie_map, show_map):
logger . info ( " " )
util . separator ( f " Mass Editing { ' Movie ' if library . is_movie else ' Show ' } Library: { library . name } " )
logger . info ( " " )
radarr_adds = [ ]
sonarr_adds = [ ]
items = library . Plex . all ( )
for i , item in enumerate ( items , 1 ) :
length = util . print_return ( length , f " Processing: { i } / { len ( items ) } { item . title } " )
@ -257,14 +284,16 @@ def mass_metadata(config, library, movie_map, show_map):
if item . ratingKey in rating_keys :
tvdb_id = tvdb
break
if tmdb_id :
imdb_id = config . Convert . tmdb_to_imdb ( tmdb_id )
elif tvdb_id :
tmdb_id = config . Convert . tvdb_to_tmdb ( tvdb_id )
imdb_id = config . Convert . tvdb_to_imdb ( tvdb_id )
if library . Radarr and library . radarr_add_all and tmdb_id :
radarr_adds . append ( tmdb_id )
if library . Sonarr and library . sonarr_add_all and tvdb_id :
sonarr_adds. append ( tvdb_id )
tmdb_item = None
if library . mass_genre_update == " tmdb " or library . mass_audience_rating_update == " tmdb " or library . mass_critic_rating_update == " tmdb " :
if tvdb_id and not tmdb_id :
tmdb_id = config . Convert . tvdb_to_tmdb ( tvdb_id )
if tmdb_id :
try :
tmdb_item = config . TMDb . get_movie ( tmdb_id ) if library . is_movie else config . TMDb . get_show ( tmdb_id )
@ -276,6 +305,10 @@ def mass_metadata(config, library, movie_map, show_map):
omdb_item = None
if library . mass_genre_update in [ " omdb " , " imdb " ] or library . mass_audience_rating_update in [ " omdb " , " imdb " ] or library . mass_critic_rating_update in [ " omdb " , " imdb " ] :
if config . OMDb . limit is False :
if tmdb_id and not imdb_id :
imdb_id = config . Convert . tmdb_to_imdb ( tmdb_id )
elif tvdb_id and not imdb_id :
imdb_id = config . Convert . tvdb_to_imdb ( tvdb_id )
if imdb_id :
try :
omdb_item = config . OMDb . get_omdb ( imdb_id )
@ -327,7 +360,21 @@ def mass_metadata(config, library, movie_map, show_map):
except Failed :
pass
if library . Radarr and library . radarr_add_all :
try :
library . Radarr . add_tmdb ( radarr_adds )
except Failed as e :
logger . error ( e )
if library . Sonarr and library . sonarr_add_all :
try :
library . Sonarr . add_tvdb ( sonarr_adds )
except Failed as e :
logger . error ( e )
def run_collection ( config , library , metadata , requested_collections , is_test , resume_from , movie_map , show_map ) :
logger . info ( " " )
for mapping_name , collection_attrs in requested_collections . items ( ) :
if is_test and ( " test " not in collection_attrs or collection_attrs [ " test " ] is not True ) :
no_template_test = True
@ -343,15 +390,29 @@ def run_collection(config, library, metadata, requested_collections, is_test, re
no_template_test = False
if no_template_test :
continue
try :
if resume_from and resume_from != mapping_name :
continue
elif resume_from == mapping_name :
resume_from = None
logger . info ( " " )
util . separator ( f " Resuming Collections " )
if resume_from and resume_from != mapping_name :
continue
elif resume_from == mapping_name :
resume_from = None
logger . info ( " " )
util . separator ( f " Resuming Collections " )
if " name_mapping " in collection_attrs and collection_attrs [ " name_mapping " ] :
collection_log_name = util . validate_filename ( collection_attrs [ " name_mapping " ] )
else :
collection_log_name = util . validate_filename ( mapping_name )
collection_log_folder = os . path . join ( default_dir , " logs " , library . mapping_name , " collections " , collection_log_name )
os . makedirs ( collection_log_folder , exist_ok = True )
col_file_logger = os . path . join ( collection_log_folder , f " collection.log " )
should_roll_over = os . path . isfile ( col_file_logger )
collection_handler = logging . handlers . RotatingFileHandler ( col_file_logger , delay = True , mode = " w " , backupCount = 3 , encoding = " utf-8 " )
util . apply_formatter ( collection_handler )
if should_roll_over :
collection_handler . doRollover ( )
logger . addHandler ( collection_handler )
try :
util . separator ( f " { mapping_name } Collection " )
logger . info ( " " )
@ -360,26 +421,27 @@ def run_collection(config, library, metadata, requested_collections, is_test, re
if len ( builder . schedule ) > 0 :
util . print_multiline ( builder . schedule , info = True )
logger . info ( " " )
logger . info ( f " Sync Mode: { ' sync ' if builder . sync else ' append ' } " )
if len ( builder . filters ) > 0 :
if not builder . smart_url :
logger . info ( " " )
for filter_key , filter_value in builder . filters :
logger . info ( f " Collection Filter { filter_key } : { filter_value } " )
logger . info ( f " Sync Mode: { ' sync ' if builder . sync else ' append ' } " )
if len ( builder . filters ) > 0 :
logger . info ( " " )
for filter_key , filter_value in builder . filters :
logger . info ( f " Collection Filter { filter_key } : { filter_value } " )
if not builder . smart_url :
builder . collect_rating_keys ( movie_map , show_map )
logger . info ( " " )
if len ( builder . rating_keys ) > 0 :
if len ( builder . rating_keys ) > 0 and builder . build_collection :
builder . add_to_collection ( movie_map )
if len ( builder . missing_movies ) > 0 or len ( builder . missing_shows ) > 0 :
builder . run_missing ( )
if builder . sync and len ( builder . rating_keys ) > 0 :
if builder . sync and len ( builder . rating_keys ) > 0 and builder . build_collection :
builder . sync_collection ( )
logger . info ( " " )
builder . update_details ( )
if builder . build_collection :
builder . update_details ( )
if builder . run_again and ( len ( builder . run_again_movies ) > 0 or len ( builder . run_again_shows ) > 0 ) :
library . run_again . append ( builder )
@ -390,6 +452,8 @@ def run_collection(config, library, metadata, requested_collections, is_test, re
except Exception as e :
util . print_stacktrace ( )
logger . error ( f " Unknown Error: { e } " )
logger . info ( " " )
logger . removeHandler ( collection_handler )
return resume_from
try :