mirror of https://github.com/l3uddz/traktarr
commit
e176d51f26
@ -0,0 +1,30 @@
|
||||
# User-specific stuff:
|
||||
.idea
|
||||
|
||||
## File-based project format:
|
||||
*.iws
|
||||
|
||||
# IntelliJ
|
||||
/out/
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.pyc
|
||||
|
||||
# logs
|
||||
*.log*
|
||||
|
||||
# databases
|
||||
*.db
|
||||
|
||||
# configs
|
||||
*.cfg
|
||||
*.json
|
||||
|
||||
# generators
|
||||
*.bat
|
||||
|
||||
# Pyenv
|
||||
**/.python-version
|
@ -0,0 +1,2 @@
|
||||
# traktarr
|
||||
Script to add new series & movies to Sonarr/Radarr based on Trakt lists.
|
@ -0,0 +1 @@
|
||||
from media import trakt, sonarr, radarr
|
@ -0,0 +1,109 @@
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import backoff
|
||||
import requests
|
||||
|
||||
from misc.log import logger
|
||||
|
||||
log = logger.get_logger(__name__)
|
||||
|
||||
|
||||
def backoff_handler(details):
|
||||
log.warning("Backing off {wait:0.1f} seconds afters {tries} tries "
|
||||
"calling function {target} with args {args} and kwargs "
|
||||
"{kwargs}".format(**details))
|
||||
|
||||
|
||||
class Radarr:
|
||||
def __init__(self, server_url, api_key):
|
||||
self.server_url = server_url
|
||||
self.api_key = api_key
|
||||
self.headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Api-Key': self.api_key,
|
||||
}
|
||||
|
||||
def validate_api_key(self):
|
||||
try:
|
||||
# request system status to validate api_key
|
||||
req = requests.get(urljoin(self.server_url, 'api/system/status'), headers=self.headers, timeout=30)
|
||||
log.debug("Request Response: %d", req.status_code)
|
||||
|
||||
if req.status_code == 200 and 'version' in req.json():
|
||||
return True
|
||||
return False
|
||||
except Exception:
|
||||
log.exception("Exception validating api_key: ")
|
||||
return False
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def get_movies(self):
|
||||
try:
|
||||
# make request
|
||||
req = requests.get(urljoin(self.server_url, 'api/movie'), headers=self.headers, timeout=30)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Response: %d", req.status_code)
|
||||
|
||||
if req.status_code == 200:
|
||||
resp_json = req.json()
|
||||
log.debug("Found %d movies", len(resp_json))
|
||||
return resp_json
|
||||
else:
|
||||
log.error("Failed to retrieve all movies, request response: %d", req.status_code)
|
||||
except Exception:
|
||||
log.exception("Exception retrieving movies: ")
|
||||
return None
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def get_profile_id(self, profile_name):
|
||||
try:
|
||||
# make request
|
||||
req = requests.get(urljoin(self.server_url, 'api/profile'), headers=self.headers, timeout=30)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Response: %d", req.status_code)
|
||||
|
||||
if req.status_code == 200:
|
||||
resp_json = req.json()
|
||||
for profile in resp_json:
|
||||
if profile['name'].lower() == profile_name.lower():
|
||||
log.debug("Found id of %s profile: %d", profile_name, profile['id'])
|
||||
return profile['id']
|
||||
log.debug("Profile %s with id %d did not match %s", profile['name'], profile['id'], profile_name)
|
||||
else:
|
||||
log.error("Failed to retrieve all quality profiles, request response: %d", req.status_code)
|
||||
except Exception:
|
||||
log.exception("Exception retrieving id of profile %s: ", profile_name)
|
||||
return None
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def add_movie(self, movie_tmdbid, movie_title, movie_year, profile_id, root_folder, search_missing=False):
|
||||
try:
|
||||
# generate payload
|
||||
payload = {
|
||||
'tmdbId': movie_tmdbid, 'title': movie_title, 'year': movie_year,
|
||||
'qualityProfileId': profile_id, 'images': [],
|
||||
'monitored': True, 'rootFolderPath': root_folder,
|
||||
'minimumAvailability': 'released', 'titleSlug': '',
|
||||
'addOptions': {'ignoreEpisodesWithFiles': False, 'ignoreEpisodesWithoutFiles': False,
|
||||
'searchForMovie': search_missing}
|
||||
}
|
||||
|
||||
# make request
|
||||
req = requests.post(urljoin(self.server_url, 'api/movie'), json=payload, headers=self.headers, timeout=30)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Payload: %s", payload)
|
||||
log.debug("Request Response: %d", req.status_code)
|
||||
|
||||
if req.status_code == 201 and req.json()['tmdbId'] == movie_tmdbid:
|
||||
log.debug("Successfully added %s (%d)", movie_title, movie_tmdbid)
|
||||
return True
|
||||
elif req.status_code == 401:
|
||||
log.error("Failed to add %s (%d), reason: %s", movie_title, movie_tmdbid,
|
||||
req.json()['errorMessage'] if '{' in req.text else "\n{}".format(req.text))
|
||||
return False
|
||||
else:
|
||||
log.error("Failed to add %s (%d), unexpected response:\n%s", movie_title, movie_tmdbid, req.text)
|
||||
return False
|
||||
except Exception:
|
||||
log.exception("Exception adding movie %s (%d): ", movie_title, movie_tmdbid)
|
||||
return None
|
@ -0,0 +1,110 @@
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import backoff
|
||||
import requests
|
||||
|
||||
from misc.log import logger
|
||||
|
||||
log = logger.get_logger(__name__)
|
||||
|
||||
|
||||
def backoff_handler(details):
|
||||
log.warning("Backing off {wait:0.1f} seconds afters {tries} tries "
|
||||
"calling function {target} with args {args} and kwargs "
|
||||
"{kwargs}".format(**details))
|
||||
|
||||
|
||||
class Sonarr:
|
||||
def __init__(self, server_url, api_key):
|
||||
self.server_url = server_url
|
||||
self.api_key = api_key
|
||||
self.headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Api-Key': self.api_key,
|
||||
}
|
||||
|
||||
def validate_api_key(self):
|
||||
try:
|
||||
# request system status to validate api_key
|
||||
req = requests.get(urljoin(self.server_url, 'api/system/status'), headers=self.headers, timeout=30)
|
||||
log.debug("Request Response: %d", req.status_code)
|
||||
|
||||
if req.status_code == 200 and 'version' in req.json():
|
||||
return True
|
||||
return False
|
||||
except Exception:
|
||||
log.exception("Exception validating api_key: ")
|
||||
return False
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def get_series(self):
|
||||
try:
|
||||
# make request
|
||||
req = requests.get(urljoin(self.server_url, 'api/series'), headers=self.headers, timeout=30)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Response: %d", req.status_code)
|
||||
|
||||
if req.status_code == 200:
|
||||
resp_json = req.json()
|
||||
log.debug("Found %d series", len(resp_json))
|
||||
return resp_json
|
||||
else:
|
||||
log.error("Failed to retrieve all series, request response: %d", req.status_code)
|
||||
except Exception:
|
||||
log.exception("Exception retrieving series: ")
|
||||
return None
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def get_profile_id(self, profile_name):
|
||||
try:
|
||||
# make request
|
||||
req = requests.get(urljoin(self.server_url, 'api/profile'), headers=self.headers, timeout=30)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Response: %d", req.status_code)
|
||||
|
||||
if req.status_code == 200:
|
||||
resp_json = req.json()
|
||||
for profile in resp_json:
|
||||
if profile['name'].lower() == profile_name.lower():
|
||||
log.debug("Found id of %s profile: %d", profile_name, profile['id'])
|
||||
return profile['id']
|
||||
log.debug("Profile %s with id %d did not match %s", profile['name'], profile['id'], profile_name)
|
||||
else:
|
||||
log.error("Failed to retrieve all quality profiles, request response: %d", req.status_code)
|
||||
except Exception:
|
||||
log.exception("Exception retrieving id of profile %s: ", profile_name)
|
||||
return None
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def add_series(self, series_tvdbid, series_title, profile_id, root_folder, search_missing=False):
|
||||
try:
|
||||
# generate payload
|
||||
payload = {
|
||||
'tvdbId': series_tvdbid, 'title': series_title,
|
||||
'qualityProfileId': profile_id, 'images': [],
|
||||
'seasons': [], 'seasonFolder': True,
|
||||
'monitored': True, 'rootFolderPath': root_folder,
|
||||
'addOptions': {'ignoreEpisodesWithFiles': False,
|
||||
'ignoreEpisodesWithoutFiles': False,
|
||||
'searchForMissingEpisodes': search_missing}
|
||||
}
|
||||
|
||||
# make request
|
||||
req = requests.post(urljoin(self.server_url, 'api/series'), json=payload, headers=self.headers, timeout=30)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Payload: %s", payload)
|
||||
log.debug("Request Response: %d", req.status_code)
|
||||
|
||||
if req.status_code == 201 and req.json()['tvdbId'] == series_tvdbid:
|
||||
log.debug("Successfully added %s (%d)", series_title, series_tvdbid)
|
||||
return True
|
||||
elif req.status_code == 401:
|
||||
log.error("Failed to add %s (%d), reason: %s", series_title, series_tvdbid,
|
||||
req.json()['errorMessage'] if '{' in req.text else "\n{}".format(req.text))
|
||||
return False
|
||||
else:
|
||||
log.error("Failed to add %s (%d), unexpected response:\n%s", series_title, series_tvdbid, req.text)
|
||||
return False
|
||||
except Exception:
|
||||
log.exception("Exception adding series %s (%d): ", series_title, series_tvdbid)
|
||||
return None
|
@ -0,0 +1,379 @@
|
||||
import backoff
|
||||
import requests
|
||||
|
||||
from misc.log import logger
|
||||
|
||||
log = logger.get_logger(__name__)
|
||||
|
||||
|
||||
def backoff_handler(details):
|
||||
log.warning("Backing off {wait:0.1f} seconds afters {tries} tries "
|
||||
"calling function {target} with args {args} and kwargs "
|
||||
"{kwargs}".format(**details))
|
||||
|
||||
|
||||
class Trakt:
|
||||
def __init__(self, api_key):
|
||||
self.api_key = api_key
|
||||
self.headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'trakt-api-version': '2',
|
||||
'trakt-api-key': self.api_key
|
||||
}
|
||||
|
||||
def validate_api_key(self):
|
||||
try:
|
||||
# request trending shows to determine if api_key is valid
|
||||
payload = {'extended': 'full', 'limit': 1000}
|
||||
|
||||
# make request
|
||||
req = requests.get('https://api.trakt.tv/shows/anticipated', params=payload, headers=self.headers,
|
||||
timeout=30)
|
||||
log.debug("Request Response: %d", req.status_code)
|
||||
|
||||
if req.status_code == 200:
|
||||
return True
|
||||
return False
|
||||
except Exception:
|
||||
log.exception("Exception validating api_key: ")
|
||||
return False
|
||||
|
||||
############################################################
|
||||
# Shows
|
||||
############################################################
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def get_anticipated_shows(self, limit=1000, languages=None):
|
||||
try:
|
||||
processed_shows = []
|
||||
|
||||
if languages is None:
|
||||
languages = ['en']
|
||||
|
||||
# generate payload
|
||||
payload = {'extended': 'full', 'limit': limit, 'page': 1}
|
||||
if languages:
|
||||
payload['languages'] = ','.join(languages)
|
||||
|
||||
# make request
|
||||
while True:
|
||||
req = requests.get('https://api.trakt.tv/shows/anticipated', params=payload, headers=self.headers,
|
||||
timeout=30)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Payload: %s", payload)
|
||||
log.debug("Response Code: %d", req.status_code)
|
||||
log.debug("Response Page: %d of %d", payload['page'],
|
||||
0 if 'X-Pagination-Page-Count' not in req.headers else int(
|
||||
req.headers['X-Pagination-Page-Count']))
|
||||
|
||||
if req.status_code == 200:
|
||||
resp_json = req.json()
|
||||
|
||||
for show in resp_json:
|
||||
if show not in processed_shows:
|
||||
processed_shows.append(show)
|
||||
|
||||
# check if we have fetched the last page, break if so
|
||||
if 'X-Pagination-Page-Count' not in req.headers or not int(req.headers['X-Pagination-Page-Count']):
|
||||
log.debug("There was no more pages to retrieve")
|
||||
break
|
||||
elif payload['page'] >= int(req.headers['X-Pagination-Page-Count']):
|
||||
log.debug("There are no more pages to retrieve results from")
|
||||
break
|
||||
else:
|
||||
log.info("There are %d pages left to retrieve results from",
|
||||
int(req.headers['X-Pagination-Page-Count']) - payload['page'])
|
||||
payload['page'] += 1
|
||||
|
||||
else:
|
||||
log.error("Failed to retrieve anticipated shows, request response: %d", req.status_code)
|
||||
break
|
||||
|
||||
if len(processed_shows):
|
||||
log.debug("Found %d anticipated shows", len(processed_shows))
|
||||
return processed_shows
|
||||
return None
|
||||
except Exception:
|
||||
log.exception("Exception retrieving anticipated shows: ")
|
||||
return None
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def get_trending_shows(self, limit=1000, languages=None):
|
||||
try:
|
||||
processed_shows = []
|
||||
|
||||
if languages is None:
|
||||
languages = ['en']
|
||||
|
||||
# generate payload
|
||||
payload = {'extended': 'full', 'limit': limit, 'page': 1}
|
||||
if languages:
|
||||
payload['languages'] = ','.join(languages)
|
||||
|
||||
# make request
|
||||
while True:
|
||||
req = requests.get('https://api.trakt.tv/shows/trending', params=payload, headers=self.headers,
|
||||
timeout=30)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Payload: %s", payload)
|
||||
log.debug("Response Code: %d", req.status_code)
|
||||
log.debug("Response Page: %d of %d", payload['page'],
|
||||
0 if 'X-Pagination-Page-Count' not in req.headers else int(
|
||||
req.headers['X-Pagination-Page-Count']))
|
||||
|
||||
if req.status_code == 200:
|
||||
resp_json = req.json()
|
||||
|
||||
for show in resp_json:
|
||||
if show not in processed_shows:
|
||||
processed_shows.append(show)
|
||||
|
||||
# check if we have fetched the last page, break if so
|
||||
if 'X-Pagination-Page-Count' not in req.headers or not int(req.headers['X-Pagination-Page-Count']):
|
||||
log.debug("There was no more pages to retrieve")
|
||||
break
|
||||
elif payload['page'] >= int(req.headers['X-Pagination-Page-Count']):
|
||||
log.debug("There are no more pages to retrieve results from")
|
||||
break
|
||||
else:
|
||||
log.info("There are %d pages left to retrieve results from",
|
||||
int(req.headers['X-Pagination-Page-Count']) - payload['page'])
|
||||
payload['page'] += 1
|
||||
|
||||
else:
|
||||
log.error("Failed to retrieve trending shows, request response: %d", req.status_code)
|
||||
break
|
||||
|
||||
if len(processed_shows):
|
||||
log.debug("Found %d trending shows", len(processed_shows))
|
||||
return processed_shows
|
||||
return None
|
||||
except Exception:
|
||||
log.exception("Exception retrieving trending shows: ")
|
||||
return None
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def get_popular_shows(self, limit=1000, languages=None):
|
||||
try:
|
||||
processed_shows = []
|
||||
|
||||
if languages is None:
|
||||
languages = ['en']
|
||||
|
||||
# generate payload
|
||||
payload = {'extended': 'full', 'limit': limit, 'page': 1}
|
||||
if languages:
|
||||
payload['languages'] = ','.join(languages)
|
||||
|
||||
# make request
|
||||
while True:
|
||||
req = requests.get('https://api.trakt.tv/shows/popular', params=payload, headers=self.headers,
|
||||
timeout=30)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Payload: %s", payload)
|
||||
log.debug("Response Code: %d", req.status_code)
|
||||
log.debug("Response Page: %d of %d", payload['page'],
|
||||
0 if 'X-Pagination-Page-Count' not in req.headers else int(
|
||||
req.headers['X-Pagination-Page-Count']))
|
||||
|
||||
if req.status_code == 200:
|
||||
resp_json = req.json()
|
||||
|
||||
# process list so it conforms to standard we expect ( e.g. {"show": {.....}} )
|
||||
for show in resp_json:
|
||||
if show not in processed_shows:
|
||||
processed_shows.append({'show': show})
|
||||
|
||||
# check if we have fetched the last page, break if so
|
||||
if 'X-Pagination-Page-Count' not in req.headers or not int(req.headers['X-Pagination-Page-Count']):
|
||||
log.debug("There was no more pages to retrieve")
|
||||
break
|
||||
elif payload['page'] >= int(req.headers['X-Pagination-Page-Count']):
|
||||
log.debug("There are no more pages to retrieve results from")
|
||||
break
|
||||
else:
|
||||
log.info("There are %d pages left to retrieve results from",
|
||||
int(req.headers['X-Pagination-Page-Count']) - payload['page'])
|
||||
payload['page'] += 1
|
||||
|
||||
else:
|
||||
log.error("Failed to retrieve popular shows, request response: %d", req.status_code)
|
||||
break
|
||||
|
||||
if len(processed_shows):
|
||||
log.debug("Found %d popular shows", len(processed_shows))
|
||||
return processed_shows
|
||||
return None
|
||||
except Exception:
|
||||
log.exception("Exception retrieving popular shows: ")
|
||||
return None
|
||||
|
||||
############################################################
|
||||
# Movies
|
||||
############################################################
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def get_anticipated_movies(self, limit=1000, languages=None):
|
||||
try:
|
||||
processed_movies = []
|
||||
|
||||
if languages is None:
|
||||
languages = ['en']
|
||||
|
||||
# generate payload
|
||||
payload = {'extended': 'full', 'limit': limit, 'page': 1}
|
||||
if languages:
|
||||
payload['languages'] = ','.join(languages)
|
||||
|
||||
# make request
|
||||
while True:
|
||||
req = requests.get('https://api.trakt.tv/movies/anticipated', params=payload, headers=self.headers,
|
||||
timeout=30)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Payload: %s", payload)
|
||||
log.debug("Response Code: %d", req.status_code)
|
||||
log.debug("Response Page: %d of %d", payload['page'],
|
||||
0 if 'X-Pagination-Page-Count' not in req.headers else int(
|
||||
req.headers['X-Pagination-Page-Count']))
|
||||
|
||||
if req.status_code == 200:
|
||||
resp_json = req.json()
|
||||
|
||||
for movie in resp_json:
|
||||
if movie not in processed_movies:
|
||||
processed_movies.append(movie)
|
||||
|
||||
# check if we have fetched the last page, break if so
|
||||
if 'X-Pagination-Page-Count' not in req.headers or not int(req.headers['X-Pagination-Page-Count']):
|
||||
log.debug("There was no more pages to retrieve")
|
||||
break
|
||||
elif payload['page'] >= int(req.headers['X-Pagination-Page-Count']):
|
||||
log.debug("There are no more pages to retrieve results from")
|
||||
break
|
||||
else:
|
||||
log.info("There are %d pages left to retrieve results from",
|
||||
int(req.headers['X-Pagination-Page-Count']) - payload['page'])
|
||||
payload['page'] += 1
|
||||
|
||||
else:
|
||||
log.error("Failed to retrieve anticipated movies, request response: %d", req.status_code)
|
||||
break
|
||||
|
||||
if len(processed_movies):
|
||||
log.debug("Found %d anticipated movies", len(processed_movies))
|
||||
return processed_movies
|
||||
return None
|
||||
except Exception:
|
||||
log.exception("Exception retrieving anticipated movies: ")
|
||||
return None
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def get_trending_movies(self, limit=1000, languages=None):
|
||||
try:
|
||||
processed_movies = []
|
||||
|
||||
if languages is None:
|
||||
languages = ['en']
|
||||
|
||||
# generate payload
|
||||
payload = {'extended': 'full', 'limit': limit, 'page': 1}
|
||||
if languages:
|
||||
payload['languages'] = ','.join(languages)
|
||||
|
||||
# make request
|
||||
while True:
|
||||
req = requests.get('https://api.trakt.tv/movies/trending', params=payload, headers=self.headers,
|
||||
timeout=30)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Payload: %s", payload)
|
||||
log.debug("Response Code: %d", req.status_code)
|
||||
log.debug("Response Page: %d of %d", payload['page'],
|
||||
0 if 'X-Pagination-Page-Count' not in req.headers else int(
|
||||
req.headers['X-Pagination-Page-Count']))
|
||||
|
||||
if req.status_code == 200:
|
||||
resp_json = req.json()
|
||||
|
||||
for movie in resp_json:
|
||||
if movie not in processed_movies:
|
||||
processed_movies.append(movie)
|
||||
|
||||
# check if we have fetched the last page, break if so
|
||||
if 'X-Pagination-Page-Count' not in req.headers or not int(req.headers['X-Pagination-Page-Count']):
|
||||
log.debug("There was no more pages to retrieve")
|
||||
break
|
||||
elif payload['page'] >= int(req.headers['X-Pagination-Page-Count']):
|
||||
log.debug("There are no more pages to retrieve results from")
|
||||
break
|
||||
else:
|
||||
log.info("There are %d pages left to retrieve results from",
|
||||
int(req.headers['X-Pagination-Page-Count']) - payload['page'])
|
||||
payload['page'] += 1
|
||||
|
||||
else:
|
||||
log.error("Failed to retrieve trending movies, request response: %d", req.status_code)
|
||||
break
|
||||
|
||||
if len(processed_movies):
|
||||
log.debug("Found %d trending movies", len(processed_movies))
|
||||
return processed_movies
|
||||
return None
|
||||
except Exception:
|
||||
log.exception("Exception retrieving trending movies: ")
|
||||
return None
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def get_popular_movies(self, limit=1000, languages=None):
|
||||
try:
|
||||
processed_movies = []
|
||||
|
||||
if languages is None:
|
||||
languages = ['en']
|
||||
|
||||
# generate payload
|
||||
payload = {'extended': 'full', 'limit': limit, 'page': 1}
|
||||
if languages:
|
||||
payload['languages'] = ','.join(languages)
|
||||
|
||||
# make request
|
||||
while True:
|
||||
req = requests.get('https://api.trakt.tv/movies/popular', params=payload, headers=self.headers,
|
||||
timeout=30)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Payload: %s", payload)
|
||||
log.debug("Response Code: %d", req.status_code)
|
||||
log.debug("Response Page: %d of %d", payload['page'],
|
||||
0 if 'X-Pagination-Page-Count' not in req.headers else int(
|
||||
req.headers['X-Pagination-Page-Count']))
|
||||
|
||||
if req.status_code == 200:
|
||||
resp_json = req.json()
|
||||
|
||||
# process list so it conforms to standard we expect ( e.g. {"show": {.....}} )
|
||||
for movie in resp_json:
|
||||
if movie not in processed_movies:
|
||||
processed_movies.append({'movie': movie})
|
||||
|
||||
# check if we have fetched the last page, break if so
|
||||
if 'X-Pagination-Page-Count' not in req.headers or not int(req.headers['X-Pagination-Page-Count']):
|
||||
log.debug("There was no more pages to retrieve")
|
||||
break
|
||||
elif payload['page'] >= int(req.headers['X-Pagination-Page-Count']):
|
||||
log.debug("There are no more pages to retrieve results from")
|
||||
break
|
||||
else:
|
||||
log.info("There are %d pages left to retrieve results from",
|
||||
int(req.headers['X-Pagination-Page-Count']) - payload['page'])
|
||||
payload['page'] += 1
|
||||
|
||||
else:
|
||||
log.error("Failed to retrieve popular movies, request response: %d", req.status_code)
|
||||
break
|
||||
|
||||
if len(processed_movies):
|
||||
log.debug("Found %d popular movies", len(processed_movies))
|
||||
return processed_movies
|
||||
return None
|
||||
except Exception:
|
||||
log.exception("Exception retrieving popular movies: ")
|
||||
return None
|
@ -0,0 +1,2 @@
|
||||
from misc import config, str, helpers
|
||||
from misc.log import logger
|
@ -0,0 +1,101 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
from attrdict import AttrDict
|
||||
|
||||
config_path = os.path.join(os.path.dirname(sys.argv[0]), 'config.json')
|
||||
base_config = {
|
||||
'core': {
|
||||
'debug': False
|
||||
},
|
||||
'trakt': {
|
||||
'api_key': ''
|
||||
},
|
||||
'sonarr': {
|
||||
'url': 'http://localhost:8989',
|
||||
'api_key': '',
|
||||
'profile': 'WEBDL-1080p',
|
||||
'root_folder': '/tv/'
|
||||
},
|
||||
'radarr': {
|
||||
'url': 'http://localhost:8989',
|
||||
'api_key': '',
|
||||
'profile': 'Remux',
|
||||
'root_folder': '/movies/'
|
||||
},
|
||||
'filters': {
|
||||
'shows': {
|
||||
'blacklisted_genres': ['animation', 'game-show', 'talk-show', 'home-and-garden', 'children', 'reality',
|
||||
'anime', 'news', 'documentary', 'special-interest'],
|
||||
'blacklisted_networks': ['twitch', 'youtube', 'nickelodeon', 'hallmark', 'reelzchannel', 'disney',
|
||||
'cnn', 'cbbc', 'the movie network', 'teletoon', 'cartoon network', 'espn',
|
||||
'yahoo!',
|
||||
'fox sports'],
|
||||
'allowed_countries': ['us', 'gb', 'ca'],
|
||||
'blacklisted_min_runtime': 15,
|
||||
'blacklisted_min_year': 2000
|
||||
},
|
||||
'movies': {
|
||||
'blacklisted_genres': ['documentary', 'music'],
|
||||
'blacklisted_min_runtime': 60,
|
||||
'blacklisted_min_year': 2000,
|
||||
'blacklist_title_keywords': ['untitled', 'barbie'],
|
||||
'allowed_countries': ['us', 'gb', 'ca']
|
||||
}
|
||||
}
|
||||
}
|
||||
cfg = None
|
||||
|
||||
|
||||
class AttrConfig(AttrDict):
|
||||
"""
|
||||
Simple AttrDict subclass to return None when requested attribute does not exist
|
||||
"""
|
||||
|
||||
def __init__(self, config):
|
||||
super().__init__(config)
|
||||
|
||||
def __getattr__(self, item):
|
||||
try:
|
||||
return super().__getattr__(item)
|
||||
except AttributeError:
|
||||
pass
|
||||
# Default behaviour
|
||||
return None
|
||||
|
||||
|
||||
def build_config():
|
||||
if not os.path.exists(config_path):
|
||||
print("Dumping default config to: %s" % config_path)
|
||||
with open(config_path, 'w') as fp:
|
||||
json.dump(base_config, fp, sort_keys=True, indent=2)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def dump_config():
|
||||
if os.path.exists(config_path):
|
||||
with open(config_path, 'w') as fp:
|
||||
json.dump(cfg, fp, sort_keys=True, indent=2)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def load_config():
|
||||
with open(config_path, 'r') as fp:
|
||||
return AttrConfig(json.load(fp))
|
||||
|
||||
|
||||
############################################################
|
||||
# LOAD CFG
|
||||
############################################################
|
||||
|
||||
# dump/load config
|
||||
if build_config():
|
||||
print("Please edit the default configuration before running again!")
|
||||
exit(0)
|
||||
else:
|
||||
cfg = load_config()
|
@ -0,0 +1,313 @@
|
||||
from misc import str as misc_str
|
||||
from misc.log import logger
|
||||
|
||||
log = logger.get_logger(__name__)
|
||||
|
||||
|
||||
############################################################
|
||||
# SONARR
|
||||
############################################################
|
||||
|
||||
def sonarr_series_to_tvdb_dict(sonarr_series):
|
||||
series = {}
|
||||
try:
|
||||
for tmp in sonarr_series:
|
||||
if 'tvdbId' not in tmp:
|
||||
log.debug("Could not handle series: %s", tmp['title'])
|
||||
continue
|
||||
series[tmp['tvdbId']] = tmp
|
||||
return series
|
||||
except Exception:
|
||||
log.exception("Exception processing sonarr series to tvdb dict: ")
|
||||
return None
|
||||
|
||||
|
||||
def sonarr_remove_existing_series(sonarr_series, trakt_series):
|
||||
new_series_list = []
|
||||
|
||||
if not sonarr_series or not trakt_series:
|
||||
log.error("Inappropriate parameters were supplied")
|
||||
return None
|
||||
|
||||
try:
|
||||
# turn sonarr series result into a dict with tvdb id as keys
|
||||
processed_series = sonarr_series_to_tvdb_dict(sonarr_series)
|
||||
if not processed_series:
|
||||
return None
|
||||
|
||||
# loop list adding to series that do not already exist
|
||||
for tmp in trakt_series:
|
||||
if 'show' not in tmp or 'ids' not in tmp['show'] or 'tvdb' not in tmp['show']['ids']:
|
||||
log.debug("Skipping show because it did not have required fields: %s", tmp)
|
||||
continue
|
||||
# check if show exists in processed_series
|
||||
if tmp['show']['ids']['tvdb'] in processed_series:
|
||||
log.debug("Removing existing show: %s", tmp['show']['title'])
|
||||
continue
|
||||
|
||||
new_series_list.append(tmp)
|
||||
|
||||
log.debug("Filtered %d trakt shows to %d shows that weren't already in Sonarr", len(trakt_series),
|
||||
len(new_series_list))
|
||||
return new_series_list
|
||||
except Exception:
|
||||
log.exception("Exception removing existing series from trakt list: ")
|
||||
return None
|
||||
|
||||
|
||||
def trakt_blacklisted_show_genre(show, genres):
|
||||
blacklisted = False
|
||||
try:
|
||||
if not show['show']['genres']:
|
||||
log.debug("%s was blacklisted because it had no genres", show['show']['title'])
|
||||
blacklisted = True
|
||||
else:
|
||||
for genre in genres:
|
||||
if genre.lower() in show['show']['genres']:
|
||||
log.debug("%s was blacklisted because it has genre: %s", show['show']['title'], genre)
|
||||
blacklisted = True
|
||||
break
|
||||
|
||||
except Exception:
|
||||
log.exception("Exception determining if show has a blacklisted genre %s: ", show)
|
||||
return blacklisted
|
||||
|
||||
|
||||
def trakt_blacklisted_show_year(show, earliest_year):
|
||||
blacklisted = False
|
||||
try:
|
||||
year = misc_str.get_year_from_timestamp(show['show']['first_aired'])
|
||||
if not year:
|
||||
log.debug("%s was blacklisted due to having an unknown first_aired date", show['show']['title'])
|
||||
blacklisted = True
|
||||
else:
|
||||
if year < earliest_year:
|
||||
log.debug("%s was blacklisted because it first aired in: %d", show['show']['title'], year)
|
||||
blacklisted = True
|
||||
except Exception:
|
||||
log.exception("Exception determining if show is before earliest_year %s:", show)
|
||||
return blacklisted
|
||||
|
||||
|
||||
def trakt_blacklisted_show_country(show, allowed_countries):
|
||||
blacklisted = False
|
||||
try:
|
||||
if not show['show']['country']:
|
||||
log.debug("%s was blacklisted because it had no country", show['show']['title'])
|
||||
blacklisted = True
|
||||
else:
|
||||
if show['show']['country'].lower() not in allowed_countries:
|
||||
log.debug("%s was blacklisted because it's from country: %s", show['show']['title'],
|
||||
show['show']['country'])
|
||||
blacklisted = True
|
||||
|
||||
except Exception:
|
||||
log.exception("Exception determining if show was from an allowed country %s: ", show)
|
||||
return blacklisted
|
||||
|
||||
|
||||
def trakt_blacklisted_show_network(show, networks):
|
||||
blacklisted = False
|
||||
try:
|
||||
if not show['show']['network']:
|
||||
log.debug("%s was blacklisted because it had no network", show['show']['title'])
|
||||
blacklisted = True
|
||||
else:
|
||||
for network in networks:
|
||||
if network.lower() in show['show']['network'].lower():
|
||||
log.debug("%s was blacklisted because it's from network: %s", show['show']['title'],
|
||||
show['show']['network'])
|
||||
blacklisted = True
|
||||
break
|
||||
|
||||
except Exception:
|
||||
log.exception("Exception determining if show is from a blacklisted network %s: ", show)
|
||||
return blacklisted
|
||||
|
||||
|
||||
def trakt_blacklisted_show_runtime(show, lowest_runtime):
|
||||
blacklisted = False
|
||||
try:
|
||||
if not show['show']['runtime'] or not isinstance(show['show']['runtime'], int):
|
||||
log.debug("%s was blacklisted because it had no runtime", show['show']['title'])
|
||||
blacklisted = True
|
||||
elif int(show['show']['runtime']) < lowest_runtime:
|
||||
log.debug("%s was blacklisted because it had a runtime of: %d", show['show']['title'],
|
||||
show['movie']['runtime'])
|
||||
blacklisted = True
|
||||
|
||||
except Exception:
|
||||
log.exception("Exception determining if show had sufficient runtime %s: ", show)
|
||||
return blacklisted
|
||||
|
||||
|
||||
def trakt_is_show_blacklisted(show, blacklist_settings):
|
||||
blacklisted = False
|
||||
try:
|
||||
if trakt_blacklisted_show_year(show, blacklist_settings.blacklisted_min_year):
|
||||
blacklisted = True
|
||||
if trakt_blacklisted_show_country(show, blacklist_settings.allowed_countries):
|
||||
blacklisted = True
|
||||
if trakt_blacklisted_show_genre(show, blacklist_settings.blacklisted_genres):
|
||||
blacklisted = True
|
||||
if trakt_blacklisted_show_network(show, blacklist_settings.blacklisted_networks):
|
||||
blacklisted = True
|
||||
if trakt_blacklisted_show_runtime(show, blacklist_settings.blacklisted_min_runtime):
|
||||
blacklisted = True
|
||||
except Exception:
|
||||
log.exception("Exception determining if show was blacklisted %s: ", show)
|
||||
return blacklisted
|
||||
|
||||
|
||||
############################################################
|
||||
# RADARR
|
||||
############################################################
|
||||
|
||||
def radarr_movies_to_tmdb_dict(radarr_movies):
|
||||
movies = {}
|
||||
try:
|
||||
for tmp in radarr_movies:
|
||||
if 'tmdbId' not in tmp:
|
||||
log.debug("Could not handle movie: %s", tmp['title'])
|
||||
continue
|
||||
movies[tmp['tmdbId']] = tmp
|
||||
return movies
|
||||
except Exception:
|
||||
log.exception("Exception processing radarr movies to tmdb dict: ")
|
||||
return None
|
||||
|
||||
|
||||
def radarr_remove_existing_movies(radarr_movies, trakt_movies):
|
||||
new_movies_list = []
|
||||
|
||||
if not radarr_movies or not trakt_movies:
|
||||
log.error("Inappropriate parameters were supplied")
|
||||
return None
|
||||
|
||||
try:
|
||||
# turn radarr movies result into a dict with tmdb id as keys
|
||||
processed_movies = radarr_movies_to_tmdb_dict(radarr_movies)
|
||||
if not processed_movies:
|
||||
return None
|
||||
|
||||
# loop list adding to movies that do not already exist
|
||||
for tmp in trakt_movies:
|
||||
if 'movie' not in tmp or 'ids' not in tmp['movie'] or 'tmdb' not in tmp['movie']['ids']:
|
||||
log.debug("Skipping movie because it did not have required fields: %s", tmp)
|
||||
continue
|
||||
# check if movie exists in processed_movies
|
||||
if tmp['movie']['ids']['tmdb'] in processed_movies:
|
||||
log.debug("Removing existing movie: %s", tmp['movie']['title'])
|
||||
continue
|
||||
|
||||
new_movies_list.append(tmp)
|
||||
|
||||
log.debug("Filtered %d trakt movies to %d movies that weren't already in Radarr", len(trakt_movies),
|
||||
len(new_movies_list))
|
||||
return new_movies_list
|
||||
except Exception:
|
||||
log.exception("Exception removing existing movies from trakt list: ")
|
||||
return None
|
||||
|
||||
|
||||
def trakt_blacklisted_movie_genre(movie, genres):
|
||||
blacklisted = False
|
||||
try:
|
||||
if not movie['movie']['genres']:
|
||||
log.debug("%s was blacklisted because it had no genres", movie['movie']['title'])
|
||||
blacklisted = True
|
||||
else:
|
||||
for genre in genres:
|
||||
if genre.lower() in movie['movie']['genres']:
|
||||
log.debug("%s was blacklisted because it has genre: %s", movie['movie']['title'], genre)
|
||||
blacklisted = True
|
||||
break
|
||||
|
||||
except Exception:
|
||||
log.exception("Exception determining if movie has a blacklisted genre %s: ", movie)
|
||||
return blacklisted
|
||||
|
||||
|
||||
def trakt_blacklisted_movie_year(movie, earliest_year):
|
||||
blacklisted = False
|
||||
try:
|
||||
year = movie['movie']['year']
|
||||
if year is None or not isinstance(year, int):
|
||||
log.debug("%s was blacklisted due to having an unknown year", movie['movie']['title'])
|
||||
blacklisted = True
|
||||
else:
|
||||
if int(year) < earliest_year:
|
||||
log.debug("%s was blacklisted because it's year is: %d", movie['movie']['title'], int(year))
|
||||
blacklisted = True
|
||||
except Exception:
|
||||
log.exception("Exception determining if movie is before earliest_year %s:", movie)
|
||||
return blacklisted
|
||||
|
||||
|
||||
def trakt_blacklisted_movie_country(movie, allowed_countries):
|
||||
blacklisted = False
|
||||
try:
|
||||
if not movie['movie']['country']:
|
||||
log.debug("%s was blacklisted because it had no country", movie['movie']['title'])
|
||||
blacklisted = True
|
||||
else:
|
||||
if movie['movie']['country'].lower() not in allowed_countries:
|
||||
log.debug("%s was blacklisted because it's from country: %s", movie['movie']['title'],
|
||||
movie['movie']['country'])
|
||||
blacklisted = True
|
||||
|
||||
except Exception:
|
||||
log.exception("Exception determining if movie was from an allowed country %s: ", movie)
|
||||
return blacklisted
|
||||
|
||||
|
||||
def trakt_blacklisted_movie_title(movie, blacklisted_keywords):
|
||||
blacklisted = False
|
||||
try:
|
||||
if not movie['movie']['title']:
|
||||
log.debug("Blacklisted movie because it had no title: %s", movie)
|
||||
blacklisted = True
|
||||
else:
|
||||
for keyword in blacklisted_keywords:
|
||||
if keyword.lower() in movie['movie']['title'].lower():
|
||||
log.debug("%s was blacklisted because it had title keyword: %s", movie['movie']['title'], keyword)
|
||||
blacklisted = True
|
||||
break
|
||||
|
||||
except Exception:
|
||||
log.exception("Exception determining if movie had a blacklisted title %s: ", movie)
|
||||
return blacklisted
|
||||
|
||||
|
||||
def trakt_blacklisted_movie_runtime(movie, lowest_runtime):
|
||||
blacklisted = False
|
||||
try:
|
||||
if not movie['movie']['runtime'] or not isinstance(movie['movie']['runtime'], int):
|
||||
log.debug("%s was blacklisted because it had no runtime", movie['movie']['title'])
|
||||
blacklisted = True
|
||||
elif int(movie['movie']['runtime']) < lowest_runtime:
|
||||
log.debug("%s was blacklisted because it had a runtime of: %d", movie['movie']['title'],
|
||||
movie['movie']['runtime'])
|
||||
blacklisted = True
|
||||
|
||||
except Exception:
|
||||
log.exception("Exception determining if movie had sufficient runtime %s: ", movie)
|
||||
return blacklisted
|
||||
|
||||
|
||||
def trakt_is_movie_blacklisted(movie, blacklist_settings):
|
||||
blacklisted = False
|
||||
try:
|
||||
if trakt_blacklisted_movie_title(movie, blacklist_settings.blacklist_title_keywords):
|
||||
blacklisted = True
|
||||
if trakt_blacklisted_movie_year(movie, blacklist_settings.blacklisted_min_year):
|
||||
blacklisted = True
|
||||
if trakt_blacklisted_movie_country(movie, blacklist_settings.allowed_countries):
|
||||
blacklisted = True
|
||||
if trakt_blacklisted_movie_genre(movie, blacklist_settings.blacklisted_genres):
|
||||
blacklisted = True
|
||||
if trakt_blacklisted_movie_runtime(movie, blacklist_settings.blacklisted_min_runtime):
|
||||
blacklisted = True
|
||||
except Exception:
|
||||
log.exception("Exception determining if movie was blacklisted %s: ", movie)
|
||||
return blacklisted
|
@ -0,0 +1,50 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from logging.handlers import RotatingFileHandler
|
||||
|
||||
from misc.config import cfg
|
||||
|
||||
|
||||
class Logger:
|
||||
def __init__(self, file_name=None, log_level=logging.DEBUG,
|
||||
log_format='%(asctime)s - %(levelname)-10s - %(name)-35s - %(funcName)-35s - %(message)s'):
|
||||
self.log_format = log_format
|
||||
|
||||
# init root_logger
|
||||
self.log_formatter = logging.Formatter(log_format)
|
||||
self.root_logger = logging.getLogger()
|
||||
self.root_logger.setLevel(log_level)
|
||||
|
||||
# disable bloat loggers
|
||||
logging.getLogger('urllib3').setLevel(logging.ERROR)
|
||||
|
||||
# init console_logger
|
||||
self.console_handler = logging.StreamHandler(sys.stdout)
|
||||
self.console_handler.setFormatter(self.log_formatter)
|
||||
self.root_logger.addHandler(self.console_handler)
|
||||
|
||||
# init file_logger
|
||||
if file_name:
|
||||
if os.path.sep not in file_name:
|
||||
# file_name was a filename, lets build a full file_path
|
||||
self.log_file_path = os.path.join(os.path.dirname(sys.argv[0]), file_name)
|
||||
else:
|
||||
self.log_file_path = file_name
|
||||
|
||||
self.file_handler = RotatingFileHandler(
|
||||
self.log_file_path,
|
||||
maxBytes=1024 * 1024 * 5,
|
||||
backupCount=5
|
||||
)
|
||||
self.file_handler.setFormatter(self.log_formatter)
|
||||
self.root_logger.addHandler(self.file_handler)
|
||||
|
||||
# Set chosen logging level
|
||||
self.root_logger.setLevel(log_level)
|
||||
|
||||
def get_logger(self, name):
|
||||
return self.root_logger.getChild(name)
|
||||
|
||||
|
||||
logger = Logger('activity.log', logging.DEBUG if cfg.core.debug else logging.INFO)
|
@ -0,0 +1,15 @@
|
||||
from misc.log import logger
|
||||
|
||||
log = logger.get_logger(__name__)
|
||||
|
||||
|
||||
def get_year_from_timestamp(timestamp):
|
||||
year = 0
|
||||
try:
|
||||
if not timestamp:
|
||||
return 0
|
||||
|
||||
year = timestamp[:timestamp.index('-')]
|
||||
except Exception:
|
||||
log.exception("Exception parsing year from %s: ", timestamp)
|
||||
return int(year) if str(year).isdigit() else 0
|
@ -0,0 +1,4 @@
|
||||
attrdict==2.0.0
|
||||
click==6.7
|
||||
backoff==1.4.3
|
||||
requests==2.18.4
|
@ -0,0 +1,229 @@
|
||||
#!/usr/bin/env python3
|
||||
import time
|
||||
|
||||
import click
|
||||
|
||||
from media.radarr import Radarr
|
||||
from media.sonarr import Sonarr
|
||||
from media.trakt import Trakt
|
||||
from misc import helpers
|
||||
from misc.config import cfg
|
||||
from misc.log import logger
|
||||
|
||||
############################################################
|
||||
# INIT
|
||||
############################################################
|
||||
|
||||
# Logging
|
||||
log = logger.get_logger('traktarr')
|
||||
|
||||
|
||||
# Click
|
||||
@click.group(help='Add new series/movies to Sonarr & Radarr from Trakt.')
|
||||
def app():
|
||||
pass
|
||||
|
||||
|
||||
############################################################
|
||||
# SHOWS
|
||||
############################################################
|
||||
|
||||
@app.command(help='Add new series to Sonarr.')
|
||||
@click.option('--list-type', '-t', type=click.Choice(['anticipated', 'trending', 'popular']),
|
||||
help='Trakt list to process.', required=True)
|
||||
@click.option('--add-limit', '-l', default=0, help='Limit number of series added to Sonarr.', show_default=True)
|
||||
@click.option('--add-delay', '-d', default=2.5, help='Seconds between each add request to Sonarr.', show_default=True)
|
||||
def shows(list_type, add_limit=0, add_delay=2.5):
|
||||
added_shows = 0
|
||||
|
||||
# validate trakt api_key
|
||||
trakt = Trakt(cfg.trakt.api_key)
|
||||
if not trakt.validate_api_key():
|
||||
log.error("Aborting due to failure to validate Trakt API Key")
|
||||
return
|
||||
else:
|
||||
log.info("Validated Trakt API Key")
|
||||
|
||||
# validate sonarr url & api_key
|
||||
sonarr = Sonarr(cfg.sonarr.url, cfg.sonarr.api_key)
|
||||
if not sonarr.validate_api_key():
|
||||
log.error("Aborting due to failure to validate Sonarr URL / API Key")
|
||||
return
|
||||
else:
|
||||
log.info("Validated Sonarr URL & API Key")
|
||||
|
||||
# retrieve profile id for requested profile
|
||||
profile_id = sonarr.get_profile_id(cfg.sonarr.profile)
|
||||
if not profile_id or not profile_id > 0:
|
||||
log.error("Aborting due to failure to retrieve Profile ID for: %s", cfg.sonarr.profile)
|
||||
return
|
||||
else:
|
||||
log.info("Retrieved Profile ID for %s: %d", cfg.sonarr.profile, profile_id)
|
||||
|
||||
# get sonarr series list
|
||||
sonarr_series_list = sonarr.get_series()
|
||||
if not sonarr_series_list:
|
||||
log.error("Aborting due to failure to retrieve Sonarr series list")
|
||||
return
|
||||
else:
|
||||
log.info("Retrieved Sonarr series list, series found: %d", len(sonarr_series_list))
|
||||
|
||||
# get trakt series list
|
||||
trakt_series_list = None
|
||||
if list_type.lower() == 'anticipated':
|
||||
trakt_series_list = trakt.get_anticipated_shows()
|
||||
elif list_type.lower() == 'trending':
|
||||
trakt_series_list = trakt.get_trending_shows()
|
||||
elif list_type.lower() == 'popular':
|
||||
trakt_series_list = trakt.get_popular_shows()
|
||||
else:
|
||||
log.error("Aborting due to unknown Trakt list type")
|
||||
return
|
||||
if not trakt_series_list:
|
||||
log.error("Aborting due to failure to retrieve Trakt %s series list", list_type)
|
||||
return
|
||||
else:
|
||||
log.info("Retrieved Trakt %s series list, series found: %d", list_type, len(trakt_series_list))
|
||||
|
||||
# build filtered series list without series that exist in sonarr
|
||||
processed_series_list = helpers.sonarr_remove_existing_series(sonarr_series_list, trakt_series_list)
|
||||
if not processed_series_list:
|
||||
log.error("Aborting due to failure to remove existing Sonarr series from retrieved Trakt series list")
|
||||
return
|
||||
else:
|
||||
log.info("Removed existing Sonarr series from Trakt series list, series left to process: %d",
|
||||
len(processed_series_list))
|
||||
|
||||
# loop series_list
|
||||
log.info("Processing list now...")
|
||||
for series in processed_series_list:
|
||||
try:
|
||||
# check if series passes out blacklist criteria inspection
|
||||
if not helpers.trakt_is_show_blacklisted(series, cfg.filters.shows):
|
||||
log.info("Adding: %s | Genres: %s | Network: %s | Country: %s", series['show']['title'],
|
||||
', '.join(series['show']['genres']), series['show']['network'],
|
||||
series['show']['country'].upper())
|
||||
|
||||
# add show to sonarr
|
||||
if sonarr.add_series(series['show']['ids']['tvdb'], series['show']['title'], profile_id,
|
||||
cfg.sonarr.root_folder,
|
||||
True):
|
||||
log.info("ADDED %s (%d)", series['show']['title'], series['show']['year'])
|
||||
added_shows += 1
|
||||
else:
|
||||
log.error("FAILED adding %s (%d)", series['show']['title'], series['show']['year'])
|
||||
|
||||
# stop adding shows, if added_shows >= add_limit
|
||||
if add_limit and added_shows >= add_limit:
|
||||
break
|
||||
|
||||
# sleep before adding any more
|
||||
time.sleep(add_delay)
|
||||
|
||||
except Exception:
|
||||
log.exception("Exception while processing series %s: ", series['show']['title'])
|
||||
|
||||
log.info("Added %d new shows to Sonarr", added_shows)
|
||||
|
||||
|
||||
@app.command(help='Add new movies to Radarr.')
|
||||
@click.option('--list-type', '-t', type=click.Choice(['anticipated', 'trending', 'popular']),
|
||||
help='Trakt list to process.', required=True)
|
||||
@click.option('--add-limit', '-l', default=0, help='Limit number of movies added to Radarr.', show_default=True)
|
||||
@click.option('--add-delay', '-d', default=2.5, help='Seconds between each add request to Radarr.', show_default=True)
|
||||
def movies(list_type, add_limit=0, add_delay=2.5):
|
||||
added_movies = 0
|
||||
|
||||
# validate trakt api_key
|
||||
trakt = Trakt(cfg.trakt.api_key)
|
||||
if not trakt.validate_api_key():
|
||||
log.error("Aborting due to failure to validate Trakt API Key")
|
||||
return
|
||||
else:
|
||||
log.info("Validated Trakt API Key")
|
||||
|
||||
# validate radarr url & api_key
|
||||
radarr = Radarr(cfg.radarr.url, cfg.radarr.api_key)
|
||||
if not radarr.validate_api_key():
|
||||
log.error("Aborting due to failure to validate Radarr URL / API Key")
|
||||
return
|
||||
else:
|
||||
log.info("Validated Radarr URL & API Key")
|
||||
|
||||
# retrieve profile id for requested profile
|
||||
profile_id = radarr.get_profile_id(cfg.radarr.profile)
|
||||
if not profile_id or not profile_id > 0:
|
||||
log.error("Aborting due to failure to retrieve Profile ID for: %s", cfg.radarr.profile)
|
||||
return
|
||||
else:
|
||||
log.info("Retrieved Profile ID for %s: %d", cfg.radarr.profile, profile_id)
|
||||
|
||||
# get radarr movies list
|
||||
radarr_movie_list = radarr.get_movies()
|
||||
if not radarr_movie_list:
|
||||
log.error("Aborting due to failure to retrieve Radarr movies list")
|
||||
return
|
||||
else:
|
||||
log.info("Retrieved Radarr movies list, movies found: %d", len(radarr_movie_list))
|
||||
|
||||
# get trakt movies list
|
||||
trakt_movies_list = None
|
||||
if list_type.lower() == 'anticipated':
|
||||
trakt_movies_list = trakt.get_anticipated_movies()
|
||||
elif list_type.lower() == 'trending':
|
||||
trakt_movies_list = trakt.get_trending_movies()
|
||||
elif list_type.lower() == 'popular':
|
||||
trakt_movies_list = trakt.get_popular_movies()
|
||||
else:
|
||||
log.error("Aborting due to unknown Trakt list type")
|
||||
return
|
||||
if not trakt_movies_list:
|
||||
log.error("Aborting due to failure to retrieve Trakt %s movies list", list_type)
|
||||
return
|
||||
else:
|
||||
log.info("Retrieved Trakt %s movies list, movies found: %d", list_type, len(trakt_movies_list))
|
||||
|
||||
# build filtered series list without series that exist in sonarr
|
||||
processed_movies_list = helpers.radarr_remove_existing_movies(radarr_movie_list, trakt_movies_list)
|
||||
if not processed_movies_list:
|
||||
log.error("Aborting due to failure to remove existing Radarr movies from retrieved Trakt movies list")
|
||||
return
|
||||
else:
|
||||
log.info("Removed existing Radarr movies from Trakt movies list, movies left to process: %d",
|
||||
len(processed_movies_list))
|
||||
|
||||
# loop movies
|
||||
log.info("Processing list now...")
|
||||
for movie in processed_movies_list:
|
||||
try:
|
||||
# check if movie passes out blacklist criteria inspection
|
||||
if not helpers.trakt_is_movie_blacklisted(movie, cfg.filters.movies):
|
||||
log.info("Adding: %s (%d) | Genres: %s | Country: %s", movie['movie']['title'], movie['movie']['year'],
|
||||
', '.join(movie['movie']['genres']), movie['movie']['country'].upper())
|
||||
# add movie to radarr
|
||||
if radarr.add_movie(movie['movie']['ids']['tmdb'], movie['movie']['title'], movie['movie']['year'],
|
||||
profile_id, cfg.sonarr.root_folder, True):
|
||||
log.info("ADDED %s (%d)", movie['movie']['title'], movie['movie']['year'])
|
||||
added_movies += 1
|
||||
else:
|
||||
log.error("FAILED adding %s (%d)", movie['movie']['title'], movie['movie']['year'])
|
||||
|
||||
# stop adding movies, if added_movies >= add_limit
|
||||
if add_limit and added_movies >= add_limit:
|
||||
break
|
||||
|
||||
# sleep before adding any more
|
||||
time.sleep(add_delay)
|
||||
|
||||
except Exception:
|
||||
log.exception("Exception while processing movie %s: ", movie['movie']['title'])
|
||||
|
||||
log.info("Added %d new movies to Radarr", added_movies)
|
||||
|
||||
|
||||
############################################################
|
||||
# MAIN
|
||||
############################################################
|
||||
|
||||
if __name__ == "__main__":
|
||||
app()
|
Loading…
Reference in new issue