mirror of https://github.com/l3uddz/traktarr
commit
e6ac7323af
@ -0,0 +1,65 @@
|
||||
from misc.log import logger
|
||||
|
||||
log = logger.get_logger(__name__)
|
||||
|
||||
|
||||
def get_response_dict(response, key_field=None, key_value=None):
|
||||
found_response = None
|
||||
try:
|
||||
if isinstance(response, list):
|
||||
if not key_field or not key_value:
|
||||
found_response = response[0]
|
||||
else:
|
||||
for result in response:
|
||||
if isinstance(result, dict) and key_field in result and result[key_field] == key_value:
|
||||
found_response = result
|
||||
break
|
||||
|
||||
if not found_response:
|
||||
log.error("Unable to find a result with key %s where the value is %s", key_field, key_value)
|
||||
|
||||
elif isinstance(response, dict):
|
||||
found_response = response
|
||||
else:
|
||||
log.error("Unexpected response instance type of %s for %s", type(response).__name__, response)
|
||||
|
||||
except Exception:
|
||||
log.exception("Exception determining response for %s: ", response)
|
||||
return found_response
|
||||
|
||||
|
||||
def backoff_handler(details):
|
||||
log.warning("Backing off {wait:0.1f} seconds afters {tries} tries "
|
||||
"calling function {target} with args {args} and kwargs "
|
||||
"{kwargs}".format(**details))
|
||||
|
||||
|
||||
def dict_merge(dct, merge_dct):
|
||||
for k, v in merge_dct.items():
|
||||
import collections
|
||||
|
||||
if k in dct and isinstance(dct[k], dict) and isinstance(merge_dct[k], collections.Mapping):
|
||||
dict_merge(dct[k], merge_dct[k])
|
||||
else:
|
||||
dct[k] = merge_dct[k]
|
||||
|
||||
return dct
|
||||
|
||||
|
||||
def unblacklist_genres(genre, blacklisted_genres):
|
||||
genres = genre.split(',')
|
||||
for allow_genre in genres:
|
||||
if allow_genre in blacklisted_genres:
|
||||
blacklisted_genres.remove(allow_genre)
|
||||
return
|
||||
|
||||
|
||||
def allowed_genres(genre, object_type, trakt_object):
|
||||
allowed_object = False
|
||||
genres = genre.split(',')
|
||||
|
||||
for item in genres:
|
||||
if item.lower() in trakt_object[object_type]['genres']:
|
||||
allowed_object = True
|
||||
break
|
||||
return allowed_object
|
@ -0,0 +1,50 @@
|
||||
from misc.log import logger
|
||||
|
||||
log = logger.get_logger(__name__)
|
||||
|
||||
|
||||
def movies_to_tmdb_dict(radarr_movies):
|
||||
movies = {}
|
||||
try:
|
||||
for tmp in radarr_movies:
|
||||
if 'tmdbId' not in tmp:
|
||||
log.debug("Could not handle movie: %s", tmp['title'])
|
||||
continue
|
||||
movies[tmp['tmdbId']] = tmp
|
||||
return movies
|
||||
except Exception:
|
||||
log.exception("Exception processing Radarr movies to TMDB dict: ")
|
||||
return None
|
||||
|
||||
|
||||
def remove_existing_movies(radarr_movies, trakt_movies):
|
||||
new_movies_list = []
|
||||
|
||||
if not radarr_movies or not trakt_movies:
|
||||
log.error("Inappropriate parameters were supplied")
|
||||
return None
|
||||
|
||||
try:
|
||||
# turn radarr movies result into a dict with tmdb id as keys
|
||||
processed_movies = movies_to_tmdb_dict(radarr_movies)
|
||||
if not processed_movies:
|
||||
return None
|
||||
|
||||
# loop list adding to movies that do not already exist
|
||||
for tmp in trakt_movies:
|
||||
if 'movie' not in tmp or 'ids' not in tmp['movie'] or 'tmdb' not in tmp['movie']['ids']:
|
||||
log.debug("Skipping movie because it did not have required fields: %s", tmp)
|
||||
continue
|
||||
# check if movie exists in processed_movies
|
||||
if tmp['movie']['ids']['tmdb'] in processed_movies:
|
||||
log.debug("Removing existing movie: %s", tmp['movie']['title'])
|
||||
continue
|
||||
|
||||
new_movies_list.append(tmp)
|
||||
|
||||
log.debug("Filtered %d Trakt movies to %d movies that weren't already in Radarr", len(trakt_movies),
|
||||
len(new_movies_list))
|
||||
return new_movies_list
|
||||
except Exception:
|
||||
log.exception("Exception removing existing movies from Trakt list: ")
|
||||
return None
|
@ -0,0 +1,81 @@
|
||||
from misc.log import logger
|
||||
|
||||
log = logger.get_logger(__name__)
|
||||
|
||||
|
||||
def series_tag_id_from_network(profile_tags, network_tags, network):
|
||||
try:
|
||||
tags = []
|
||||
for tag_name, tag_networks in network_tags.items():
|
||||
for tag_network in tag_networks:
|
||||
if tag_network.lower() in network.lower() and tag_name.lower() in profile_tags:
|
||||
log.debug("Using %s tag for network: %s", tag_name, network)
|
||||
tags.append(profile_tags[tag_name.lower()])
|
||||
if tags:
|
||||
return tags
|
||||
except Exception:
|
||||
log.exception("Exception determining tag to use for network %s: ", network)
|
||||
return None
|
||||
|
||||
|
||||
def readable_tag_from_ids(profile_tag_ids, chosen_tag_ids):
|
||||
try:
|
||||
if not chosen_tag_ids:
|
||||
return None
|
||||
|
||||
tags = []
|
||||
for tag_name, tag_id in profile_tag_ids.items():
|
||||
if tag_id in chosen_tag_ids:
|
||||
tags.append(tag_name)
|
||||
if tags:
|
||||
return tags
|
||||
except Exception:
|
||||
log.exception("Exception building readable tag name list from ids %s: ", chosen_tag_ids)
|
||||
return None
|
||||
|
||||
|
||||
def series_to_tvdb_dict(sonarr_series):
|
||||
series = {}
|
||||
try:
|
||||
for tmp in sonarr_series:
|
||||
if 'tvdbId' not in tmp:
|
||||
log.debug("Could not handle show: %s", tmp['title'])
|
||||
continue
|
||||
series[tmp['tvdbId']] = tmp
|
||||
return series
|
||||
except Exception:
|
||||
log.exception("Exception processing Sonarr shows to TVDB dict: ")
|
||||
return None
|
||||
|
||||
|
||||
def remove_existing_series(sonarr_series, trakt_series):
|
||||
new_series_list = []
|
||||
|
||||
if not sonarr_series or not trakt_series:
|
||||
log.error("Inappropriate parameters were supplied")
|
||||
return None
|
||||
|
||||
try:
|
||||
# turn sonarr series result into a dict with tvdb id as keys
|
||||
processed_series = series_to_tvdb_dict(sonarr_series)
|
||||
if not processed_series:
|
||||
return None
|
||||
|
||||
# loop list adding to series that do not already exist
|
||||
for tmp in trakt_series:
|
||||
if 'show' not in tmp or 'ids' not in tmp['show'] or 'tvdb' not in tmp['show']['ids']:
|
||||
log.debug("Skipping show because it did not have required fields: %s", tmp)
|
||||
continue
|
||||
# check if show exists in processed_series
|
||||
if tmp['show']['ids']['tvdb'] in processed_series:
|
||||
log.debug("Removing existing show: %s", tmp['show']['title'])
|
||||
continue
|
||||
|
||||
new_series_list.append(tmp)
|
||||
|
||||
log.debug("Filtered %d Trakt shows to %d shows that weren't already in Sonarr", len(trakt_series),
|
||||
len(new_series_list))
|
||||
return new_series_list
|
||||
except Exception:
|
||||
log.exception("Exception removing existing shows from Trakt list: ")
|
||||
return None
|
@ -0,0 +1,141 @@
|
||||
import os.path
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
import backoff
|
||||
import requests
|
||||
|
||||
from helpers.misc import backoff_handler
|
||||
from helpers import str as misc_str
|
||||
from helpers import misc
|
||||
from misc.log import logger
|
||||
|
||||
log = logger.get_logger(__name__)
|
||||
|
||||
|
||||
class PVR(ABC):
|
||||
def __init__(self, server_url, api_key):
|
||||
self.server_url = server_url
|
||||
self.api_key = api_key
|
||||
self.headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Api-Key': self.api_key,
|
||||
}
|
||||
|
||||
def validate_api_key(self):
|
||||
try:
|
||||
# request system status to validate api_key
|
||||
req = requests.get(
|
||||
os.path.join(misc_str.ensure_endswith(self.server_url, "/"), 'api/system/status'),
|
||||
headers=self.headers,
|
||||
timeout=60
|
||||
)
|
||||
log.debug("Request Response: %d", req.status_code)
|
||||
|
||||
if req.status_code == 200 and 'version' in req.json():
|
||||
return True
|
||||
return False
|
||||
except Exception:
|
||||
log.exception("Exception validating api_key: ")
|
||||
return False
|
||||
|
||||
@abstractmethod
|
||||
def get_objects(self):
|
||||
pass
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def _get_objects(self, endpoint):
|
||||
try:
|
||||
# make request
|
||||
req = requests.get(
|
||||
os.path.join(misc_str.ensure_endswith(self.server_url, "/"), endpoint),
|
||||
headers=self.headers,
|
||||
timeout=60
|
||||
)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Response: %d", req.status_code)
|
||||
|
||||
if req.status_code == 200:
|
||||
resp_json = req.json()
|
||||
log.debug("Found %d objects", len(resp_json))
|
||||
return resp_json
|
||||
else:
|
||||
log.error("Failed to retrieve all objects, request response: %d", req.status_code)
|
||||
except Exception:
|
||||
log.exception("Exception retrieving objects: ")
|
||||
return None
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def get_profile_id(self, profile_name):
|
||||
try:
|
||||
# make request
|
||||
req = requests.get(
|
||||
os.path.join(misc_str.ensure_endswith(self.server_url, "/"), 'api/profile'),
|
||||
headers=self.headers,
|
||||
timeout=60
|
||||
)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Response: %d", req.status_code)
|
||||
|
||||
if req.status_code == 200:
|
||||
resp_json = req.json()
|
||||
for profile in resp_json:
|
||||
if profile['name'].lower() == profile_name.lower():
|
||||
log.debug("Found id of %s profile: %d", profile_name, profile['id'])
|
||||
return profile['id']
|
||||
log.debug("Profile %s with id %d did not match %s", profile['name'], profile['id'], profile_name)
|
||||
else:
|
||||
log.error("Failed to retrieve all quality profiles, request response: %d", req.status_code)
|
||||
except Exception:
|
||||
log.exception("Exception retrieving id of profile %s: ", profile_name)
|
||||
return None
|
||||
|
||||
def _prepare_add_object_payload(self, title, title_slug, profile_id, root_folder):
|
||||
return {
|
||||
'title': title,
|
||||
'titleSlug': title_slug,
|
||||
'qualityProfileId': profile_id,
|
||||
'images': [],
|
||||
'monitored': True,
|
||||
'rootFolderPath': root_folder,
|
||||
'addOptions': {
|
||||
'ignoreEpisodesWithFiles': False,
|
||||
'ignoreEpisodesWithoutFiles': False,
|
||||
}
|
||||
}
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def _add_object(self, endpoint, payload, identifier_field, identifier):
|
||||
try:
|
||||
# make request
|
||||
req = requests.post(
|
||||
os.path.join(misc_str.ensure_endswith(self.server_url, "/"), endpoint),
|
||||
headers=self.headers,
|
||||
json=payload,
|
||||
timeout=60
|
||||
)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Payload: %s", payload)
|
||||
log.debug("Request Response Code: %d", req.status_code)
|
||||
log.debug("Request Response Text:\n%s", req.text)
|
||||
|
||||
response_json = None
|
||||
if 'json' in req.headers['Content-Type'].lower():
|
||||
response_json = misc.get_response_dict(req.json(), identifier_field, identifier)
|
||||
|
||||
if (req.status_code == 201 or req.status_code == 200) \
|
||||
and (response_json and identifier_field in response_json) \
|
||||
and response_json[identifier_field] == identifier:
|
||||
log.debug("Successfully added %s (%d)", payload['title'], identifier)
|
||||
return True
|
||||
elif response_json and ('errorMessage' in response_json or 'message' in response_json):
|
||||
message = response_json['errorMessage'] if 'errorMessage' in response_json else response_json['message']
|
||||
|
||||
log.error("Failed to add %s (%d) - status_code: %d, reason: %s", payload['title'], identifier,
|
||||
req.status_code, message)
|
||||
return False
|
||||
else:
|
||||
log.error("Failed to add %s (%d), unexpected response:\n%s", payload['title'], identifier, req.text)
|
||||
return False
|
||||
except Exception:
|
||||
log.exception("Exception adding %s (%d): ", payload['title'], identifier)
|
||||
return None
|
@ -1,143 +1,28 @@
|
||||
import os.path
|
||||
|
||||
import backoff
|
||||
import requests
|
||||
|
||||
from misc import helpers
|
||||
from misc import str as misc_str
|
||||
from helpers.misc import backoff_handler, dict_merge
|
||||
from media.pvr import PVR
|
||||
from misc.log import logger
|
||||
|
||||
log = logger.get_logger(__name__)
|
||||
|
||||
|
||||
def backoff_handler(details):
|
||||
log.warning("Backing off {wait:0.1f} seconds afters {tries} tries "
|
||||
"calling function {target} with args {args} and kwargs "
|
||||
"{kwargs}".format(**details))
|
||||
|
||||
|
||||
class Radarr:
|
||||
def __init__(self, server_url, api_key):
|
||||
self.server_url = server_url
|
||||
self.api_key = api_key
|
||||
self.headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Api-Key': self.api_key,
|
||||
}
|
||||
|
||||
def validate_api_key(self):
|
||||
try:
|
||||
# request system status to validate api_key
|
||||
req = requests.get(
|
||||
os.path.join(misc_str.ensure_endswith(self.server_url, "/"), 'api/system/status'),
|
||||
headers=self.headers,
|
||||
timeout=60
|
||||
)
|
||||
log.debug("Request Response: %d", req.status_code)
|
||||
|
||||
if req.status_code == 200 and 'version' in req.json():
|
||||
return True
|
||||
return False
|
||||
except Exception:
|
||||
log.exception("Exception validating api_key: ")
|
||||
return False
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def get_movies(self):
|
||||
try:
|
||||
# make request
|
||||
req = requests.get(
|
||||
os.path.join(misc_str.ensure_endswith(self.server_url, "/"), 'api/movie'),
|
||||
headers=self.headers,
|
||||
timeout=60
|
||||
)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Response: %d", req.status_code)
|
||||
|
||||
if req.status_code == 200:
|
||||
resp_json = req.json()
|
||||
log.debug("Found %d movies", len(resp_json))
|
||||
return resp_json
|
||||
else:
|
||||
log.error("Failed to retrieve all movies, request response: %d", req.status_code)
|
||||
except Exception:
|
||||
log.exception("Exception retrieving movies: ")
|
||||
return None
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def get_profile_id(self, profile_name):
|
||||
try:
|
||||
# make request
|
||||
req = requests.get(
|
||||
os.path.join(misc_str.ensure_endswith(self.server_url, "/"), 'api/profile'),
|
||||
headers=self.headers,
|
||||
timeout=60
|
||||
)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Response: %d", req.status_code)
|
||||
|
||||
if req.status_code == 200:
|
||||
resp_json = req.json()
|
||||
for profile in resp_json:
|
||||
if profile['name'].lower() == profile_name.lower():
|
||||
log.debug("Found id of %s profile: %d", profile_name, profile['id'])
|
||||
return profile['id']
|
||||
log.debug("Profile %s with id %d did not match %s", profile['name'], profile['id'], profile_name)
|
||||
else:
|
||||
log.error("Failed to retrieve all quality profiles, request response: %d", req.status_code)
|
||||
except Exception:
|
||||
log.exception("Exception retrieving id of profile %s: ", profile_name)
|
||||
return None
|
||||
class Radarr(PVR):
|
||||
def get_objects(self):
|
||||
return self._get_objects('api/movie')
|
||||
|
||||
@backoff.on_predicate(backoff.expo, lambda x: x is None, max_tries=4, on_backoff=backoff_handler)
|
||||
def add_movie(self, movie_tmdbid, movie_title, movie_year, movie_title_slug, profile_id, root_folder,
|
||||
search_missing=False):
|
||||
try:
|
||||
# generate payload
|
||||
payload = {
|
||||
'tmdbId': movie_tmdbid,
|
||||
'title': movie_title,
|
||||
'year': movie_year,
|
||||
'qualityProfileId': profile_id,
|
||||
'images': [],
|
||||
'monitored': True,
|
||||
'rootFolderPath': root_folder,
|
||||
'minimumAvailability': 'released',
|
||||
'titleSlug': movie_title_slug,
|
||||
'addOptions': {
|
||||
'ignoreEpisodesWithFiles': False,
|
||||
'ignoreEpisodesWithoutFiles': False,
|
||||
'searchForMovie': search_missing
|
||||
}
|
||||
payload = self._prepare_add_object_payload(movie_title, movie_title_slug, profile_id, root_folder)
|
||||
|
||||
payload = dict_merge(payload, {
|
||||
'tmdbId': movie_tmdbid,
|
||||
'year': movie_year,
|
||||
'minimumAvailability': 'released',
|
||||
'addOptions': {
|
||||
'searchForMovie': search_missing
|
||||
}
|
||||
})
|
||||
|
||||
# make request
|
||||
req = requests.post(
|
||||
os.path.join(misc_str.ensure_endswith(self.server_url, "/"), 'api/movie'),
|
||||
headers=self.headers,
|
||||
json=payload,
|
||||
timeout=60
|
||||
)
|
||||
log.debug("Request URL: %s", req.url)
|
||||
log.debug("Request Payload: %s", payload)
|
||||
log.debug("Request Response Code: %d", req.status_code)
|
||||
log.debug("Request Response Text:\n%s", req.text)
|
||||
|
||||
response_json = None
|
||||
if 'json' in req.headers['Content-Type'].lower():
|
||||
response_json = helpers.get_response_dict(req.json(), 'tmdbId', movie_tmdbid)
|
||||
|
||||
if (req.status_code == 201 or req.status_code == 200) and (response_json and 'tmdbId' in response_json) \
|
||||
and response_json['tmdbId'] == movie_tmdbid:
|
||||
log.debug("Successfully added %s (%d)", movie_title, movie_tmdbid)
|
||||
return True
|
||||
elif response_json and 'message' in response_json:
|
||||
log.error("Failed to add %s (%d) - status_code: %d, reason: %s", movie_title, movie_tmdbid,
|
||||
req.status_code, response_json['message'])
|
||||
return False
|
||||
else:
|
||||
log.error("Failed to add %s (%d), unexpected response:\n%s", movie_title, movie_tmdbid, req.text)
|
||||
return False
|
||||
except Exception:
|
||||
log.exception("Exception adding movie %s (%d): ", movie_title, movie_tmdbid)
|
||||
return None
|
||||
return self._add_object('api/movie', payload, identifier_field='tmdbId', identifier=movie_tmdbid)
|
||||
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in new issue