diff --git a/addon.xml b/addon.xml index 9e83900..1a5a090 100644 --- a/addon.xml +++ b/addon.xml @@ -1,13 +1,11 @@ - - diff --git a/libs/actions.py b/libs/actions.py index 2f2df2b..3040903 100644 --- a/libs/actions.py +++ b/libs/actions.py @@ -22,11 +22,8 @@ from __future__ import absolute_import, unicode_literals -import sys -import six -import xbmcgui -import xbmcplugin -from six.moves import urllib_parse +import sys, urllib.parse +import xbmcgui, xbmcplugin from . import tmdb, data_utils from .utils import logger, safe_get try: @@ -40,7 +37,7 @@ def find_show(title, year=None): # type: (Union[Text, bytes], Optional[Text]) -> None """Find a show by title""" - if not isinstance(title, six.text_type): + if not isinstance(title, str): title = title.decode('utf-8') logger.debug('Searching for TV show {} ({})'.format(title, year)) search_results = tmdb.search_show(title, year) @@ -126,13 +123,13 @@ def get_episode_list(show_id): # pylint: disable=missing-docstring for episode in show_info['episodes']: list_item = xbmcgui.ListItem(episode['name'], offscreen=True) list_item = data_utils.add_episode_info(list_item, episode, full_info=False) - encoded_ids = urllib_parse.urlencode( + encoded_ids = urllib.parse.urlencode( {'show_id': str(show_info['id']), 'episode_id': str(theindex)} ) theindex = theindex + 1 # Below "url" is some unique ID string (may be an actual URL to an episode page) # that allows to retrieve information about a specific episode. - url = urllib_parse.quote(encoded_ids) + url = urllib.parse.quote(encoded_ids) xbmcplugin.addDirectoryItem( HANDLE, url=url, @@ -143,8 +140,8 @@ def get_episode_list(show_id): # pylint: disable=missing-docstring def get_episode_details(encoded_ids): # pylint: disable=missing-docstring # type: (Text) -> None - encoded_ids = urllib_parse.unquote(encoded_ids) - decoded_ids = dict(urllib_parse.parse_qsl(encoded_ids)) + encoded_ids = urllib.parse.unquote(encoded_ids) + decoded_ids = dict(urllib.parse.parse_qsl(encoded_ids)) logger.debug('Getting episode details for {}'.format(decoded_ids)) episode_info = tmdb.load_episode_info( decoded_ids['show_id'], decoded_ids['episode_id'] @@ -184,7 +181,7 @@ def router(paramstring): :param paramstring: url-encoded query string :raises RuntimeError: on unknown call action """ - params = dict(urllib_parse.parse_qsl(paramstring)) + params = dict(urllib.parse.parse_qsl(paramstring)) logger.debug('Called addon with params: {}'.format(sys.argv)) if params['action'] == 'find': find_show(params['title'], params.get('year')) diff --git a/libs/api_utils.py b/libs/api_utils.py index ed90ef9..b3ec045 100644 --- a/libs/api_utils.py +++ b/libs/api_utils.py @@ -20,9 +20,10 @@ from __future__ import absolute_import, unicode_literals import json +from urllib.request import Request, urlopen +from urllib.error import URLError +from urllib.parse import urlencode from pprint import pformat -import requests -from requests.exceptions import HTTPError from . import settings from .utils import logger try: @@ -31,33 +32,42 @@ except ImportError: pass -HEADERS = ( - ('User-Agent', 'Kodi scraper for themoviedb.org by pkscout; pkscout@kodi.tv'), - ('Accept', 'application/json'), -) -SESSION = requests.Session() -SESSION.headers.update(dict(HEADERS)) +HEADERS = {} def set_headers(headers): - SESSION.headers.update(headers) + HEADERS.update(headers) -def load_info(url, params=None): +def load_info(url, params=None, default=None, resp_type = 'json'): # type: (Text, Optional[Dict[Text, Union[Text, List[Text]]]]) -> Union[dict, list] """ - Load info from themoviedb + Load info from external api :param url: API endpoint URL :param params: URL query params - :return: API response - :raises requests.exceptions.HTTPError: if any error happens + :default: object to return if there is an error + :resp_type: what to return to the calling function + :return: API response or default on error """ - logger.debug('Calling URL "{}" with params {}'.format(url, params)) - response = SESSION.get(url, params=params) - if not response.ok: - response.raise_for_status() - json_response = response.json() + if params: + url = url + '?' + urlencode(params) + logger.debug('Calling URL "{}"'.format(url)) + req = Request(url, headers=HEADERS) + try: + response = urlopen(req) + except URLError as e: + if hasattr(e, 'reason'): + logger.debug('failed to reach the remote site\nReason: {}'.format(e.reason)) + elif hasattr(e, 'code'): + logger.debug('remote site unable to fulfill the request\nError code: {}'.format(e.code)) + response = None + if response is None: + resp = default + elif resp_type.lower() == 'json': + resp = json.loads(response.read().decode('utf-8')) + else: + resp = response.read().decode('utf-8') if settings.VERBOSELOG: - logger.debug('the api response:\n{}'.format(pformat(json_response))) - return json_response + logger.debug('the api response:\n{}'.format(pformat(resp))) + return resp diff --git a/libs/cache.py b/libs/cache.py index 411dc4b..03a1e31 100644 --- a/libs/cache.py +++ b/libs/cache.py @@ -22,13 +22,9 @@ from __future__ import absolute_import, unicode_literals -import os +import os, pickle from datetime import datetime, timedelta - -from six import PY2, PY3 -from six.moves import cPickle as pickle -import xbmc -import xbmcvfs +import xbmc, xbmcvfs from .utils import ADDON, logger @@ -44,8 +40,6 @@ def _get_cache_directory(): # pylint: disable=missing-docstring # type: () -> Text profile_dir = xbmc.translatePath(ADDON.getAddonInfo('profile')) - if PY2: - profile_dir = profile_dir.decode('utf-8') cache_dir = os.path.join(profile_dir, 'cache') if not xbmcvfs.exists(cache_dir): xbmcvfs.mkdir(cache_dir) @@ -81,9 +75,7 @@ def load_show_info_from_cache(show_id): try: with open(os.path.join(CACHE_DIR, file_name), 'rb') as fo: load_kwargs = {} - if PY3: - # https://forum.kodi.tv/showthread.php?tid=349813&pid=2970989#pid2970989 - load_kwargs['encoding'] = 'bytes' + load_kwargs['encoding'] = 'bytes' cache = pickle.load(fo, **load_kwargs) if datetime.now() - cache['timestamp'] > CACHING_DURATION: return None diff --git a/libs/data_utils.py b/libs/data_utils.py index 699bcca..b727b25 100644 --- a/libs/data_utils.py +++ b/libs/data_utils.py @@ -24,7 +24,6 @@ import re, json from collections import OrderedDict, namedtuple -import six from .utils import safe_get, logger from . import settings @@ -114,7 +113,7 @@ def _set_unique_ids(ext_ids, list_item): # type: (InfoType, ListItem) -> ListItem """Extract unique ID in various online databases""" unique_ids = {} - for key, value in six.iteritems(ext_ids): + for key, value in ext_ids.items(): if key in VALIDEXTIDS and value: key = key[:-3] unique_ids[key] = str(value) @@ -146,7 +145,7 @@ def _add_season_info(show_info, list_item): if image: url = settings.IMAGEROOTURL + image list_item.addAvailableArtwork(url, 'poster', season=season['season_number']) - for image_type, image_list in six.iteritems(season.get('images', {})): + for image_type, image_list in season.get('images', {}).items(): if image_type == 'posters': destination = 'poster' else: @@ -163,7 +162,7 @@ def _add_season_info(show_info, list_item): def set_show_artwork(show_info, list_item): # type: (InfoType, ListItem) -> ListItem """Set available images for a show""" - for image_type, image_list in six.iteritems(show_info.get('images', {})): + for image_type, image_list in show_info.get('images', {}).items(): if image_type == 'backdrops': fanart_list = [] for image in image_list: diff --git a/libs/debugger.py b/libs/debugger.py index 53b0eaf..fd63eff 100644 --- a/libs/debugger.py +++ b/libs/debugger.py @@ -29,7 +29,6 @@ from platform import uname from pprint import pformat -import six import xbmc from .utils import logger @@ -50,7 +49,7 @@ def _format_vars(variables): :return: formatted string with sorted ``var = val`` pairs :rtype: str """ - var_list = [(var, val) for var, val in six.iteritems(variables) + var_list = [(var, val) for var, val in variables.items() if not (var.startswith('__') or var.endswith('__'))] var_list.sort(key=lambda i: i[0]) lines = [] diff --git a/libs/imdbratings.py b/libs/imdbratings.py index c82fa55..e8d80e5 100644 --- a/libs/imdbratings.py +++ b/libs/imdbratings.py @@ -20,15 +20,13 @@ import re -import requests -from requests.exceptions import ConnectionError as RequestsConnectionError, Timeout, RequestException +from . import api_utils IMDB_RATINGS_URL = 'https://www.imdb.com/title/{}/' - IMDB_RATING_REGEX = re.compile(r'itemprop="ratingValue".*?>.*?([\d.]+).*?<') IMDB_VOTES_REGEX = re.compile(r'itemprop="ratingCount".*?>.*?([\d,]+).*?<') -# get the tv show info via imdb + def get_details(imdb_id): if not imdb_id: return {} @@ -36,11 +34,8 @@ def get_details(imdb_id): return _assemble_imdb_result(votes, rating) def _get_ratinginfo(imdb_id): - try: - response = requests.get(IMDB_RATINGS_URL.format(imdb_id)) - except (Timeout, RequestsConnectionError, RequestException) as ex: - return _format_error_message(ex) - return _parse_imdb_result(response.text if response and response.status_code == 200 else '') + response = api_utils.load_info(IMDB_RATINGS_URL.format(imdb_id), default = '', resp_type='text') + return _parse_imdb_result(response) def _assemble_imdb_result(votes, rating): result = {} @@ -64,9 +59,3 @@ def _parse_imdb_votes(input_html): if (match): return int(match.group(1).replace(',', '')) return None - -def _format_error_message(ex): - message = type(ex).__name__ - if hasattr(ex, 'message'): - message += ": {0}".format(ex.message) - return {'error': message} diff --git a/libs/settings.py b/libs/settings.py index 62401bb..96cba24 100644 --- a/libs/settings.py +++ b/libs/settings.py @@ -16,8 +16,7 @@ # along with this program. If not, see . # pylint: disable=missing-docstring -import json, six, sys -from six.moves import urllib_parse +import json, sys, urllib.parse from .utils import logger from pprint import pformat @@ -41,7 +40,7 @@ 'seasonthumb': 'seasonlandscape' } try: - source_params = dict(urllib_parse.parse_qsl(sys.argv[2])) + source_params = dict(urllib.parse.parse_qsl(sys.argv[2])) except IndexError: source_params = {} source_settings = json.loads(source_params.get('pathSettings', {})) @@ -66,5 +65,5 @@ RATING_TYPES.append('tmdb') FANARTTV_CLIENTKEY = source_settings.get('fanarttv_clientkey', '') FANARTTV_ART = {} -for fanarttv_type, tmdb_type in six.iteritems(FANARTTV_MAPPING): +for fanarttv_type, tmdb_type in FANARTTV_MAPPING.items(): FANARTTV_ART[tmdb_type] = source_settings.get('enable_fanarttv_%s' % tmdb_type, False) diff --git a/libs/tmdb.py b/libs/tmdb.py index ceb0ab6..c3ee3ff 100644 --- a/libs/tmdb.py +++ b/libs/tmdb.py @@ -20,8 +20,6 @@ from __future__ import absolute_import, unicode_literals -import six -from requests.exceptions import HTTPError from math import floor from pprint import pformat from . import cache, data_utils, api_utils, settings, imdbratings, traktratings @@ -32,17 +30,24 @@ except ImportError: pass -BASE_URL = 'https://api.themoviedb.org/3/{}?api_key=%s&language=%s' % (settings.TMDB_CLOWNCAR, settings.LANG) +HEADERS = ( + ('User-Agent', 'Kodi TV Show scraper by Team Kodi; contact pkscout@kodi.tv'), + ('Accept', 'application/json'), +) +api_utils.set_headers(dict(HEADERS)) + +TMDB_PARAMS = {'api_key': settings.TMDB_CLOWNCAR, 'language': settings.LANG} +BASE_URL = 'https://api.themoviedb.org/3/{}' EPISODE_GROUP_URL = BASE_URL.format('tv/episode_group/{}') SEARCH_URL = BASE_URL.format('search/tv') FIND_URL = BASE_URL.format('find/{}') SHOW_URL = BASE_URL.format('tv/{}') SEASON_URL = BASE_URL.format('tv/{}/season/{}') EPISODE_URL = BASE_URL.format('tv/{}/season/{}/episode/{}') +FANARTTV_URL = 'https://webservice.fanart.tv/v3/tv/{}' +FANARTTV_PARAMS = {'api_key': settings.FANARTTV_CLOWNCAR} if settings.FANARTTV_CLIENTKEY: - FANARTTV_URL = 'https://webservice.fanart.tv/v3/tv/{}?api_key=%s&client_key=%s' % (settings.FANARTTV_CLOWNCAR, settings.FANARTTV_CLIENTKEY) -else: - FANARTTV_URL = 'https://webservice.fanart.tv/v3/tv/{}?api_key=%s' % settings.FANARTTV_CLOWNCAR + FANARTTV_PARAMS['client_key'] = settings.FANARTTV_CLIENTKEY def search_show(title, year=None): @@ -54,24 +59,24 @@ def search_show(title, year=None): : param year: the year to search (optional) :return: a list with found TV shows """ + params = TMDB_PARAMS results = [] ext_media_id = data_utils.parse_media_id(title) if ext_media_id: logger.debug('using %s of %s to find show' % (ext_media_id['type'], ext_media_id['title'])) if ext_media_id['type'] == 'tmdb_id': search_url = SHOW_URL.format(ext_media_id['title']) - params = {} else: search_url = FIND_URL.format(ext_media_id['title']) - params = {'external_source':ext_media_id['type']} + params['external_source'] = ext_media_id['type'] else: logger.debug('using title of %s to find show' % title) search_url = SEARCH_URL - params = {'query': title} + params['query'] = title if year: - params.update({'first_air_date_year': str(year)}) - try: - resp = api_utils.load_info(search_url, params) + params['first_air_date_year'] = str(year) + resp = api_utils.load_info(search_url, params=params) + if resp is not None: if ext_media_id: if ext_media_id['type'] == 'tmdb_id': if resp.get('success') == 'false': @@ -82,8 +87,6 @@ def search_show(title, year=None): results = resp.get('tv_results', []) else: results = resp.get('results', []) - except HTTPError as exc: - logger.error('themoviedb returned an error: {}'.format(exc)) return results @@ -94,11 +97,7 @@ def load_episode_list(show_info, season_map, ep_grouping): if ep_grouping is not None: logger.debug('Getting episodes with episode grouping of ' + ep_grouping) episode_group_url = EPISODE_GROUP_URL.format(ep_grouping) - try: - custom_order = api_utils.load_info(episode_group_url) - except HTTPError as exc: - logger.error('themoviedb returned an error: {}'.format(exc)) - custom_order = None + custom_order = api_utils.load_info(episode_group_url, params=TMDB_PARAMS) if custom_order is not None: show_info['seasons'] = [] season_num = 1 @@ -122,7 +121,7 @@ def load_episode_list(show_info, season_map, ep_grouping): else: logger.debug('Getting episodes from standard season list') show_info['seasons'] = [] - for key, value in six.iteritems(season_map): + for key, value in season_map.items(): show_info['seasons'].append(value) for season in show_info.get('seasons', []): for episode in season.get('episodes', []): @@ -145,25 +144,17 @@ def load_show_info(show_id, ep_grouping=None): if show_info is None: logger.debug('no cache file found, loading from scratch') show_url = SHOW_URL.format(show_id) - params = {} + params = TMDB_PARAMS params['append_to_response'] = 'credits,content_ratings,external_ids,images' params['include_image_language'] = '%s,en,null' % settings.LANG[0:2] - try: - show_info = api_utils.load_info(show_url, params) - except HTTPError as exc: - logger.error('themoviedb returned an error: {}'.format(exc)) + show_info = api_utils.load_info(show_url, params=params) + if show_info is None: return None season_map = {} + params['append_to_response'] = 'credits,images' for season in show_info.get('seasons', []): season_url = SEASON_URL.format(show_id, season['season_number']) - params = {} - params['append_to_response'] = 'credits,images' - params['include_image_language'] = '%s,en,null' % settings.LANG[0:2] - try: - season_info = api_utils.load_info(season_url, params) - except HTTPError as exc: - logger.error('themoviedb returned an error: {}'.format(exc)) - season_info = {} + season_info = api_utils.load_info(season_url, params=params, default={}) season_info['images'] = _sort_image_types(season_info.get('images', {})) season_map[str(season['season_number'])] = season_info show_info = load_episode_list(show_info, season_map, ep_grouping) @@ -205,13 +196,11 @@ def load_episode_info(show_id, episode_id): return None # this ensures we are using the season/ep from the episode grouping if provided ep_url = EPISODE_URL.format(show_info['id'], episode_info['org_seasonnum'], episode_info['org_epnum']) - params = {} + params = TMDB_PARAMS params['append_to_response'] = 'credits,external_ids,images' params['include_image_language'] = '%s,en,null' % settings.LANG[0:2] - try: - ep_return = api_utils.load_info(ep_url, params) - except HTTPError as exc: - logger.error('themoviedb returned an error: {}'.format(exc)) + ep_return = api_utils.load_info(ep_url, params=params) + if ep_return is None: return None ep_return['images'] = _sort_image_types(ep_return.get('images', {})) ep_return['season_number'] = episode_info['season_number'] @@ -266,12 +255,10 @@ def load_fanarttv_art(show_info): break if tvdb_id and artwork_enabled: fanarttv_url = FANARTTV_URL.format(tvdb_id) - try: - artwork = api_utils.load_info(fanarttv_url) - except HTTPError as exc: - logger.error('fanart.tv returned an error: {}'.format(exc)) + artwork = api_utils.load_info(fanarttv_url, params=FANARTTV_PARAMS) + if artwork is None: return show_info - for fanarttv_type, tmdb_type in six.iteritems(settings.FANARTTV_MAPPING): + for fanarttv_type, tmdb_type in settings.FANARTTV_MAPPING.items(): if settings.FANARTTV_ART[tmdb_type]: if not show_info['images'].get(tmdb_type) and not tmdb_type.startswith('season'): show_info['images'][tmdb_type] = [] @@ -310,7 +297,7 @@ def trim_artwork(show_info): image_counts = {} image_total = 0 backdrops_total = 0 - for image_type, image_list in six.iteritems(show_info.get('images', {})): + for image_type, image_list in show_info.get('images', {}).items(): total = len(image_list) if image_type == 'backdrops': backdrops_total = backdrops_total + total @@ -318,7 +305,7 @@ def trim_artwork(show_info): image_counts[image_type] = {'total':total} image_total = image_total + total for season in show_info.get('seasons', []): - for image_type, image_list in six.iteritems(season.get('images', {})): + for image_type, image_list in season.get('images', {}).items(): total = len(image_list) thetype = '%s_%s' % (str(season['season_number']), image_type) image_counts[thetype] = {'total':total} @@ -334,7 +321,7 @@ def trim_artwork(show_info): reduction = (image_total - settings.MAXIMAGES)/image_total logger.error('there are %s non-fanart images' % str(image_total)) logger.error('that is more than the max of %s, image results will be trimmed by %s' % (str(settings.MAXIMAGES), str(reduction))) - for key, value in six.iteritems(image_counts): + for key, value in image_counts.items(): total = value['total'] reduce = int(floor(total * reduction)) target = total - reduce @@ -344,14 +331,14 @@ def trim_artwork(show_info): reduce = -1 * reduce image_counts[key]['reduce'] = reduce logger.debug('%s: %s' % (key, pformat(image_counts[key]))) - for image_type, image_list in six.iteritems(show_info.get('images', {})): + for image_type, image_list in show_info.get('images', {}).items(): if image_type == 'backdrops': continue # already handled backdrops above reduce = image_counts[image_type]['reduce'] if reduce != 0: del show_info['images'][image_type][reduce:] for s in range(len(show_info.get('seasons', []))): - for image_type, image_list in six.iteritems(show_info['seasons'][s].get('images', {})): + for image_type, image_list in show_info['seasons'][s].get('images', {}).items(): thetype = '%s_%s' % (str(show_info['seasons'][s]['season_number']), image_type) reduce = image_counts[thetype]['reduce'] if reduce != 0: @@ -360,7 +347,7 @@ def trim_artwork(show_info): def _sort_image_types(imagelist): - for image_type, images in six.iteritems(imagelist): + for image_type, images in imagelist.items(): imagelist[image_type] = _image_sort(images) return imagelist diff --git a/libs/traktratings.py b/libs/traktratings.py index 20839b3..deeedbc 100644 --- a/libs/traktratings.py +++ b/libs/traktratings.py @@ -20,8 +20,6 @@ from __future__ import absolute_import, unicode_literals -import six -from requests.exceptions import HTTPError from . import api_utils, settings from .utils import logger try: @@ -30,28 +28,29 @@ except ImportError: pass -BASE_URL = 'https://api.trakt.tv/shows/{}' -SHOW_URL = BASE_URL + '?extended=full' -EP_URL = BASE_URL + '/seasons/{}/episodes/{}/ratings' + HEADERS = ( + ('User-Agent', 'Kodi TV Show scraper by Team Kodi; contact pkscout@kodi.tv'), + ('Accept', 'application/json'), ('trakt-api-key', settings.TRAKT_CLOWNCAR), ('trakt-api-version', '2'), ('Content-Type', 'application/json'), ) api_utils.set_headers(dict(HEADERS)) +SHOW_URL = 'https://api.trakt.tv/shows/{}' +EP_URL = SHOW_URL + '/seasons/{}/episodes/{}/ratings' + def get_details(imdb_id, season=None, episode=None): result = {} if season and episode: url = EP_URL.format(imdb_id, season, episode) + params = None else: url = SHOW_URL.format(imdb_id) - try: - resp = api_utils.load_info(url) - except HTTPError as exc: - logger.error('trakt returned an error: {}'.format(exc)) - resp = {} + params = {'extended': 'full'} + resp = api_utils.load_info(url, params=params, default={}) rating =resp.get('rating') votes = resp.get('votes') if votes and rating: diff --git a/libs/utils.py b/libs/utils.py index 86068f1..48961b4 100644 --- a/libs/utils.py +++ b/libs/utils.py @@ -21,7 +21,6 @@ from __future__ import absolute_import, unicode_literals import xbmc -from six import PY2, text_type, binary_type from xbmcaddon import Addon try: @@ -39,11 +38,9 @@ class logger: @staticmethod def log(message, level=xbmc.LOGDEBUG): # type: (Text, int) -> None - if isinstance(message, binary_type): + if isinstance(message, bytes): message = message.decode('utf-8') message = logger.log_message_prefix + message - if PY2 and isinstance(message, text_type): - message = message.encode('utf-8') xbmc.log(message, level) @staticmethod