From e24a5e3b7b95f72fb5d6ef8a1ec5438ded76cac6 Mon Sep 17 00:00:00 2001 From: Adeel Ahmad Date: Mon, 28 Aug 2017 20:49:24 +0500 Subject: [PATCH 1/6] Restructure high level function fetch_tiles --- hips/tiles/fetch.py | 72 ++++++++++++++++++++++----------------------- 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/hips/tiles/fetch.py b/hips/tiles/fetch.py index 3d50fb2..1af267c 100644 --- a/hips/tiles/fetch.py +++ b/hips/tiles/fetch.py @@ -75,83 +75,83 @@ def fetch_tiles(tile_metas: List[HipsTileMeta], hips_survey: HipsSurveyPropertie else: raise ValueError(f'Invalid package name: {fetch_package}') - tiles = fetch_fct(tile_metas, hips_survey, progress_bar, n_parallel, timeout) + _tile_urls = tile_urls(tile_metas, hips_survey) + response_all = fetch_fct(_tile_urls, hips_survey, progress_bar, n_parallel, timeout) # Sort tiles to match the tile_meta list # TODO: this doesn't seem like a great solution. - # Use dict instead? - out = [] - for tile_meta in tile_metas: - for tile in tiles: - if tile.meta == tile_meta: - out.append(tile) - continue - return out + # Use OrderedDict instead? + tiles = [] + for tile_url in _tile_urls: + for idx, response in enumerate(response_all): + if response['url'] == tile_url: + tiles.append(HipsTile(tile_metas[idx], response['raw_data'])) + return tiles + +def tile_urls(tile_metas: List[HipsTileMeta], hips_survey: HipsSurveyProperties) -> List[str]: + """Retrun list of tile URLs""" + return [hips_survey.tile_url(meta) for meta in tile_metas] -def fetch_tile_urllib(url: str, meta: HipsTileMeta, timeout: float) -> HipsTile: +def fetch_tile_urllib(url: str, timeout: float) -> dict: """Fetch a HiPS tile asynchronously.""" with urllib.request.urlopen(url, timeout=timeout) as conn: - raw_data = conn.read() - return HipsTile(meta, raw_data) + return {'raw_data': conn.read(), 'url': url} -def tiles_urllib(tile_metas: List[HipsTileMeta], hips_survey: HipsSurveyProperties, - progress_bar: bool, n_parallel, timeout: float) -> List[HipsTile]: +def tiles_urllib(tile_urls: List[str], hips_survey: HipsSurveyProperties, + progress_bar: bool, n_parallel, timeout: float) -> List[dict]: """Generator function to fetch HiPS tiles from a remote URL.""" with concurrent.futures.ThreadPoolExecutor(max_workers=n_parallel) as executor: futures = [] - for meta in tile_metas: - url = hips_survey.tile_url(meta) - future = executor.submit(fetch_tile_urllib, url, meta, timeout) + for url in tile_urls: + future = executor.submit(fetch_tile_urllib, url, timeout) futures.append(future) futures = concurrent.futures.as_completed(futures) if progress_bar: from tqdm import tqdm - futures = tqdm(futures, total=len(tile_metas), desc='Fetching tiles') + futures = tqdm(futures, total=len(tile_urls), desc='Fetching tiles') - tiles = [] + response_all = [] for future in futures: - tiles.append(future.result()) + response_all.append(future.result()) - return tiles + return response_all -async def fetch_tile_aiohttp(url: str, meta: HipsTileMeta, session, timeout: float) -> HipsTile: +async def fetch_tile_aiohttp(url: str, session, timeout: float) -> dict: """Fetch a HiPS tile asynchronously using aiohttp.""" async with session.get(url, timeout=timeout) as response: - raw_data = await response.read() - return HipsTile(meta, raw_data) + return {'raw_data': await response.read(), 'url': url} -async def fetch_all_tiles_aiohttp(tile_metas: List[HipsTileMeta], hips_survey: HipsSurveyProperties, - progress_bar: bool, n_parallel: int, timeout: float) -> List[HipsTile]: +async def fetch_all_tiles_aiohttp(tile_urls: List[str], hips_survey: HipsSurveyProperties, + progress_bar: bool, n_parallel: int, timeout: float) -> List[dict]: """Generator function to fetch HiPS tiles from a remote URL using aiohttp.""" import aiohttp connector = aiohttp.TCPConnector(limit=n_parallel) async with aiohttp.ClientSession(connector=connector) as session: futures = [] - for meta in tile_metas: - url = hips_survey.tile_url(meta) - future = asyncio.ensure_future(fetch_tile_aiohttp(url, meta, session, timeout)) + for url in tile_urls: + future = asyncio.ensure_future(fetch_tile_aiohttp(url, session, timeout)) futures.append(future) futures = asyncio.as_completed(futures) if progress_bar: from tqdm import tqdm - futures = tqdm(futures, total=len(tile_metas), desc='Fetching tiles') + futures = tqdm(futures, total=len(tile_urls), desc='Fetching tiles') - tiles = [] + response_all = [] for future in futures: - tiles.append(await future) + response_all.append(await future) - return tiles + return response_all -def tiles_aiohttp(tile_metas: List[HipsTileMeta], hips_survey: HipsSurveyProperties, - progress_bar: bool, n_parallel: int, timeout: float) -> List[HipsTile]: +def tiles_aiohttp(tile_urls: List[str], hips_survey: HipsSurveyProperties, + progress_bar: bool, n_parallel: int, timeout: float) -> List[dict]: return asyncio.get_event_loop().run_until_complete( - fetch_all_tiles_aiohttp(tile_metas, hips_survey, progress_bar, n_parallel, timeout) + fetch_all_tiles_aiohttp(tile_urls, hips_survey, progress_bar, n_parallel, timeout) ) From 6c8874c3956aca12539d806fa98889ca078c43e3 Mon Sep 17 00:00:00 2001 From: Adeel Ahmad Date: Thu, 31 Aug 2017 19:03:38 +0500 Subject: [PATCH 2/6] Replace ThreadPoolExecutor.submit with ThreadPoolExecutor.map function --- hips/tiles/fetch.py | 34 +++++----------------------------- 1 file changed, 5 insertions(+), 29 deletions(-) diff --git a/hips/tiles/fetch.py b/hips/tiles/fetch.py index 1af267c..cdd429b 100644 --- a/hips/tiles/fetch.py +++ b/hips/tiles/fetch.py @@ -75,24 +75,15 @@ def fetch_tiles(tile_metas: List[HipsTileMeta], hips_survey: HipsSurveyPropertie else: raise ValueError(f'Invalid package name: {fetch_package}') - _tile_urls = tile_urls(tile_metas, hips_survey) - response_all = fetch_fct(_tile_urls, hips_survey, progress_bar, n_parallel, timeout) + tile_urls = [hips_survey.tile_url(meta) for meta in tile_metas] + response_all = fetch_fct(tile_urls, hips_survey, progress_bar, n_parallel, timeout) - # Sort tiles to match the tile_meta list - # TODO: this doesn't seem like a great solution. - # Use OrderedDict instead? tiles = [] - for tile_url in _tile_urls: - for idx, response in enumerate(response_all): - if response['url'] == tile_url: - tiles.append(HipsTile(tile_metas[idx], response['raw_data'])) + for idx, response in enumerate(response_all): + tiles.append(HipsTile(tile_metas[idx], response['raw_data'])) return tiles -def tile_urls(tile_metas: List[HipsTileMeta], hips_survey: HipsSurveyProperties) -> List[str]: - """Retrun list of tile URLs""" - return [hips_survey.tile_url(meta) for meta in tile_metas] - def fetch_tile_urllib(url: str, timeout: float) -> dict: """Fetch a HiPS tile asynchronously.""" with urllib.request.urlopen(url, timeout=timeout) as conn: @@ -103,22 +94,7 @@ def tiles_urllib(tile_urls: List[str], hips_survey: HipsSurveyProperties, progress_bar: bool, n_parallel, timeout: float) -> List[dict]: """Generator function to fetch HiPS tiles from a remote URL.""" with concurrent.futures.ThreadPoolExecutor(max_workers=n_parallel) as executor: - futures = [] - for url in tile_urls: - future = executor.submit(fetch_tile_urllib, url, timeout) - futures.append(future) - - futures = concurrent.futures.as_completed(futures) - if progress_bar: - from tqdm import tqdm - futures = tqdm(futures, total=len(tile_urls), desc='Fetching tiles') - - response_all = [] - for future in futures: - response_all.append(future.result()) - - return response_all - + return list(executor.map(fetch_tile_urllib, tile_urls, [timeout] * len(tile_urls))) async def fetch_tile_aiohttp(url: str, session, timeout: float) -> dict: """Fetch a HiPS tile asynchronously using aiohttp.""" From 030aa503f4e152d3fc434bfcef9320211466be3c Mon Sep 17 00:00:00 2001 From: Adeel Ahmad Date: Fri, 18 Jan 2019 14:42:46 +0100 Subject: [PATCH 3/6] Catch URLError and HTTPError exception for timed out and missing tiles Signed-off-by: Adeel Ahmad --- hips/tiles/fetch.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/hips/tiles/fetch.py b/hips/tiles/fetch.py index cdd429b..0382dd5 100644 --- a/hips/tiles/fetch.py +++ b/hips/tiles/fetch.py @@ -1,4 +1,5 @@ # Licensed under a 3-clause BSD style license - see LICENSE.rst +import socket import asyncio import urllib.request import concurrent.futures @@ -84,10 +85,19 @@ def fetch_tiles(tile_metas: List[HipsTileMeta], hips_survey: HipsSurveyPropertie return tiles + def fetch_tile_urllib(url: str, timeout: float) -> dict: """Fetch a HiPS tile asynchronously.""" - with urllib.request.urlopen(url, timeout=timeout) as conn: - return {'raw_data': conn.read(), 'url': url} + try: + with urllib.request.urlopen(url, timeout=timeout) as conn: + return {'raw_data': conn.read(), 'url': url} + except urllib.error.HTTPError as error: + if error.code == 404: + error.msg = f'Tile not found at:\n{url}' + raise + except urllib.error.URLError as error: + if isinstance(error.reason, socket.timeout): + raise TimeoutError(f'The server timed out while fetching the tile at:\n{url}') def tiles_urllib(tile_urls: List[str], hips_survey: HipsSurveyProperties, @@ -96,6 +106,7 @@ def tiles_urllib(tile_urls: List[str], hips_survey: HipsSurveyProperties, with concurrent.futures.ThreadPoolExecutor(max_workers=n_parallel) as executor: return list(executor.map(fetch_tile_urllib, tile_urls, [timeout] * len(tile_urls))) + async def fetch_tile_aiohttp(url: str, session, timeout: float) -> dict: """Fetch a HiPS tile asynchronously using aiohttp.""" async with session.get(url, timeout=timeout) as response: From 3d7f49e7b8b16a80fac903d30ded660b986ec106 Mon Sep 17 00:00:00 2001 From: Adeel Ahmad Date: Fri, 18 Jan 2019 19:32:56 +0100 Subject: [PATCH 4/6] Handle case for missing HiPS tiles Signed-off-by: Adeel Ahmad --- hips/draw/ui.py | 2 +- hips/tiles/fetch.py | 14 ++++++++++---- hips/tiles/tile.py | 36 ++++++++++++++++++++++++++++-------- 3 files changed, 39 insertions(+), 13 deletions(-) diff --git a/hips/draw/ui.py b/hips/draw/ui.py index b12ee19..9bcc414 100644 --- a/hips/draw/ui.py +++ b/hips/draw/ui.py @@ -147,7 +147,7 @@ def plot(self, show_grid: bool = False) -> None: def report(self) -> None: """Print a brief report for the fetched data.""" - print ( + print( f"Time for fetching tiles = {self.stats['fetch_time']} seconds\n" f"Time for drawing tiles = {self.stats['draw_time']} seconds\n" f"Total memory consumed = {self.stats['consumed_memory'] / 1e6} MB\n" diff --git a/hips/tiles/fetch.py b/hips/tiles/fetch.py index 0382dd5..f3c63c5 100644 --- a/hips/tiles/fetch.py +++ b/hips/tiles/fetch.py @@ -81,7 +81,11 @@ def fetch_tiles(tile_metas: List[HipsTileMeta], hips_survey: HipsSurveyPropertie tiles = [] for idx, response in enumerate(response_all): - tiles.append(HipsTile(tile_metas[idx], response['raw_data'])) + try: + response['raw_data'] + tiles.append(HipsTile(tile_metas[idx], response['raw_data'])) + except KeyError: + tiles.append(HipsTile(tile_metas[idx], b'', is_missing=True)) return tiles @@ -92,12 +96,14 @@ def fetch_tile_urllib(url: str, timeout: float) -> dict: with urllib.request.urlopen(url, timeout=timeout) as conn: return {'raw_data': conn.read(), 'url': url} except urllib.error.HTTPError as error: + # If the tile is missing, enable the `is_missing` flag in HipsTile. if error.code == 404: - error.msg = f'Tile not found at:\n{url}' - raise + print(f'Tile not found at:\n{url}') + return {'is_missing': True} except urllib.error.URLError as error: if isinstance(error.reason, socket.timeout): - raise TimeoutError(f'The server timed out while fetching the tile at:\n{url}') + print(f'The server timed out while fetching the tile at:\n{url}') + return {'is_missing': True} def tiles_urllib(tile_urls: List[str], hips_survey: HipsSurveyProperties, diff --git a/hips/tiles/tile.py b/hips/tiles/tile.py index 1fb362c..c29e4a5 100644 --- a/hips/tiles/tile.py +++ b/hips/tiles/tile.py @@ -1,6 +1,7 @@ # Licensed under a 3-clause BSD style license - see LICENSE.rst from typing import List, Tuple from copy import deepcopy +import socket import warnings import urllib.request from io import BytesIO @@ -154,6 +155,8 @@ class HipsTile: Metadata of HiPS tile raw_data : `bytes` Raw data (copy of bytes from file) + is_missing : `bool` + To check whether the tile is missing or not Examples -------- @@ -175,9 +178,10 @@ class HipsTile: int16 """ - def __init__(self, meta: HipsTileMeta, raw_data: bytes) -> None: + def __init__(self, meta: HipsTileMeta, raw_data: bytes, is_missing: bool = False) -> None: self.meta = meta self.raw_data = raw_data + self.is_missing = is_missing self._data = None def __eq__(self, other: "HipsTile") -> bool: @@ -259,8 +263,11 @@ def data(self) -> np.ndarray: See the `to_numpy` function. """ - if self._data is None: + if self._data is None and not self.is_missing: self._data = self.to_numpy(self.raw_data, self.meta.file_format) + elif self.is_missing: + self._data = np.zeros((compute_image_shape(self.meta.width, self.meta.width, self.meta.file_format)), + dtype=np.uint8) return self._data @@ -295,6 +302,7 @@ def to_numpy(raw_data: bytes, fmt: str) -> np.ndarray: elif fmt in {"jpg", "png"}: with Image.open(bio) as image: data = np.array(image) + # Flip tile to be consistent with FITS orientation data = np.flipud(data) else: @@ -316,8 +324,12 @@ def read(cls, meta: HipsTileMeta, filename: str = None) -> "HipsTile": filename : str Filename """ - raw_data = Path(filename).read_bytes() - return cls(meta, raw_data) + if Path(filename).exists(): + return cls(meta, Path(filename).read_bytes()) + else: + print(f'Tile not found at:\n{filename}') + return cls(meta, b'', is_missing=True) + @classmethod def fetch(cls, meta: HipsTileMeta, url: str) -> "HipsTile": @@ -330,10 +342,18 @@ def fetch(cls, meta: HipsTileMeta, url: str) -> "HipsTile": url : str URL containing HiPS tile """ - with urllib.request.urlopen(url) as response: - raw_data = response.read() - - return cls(meta, raw_data) + try: + with urllib.request.urlopen(url, timeout=10) as response: + raw_data = response.read() + return cls(meta, raw_data) + except urllib.error.HTTPError as error: + if error.code == 404: + print(f'Tile not found at:\n{url}') + return cls(meta, b'', is_missing=True) + except urllib.error.URLError as error: + if isinstance(error.reason, socket.timeout): + print(f'The server timed out while fetching the tile at:\n{url}') + return cls(meta, b'', is_missing=True) def write(self, filename: str) -> None: """Write to file. From f235ae942df72f26e64287329e22461c94ed06e1 Mon Sep 17 00:00:00 2001 From: Adeel Ahmad Date: Sat, 6 Jul 2019 08:17:14 +0200 Subject: [PATCH 5/6] Remove aiohttp package for fetching tiles --- hips/draw/tests/test_paint.py | 4 +-- hips/draw/tests/test_ui.py | 2 +- hips/tiles/fetch.py | 58 ++++------------------------------ hips/tiles/tests/test_fetch.py | 11 ------- 4 files changed, 9 insertions(+), 66 deletions(-) diff --git a/hips/draw/tests/test_paint.py b/hips/draw/tests/test_paint.py index dfb2db8..89a8b2a 100644 --- a/hips/draw/tests/test_paint.py +++ b/hips/draw/tests/test_paint.py @@ -20,7 +20,7 @@ def setup_class(cls): width=2000, height=1000, fov="3 deg", coordsys='icrs', projection='AIT', ) - fetch_opts = dict(fetch_package='urllib', timeout=30, n_parallel=10) + fetch_opts = dict(timeout=30, n_parallel=10) cls.painter = HipsPainter(cls.geometry, cls.hips_survey, 'fits', fetch_opts=fetch_opts) def test_draw_hips_order(self): @@ -44,7 +44,7 @@ def test_compute_matching_hips_order(self, pars): coordsys='icrs', projection='AIT', ) - fetch_opts = dict(fetch_package='urllib', timeout=30, n_parallel=10) + fetch_opts = dict(timeout=30, n_parallel=10) simple_tile_painter = HipsPainter(geometry, self.hips_survey, 'fits', fetch_opts=fetch_opts) assert simple_tile_painter.draw_hips_order == pars['order'] diff --git a/hips/draw/tests/test_ui.py b/hips/draw/tests/test_ui.py index 786f3d2..a7dd254 100644 --- a/hips/draw/tests/test_ui.py +++ b/hips/draw/tests/test_ui.py @@ -61,7 +61,7 @@ def test_make_sky_image(tmpdir, pars): hips_survey = HipsSurveyProperties.fetch(url=pars['url']) geometry = make_test_wcs_geometry() - fetch_opts = dict(fetch_package='urllib', timeout=30, n_parallel=10) + fetch_opts = dict(timeout=30, n_parallel=10) result = make_sky_image(geometry=geometry, hips_survey=hips_survey, tile_format=pars['file_format'], precise=pars['precise'], fetch_opts=fetch_opts) diff --git a/hips/tiles/fetch.py b/hips/tiles/fetch.py index f3c63c5..c79fdde 100644 --- a/hips/tiles/fetch.py +++ b/hips/tiles/fetch.py @@ -17,7 +17,7 @@ def fetch_tiles(tile_metas: List[HipsTileMeta], hips_survey: HipsSurveyProperties, progress_bar: bool = True, n_parallel: int = 5, - timeout: float = 10, fetch_package: str = 'urllib') -> List[HipsTile]: + timeout: float = 10) -> List[HipsTile]: """Fetch a list of HiPS tiles. This function fetches a list of HiPS tiles based @@ -38,8 +38,6 @@ def fetch_tiles(tile_metas: List[HipsTileMeta], hips_survey: HipsSurveyPropertie Number of tile fetch web requests to make in parallel timeout : float Seconds to timeout for fetching a HiPS tile - fetch_package : {'urllib', 'aiohttp'} - Package to use for fetching HiPS tiles Examples -------- @@ -69,15 +67,8 @@ def fetch_tiles(tile_metas: List[HipsTileMeta], hips_survey: HipsSurveyPropertie tiles : list A Python list of `~hips.HipsTile` """ - if fetch_package == 'aiohttp': - fetch_fct = tiles_aiohttp - elif fetch_package == 'urllib': - fetch_fct = tiles_urllib - else: - raise ValueError(f'Invalid package name: {fetch_package}') - tile_urls = [hips_survey.tile_url(meta) for meta in tile_metas] - response_all = fetch_fct(tile_urls, hips_survey, progress_bar, n_parallel, timeout) + response_all = do_fetch_tiles(tile_urls, hips_survey, progress_bar, n_parallel, timeout) tiles = [] for idx, response in enumerate(response_all): @@ -90,7 +81,7 @@ def fetch_tiles(tile_metas: List[HipsTileMeta], hips_survey: HipsSurveyPropertie return tiles -def fetch_tile_urllib(url: str, timeout: float) -> dict: +def do_fetch_single_tile(url: str, timeout: float) -> dict: """Fetch a HiPS tile asynchronously.""" try: with urllib.request.urlopen(url, timeout=timeout) as conn: @@ -106,45 +97,8 @@ def fetch_tile_urllib(url: str, timeout: float) -> dict: return {'is_missing': True} -def tiles_urllib(tile_urls: List[str], hips_survey: HipsSurveyProperties, - progress_bar: bool, n_parallel, timeout: float) -> List[dict]: +def do_fetch_tiles(tile_urls: List[str], hips_survey: HipsSurveyProperties, + progress_bar: bool, n_parallel, timeout: float) -> List[dict]: """Generator function to fetch HiPS tiles from a remote URL.""" with concurrent.futures.ThreadPoolExecutor(max_workers=n_parallel) as executor: - return list(executor.map(fetch_tile_urllib, tile_urls, [timeout] * len(tile_urls))) - - -async def fetch_tile_aiohttp(url: str, session, timeout: float) -> dict: - """Fetch a HiPS tile asynchronously using aiohttp.""" - async with session.get(url, timeout=timeout) as response: - return {'raw_data': await response.read(), 'url': url} - - -async def fetch_all_tiles_aiohttp(tile_urls: List[str], hips_survey: HipsSurveyProperties, - progress_bar: bool, n_parallel: int, timeout: float) -> List[dict]: - """Generator function to fetch HiPS tiles from a remote URL using aiohttp.""" - import aiohttp - - connector = aiohttp.TCPConnector(limit=n_parallel) - async with aiohttp.ClientSession(connector=connector) as session: - futures = [] - for url in tile_urls: - future = asyncio.ensure_future(fetch_tile_aiohttp(url, session, timeout)) - futures.append(future) - - futures = asyncio.as_completed(futures) - if progress_bar: - from tqdm import tqdm - futures = tqdm(futures, total=len(tile_urls), desc='Fetching tiles') - - response_all = [] - for future in futures: - response_all.append(await future) - - return response_all - - -def tiles_aiohttp(tile_urls: List[str], hips_survey: HipsSurveyProperties, - progress_bar: bool, n_parallel: int, timeout: float) -> List[dict]: - return asyncio.get_event_loop().run_until_complete( - fetch_all_tiles_aiohttp(tile_urls, hips_survey, progress_bar, n_parallel, timeout) - ) + return list(executor.map(do_fetch_single_tile, tile_urls, [timeout] * len(tile_urls))) diff --git a/hips/tiles/tests/test_fetch.py b/hips/tiles/tests/test_fetch.py index 2b55f4d..aab47e4 100644 --- a/hips/tiles/tests/test_fetch.py +++ b/hips/tiles/tests/test_fetch.py @@ -14,16 +14,6 @@ url='http://alasky.unistra.fr/DSS/DSS2Merged/properties', progress_bar=True, data=[2101, 1945, 1828, 1871, 2079, 2336], - fetch_package='urllib', - ), - dict( - tile_indices=[69623, 69627, 69628, 69629, 69630, 69631], - tile_format='fits', - order=7, - url='http://alasky.unistra.fr/DSS/DSS2Merged/properties', - progress_bar=True, - data=[2101, 1945, 1828, 1871, 2079, 2336], - fetch_package='aiohttp', ), ] @@ -48,7 +38,6 @@ def test_fetch_tiles(pars): tiles = fetch_tiles( tile_metas, hips_survey, progress_bar=pars['progress_bar'], - fetch_package=pars['fetch_package'], ) for idx, val in enumerate(pars['data']): From a1c7b95c884e77c699b44235927f1ab550eef83c Mon Sep 17 00:00:00 2001 From: Adeel Ahmad Date: Sat, 6 Jul 2019 10:20:17 +0200 Subject: [PATCH 6/6] Add test case for missing HiPS tiles --- hips/tiles/tests/test_fetch.py | 20 +++++++++++++++++++- hips/tiles/tile.py | 2 +- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/hips/tiles/tests/test_fetch.py b/hips/tiles/tests/test_fetch.py index aab47e4..a067e0f 100644 --- a/hips/tiles/tests/test_fetch.py +++ b/hips/tiles/tests/test_fetch.py @@ -1,7 +1,7 @@ # Licensed under a 3-clause BSD style license - see LICENSE.rst import pytest from astropy.tests.helper import remote_data -from numpy.testing import assert_allclose +from numpy.testing import assert_allclose, assert_equal from ..fetch import fetch_tiles from ..survey import HipsSurveyProperties from ..tile import HipsTileMeta @@ -9,12 +9,27 @@ TILE_FETCH_TEST_CASES = [ dict( tile_indices=[69623, 69627, 69628, 69629, 69630, 69631], + is_missing=[False, False, False, False, False, False], tile_format='fits', order=7, url='http://alasky.unistra.fr/DSS/DSS2Merged/properties', progress_bar=True, data=[2101, 1945, 1828, 1871, 2079, 2336], ), + dict( + tile_indices=[69623, + 9999999, # missing + 69628, + 9999999, # missing + 9999999, # missing + 69631], + is_missing=[False, True, False, True, True, False], + tile_format='fits', + order=7, + url='http://alasky.unistra.fr/DSS/DSS2Merged/properties', + progress_bar=True, + data=[2101, 0, 1828, 0, 0, 2336], + ), ] @@ -42,3 +57,6 @@ def test_fetch_tiles(pars): for idx, val in enumerate(pars['data']): assert_allclose(tiles[idx].data[0][5], val) + + for idx, val in enumerate(pars['is_missing']): + assert_equal(tiles[idx].is_missing, val) diff --git a/hips/tiles/tile.py b/hips/tiles/tile.py index c29e4a5..cc31dbb 100644 --- a/hips/tiles/tile.py +++ b/hips/tiles/tile.py @@ -156,7 +156,7 @@ class HipsTile: raw_data : `bytes` Raw data (copy of bytes from file) is_missing : `bool` - To check whether the tile is missing or not + Specifies whether the tile is missing or not Examples --------