diff --git a/README.md b/README.md index f098e54..1ce1a1b 100644 --- a/README.md +++ b/README.md @@ -49,12 +49,22 @@ Where `-125 41 -124 42` is a desired bounding box in **integer** `minx, miny, ma For TMS generation ASF will be using 1x1 degree tiles. +The resulting products have the name format: +`METADATA_{orbit direction}_{upper left corner in lon/lat}.tif`. +For example: +`METADATA_ASCENDING_W125N42.tif` + ### Create a Short Wavelength Cumulative Displacement tile The `generate_sw_disp_tile` CLI command can be used to generate a cumulative displacement geotiff: ```bash -generate_sw_disp_tile METADATA_ASCENDING_W125N42.tif --begin-date 20170901 --end-date 20171231 +generate_sw_disp_tile METADATA_ASCENDING_W125N42.tif 20170901 20171231 ``` -Where `METADATA_ASCENDING_W125N42.tif` is the path to the frame metadata tile you want to generate a Short Wavelength Cumulative Displacement tile for, and `--begin-date` and `--end-date` specify the date range you want to generate the Short Wavelength Cumulative Displacement tile for. +Where `METADATA_ASCENDING_W125N42.tif` is the path to the frame metadata tile you want to generate a Short Wavelength Cumulative Displacement tile for, and `20170901`/`20171231` specify the start/end of the secondary date search range to generate a tile for in format `%Y%m%d`. + +The resulting products have the name format: +`SW_CUMUL_DISP_{start date search range}_{stop data search range}_{orbit direction}_{upper left corner in lon/lat}.tif` +For example: +`SW_CUMUL_DISP_20170901_20171231_ASCENDING_W125N42.tif` ### Create a Tile Map The `create_tile_map` CLI command generates a directory with small .png tiles from a list of rasters in a common projection, following the OSGeo Tile Map Service Specification, using gdal2tiles: https://gdal.org/en/latest/programs/gdal2tiles.html diff --git a/src/opera_disp_tms/generate_coh_tiles.py b/src/opera_disp_tms/generate_coh_tiles.py deleted file mode 100644 index 8a5e546..0000000 --- a/src/opera_disp_tms/generate_coh_tiles.py +++ /dev/null @@ -1,180 +0,0 @@ -"""Generate a set tiles for the Global Seasonal Sentinel-1 Interferometric Coherence and Backscatter Data Set -See https://registry.opendata.aws/ebd-sentinel-1-global-coherence-backscatter/ for more details on the dataset -""" -from itertools import product -from pathlib import Path -from typing import Iterable - -import boto3 -from botocore import UNSIGNED -from botocore.client import Config -from botocore.exceptions import ClientError -from osgeo import gdal - - -gdal.UseExceptions() - - -S3 = boto3.client('s3', config=Config(signature_version=UNSIGNED)) -SOURCE_S3 = 's3://sentinel-1-global-coherence-earthbigdata/data/tiles' -UPLOAD_S3 = 's3://opera-disp-tms-dev' - - -def create_coh_s3_path(lon: int, lat: int, coh_prod: str = 'summer_vv_COH12') -> str: - """Create S3 path for a given lon, lat and coherence product - Args: - lon: longitude - lat: latitude - coh_prod: coherence product name - - Returns: - str: S3 path - """ - lon_prefix = 'E' if lon >= 0 else 'W' - lat_prefix = 'N' if lat >= 0 else 'S' - key = f'{lat_prefix}{abs(lat):02d}{lon_prefix}{abs(lon):03d}' - s3_path = f'{SOURCE_S3}/{key}/{key}_{coh_prod}.tif' - return s3_path - - -def download_coh(s3_path: str) -> str: - """Download coherence tile from S3 - Args: - s3_path: S3 path - - Returns: - s3_path if download was successfule - """ - if not Path(s3_path.split('/')[-1]).exists(): - bucket = s3_path.split('/')[2] - path = '/'.join(s3_path.split('/')[3:]) - try: - S3.download_file(bucket, path, s3_path.split('/')[-1]) - print(f'Downloaded {s3_path}') - except ClientError: - print(f'{s3_path} not found') - return - return s3_path - else: - return s3_path - - -def create_coh_tile(bbox: Iterable[int], coh_prod: str = 'summer_vv_COH12', gtiff: bool = True) -> str: - """Create a mosaic of coherence tiles for a given bounding box - - Args: - bbox: bounding box [min_lon, min_lat, max_lon, max_lat] - coh_prod: coherence product name - gtiff: save as GeoTIFF or COG - - Returns: - str: path to the mosaic - """ - min_lon, min_lat, max_lon, max_lat = bbox - s3_paths = [] - for lon in range(min_lon, max_lon): - for lat in range(min_lat, max_lat): - s3_paths.append(create_coh_s3_path(lon, lat)) - - s3_paths = [create_coh_s3_path(lon, lat) for lon, lat in product(range(min_lon, max_lon), range(min_lat, max_lat))] - - s3_paths = [download_coh(x) for x in s3_paths] - names = [x.split('/')[-1] for x in s3_paths if x is not None] - - def lon_string(lon): - return f'E{abs(lon):03d}' if lon >= 0 else f'W{abs(lon):03d}' - - def lat_string(lat): - return f'N{abs(lat):02d}' if lat >= 0 else f'S{abs(lat):02d}' - - bbox_str = f'{lat_string(min_lat)}{lon_string(min_lon)}_{lat_string(max_lat)}{lon_string(max_lon)}' - - product_name = f'{coh_prod}_{bbox_str}' - vrt_path = f'{product_name}.vrt' - output_path = f'{product_name}.tif' - - if len(names) > 0: - print('Building mosaic...') - vrt = gdal.BuildVRT(vrt_path, names) - vrt.FlushCache() - vrt = None - - if gtiff: - opts = ['TILED=YES', 'COMPRESS=LZW', 'NUM_THREADS=ALL_CPUS'] - warp_options = gdal.WarpOptions(format='GTiff', dstSRS='EPSG:3857', xRes=30, yRes=30, creationOptions=opts) - else: - warp_options = gdal.WarpOptions(format='COG', dstSRS='EPSG:3857', xRes=30, yRes=30) - gdal.Warp(output_path, vrt_path, options=warp_options) - - Path(vrt_path).unlink() - for name in names: - Path(name).unlink() - print('Mosaic done') - else: - print('No tiles found') - - return output_path - - -def split_range(start_value: int, end_value: int, n: int) -> list: - """Split a range into n parts - Args: - start_value: start of the range - end_value: end of the range - n: number of parts - - Returns: - list of tuples with the ranges - """ - step = (end_value - start_value) // n - offset = (end_value - start_value) % n - ranges = [ - (a, b) - for (a, b) in zip( - range(start_value, end_value, step), range(start_value + step, end_value - offset + step, step) - ) - ] - ranges[-1] = (ranges[-1][0], end_value) - return ranges - - -def create_coh_tile_set( - bbox: Iterable[int], n_parts_lon: int = 1, n_parts_lat: int = 1, coh_prod: str = 'summer_vv_COH12' -) -> None: - """Create a set of mosaics for a given bounding box - - Args: - bbox: bounding box [min_lon, min_lat, max_lon, max_lat] - n_parts_lon: number of parts to split the longitude range into - n_parts_lat: number of parts to split the latitude range into - coh_prod: coherence product name - """ - min_lon, min_lat, max_lon, max_lat = bbox - lon_ranges = split_range(min_lon, max_lon, n_parts_lon) - lat_ranges = split_range(min_lat, max_lat, n_parts_lat) - lon_lat_bboxes = [(lon[0], lat[0], lon[1], lat[1]) for lon, lat in product(lon_ranges, lat_ranges)] - output_paths = [] - for lon_lat_box in lon_lat_bboxes: - output_path = create_coh_tile(lon_lat_box, coh_prod=coh_prod) - output_paths.append(output_path) - print('All mosaics complete') - - -def upload_tileset_s3(prefix: str = 'summer_vv_COH12_v2', wildcard: str = 'summer_vv_COH12') -> None: - """Upload a set of files to S3 - Args: - prefix: S3 prefix to upload files to - wildcard: wildcard to filter files by - """ - s3 = boto3.client('s3') - files = list(Path.cwd().glob(f'{wildcard}*.tif')) - for file in files: - s3.upload_file(str(file), UPLOAD_S3.split('/')[-1], f'{prefix}/{file.name}') - - -if __name__ == '__main__': - # Full North America: 6,148 tiles, 34 GB uncompressed, 24 compressed - lon_lat_box = [-169, 14, -63, 72] - output_paths = create_coh_tile_set(lon_lat_box, n_parts_lon=3, n_parts_lat=3) - - upload_tileset_s3() diff --git a/src/opera_disp_tms/generate_frame_tile.py b/src/opera_disp_tms/generate_frame_tile.py index cf75e7d..29b5d5c 100644 --- a/src/opera_disp_tms/generate_frame_tile.py +++ b/src/opera_disp_tms/generate_frame_tile.py @@ -225,14 +225,17 @@ def create_granule_metadata_dict(granule: Granule) -> dict: return frame_metadata -def add_frames_to_tile(frames: Iterable[Frame], tile_path: Path) -> None: +def create_metadata_tile(bbox: Iterable[int], frames: Iterable[Frame], tile_path: Path) -> None: """Add frame information to a frame metadata tile Args: + bbox: The bounding box to create the frame for in the (minx, miny, maxx, maxy) in EPSG:4326, integers only. frames: The frames to add to the tile tile_path: The path to the frame metadata tile """ + validate_bbox(bbox) cal_data = find_california_dataset() + create_empty_frame_tile(bbox, tile_path) frame_metadata = {} for frame in frames: relevant_granules = [x for x in cal_data if x.frame_id == frame.frame_id] @@ -243,6 +246,11 @@ def add_frames_to_tile(frames: Iterable[Frame], tile_path: Path) -> None: frame_metadata[str(frame.frame_id)] = create_granule_metadata_dict(first_granule) burn_frame(frame, tile_path) + if frame_metadata == {}: + warnings.warn('No granules are available for this tile. The tile will not be created.') + tile_path.unlink() + return + tile_ds = gdal.Open(str(tile_path), gdal.GA_Update) # Not all frames will be in the final array, so we need to find the included frames band = tile_ds.GetRasterBand(1) @@ -267,7 +275,6 @@ def create_tile_for_bbox(bbox: Iterable[int], direction: str) -> Path: Returns: The path to the frame metadata tile """ - validate_bbox(bbox) direction = direction.upper() if direction not in ['ASCENDING', 'DESCENDING']: raise ValueError('Direction must be either "ASCENDING" or "DESCENDING"') @@ -275,13 +282,12 @@ def create_tile_for_bbox(bbox: Iterable[int], direction: str) -> Path: relevant_frames = intersect(bbox=bbox, orbit_pass=direction, is_north_america=True, is_land=True) updated_frames = [buffer_frame_geometry(x) for x in relevant_frames] ordered_frames = reorder_frames(updated_frames) - create_empty_frame_tile(bbox, out_path) - add_frames_to_tile(ordered_frames, out_path) + create_metadata_tile(bbox, ordered_frames, out_path) return out_path def main(): - """CLI entrypoint + """CLI entry point Example: generate_frame_tile -125 41 -124 42 ascending """ parser = argparse.ArgumentParser(description='Create a frame metadata tile for a given bounding box') diff --git a/src/opera_disp_tms/generate_sw_disp_tile.py b/src/opera_disp_tms/generate_sw_disp_tile.py index 5340cee..251e3c1 100644 --- a/src/opera_disp_tms/generate_sw_disp_tile.py +++ b/src/opera_disp_tms/generate_sw_disp_tile.py @@ -227,7 +227,7 @@ def create_sw_disp_tile(metadata_path: Path, begin_date: datetime, end_date: dat def main(): - """CLI entrpypoint + """CLI entry point Example: generate_sw_disp_tile METADATA_ASCENDING_N41W125_N42W124.tif 20170901 20171231 """ diff --git a/src/opera_disp_tms/tmp_s3_access.py b/src/opera_disp_tms/tmp_s3_access.py index 4ed7eaa..0e1316f 100644 --- a/src/opera_disp_tms/tmp_s3_access.py +++ b/src/opera_disp_tms/tmp_s3_access.py @@ -23,7 +23,7 @@ def get_temporary_aws_credentials(endpoint: str = 'https://cumulus-test.asf.alas def main(): - """CLI entrypoint for getting temporary S3 credentials + """CLI entry point for getting temporary S3 credentials Example: get_tmp_s3_creds --endpoint https://cumulus-test.asf.alaska.edu/s3credentials """ parser = argparse.ArgumentParser(description='Get temporary S3 credentials')