diff --git a/pyproject.toml b/pyproject.toml index da7ccc5..42a13fb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,11 +52,8 @@ flood_map = "asf_tools.hydrosar.flood_map:hyp3" [project.optional-dependencies] develop = [ - "flake8", - "flake8-import-order", - "flake8-blind-except", - "flake8-builtins", "gdal-utils", + "ruff", "pytest", "pytest-cov", "pytest-console-scripts", diff --git a/ruff.toml b/ruff.toml index b742d9c..2aed2d6 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,2 +1,6 @@ -cache-dir = "~/.cache/ruff" exclude = ["prototype"] + +line-length = 120 + +[format] +quote-style = "single" diff --git a/src/asf_tools/__init__.py b/src/asf_tools/__init__.py index c6f84a5..23022be 100644 --- a/src/asf_tools/__init__.py +++ b/src/asf_tools/__init__.py @@ -5,5 +5,5 @@ __version__ = version(__name__) __all__ = [ - "__version__", + '__version__', ] diff --git a/src/asf_tools/__main__.py b/src/asf_tools/__main__.py index 1f70f0c..83dc9ae 100644 --- a/src/asf_tools/__main__.py +++ b/src/asf_tools/__main__.py @@ -4,23 +4,21 @@ def main(): - parser = argparse.ArgumentParser( - prefix_chars="+", formatter_class=argparse.ArgumentDefaultsHelpFormatter - ) + parser = argparse.ArgumentParser(prefix_chars='+', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( - "++process", - choices=["water_map", "flood_map"], - default="water_map", - help="Select the HyP3 entrypoint to use", # HyP3 entrypoints are specified in `pyproject.toml` + '++process', + choices=['water_map', 'flood_map'], + default='water_map', + help='Select the HyP3 entrypoint to use', # HyP3 entrypoints are specified in `pyproject.toml` ) args, unknowns = parser.parse_known_args() # NOTE: Cast to set because of: https://github.com/pypa/setuptools/issues/3649 - (process_entry_point,) = set(entry_points(group="hyp3", name=args.process)) + (process_entry_point,) = set(entry_points(group='hyp3', name=args.process)) sys.argv = [args.process, *unknowns] sys.exit(process_entry_point.load()()) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/src/asf_tools/aws.py b/src/asf_tools/aws.py index 25cc539..a72ab52 100644 --- a/src/asf_tools/aws.py +++ b/src/asf_tools/aws.py @@ -5,28 +5,28 @@ import boto3 -S3_CLIENT = boto3.client("s3") +S3_CLIENT = boto3.client('s3') log = logging.getLogger(__name__) def get_tag_set() -> dict: - tag_set = {"TagSet": [{"Key": "file_type", "Value": "product"}]} + tag_set = {'TagSet': [{'Key': 'file_type', 'Value': 'product'}]} return tag_set def get_content_type(file_location: Union[Path, str]) -> str: content_type = guess_type(file_location)[0] if not content_type: - content_type = "application/octet-stream" + content_type = 'application/octet-stream' return content_type -def upload_file_to_s3(path_to_file: Union[str, Path], bucket: str, prefix: str = ""): +def upload_file_to_s3(path_to_file: Union[str, Path], bucket: str, prefix: str = ''): path_to_file = Path(path_to_file) key = str(Path(prefix) / path_to_file.name) - extra_args = {"ContentType": get_content_type(key)} + extra_args = {'ContentType': get_content_type(key)} - log.info(f"Uploading s3://{bucket}/{key}") + log.info(f'Uploading s3://{bucket}/{key}') S3_CLIENT.upload_file(str(path_to_file), bucket, key, extra_args) tag_set = get_tag_set() @@ -36,7 +36,7 @@ def upload_file_to_s3(path_to_file: Union[str, Path], bucket: str, prefix: str = def get_path_to_s3_file(bucket_name, bucket_prefix, file_type: str): result = S3_CLIENT.list_objects_v2(Bucket=bucket_name, Prefix=bucket_prefix) - for s3_object in result["Contents"]: - key = s3_object["Key"] + for s3_object in result['Contents']: + key = s3_object['Key'] if key.endswith(file_type): - return f"/vsis3/{bucket_name}/{key}" + return f'/vsis3/{bucket_name}/{key}' diff --git a/src/asf_tools/composite.py b/src/asf_tools/composite.py index 1bb9794..682cf29 100755 --- a/src/asf_tools/composite.py +++ b/src/asf_tools/composite.py @@ -44,7 +44,7 @@ def get_target_epsg_code(codes: List[int]) -> int: # South: 327XX valid_codes = list(range(32601, 32661)) + list(range(32701, 32761)) if bad_codes := set(codes) - set(valid_codes): - raise ValueError(f"Non UTM EPSG code encountered: {bad_codes}") + raise ValueError(f'Non UTM EPSG code encountered: {bad_codes}') hemispheres = [c // 100 * 100 for c in codes] # if even modes, choose lowest (North) @@ -67,7 +67,7 @@ def get_area_raster(raster: str) -> str: Returns: area_raster: path of the area raster, e.g. S1A_IW_20181102T155531_DVP_RTC30_G_gpuned_5685_area.tif """ - return "_".join(raster.split("_")[:-1] + ["area.tif"]) + return '_'.join(raster.split('_')[:-1] + ['area.tif']) def get_full_extent(raster_info: dict): @@ -81,24 +81,20 @@ def get_full_extent(raster_info: dict): upper_right: The lower right corner of the extent as a tuple geotransform: The geotransform of the extent as a list """ - upper_left_corners = [ - info["cornerCoordinates"]["upperLeft"] for info in raster_info.values() - ] - lower_right_corners = [ - info["cornerCoordinates"]["lowerRight"] for info in raster_info.values() - ] + upper_left_corners = [info['cornerCoordinates']['upperLeft'] for info in raster_info.values()] + lower_right_corners = [info['cornerCoordinates']['lowerRight'] for info in raster_info.values()] ulx = min([ul[0] for ul in upper_left_corners]) uly = max([ul[1] for ul in upper_left_corners]) lrx = max([lr[0] for lr in lower_right_corners]) lry = min([lr[1] for lr in lower_right_corners]) - log.debug(f"Full extent raster upper left: ({ulx, uly}); lower right: ({lrx, lry})") + log.debug(f'Full extent raster upper left: ({ulx, uly}); lower right: ({lrx, lry})') trans = [] for info in raster_info.values(): # Only need info from any one raster - trans = info["geoTransform"] + trans = info['geoTransform'] break trans[0] = ulx @@ -107,9 +103,7 @@ def get_full_extent(raster_info: dict): return (ulx, uly), (lrx, lry), trans -def reproject_to_target( - raster_info: dict, target_epsg_code: int, target_resolution: float, directory: str -) -> dict: +def reproject_to_target(raster_info: dict, target_epsg_code: int, target_resolution: float, directory: str) -> dict: """Reprojects a set of raster images to a common projection and resolution Args: @@ -124,38 +118,34 @@ def reproject_to_target( target_raster_info = {} for raster, info in raster_info.items(): epsg_code = get_epsg_code(info) - resolution = info["geoTransform"][1] + resolution = info['geoTransform'][1] if epsg_code != target_epsg_code or resolution != target_resolution: - log.info(f"Reprojecting {raster}") + log.info(f'Reprojecting {raster}') reprojected_raster = os.path.join(directory, os.path.basename(raster)) gdal.Warp( reprojected_raster, raster, - dstSRS=f"EPSG:{target_epsg_code}", + dstSRS=f'EPSG:{target_epsg_code}', xRes=target_resolution, yRes=target_resolution, targetAlignedPixels=True, ) area_raster = get_area_raster(raster) - log.info(f"Reprojecting {area_raster}") - reprojected_area_raster = os.path.join( - directory, os.path.basename(area_raster) - ) + log.info(f'Reprojecting {area_raster}') + reprojected_area_raster = os.path.join(directory, os.path.basename(area_raster)) gdal.Warp( reprojected_area_raster, area_raster, - dstSRS=f"EPSG:{target_epsg_code}", + dstSRS=f'EPSG:{target_epsg_code}', xRes=target_resolution, yRes=target_resolution, targetAlignedPixels=True, ) - target_raster_info[reprojected_raster] = gdal.Info( - reprojected_raster, format="json" - ) + target_raster_info[reprojected_raster] = gdal.Info(reprojected_raster, format='json') else: - log.info(f"No need to reproject {raster}") + log.info(f'No need to reproject {raster}') target_raster_info[raster] = info return target_raster_info @@ -174,25 +164,23 @@ def make_composite(out_name: str, rasters: List[str], resolution: float = None): out_counts_raster: Path to the created GeoTIFF with counts of scenes contributing to each pixel """ if not rasters: - raise ValueError("Must specify at least one raster to composite") + raise ValueError('Must specify at least one raster to composite') raster_info = {} for raster in rasters: - raster_info[raster] = gdal.Info(raster, format="json") + raster_info[raster] = gdal.Info(raster, format='json') # make sure gdal can read the area raster gdal.Info(get_area_raster(raster)) - target_epsg_code = get_target_epsg_code( - [get_epsg_code(info) for info in raster_info.values()] - ) - log.debug(f"Composite projection is EPSG:{target_epsg_code}") + target_epsg_code = get_target_epsg_code([get_epsg_code(info) for info in raster_info.values()]) + log.debug(f'Composite projection is EPSG:{target_epsg_code}') if resolution is None: - resolution = max([info["geoTransform"][1] for info in raster_info.values()]) - log.debug(f"Composite resolution is {resolution} meters") + resolution = max([info['geoTransform'][1] for info in raster_info.values()]) + log.debug(f'Composite resolution is {resolution} meters') # resample rasters to maximum resolution & common UTM zone - with TemporaryDirectory(prefix="reprojected_") as temp_dir: + with TemporaryDirectory(prefix='reprojected_') as temp_dir: raster_info = reproject_to_target( raster_info, target_epsg_code=target_epsg_code, @@ -211,7 +199,7 @@ def make_composite(out_name: str, rasters: List[str], resolution: float = None): counts = np.zeros(outputs.shape, dtype=np.int8) for raster, info in raster_info.items(): - log.info(f"Processing raster {raster}") + log.info(f'Processing raster {raster}') log.debug( f"Raster upper left: {info['cornerCoordinates']['upperLeft']}; " f"lower right: {info['cornerCoordinates']['lowerRight']}" @@ -222,7 +210,7 @@ def make_composite(out_name: str, rasters: List[str], resolution: float = None): area_raster = get_area_raster(raster) areas = read_as_array(area_raster) - ulx, uly = info["cornerCoordinates"]["upperLeft"] + ulx, uly = info['cornerCoordinates']['upperLeft'] y_index_start = int((full_ul[1] - uly) // resolution) y_index_end = y_index_start + values.shape[0] @@ -230,19 +218,15 @@ def make_composite(out_name: str, rasters: List[str], resolution: float = None): x_index_end = x_index_start + values.shape[1] log.debug( - f"Placing values in output grid at {y_index_start}:{y_index_end} and {x_index_start}:{x_index_end}" + f'Placing values in output grid at {y_index_start}:{y_index_end} and {x_index_start}:{x_index_end}' ) mask = values == 0 raster_weights = 1.0 / areas raster_weights[mask] = 0 - outputs[y_index_start:y_index_end, x_index_start:x_index_end] += ( - values * raster_weights - ) - weights[y_index_start:y_index_end, x_index_start:x_index_end] += ( - raster_weights - ) + outputs[y_index_start:y_index_end, x_index_start:x_index_end] += values * raster_weights + weights[y_index_start:y_index_end, x_index_start:x_index_end] += raster_weights counts[y_index_start:y_index_end, x_index_start:x_index_end] += ~mask del values, areas, mask, raster_weights @@ -251,13 +235,11 @@ def make_composite(out_name: str, rasters: List[str], resolution: float = None): outputs /= weights del weights - out_raster = write_cog( - f"{out_name}.tif", outputs, full_trans, target_epsg_code, nodata_value=0 - ) + out_raster = write_cog(f'{out_name}.tif', outputs, full_trans, target_epsg_code, nodata_value=0) del outputs out_counts_raster = write_cog( - f"{out_name}_counts.tif", + f'{out_name}_counts.tif', counts, full_trans, target_epsg_code, @@ -273,34 +255,27 @@ def main(): description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, ) + parser.add_argument('out_name', help='Base name of output composite GeoTIFF (without extension)') + parser.add_argument('rasters', nargs='+', help='Sentinel-1 GeoTIFF rasters to composite') parser.add_argument( - "out_name", help="Base name of output composite GeoTIFF (without extension)" - ) - parser.add_argument( - "rasters", nargs="+", help="Sentinel-1 GeoTIFF rasters to composite" - ) - parser.add_argument( - "-r", - "--resolution", + '-r', + '--resolution', type=float, - help="Desired output resolution in meters " - "(default is the max resolution of all the input files)", - ) - parser.add_argument( - "-v", "--verbose", action="store_true", help="Turn on verbose logging" + help='Desired output resolution in meters ' '(default is the max resolution of all the input files)', ) + parser.add_argument('-v', '--verbose', action='store_true', help='Turn on verbose logging') args = parser.parse_args() level = logging.DEBUG if args.verbose else logging.INFO logging.basicConfig( stream=sys.stdout, - format="%(asctime)s - %(levelname)s - %(message)s", + format='%(asctime)s - %(levelname)s - %(message)s', level=level, ) - log.debug(" ".join(sys.argv)) - log.info(f"Creating a composite of {len(args.rasters)} rasters") + log.debug(' '.join(sys.argv)) + log.info(f'Creating a composite of {len(args.rasters)} rasters') raster, counts = make_composite(args.out_name, args.rasters, args.resolution) - log.info(f"Composite created successfully: {raster}") - log.info(f"Number of rasters contributing to each pixel: {counts}") + log.info(f'Composite created successfully: {raster}') + log.info(f'Number of rasters contributing to each pixel: {counts}') diff --git a/src/asf_tools/dem.py b/src/asf_tools/dem.py index bd9e15e..4991fa9 100644 --- a/src/asf_tools/dem.py +++ b/src/asf_tools/dem.py @@ -9,7 +9,7 @@ from asf_tools import vector from asf_tools.util import GDALConfigManager -DEM_GEOJSON = "/vsicurl/https://asf-dem-west.s3.amazonaws.com/v2/cop30-2021.geojson" +DEM_GEOJSON = '/vsicurl/https://asf-dem-west.s3.amazonaws.com/v2/cop30-2021.geojson' gdal.UseExceptions() ogr.UseExceptions() @@ -27,26 +27,18 @@ def prepare_dem_vrt(vrt: Union[str, Path], geometry: Union[ogr.Geometry, BaseGeo geometry: Geometry in EPSG:4326 (lon/lat) projection for which to prepare a DEM mosaic """ - with GDALConfigManager(GDAL_DISABLE_READDIR_ON_OPEN="EMPTY_DIR"): + with GDALConfigManager(GDAL_DISABLE_READDIR_ON_OPEN='EMPTY_DIR'): if isinstance(geometry, BaseGeometry): geometry = ogr.CreateGeometryFromWkb(geometry.wkb) min_lon, max_lon, _, _ = geometry.GetEnvelope() if min_lon < -160.0 and max_lon > 160.0: - raise ValueError( - f"asf_tools does not currently support geometries that cross the antimeridian: {geometry}" - ) + raise ValueError(f'asf_tools does not currently support geometries that cross the antimeridian: {geometry}') tile_features = vector.get_features(DEM_GEOJSON) - if not vector.get_property_values_for_intersecting_features( - geometry, tile_features - ): - raise ValueError( - f"Copernicus GLO-30 DEM does not intersect this geometry: {geometry}" - ) - - dem_file_paths = vector.intersecting_feature_properties( - geometry, tile_features, "file_path" - ) + if not vector.get_property_values_for_intersecting_features(geometry, tile_features): + raise ValueError(f'Copernicus GLO-30 DEM does not intersect this geometry: {geometry}') + + dem_file_paths = vector.intersecting_feature_properties(geometry, tile_features, 'file_path') gdal.BuildVRT(str(vrt), dem_file_paths) diff --git a/src/asf_tools/hydrosar/flood_map.py b/src/asf_tools/hydrosar/flood_map.py index fec8ab2..c514ca6 100644 --- a/src/asf_tools/hydrosar/flood_map.py +++ b/src/asf_tools/hydrosar/flood_map.py @@ -40,21 +40,21 @@ def get_waterbody(input_info: dict, threshold: Optional[float] = None) -> np.arr epsg = get_epsg_code(input_info) west, south, east, north = get_coordinates(input_info) - width, height = input_info["size"] + width, height = input_info['size'] - data_dir = Path(__file__).parent / "data" - water_extent_vrt = data_dir / "water_extent.vrt" # All Perennial Flood Data + data_dir = Path(__file__).parent / 'data' + water_extent_vrt = data_dir / 'water_extent.vrt' # All Perennial Flood Data with tempfile.NamedTemporaryFile() as water_extent_file: gdal.Warp( water_extent_file.name, str(water_extent_vrt), - dstSRS=f"EPSG:{epsg}", + dstSRS=f'EPSG:{epsg}', outputBounds=[west, south, east, north], width=width, height=height, - resampleAlg="nearest", - format="GTiff", + resampleAlg='nearest', + format='GTiff', ) water_array = gdal.Open(water_extent_file.name, gdal.GA_ReadOnly).ReadAsArray() @@ -68,29 +68,19 @@ def iterative( hand: np.array, extent: np.array, water_levels: np.array = np.arange(15), - minimization_metric: str = "ts", + minimization_metric: str = 'ts', ): def get_confusion_matrix(w): iterative_flood_extent = hand < w # w=water level - tp = np.nansum( - np.logical_and(iterative_flood_extent == 1, extent == 1) - ) # true positive - tn = np.nansum( - np.logical_and(iterative_flood_extent == 0, extent == 0) - ) # true negative - fp = np.nansum( - np.logical_and(iterative_flood_extent == 1, extent == 0) - ) # False positive - fn = np.nansum( - np.logical_and(iterative_flood_extent == 0, extent == 1) - ) # False negative + tp = np.nansum(np.logical_and(iterative_flood_extent == 1, extent == 1)) # true positive + tn = np.nansum(np.logical_and(iterative_flood_extent == 0, extent == 0)) # true negative + fp = np.nansum(np.logical_and(iterative_flood_extent == 1, extent == 0)) # False positive + fn = np.nansum(np.logical_and(iterative_flood_extent == 0, extent == 1)) # False negative return tp, tn, fp, fn def _goal_ts(w): tp, _, fp, fn = get_confusion_matrix(w) - return 1 - tp / ( - tp + fp + fn - ) # threat score -- we will minimize goal func, hence `1 - threat_score`. + return 1 - tp / (tp + fp + fn) # threat score -- we will minimize goal func, hence `1 - threat_score`. def _goal_fmi(w): tp, _, fp, fn = get_confusion_matrix(w) @@ -102,15 +92,15 @@ def __init__(self, xmax=max(water_levels), xmin=min(water_levels)): self.xmin = np.array(xmin) def __call__(self, **kwargs): - x = kwargs["x_new"] + x = kwargs['x_new'] tmax = bool(np.all(x <= self.xmax)) tmin = bool(np.all(x >= self.xmin)) return tmax and tmin bounds = MyBounds() - MINIMIZATION_FUNCTION = {"fmi": _goal_fmi, "ts": _goal_ts} + MINIMIZATION_FUNCTION = {'fmi': _goal_fmi, 'ts': _goal_ts} with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=RuntimeWarning) + warnings.filterwarnings('ignore', category=RuntimeWarning) opt_res = optimize.basinhopping( MINIMIZATION_FUNCTION[minimization_metric], np.mean(water_levels), @@ -121,9 +111,8 @@ def __call__(self, **kwargs): ) if ( - opt_res.message[0] == "success condition satisfied" - or opt_res.message[0] - == "requested number of basinhopping iterations completed successfully" + opt_res.message[0] == 'success condition satisfied' + or opt_res.message[0] == 'requested number of basinhopping iterations completed successfully' ): return opt_res.x[0] else: @@ -150,16 +139,16 @@ def estimate_flood_depth( label: int, hand: np.ndarray, flood_labels: np.ndarray, - estimator: str = "iterative", + estimator: str = 'iterative', water_level_sigma: float = 3.0, iterative_bounds: Tuple[int, int] = (0, 15), iterative_min_size: int = 0, - minimization_metric: str = "ts", + minimization_metric: str = 'ts', ) -> float: with warnings.catch_warnings(): - warnings.filterwarnings("ignore", r"Mean of empty slice") + warnings.filterwarnings('ignore', r'Mean of empty slice') - if estimator.lower() == "iterative": + if estimator.lower() == 'iterative': if (flood_labels == label).sum() < iterative_min_size: return np.nan @@ -171,22 +160,20 @@ def estimate_flood_depth( minimization_metric=minimization_metric, ) - if estimator.lower() == "nmad": + if estimator.lower() == 'nmad': hand_mean = np.nanmean(hand[flood_labels == label]) - hand_std = stats.median_abs_deviation( - hand[flood_labels == label], scale="normal", nan_policy="omit" - ) + hand_std = stats.median_abs_deviation(hand[flood_labels == label], scale='normal', nan_policy='omit') - if estimator.lower() == "numpy": + if estimator.lower() == 'numpy': hand_mean = np.nanmean(hand[flood_labels == label]) hand_std = np.nanstd(hand[flood_labels == label]) - elif estimator.lower() == "logstat": + elif estimator.lower() == 'logstat': hand_mean = logstat(hand[flood_labels == label], func=np.nanmean) hand_std = logstat(hand[flood_labels == label]) else: - raise ValueError(f"Unknown flood depth estimator {estimator}") + raise ValueError(f'Unknown flood depth estimator {estimator}') return hand_mean + water_level_sigma * hand_std @@ -196,12 +183,12 @@ def make_flood_map( vv_raster: Union[str, Path], water_raster: Union[str, Path], hand_raster: Union[str, Path], - estimator: str = "iterative", + estimator: str = 'iterative', water_level_sigma: float = 3.0, known_water_threshold: Optional[float] = None, iterative_bounds: Tuple[int, int] = (0, 15), iterative_min_size: int = 0, - minimization_metric: str = "ts", + minimization_metric: str = 'ts', ): """Create a flood depth map from a surface water extent map. @@ -247,15 +234,15 @@ def make_flood_map( Jean-Francios Pekel, Andrew Cottam, Noel Gorelik, Alan S. Belward. 2016. """ - info = gdal.Info(str(water_raster), format="json") + info = gdal.Info(str(water_raster), format='json') epsg = get_epsg_code(info) - geotransform = info["geoTransform"] + geotransform = info['geoTransform'] hand_array = gdal.Open(str(hand_raster), gdal.GA_ReadOnly).ReadAsArray() - log.info("Fetching perennial flood data.") + log.info('Fetching perennial flood data.') known_water_mask = get_waterbody(info, threshold=known_water_threshold) write_cog( - str(out_raster).replace(".tif", f"_{estimator}_PW.tif"), + str(out_raster).replace('.tif', f'_{estimator}_PW.tif'), known_water_mask, transform=geotransform, epsg_code=epsg, @@ -274,9 +261,9 @@ def make_flood_map( labeled_flood_mask, num_labels = ndimage.label(flood_mask) object_slices = ndimage.find_objects(labeled_flood_mask) - log.info(f"Detected {num_labels} waterbodies...") - if estimator.lower() == "iterative": - log.info(f"Skipping waterbodies less than {iterative_min_size} pixels.") + log.info(f'Detected {num_labels} waterbodies...') + if estimator.lower() == 'iterative': + log.info(f'Skipping waterbodies less than {iterative_min_size} pixels.') flood_depth = np.zeros(flood_mask.shape) @@ -300,9 +287,7 @@ def make_flood_map( ) flood_depth_window = flood_depth[min0:max0, min1:max1] - flood_depth_window[flood_window == ll] = ( - water_height - hand_window[flood_window == ll] - ) + flood_depth_window[flood_window == ll] = water_height - hand_window[flood_window == ll] flood_depth[flood_depth < 0] = 0 @@ -314,7 +299,7 @@ def make_flood_map( flood_mask_byte[padding_mask] = floodmask_nodata write_cog( - str(out_raster).replace(".tif", f"_{estimator}_WaterDepth.tif"), + str(out_raster).replace('.tif', f'_{estimator}_WaterDepth.tif'), flood_depth, transform=geotransform, epsg_code=epsg, @@ -322,7 +307,7 @@ def make_flood_map( nodata_value=nodata, ) write_cog( - str(out_raster).replace(".tif", f"_{estimator}_FloodMask.tif"), + str(out_raster).replace('.tif', f'_{estimator}_FloodMask.tif'), flood_mask_byte, transform=geotransform, epsg_code=epsg, @@ -334,7 +319,7 @@ def make_flood_map( flood_depth[np.logical_not(flood_mask)] = 0 flood_depth[padding_mask] = nodata write_cog( - str(out_raster).replace(".tif", f"_{estimator}_FloodDepth.tif"), + str(out_raster).replace('.tif', f'_{estimator}_FloodDepth.tif'), flood_depth, transform=geotransform, epsg_code=epsg, @@ -344,156 +329,142 @@ def make_flood_map( def optional_str(value: str) -> Optional[str]: - if value.lower() == "none": + if value.lower() == 'none': return None return value def optional_float(value: str) -> Optional[float]: - if value.lower() == "none": + if value.lower() == 'none': return None return float(value) -def _get_cli(interface: Literal["hyp3", "main"]) -> argparse.ArgumentParser: - parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter - ) +def _get_cli(interface: Literal['hyp3', 'main']) -> argparse.ArgumentParser: + parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) - available_estimators = ["iterative", "logstat", "nmad", "numpy"] - estimator_help = "Flood depth estimation approach." - if interface == "hyp3": - parser.add_argument("--bucket") - parser.add_argument("--bucket-prefix", default="") - parser.add_argument( - "--wm-raster", help="Water map GeoTIFF raster, with suffix `_WM.tif`." - ) + available_estimators = ['iterative', 'logstat', 'nmad', 'numpy'] + estimator_help = 'Flood depth estimation approach.' + if interface == 'hyp3': + parser.add_argument('--bucket') + parser.add_argument('--bucket-prefix', default='') + parser.add_argument('--wm-raster', help='Water map GeoTIFF raster, with suffix `_WM.tif`.') available_estimators.append(None) - estimator_help += " If `None`, flood depth will not be calculated." - elif interface == "main": - parser.add_argument( - "out_raster", help="File to which flood depth map will be saved." - ) + estimator_help += ' If `None`, flood depth will not be calculated.' + elif interface == 'main': + parser.add_argument('out_raster', help='File to which flood depth map will be saved.') parser.add_argument( - "vv_raster", - help="Sentinel-1 RTC GeoTIFF raster, in power scale, with VV polarization", + 'vv_raster', + help='Sentinel-1 RTC GeoTIFF raster, in power scale, with VV polarization', ) + parser.add_argument('water_extent_map', help='HyP3-generated water extent raster file.') parser.add_argument( - "water_extent_map", help="HyP3-generated water extent raster file." - ) - parser.add_argument( - "hand_raster", - help="Height Above Nearest Drainage (HAND) GeoTIFF aligned to the RTC rasters. " - "If not specified, HAND data will be extracted from the GLO-30 HAND.", + 'hand_raster', + help='Height Above Nearest Drainage (HAND) GeoTIFF aligned to the RTC rasters. ' + 'If not specified, HAND data will be extracted from the GLO-30 HAND.', ) else: - raise NotImplementedError(f"Unknown interface: {interface}") + raise NotImplementedError(f'Unknown interface: {interface}') parser.add_argument( - "--estimator", + '--estimator', type=optional_str, - default="iterative", + default='iterative', choices=available_estimators, help=estimator_help, ) parser.add_argument( - "--water-level-sigma", + '--water-level-sigma', type=float, default=3.0, - help="Estimate max water height for each object.", + help='Estimate max water height for each object.', ) parser.add_argument( - "--known-water-threshold", + '--known-water-threshold', type=optional_float, default=None, - help="Threshold for extracting known water area in percent." - " If `None`, threshold will be calculated.", + help='Threshold for extracting known water area in percent.' ' If `None`, threshold will be calculated.', ) parser.add_argument( - "--minimization-metric", + '--minimization-metric', type=str, - default="ts", - choices=["fmi", "ts"], - help="Evaluation method to minimize when using the iterative estimator. " - "Options include a Fowlkes-Mallows index (fmi) or a threat score (ts).", + default='ts', + choices=['fmi', 'ts'], + help='Evaluation method to minimize when using the iterative estimator. ' + 'Options include a Fowlkes-Mallows index (fmi) or a threat score (ts).', ) parser.add_argument( - "--iterative-min-size", + '--iterative-min-size', type=int, default=0, - help="Minimum size of a connected waterbody in pixels for calculating flood depths with the " - "iterative estimator. Waterbodies smaller than this wll be skipped.", + help='Minimum size of a connected waterbody in pixels for calculating flood depths with the ' + 'iterative estimator. Waterbodies smaller than this wll be skipped.', ) - if interface == "hyp3": + if interface == 'hyp3': parser.add_argument( - "--iterative-min", + '--iterative-min', type=int, default=0, - help="Minimum bound on the flood depths calculated using the iterative estimator.", + help='Minimum bound on the flood depths calculated using the iterative estimator.', ) parser.add_argument( - "--iterative-max", + '--iterative-max', type=int, default=15, - help="Maximum bound on the flood depths calculated using the iterative estimator.", + help='Maximum bound on the flood depths calculated using the iterative estimator.', ) - elif interface == "main": + elif interface == 'main': parser.add_argument( - "--iterative-bounds", + '--iterative-bounds', type=int, nargs=2, default=[0, 15], - help="Minimum and maximum bound on the flood depths calculated using the iterative " - "estimator.", + help='Minimum and maximum bound on the flood depths calculated using the iterative ' 'estimator.', ) else: - raise NotImplementedError(f"Unknown interface: {interface}") + raise NotImplementedError(f'Unknown interface: {interface}') - parser.add_argument( - "-v", "--verbose", action="store_true", help="Turn on verbose logging" - ) + parser.add_argument('-v', '--verbose', action='store_true', help='Turn on verbose logging') return parser def hyp3(): - parser = _get_cli(interface="hyp3") + parser = _get_cli(interface='hyp3') args = parser.parse_args() level = logging.DEBUG if args.verbose else logging.INFO logging.basicConfig( stream=sys.stdout, - format="%(asctime)s - %(levelname)s - %(message)s", + format='%(asctime)s - %(levelname)s - %(message)s', level=level, ) - log.debug(" ".join(sys.argv)) + log.debug(' '.join(sys.argv)) if args.estimator is None: # NOTE: HyP3's current step function implementation does not have a good way of conditionally # running processing steps. This allows HyP3 to always run this step but exit immediately # and do nothing if flood depth maps are not requested. - log.info(f"{args.estimator} estimator provided; nothing to do!") + log.info(f'{args.estimator} estimator provided; nothing to do!') sys.exit() if args.wm_raster: water_map_raster = args.wm_raster elif args.bucket: - water_map_raster = get_path_to_s3_file( - args.bucket, args.bucket_prefix, "_WM.tif" - ) - log.info(f"Found WM raster: {water_map_raster}") + water_map_raster = get_path_to_s3_file(args.bucket, args.bucket_prefix, '_WM.tif') + log.info(f'Found WM raster: {water_map_raster}') else: - raise ValueError("Arguments --wm-raster or --bucket must be provided.") + raise ValueError('Arguments --wm-raster or --bucket must be provided.') - vv_raster = water_map_raster.replace("_WM.tif", "_VV.tif") - hand_raster = water_map_raster.replace("_WM.tif", "_WM_HAND.tif") + vv_raster = water_map_raster.replace('_WM.tif', '_VV.tif') + hand_raster = water_map_raster.replace('_WM.tif', '_WM_HAND.tif') - product_name = Path(water_map_raster).name.replace("_WM.tif", "_FM") + product_name = Path(water_map_raster).name.replace('_WM.tif', '_FM') product_dir = Path.cwd() / product_name product_dir.mkdir(exist_ok=True) - flood_map_raster = product_dir / f"{product_name}.tif" + flood_map_raster = product_dir / f'{product_name}.tif' make_flood_map( out_raster=flood_map_raster, @@ -508,28 +479,26 @@ def hyp3(): minimization_metric=args.minimization_metric, ) - log.info(f"Flood depth map created successfully: {flood_map_raster}") + log.info(f'Flood depth map created successfully: {flood_map_raster}') if args.bucket: - output_zip = make_archive( - base_name=product_name, format="zip", base_dir=product_name - ) + output_zip = make_archive(base_name=product_name, format='zip', base_dir=product_name) upload_file_to_s3(Path(output_zip), args.bucket, args.bucket_prefix) for product_file in product_dir.iterdir(): upload_file_to_s3(product_file, args.bucket, args.bucket_prefix) def main(): - parser = _get_cli(interface="main") + parser = _get_cli(interface='main') args = parser.parse_args() level = logging.DEBUG if args.verbose else logging.INFO logging.basicConfig( stream=sys.stdout, - format="%(asctime)s - %(levelname)s - %(message)s", + format='%(asctime)s - %(levelname)s - %(message)s', level=level, ) - log.debug(" ".join(sys.argv)) + log.debug(' '.join(sys.argv)) make_flood_map( out_raster=args.out_raster, @@ -544,4 +513,4 @@ def main(): minimization_metric=args.minimization_metric, ) - log.info(f"Flood depth map created successfully: {args.out_raster}") + log.info(f'Flood depth map created successfully: {args.out_raster}') diff --git a/src/asf_tools/hydrosar/hand/__init__.py b/src/asf_tools/hydrosar/hand/__init__.py index 633884f..558982d 100644 --- a/src/asf_tools/hydrosar/hand/__init__.py +++ b/src/asf_tools/hydrosar/hand/__init__.py @@ -5,7 +5,7 @@ from asf_tools.hydrosar.hand.prepare import prepare_hand_vrt __all__ = [ - "calculate_hand_for_basins", - "make_copernicus_hand", - "prepare_hand_vrt", + 'calculate_hand_for_basins', + 'make_copernicus_hand', + 'prepare_hand_vrt', ] diff --git a/src/asf_tools/hydrosar/hand/calculate.py b/src/asf_tools/hydrosar/hand/calculate.py index bbfb0e4..015d95c 100644 --- a/src/asf_tools/hydrosar/hand/calculate.py +++ b/src/asf_tools/hydrosar/hand/calculate.py @@ -30,11 +30,9 @@ def fill_nan(array: np.ndarray) -> np.ndarray: """ kernel = astropy.convolution.Gaussian2DKernel(x_stddev=3) # kernel x_size=8*stddev with warnings.catch_warnings(): - warnings.simplefilter("ignore") + warnings.simplefilter('ignore') while np.any(np.isnan(array)): - array = astropy.convolution.interpolate_replace_nans( - array, kernel, convolve=astropy.convolution.convolve - ) + array = astropy.convolution.interpolate_replace_nans(array, kernel, convolve=astropy.convolution.convolve) return array @@ -107,31 +105,31 @@ def calculate_hand( grid = sGrid.from_raster(str(temp_file.name)) dem = grid.read_raster(str(temp_file.name)) - log.info("Fill pits in DEM") + log.info('Fill pits in DEM') pit_filled_dem = grid.fill_pits(dem) - log.info("Filling depressions") + log.info('Filling depressions') flooded_dem = grid.fill_depressions(pit_filled_dem) del pit_filled_dem - log.info("Resolving flats") + log.info('Resolving flats') inflated_dem = grid.resolve_flats(flooded_dem) del flooded_dem - log.info("Obtaining flow direction") + log.info('Obtaining flow direction') flow_dir = grid.flowdir(inflated_dem, apply_mask=True) - log.info("Calculating flow accumulation") + log.info('Calculating flow accumulation') acc = grid.accumulation(flow_dir) if acc_thresh is None: acc_thresh = acc.mean() - log.info(f"Calculating HAND using accumulation threshold of {acc_thresh}") + log.info(f'Calculating HAND using accumulation threshold of {acc_thresh}') hand = grid.compute_hand(flow_dir, inflated_dem, acc > acc_thresh, inplace=False) if np.isnan(hand).any(): - log.info("Filling NaNs in the HAND") + log.info('Filling NaNs in the HAND') # mask outside of basin with a not-NaN value to prevent NaN-filling outside of basin (optimization) hand[basin_mask] = nodata_fill_value hand = fill_hand(hand, dem_array) @@ -167,9 +165,7 @@ def calculate_hand_for_basins( ) basin_array = src.read(1, window=basin_window) - hand = calculate_hand( - basin_array, basin_affine_tf, src.crs, basin_mask, acc_thresh=acc_thresh - ) + hand = calculate_hand(basin_array, basin_affine_tf, src.crs, basin_mask, acc_thresh=acc_thresh) write_cog( out_raster, @@ -199,17 +195,15 @@ def make_copernicus_hand( If `None`, the mean accumulation value is used """ with fiona.open(vector_file) as vds: - geometries = GeometryCollection([shape(feature["geometry"]) for feature in vds]) + geometries = GeometryCollection([shape(feature['geometry']) for feature in vds]) - with NamedTemporaryFile(suffix=".vrt", delete=False) as dem_vrt: + with NamedTemporaryFile(suffix='.vrt', delete=False) as dem_vrt: prepare_dem_vrt(dem_vrt.name, geometries) - calculate_hand_for_basins( - out_raster, geometries, dem_vrt.name, acc_thresh=acc_thresh - ) + calculate_hand_for_basins(out_raster, geometries, dem_vrt.name, acc_thresh=acc_thresh) def none_or_int(value: str): - if value.lower == "none": + if value.lower == 'none': return None return int(value) @@ -217,41 +211,37 @@ def none_or_int(value: str): def main(): parser = argparse.ArgumentParser( description=__doc__, - epilog="For watershed boundaries, see: https://www.hydrosheds.org/page/hydrobasins", + epilog='For watershed boundaries, see: https://www.hydrosheds.org/page/hydrobasins', formatter_class=argparse.RawDescriptionHelpFormatter, ) - parser.add_argument("out_raster", help="HAND GeoTIFF to create") + parser.add_argument('out_raster', help='HAND GeoTIFF to create') parser.add_argument( - "vector_file", - help="Vector file of watershed boundary (hydrobasin) polygons to calculate HAND " - "over. Vector file Must be openable by GDAL, see: " - "https://gdal.org/drivers/vector/index.html", + 'vector_file', + help='Vector file of watershed boundary (hydrobasin) polygons to calculate HAND ' + 'over. Vector file Must be openable by GDAL, see: ' + 'https://gdal.org/drivers/vector/index.html', ) parser.add_argument( - "-a", - "--acc-threshold", + '-a', + '--acc-threshold', type=none_or_int, default=100, - help="Accumulation threshold for determining the drainage mask. " - "If `None`, the mean accumulation value is used", + help='Accumulation threshold for determining the drainage mask. ' + 'If `None`, the mean accumulation value is used', ) - parser.add_argument( - "-v", "--verbose", action="store_true", help="Turn on verbose logging" - ) + parser.add_argument('-v', '--verbose', action='store_true', help='Turn on verbose logging') args = parser.parse_args() level = logging.DEBUG if args.verbose else logging.INFO logging.basicConfig( stream=sys.stdout, - format="%(asctime)s - %(levelname)s - %(message)s", + format='%(asctime)s - %(levelname)s - %(message)s', level=level, ) - log.debug(" ".join(sys.argv)) - log.info(f"Calculating HAND for {args.vector_file}") + log.debug(' '.join(sys.argv)) + log.info(f'Calculating HAND for {args.vector_file}') - make_copernicus_hand( - args.out_raster, args.vector_file, acc_thresh=args.acc_threshold - ) + make_copernicus_hand(args.out_raster, args.vector_file, acc_thresh=args.acc_threshold) - log.info(f"HAND GeoTIFF created successfully: {args.out_raster}") + log.info(f'HAND GeoTIFF created successfully: {args.out_raster}') diff --git a/src/asf_tools/hydrosar/hand/prepare.py b/src/asf_tools/hydrosar/hand/prepare.py index d85bfc4..8b99359 100644 --- a/src/asf_tools/hydrosar/hand/prepare.py +++ b/src/asf_tools/hydrosar/hand/prepare.py @@ -12,17 +12,13 @@ from asf_tools import vector from asf_tools.util import GDALConfigManager, get_epsg_code -HAND_GEOJSON = ( - "/vsicurl/https://glo-30-hand.s3.amazonaws.com/v1/2021/glo-30-hand.geojson" -) +HAND_GEOJSON = '/vsicurl/https://glo-30-hand.s3.amazonaws.com/v1/2021/glo-30-hand.geojson' gdal.UseExceptions() ogr.UseExceptions() -def prepare_hand_vrt( - vrt: Union[str, Path], geometry: Union[ogr.Geometry, BaseGeometry] -): +def prepare_hand_vrt(vrt: Union[str, Path], geometry: Union[ogr.Geometry, BaseGeometry]): """Prepare a HAND mosaic VRT covering a given geometry Prepare a Height Above Nearest Drainage (HAND) virtual raster (VRT) covering a given geometry. @@ -36,27 +32,19 @@ def prepare_hand_vrt( geometry: Geometry in EPSG:4326 (lon/lat) projection for which to prepare a HAND mosaic """ - with GDALConfigManager(GDAL_DISABLE_READDIR_ON_OPEN="EMPTY_DIR"): + with GDALConfigManager(GDAL_DISABLE_READDIR_ON_OPEN='EMPTY_DIR'): if isinstance(geometry, BaseGeometry): geometry = ogr.CreateGeometryFromWkb(geometry.wkb) min_lon, max_lon, _, _ = geometry.GetEnvelope() if min_lon < -160.0 and max_lon > 160.0: - raise ValueError( - f"asf_tools does not currently support geometries that cross the antimeridian: {geometry}" - ) + raise ValueError(f'asf_tools does not currently support geometries that cross the antimeridian: {geometry}') tile_features = vector.get_features(HAND_GEOJSON) - if not vector.get_property_values_for_intersecting_features( - geometry, tile_features - ): - raise ValueError( - f"Copernicus GLO-30 HAND does not intersect this geometry: {geometry}" - ) - - hand_file_paths = vector.intersecting_feature_properties( - geometry, tile_features, "file_path" - ) + if not vector.get_property_values_for_intersecting_features(geometry, tile_features): + raise ValueError(f'Copernicus GLO-30 HAND does not intersect this geometry: {geometry}') + + hand_file_paths = vector.intersecting_feature_properties(geometry, tile_features, 'file_path') gdal.BuildVRT(str(vrt), hand_file_paths) @@ -64,7 +52,7 @@ def prepare_hand_vrt( def prepare_hand_for_raster( hand_raster: Union[str, Path], source_raster: Union[str, Path], - resampling_method: str = "lanczos", + resampling_method: str = 'lanczos', ): """Create a HAND raster pixel-aligned to a source raster @@ -74,24 +62,24 @@ def prepare_hand_for_raster( resampling_method: Name of the resampling method to use. For available methods, see: https://gdal.org/programs/gdalwarp.html#cmdoption-gdalwarp-r """ - info = gdal.Info(str(source_raster), format="json") + info = gdal.Info(str(source_raster), format='json') - hand_geometry = shape(info["wgs84Extent"]) + hand_geometry = shape(info['wgs84Extent']) hand_bounds = [ - info["cornerCoordinates"]["upperLeft"][0], - info["cornerCoordinates"]["lowerRight"][1], - info["cornerCoordinates"]["lowerRight"][0], - info["cornerCoordinates"]["upperLeft"][1], + info['cornerCoordinates']['upperLeft'][0], + info['cornerCoordinates']['lowerRight'][1], + info['cornerCoordinates']['lowerRight'][0], + info['cornerCoordinates']['upperLeft'][1], ] - with NamedTemporaryFile(suffix=".vrt", delete=False) as hand_vrt: + with NamedTemporaryFile(suffix='.vrt', delete=False) as hand_vrt: prepare_hand_vrt(hand_vrt.name, hand_geometry) gdal.Warp( str(hand_raster), hand_vrt.name, - dstSRS=f"EPSG:{get_epsg_code(info)}", + dstSRS=f'EPSG:{get_epsg_code(info)}', outputBounds=hand_bounds, - width=info["size"][0], - height=info["size"][1], + width=info['size'][0], + height=info['size'][1], resampleAlg=Resampling[resampling_method].value, ) diff --git a/src/asf_tools/hydrosar/threshold.py b/src/asf_tools/hydrosar/threshold.py index de66d2b..8e5767d 100644 --- a/src/asf_tools/hydrosar/threshold.py +++ b/src/asf_tools/hydrosar/threshold.py @@ -1,5 +1,3 @@ -# Turned off flake8 because we haven't refactored 3rd party provided functions -# flake8: noqa import numpy as np @@ -39,9 +37,7 @@ def _make_distribution(m, v, g, x): return y -def expectation_maximization_threshold( - tile: np.ndarray, number_of_classes: int = 3 -) -> float: +def expectation_maximization_threshold(tile: np.ndarray, number_of_classes: int = 3) -> float: """Water threshold Calculation using a multi-mode Expectation Maximization Approach Thresholding works best when backscatter tiles are provided on a decibel scale @@ -60,9 +56,7 @@ def expectation_maximization_threshold( """ image_copy = tile.copy() - image_copy2 = np.ma.filled( - tile.astype(float), np.nan - ) # needed for valid posterior_lookup keys + image_copy2 = np.ma.filled(tile.astype(float), np.nan) # needed for valid posterior_lookup keys image = tile.flatten() minimum = np.amin(image) image = image - minimum + 1 @@ -78,36 +72,20 @@ def expectation_maximization_threshold( sml = np.mean(np.diff(nonzero_indices)) / 1000 iteration = 0 while True: - class_likelihood = _make_distribution( - class_means, class_variances, class_proportions, nonzero_indices - ) - sum_likelihood = ( - np.sum(class_likelihood, 1) + np.finfo(class_likelihood[0][0]).eps - ) + class_likelihood = _make_distribution(class_means, class_variances, class_proportions, nonzero_indices) + sum_likelihood = np.sum(class_likelihood, 1) + np.finfo(class_likelihood[0][0]).eps log_likelihood = np.sum(histogram * np.log(sum_likelihood)) for j in range(0, number_of_classes): - class_posterior_probability = ( - histogram * class_likelihood[:, j] / sum_likelihood - ) + class_posterior_probability = histogram * class_likelihood[:, j] / sum_likelihood class_proportions[j] = np.sum(class_posterior_probability) - class_means[j] = ( - np.sum(nonzero_indices * class_posterior_probability) - / class_proportions[j] - ) + class_means[j] = np.sum(nonzero_indices * class_posterior_probability) / class_proportions[j] vr = nonzero_indices - class_means[j] - class_variances[j] = ( - np.sum(vr * vr * class_posterior_probability) / class_proportions[j] - + sml - ) + class_variances[j] = np.sum(vr * vr * class_posterior_probability) / class_proportions[j] + sml del class_posterior_probability, vr class_proportions += 1e-3 class_proportions /= np.sum(class_proportions) - class_likelihood = _make_distribution( - class_means, class_variances, class_proportions, nonzero_indices - ) - sum_likelihood = ( - np.sum(class_likelihood, 1) + np.finfo(class_likelihood[0, 0]).eps - ) + class_likelihood = _make_distribution(class_means, class_variances, class_proportions, nonzero_indices) + sum_likelihood = np.sum(class_likelihood, 1) + np.finfo(class_likelihood[0, 0]).eps del class_likelihood new_log_likelihood = np.sum(histogram * np.log(sum_likelihood)) del sum_likelihood diff --git a/src/asf_tools/hydrosar/water_map.py b/src/asf_tools/hydrosar/water_map.py index 9703d54..ab46497 100644 --- a/src/asf_tools/hydrosar/water_map.py +++ b/src/asf_tools/hydrosar/water_map.py @@ -46,23 +46,19 @@ def select_hand_tiles( ) -> np.ndarray: if np.allclose(tiles, 0.0): raise ValueError( - f"All pixels in scene have a HAND value of {0.0} (all water); " - f"scene is not a good candidate for water mapping." + f'All pixels in scene have a HAND value of {0.0} (all water); ' + f'scene is not a good candidate for water mapping.' ) tile_indexes = np.arange(tiles.shape[0]) tiles = np.ma.masked_greater_equal(tiles, hand_threshold) - percent_valid_pixels = np.sum(~tiles.mask, axis=(1, 2)) / ( - tiles.shape[1] * tiles.shape[2] - ) + percent_valid_pixels = np.sum(~tiles.mask, axis=(1, 2)) / (tiles.shape[1] * tiles.shape[2]) return tile_indexes[percent_valid_pixels > hand_fraction] -def select_backscatter_tiles( - backscatter_tiles: np.ndarray, hand_candidates: np.ndarray -) -> np.ndarray: +def select_backscatter_tiles(backscatter_tiles: np.ndarray, hand_candidates: np.ndarray) -> np.ndarray: tile_indexes = np.arange(backscatter_tiles.shape[0]) sub_tile_means = mean_of_subtiles(backscatter_tiles) @@ -75,9 +71,7 @@ def select_backscatter_tiles( potential_candidates = np.intersect1d(hand_candidates, low_mean_candidates) - for variance_threshold in np.nanpercentile( - tile_variance.filled(np.nan), np.arange(5, 96)[::-1] - ): + for variance_threshold in np.nanpercentile(tile_variance.filled(np.nan), np.arange(5, 96)[::-1]): variance_candidates = tile_indexes[tile_variance > variance_threshold] selected = np.intersect1d(variance_candidates, potential_candidates) sort_index = np.argsort(sub_tile_means_std[selected])[::-1] @@ -106,26 +100,20 @@ def determine_membership_limits( array: np.ndarray, mask_percentile: float = 90.0, std_range: float = 3.0 ) -> Tuple[float, float]: array = np.ma.masked_values(array, 0.0) - array = np.ma.masked_greater( - array, np.nanpercentile(array.filled(np.nan), mask_percentile) - ) + array = np.ma.masked_greater(array, np.nanpercentile(array.filled(np.nan), mask_percentile)) lower_limit = np.ma.median(array) upper_limit = lower_limit + std_range * array.std() + 5.0 return lower_limit, upper_limit -def min_max_membership( - array: np.ndarray, lower_limit: float, upper_limit: float, resolution: float -) -> np.ndarray: +def min_max_membership(array: np.ndarray, lower_limit: float, upper_limit: float, resolution: float) -> np.ndarray: possible_values = np.arange(array.min(), array.max(), resolution) activation = fuzz.zmf(possible_values, lower_limit, upper_limit) membership = fuzz.interp_membership(possible_values, activation, array) return membership -def segment_area_membership( - segments: np.ndarray, min_area: int = 3, max_area: int = 10 -) -> np.ndarray: +def segment_area_membership(segments: np.ndarray, min_area: int = 3, max_area: int = 10) -> np.ndarray: segment_areas = np.bincount(segments.ravel()) possible_areas = np.arange(min_area, max_area + 1) @@ -134,9 +122,7 @@ def segment_area_membership( segment_membership = np.zeros_like(segments) segments_above_threshold = np.squeeze((segment_areas > max_area).nonzero()) - segments_above_threshold = np.delete( - segments_above_threshold, (segments_above_threshold == 0).nonzero() - ) + segments_above_threshold = np.delete(segments_above_threshold, (segments_above_threshold == 0).nonzero()) np.putmask(segment_membership, np.isin(segments, segments_above_threshold), 1) for area in possible_areas: @@ -186,27 +172,18 @@ def fuzzy_refinement( water_segment_membership = segment_area_membership(water_segments) water_map &= ~np.isclose(water_segment_membership, 0.0) - gaussian_membership = min_max_membership( - gaussian_array, gaussian_thresholds[0], gaussian_thresholds[1], 0.005 - ) + gaussian_membership = min_max_membership(gaussian_array, gaussian_thresholds[0], gaussian_thresholds[1], 0.005) water_map &= ~np.isclose(gaussian_membership, 0.0) hand_lower_limit, hand_upper_limit = determine_membership_limits(hand_array) - hand_membership = min_max_membership( - hand_array, hand_lower_limit, hand_upper_limit, 0.1 - ) + hand_membership = min_max_membership(hand_array, hand_lower_limit, hand_upper_limit, 0.1) water_map &= ~np.isclose(hand_membership, 0.0) hand_slopes = calculate_slope_magnitude(hand_array, pixel_size) slope_membership = min_max_membership(hand_slopes, 0.0, 15.0, 0.1) water_map &= ~np.isclose(slope_membership, 0.0) - water_map_weights = ( - gaussian_membership - + hand_membership - + slope_membership - + water_segment_membership - ) / 4.0 + water_map_weights = (gaussian_membership + hand_membership + slope_membership + water_segment_membership) / 4.0 water_map &= water_map_weights >= membership_threshold return water_map @@ -280,33 +257,33 @@ def make_water_map( membership_threshold: The average membership to the fuzzy indicators required for a water pixel """ if tile_shape[0] % 2 or tile_shape[1] % 2: - raise ValueError(f"tile_shape {tile_shape} requires even values.") + raise ValueError(f'tile_shape {tile_shape} requires even values.') - info = gdal.Info(str(vh_raster), format="json") + info = gdal.Info(str(vh_raster), format='json') - out_transform = info["geoTransform"] + out_transform = info['geoTransform'] out_epsg = get_epsg_code(info) if hand_raster is None: - hand_raster = str(out_raster).replace(".tif", "_HAND.tif") - log.info(f"Extracting HAND data to: {hand_raster}") + hand_raster = str(out_raster).replace('.tif', '_HAND.tif') + log.info(f'Extracting HAND data to: {hand_raster}') prepare_hand_for_raster(hand_raster, vh_raster) - log.info(f"Determining HAND memberships from {hand_raster}") + log.info(f'Determining HAND memberships from {hand_raster}') hand_array = read_as_masked_array(hand_raster) hand_tiles = tile_array(hand_array, tile_shape=tile_shape, pad_value=np.nan) hand_candidates = select_hand_tiles(hand_tiles, hand_threshold, hand_fraction) - log.debug(f"Selected HAND tile candidates {hand_candidates}") + log.debug(f'Selected HAND tile candidates {hand_candidates}') selected_tiles = None nodata = np.iinfo(np.uint8).max water_extent_maps = [] for max_db_threshold, raster, pol in ( - (max_vh_threshold, vh_raster, "VH"), - (max_vv_threshold, vv_raster, "VV"), + (max_vh_threshold, vh_raster, 'VH'), + (max_vv_threshold, vv_raster, 'VV'), ): - log.info(f"Creating initial {pol} water extent map from {raster}") + log.info(f'Creating initial {pol} water extent map from {raster}') array = read_as_masked_array(raster) padding_mask = array.mask tiles = tile_array(array, tile_shape=tile_shape, pad_value=0.0) @@ -314,34 +291,22 @@ def make_water_map( tiles = np.ma.masked_less_equal(tiles, 0.0) if selected_tiles is None: selected_tiles = select_backscatter_tiles(tiles, hand_candidates) - log.info(f"Selected tiles {selected_tiles} from {raster}") + log.info(f'Selected tiles {selected_tiles} from {raster}') with np.testing.suppress_warnings() as sup: - sup.filter( - RuntimeWarning - ) # invalid value and divide by zero encountered in log10 - tiles = ( - np.log10(tiles) + 30.0 - ) # linear power scale -> Gaussian scale optimized for thresholding - max_gaussian_threshold = ( - max_db_threshold / 10.0 + 30.0 - ) # db -> Gaussian scale optimized for thresholding + sup.filter(RuntimeWarning) # invalid value and divide by zero encountered in log10 + tiles = np.log10(tiles) + 30.0 # linear power scale -> Gaussian scale optimized for thresholding + max_gaussian_threshold = max_db_threshold / 10.0 + 30.0 # db -> Gaussian scale optimized for thresholding if selected_tiles.size: scaling = 256 / (np.mean(tiles) + 3 * np.std(tiles)) - gaussian_threshold = determine_em_threshold( - tiles[selected_tiles, :, :], scaling - ) + gaussian_threshold = determine_em_threshold(tiles[selected_tiles, :, :], scaling) threshold_db = 10.0 * (gaussian_threshold - 30.0) - log.info(f"Threshold determined to be {threshold_db} db") + log.info(f'Threshold determined to be {threshold_db} db') if gaussian_threshold > max_gaussian_threshold: - log.warning( - f"Threshold too high! Using maximum threshold {max_db_threshold} db" - ) + log.warning(f'Threshold too high! Using maximum threshold {max_db_threshold} db') gaussian_threshold = max_gaussian_threshold else: - log.warning( - f"Tile selection did not converge! using default threshold {max_db_threshold} db" - ) + log.warning(f'Tile selection did not converge! using default threshold {max_db_threshold} db') gaussian_threshold = max_gaussian_threshold gaussian_array = untile_array(tiles, array.shape) @@ -349,7 +314,7 @@ def make_water_map( water_map &= ~array.mask write_cog( - str(out_raster).replace(".tif", f"_{pol}_initial.tif"), + str(out_raster).replace('.tif', f'_{pol}_initial.tif'), format_raster_data(water_map, padding_mask, nodata), transform=out_transform, epsg_code=out_epsg, @@ -357,7 +322,7 @@ def make_water_map( nodata_value=nodata, ) - log.info(f"Refining initial {pol} water extent map using Fuzzy Logic") + log.info(f'Refining initial {pol} water extent map using Fuzzy Logic') array = np.ma.masked_where(~water_map, array) gaussian_lower_limit = np.log10(np.ma.median(array)) + 30.0 @@ -372,7 +337,7 @@ def make_water_map( water_map &= ~array.mask write_cog( - str(out_raster).replace(".tif", f"_{pol}_fuzzy.tif"), + str(out_raster).replace('.tif', f'_{pol}_fuzzy.tif'), format_raster_data(water_map, padding_mask, nodata), transform=out_transform, epsg_code=out_epsg, @@ -382,7 +347,7 @@ def make_water_map( water_extent_maps.append(water_map) - log.info("Combining Fuzzy VH and VV extent map") + log.info('Combining Fuzzy VH and VV extent map') combined_water_map = np.logical_or(*water_extent_maps) combined_segments = measure.label(combined_water_map, connectivity=2) @@ -398,110 +363,106 @@ def make_water_map( ) -def _get_cli(interface: Literal["hyp3", "main"]) -> argparse.ArgumentParser: - parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter - ) +def _get_cli(interface: Literal['hyp3', 'main']) -> argparse.ArgumentParser: + parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) - if interface == "hyp3": - parser.add_argument("--bucket") - parser.add_argument("--bucket-prefix", default="") + if interface == 'hyp3': + parser.add_argument('--bucket') + parser.add_argument('--bucket-prefix', default='') parser.add_argument( - "--vv-raster", - help="Sentinel-1 RTC GeoTIFF raster, in power scale, with VV polarization.", + '--vv-raster', + help='Sentinel-1 RTC GeoTIFF raster, in power scale, with VV polarization.', ) - elif interface == "main": - parser.add_argument("out_raster", help="Water map GeoTIFF to create") + elif interface == 'main': + parser.add_argument('out_raster', help='Water map GeoTIFF to create') # FIXME: Decibel RTCs would be real nice. parser.add_argument( - "vv_raster", - help="Sentinel-1 RTC GeoTIFF raster, in power scale, with VV polarization", + 'vv_raster', + help='Sentinel-1 RTC GeoTIFF raster, in power scale, with VV polarization', ) parser.add_argument( - "vh_raster", - help="Sentinel-1 RTC GeoTIFF raster, in power scale, with VH polarization", + 'vh_raster', + help='Sentinel-1 RTC GeoTIFF raster, in power scale, with VH polarization', ) parser.add_argument( - "--hand-raster", - help="Height Above Nearest Drainage (HAND) GeoTIFF aligned to the RTC rasters. " - "If not specified, HAND data will be extracted from the GLO-30 HAND.", + '--hand-raster', + help='Height Above Nearest Drainage (HAND) GeoTIFF aligned to the RTC rasters. ' + 'If not specified, HAND data will be extracted from the GLO-30 HAND.', ) parser.add_argument( - "--tile-shape", + '--tile-shape', type=int, nargs=2, default=(100, 100), - help="image tiles will have this shape (height, width) in pixels", + help='image tiles will have this shape (height, width) in pixels', ) else: - raise NotImplementedError(f"Unknown interface: {interface}") + raise NotImplementedError(f'Unknown interface: {interface}') parser.add_argument( - "--max-vv-threshold", + '--max-vv-threshold', type=float, default=-15.5, - help="Maximum threshold value to use for `vv_raster` in decibels (db)", + help='Maximum threshold value to use for `vv_raster` in decibels (db)', ) parser.add_argument( - "--max-vh-threshold", + '--max-vh-threshold', type=float, default=-23.0, - help="Maximum threshold value to use for `vh_raster` in decibels (db)", + help='Maximum threshold value to use for `vh_raster` in decibels (db)', ) parser.add_argument( - "--hand-threshold", + '--hand-threshold', type=float, default=15.0, - help="The maximum height above nearest drainage in meters to consider a pixel valid", + help='The maximum height above nearest drainage in meters to consider a pixel valid', ) parser.add_argument( - "--hand-fraction", + '--hand-fraction', type=float, default=0.8, - help="The minimum fraction of valid HAND pixels required in a tile for thresholding", + help='The minimum fraction of valid HAND pixels required in a tile for thresholding', ) parser.add_argument( - "--membership-threshold", + '--membership-threshold', type=float, default=0.45, - help="The average membership to the fuzzy indicators required for a water pixel", + help='The average membership to the fuzzy indicators required for a water pixel', ) - parser.add_argument( - "-v", "--verbose", action="store_true", help="Turn on verbose logging" - ) + parser.add_argument('-v', '--verbose', action='store_true', help='Turn on verbose logging') return parser def hyp3(): - parser = _get_cli(interface="hyp3") + parser = _get_cli(interface='hyp3') args = parser.parse_args() level = logging.DEBUG if args.verbose else logging.INFO logging.basicConfig( stream=sys.stdout, - format="%(asctime)s - %(levelname)s - %(message)s", + format='%(asctime)s - %(levelname)s - %(message)s', level=level, ) - log.debug(" ".join(sys.argv)) + log.debug(' '.join(sys.argv)) if args.vv_raster: vv_raster = args.vv_raster elif args.bucket: - vv_raster = get_path_to_s3_file(args.bucket, args.bucket_prefix, "_VV.tif") - log.info(f"Found VV raster: {vv_raster}") + vv_raster = get_path_to_s3_file(args.bucket, args.bucket_prefix, '_VV.tif') + log.info(f'Found VV raster: {vv_raster}') else: - raise ValueError("Arguments --vv-raster or --bucket must be provided.") + raise ValueError('Arguments --vv-raster or --bucket must be provided.') - vh_raster = vv_raster.replace("_VV.tif", "_VH.tif") + vh_raster = vv_raster.replace('_VV.tif', '_VH.tif') - product_name = Path(vv_raster).name.replace("_VV.tif", "_WM") + product_name = Path(vv_raster).name.replace('_VV.tif', '_WM') product_dir = Path.cwd() / product_name product_dir.mkdir(exist_ok=True) - water_map_raster = product_dir / f"{product_name}.tif" + water_map_raster = product_dir / f'{product_name}.tif' make_water_map( out_raster=water_map_raster, @@ -514,28 +475,26 @@ def hyp3(): membership_threshold=args.membership_threshold, ) - log.info(f"Water map created successfully: {water_map_raster}") + log.info(f'Water map created successfully: {water_map_raster}') if args.bucket: - output_zip = make_archive( - base_name=product_name, format="zip", base_dir=product_name - ) + output_zip = make_archive(base_name=product_name, format='zip', base_dir=product_name) upload_file_to_s3(Path(output_zip), args.bucket, args.bucket_prefix) for product_file in product_dir.iterdir(): upload_file_to_s3(product_file, args.bucket, args.bucket_prefix) def main(): - parser = _get_cli(interface="main") + parser = _get_cli(interface='main') args = parser.parse_args() level = logging.DEBUG if args.verbose else logging.INFO logging.basicConfig( stream=sys.stdout, - format="%(asctime)s - %(levelname)s - %(message)s", + format='%(asctime)s - %(levelname)s - %(message)s', level=level, ) - log.debug(" ".join(sys.argv)) + log.debug(' '.join(sys.argv)) make_water_map( args.out_raster, @@ -550,4 +509,4 @@ def main(): args.membership_threshold, ) - log.info(f"Water map created successfully: {args.out_raster}") + log.info(f'Water map created successfully: {args.out_raster}') diff --git a/src/asf_tools/raster.py b/src/asf_tools/raster.py index 6f906bc..f030857 100644 --- a/src/asf_tools/raster.py +++ b/src/asf_tools/raster.py @@ -15,35 +15,35 @@ def convert_scale( array: Union[np.ndarray, np.ma.MaskedArray], - in_scale: Literal["db", "amplitude", "power"], - out_scale: Literal["db", "amplitude", "power"], + in_scale: Literal['db', 'amplitude', 'power'], + out_scale: Literal['db', 'amplitude', 'power'], ) -> Union[np.ndarray, np.ma.MaskedArray]: """Convert calibrated raster scale between db, amplitude and power""" if in_scale == out_scale: - warnings.warn(f"Nothing to do! {in_scale} is same as {out_scale}.") + warnings.warn(f'Nothing to do! {in_scale} is same as {out_scale}.') return array log10 = np.ma.log10 if isinstance(array, np.ma.MaskedArray) else np.log10 - if in_scale == "db": - if out_scale == "power": + if in_scale == 'db': + if out_scale == 'power': return 10 ** (array / 10) - if out_scale == "amplitude": + if out_scale == 'amplitude': return 10 ** (array / 20) - if in_scale == "amplitude": - if out_scale == "power": + if in_scale == 'amplitude': + if out_scale == 'power': return array**2 - if out_scale == "db": + if out_scale == 'db': return 10 * log10(array**2) - if in_scale == "power": - if out_scale == "amplitude": + if in_scale == 'power': + if out_scale == 'amplitude': return np.sqrt(array) - if out_scale == "db": + if out_scale == 'db': return 10 * log10(array) - raise ValueError(f"Cannot convert raster of scale {in_scale} to {out_scale}") + raise ValueError(f'Cannot convert raster of scale {in_scale} to {out_scale}') def read_as_masked_array(raster: Union[str, Path], band: int = 1) -> np.ma.MaskedArray: @@ -56,7 +56,7 @@ def read_as_masked_array(raster: Union[str, Path], band: int = 1) -> np.ma.Maske Returns: data: The raster pixel data as a numpy MaskedArray """ - log.debug(f"Reading raster values from {raster}") + log.debug(f'Reading raster values from {raster}') ds = gdal.Open(str(raster)) band = ds.GetRasterBand(band) data = np.ma.masked_invalid(band.ReadAsArray()) @@ -77,7 +77,7 @@ def read_as_array(raster: str, band: int = 1) -> np.array: Returns: data: The raster pixel data as a numpy array """ - log.debug(f"Reading raster values from {raster}") + log.debug(f'Reading raster values from {raster}') ds = gdal.Open(raster) data = ds.GetRasterBand(band).ReadAsArray() del ds # How to close w/ gdal @@ -105,25 +105,23 @@ def write_cog( Returns: file_name: The output file name """ - log.info(f"Creating {file_name}") + log.info(f'Creating {file_name}') with NamedTemporaryFile() as temp_file: - driver = gdal.GetDriverByName("GTiff") - temp_geotiff = driver.Create( - temp_file.name, data.shape[1], data.shape[0], 1, dtype - ) + driver = gdal.GetDriverByName('GTiff') + temp_geotiff = driver.Create(temp_file.name, data.shape[1], data.shape[0], 1, dtype) temp_geotiff.GetRasterBand(1).WriteArray(data) if nodata_value is not None: temp_geotiff.GetRasterBand(1).SetNoDataValue(nodata_value) temp_geotiff.SetGeoTransform(transform) temp_geotiff.SetProjection(epsg_to_wkt(epsg_code)) - driver = gdal.GetDriverByName("COG") + driver = gdal.GetDriverByName('COG') options = [ - "COMPRESS=LZW", - "OVERVIEW_RESAMPLING=AVERAGE", - "NUM_THREADS=ALL_CPUS", - "BIGTIFF=YES", + 'COMPRESS=LZW', + 'OVERVIEW_RESAMPLING=AVERAGE', + 'NUM_THREADS=ALL_CPUS', + 'BIGTIFF=YES', ] driver.CreateCopy(str(file_name), temp_geotiff, options=options) diff --git a/src/asf_tools/tile.py b/src/asf_tools/tile.py index 3427ed9..c3e12fb 100644 --- a/src/asf_tools/tile.py +++ b/src/asf_tools/tile.py @@ -48,9 +48,7 @@ def tile_array( cpad = -array_columns % tile_columns if (rpad or cpad) and pad_value is None: - raise ValueError( - f"Cannot evenly tile a {array.shape} array into ({tile_rows},{tile_columns}) tiles" - ) + raise ValueError(f'Cannot evenly tile a {array.shape} array into ({tile_rows},{tile_columns}) tiles') if rpad or cpad: padded_array = np.pad(array, ((0, rpad), (0, cpad)), constant_values=pad_value) @@ -62,9 +60,7 @@ def tile_array( tile_list = [] for rows in np.vsplit(padded_array, range(tile_rows, array_rows, tile_rows)): - tile_list.extend( - np.hsplit(rows, range(tile_columns, array_columns, tile_columns)) - ) + tile_list.extend(np.hsplit(rows, range(tile_columns, array_columns, tile_columns))) dstack = np.ma.dstack if isinstance(array, np.ma.MaskedArray) else np.dstack tiled = np.moveaxis(dstack(tile_list), -1, 0) @@ -121,8 +117,8 @@ def untile_array( if (array_size := array_rows * array_columns) > tiled_array.size: raise ValueError( - f"array_shape {array_shape} will result in an array bigger than the tiled array:" - f" {array_size} > {tiled_array.size}" + f'array_shape {array_shape} will result in an array bigger than the tiled array:' + f' {array_size} > {tiled_array.size}' ) for ii in range(untiled_rows): diff --git a/src/asf_tools/util.py b/src/asf_tools/util.py index a6a6cc8..8af4f48 100644 --- a/src/asf_tools/util.py +++ b/src/asf_tools/util.py @@ -37,8 +37,8 @@ def get_epsg_code(info: dict) -> int: Returns: epsg_code: The integer EPSG code """ - proj = osr.SpatialReference(info["coordinateSystem"]["wkt"]) - epsg_code = int(proj.GetAttrValue("AUTHORITY", 1)) + proj = osr.SpatialReference(info['coordinateSystem']['wkt']) + epsg_code = int(proj.GetAttrValue('AUTHORITY', 1)) return epsg_code @@ -51,8 +51,8 @@ def get_coordinates(info: dict) -> Tuple[int, int, int, int]: Returns: (west, south, east, north): the corner coordinates values """ - west, south = info["cornerCoordinates"]["lowerLeft"] - east, north = info["cornerCoordinates"]["upperRight"] + west, south = info['cornerCoordinates']['lowerLeft'] + east, north = info['cornerCoordinates']['upperRight'] return west, south, east, north diff --git a/src/asf_tools/vector.py b/src/asf_tools/vector.py index 47b72a0..3f278f9 100644 --- a/src/asf_tools/vector.py +++ b/src/asf_tools/vector.py @@ -12,17 +12,13 @@ def get_features(vector_path: Union[str, Path]) -> List[ogr.Feature]: return [feature for feature in layer] -def get_property_values_for_intersecting_features( - geometry: ogr.Geometry, features: Iterator -) -> bool: +def get_property_values_for_intersecting_features(geometry: ogr.Geometry, features: Iterator) -> bool: for feature in features: if feature.GetGeometryRef().Intersects(geometry): return True -def intersecting_feature_properties( - geometry: ogr.Geometry, features: Iterator, feature_property: str -) -> List[str]: +def intersecting_feature_properties(geometry: ogr.Geometry, features: Iterator, feature_property: str) -> List[str]: property_values = [] for feature in features: if feature.GetGeometryRef().Intersects(geometry): diff --git a/src/asf_tools/watermasking/fill_missing_tiles.py b/src/asf_tools/watermasking/fill_missing_tiles.py index a2923de..bddaa6d 100644 --- a/src/asf_tools/watermasking/fill_missing_tiles.py +++ b/src/asf_tools/watermasking/fill_missing_tiles.py @@ -13,39 +13,33 @@ def main(): parser = argparse.ArgumentParser( - prog="fill_missing_tiles.py", - description="Script for creating filled tifs in areas with missing tiles.", + prog='fill_missing_tiles.py', + description='Script for creating filled tifs in areas with missing tiles.', ) + parser.add_argument('--fill-value', help='The value to fill the data array with.', default=0) parser.add_argument( - "--fill-value", help="The value to fill the data array with.", default=0 - ) - parser.add_argument( - "--lat-begin", - help="The minimum latitude of the dataset in EPSG:4326.", + '--lat-begin', + help='The minimum latitude of the dataset in EPSG:4326.', default=-85, ) parser.add_argument( - "--lat-end", - help="The maximum latitude of the dataset in EPSG:4326.", + '--lat-end', + help='The maximum latitude of the dataset in EPSG:4326.', default=85, ) parser.add_argument( - "--lon-begin", - help="The minimum longitude of the dataset in EPSG:4326.", + '--lon-begin', + help='The minimum longitude of the dataset in EPSG:4326.', default=-180, ) parser.add_argument( - "--lon-end", - help="The maximum longitude of the dataset in EPSG:4326.", + '--lon-end', + help='The maximum longitude of the dataset in EPSG:4326.', default=180, ) - parser.add_argument( - "--tile-width", help="The desired width of the tile in degrees.", default=5 - ) - parser.add_argument( - "--tile-height", help="The desired height of the tile in degrees.", default=5 - ) + parser.add_argument('--tile-width', help='The desired width of the tile in degrees.', default=5) + parser.add_argument('--tile-height', help='The desired height of the tile in degrees.', default=5) args = parser.parse_args() @@ -62,11 +56,11 @@ def main(): for lat in lat_range: for lon in lon_range: - tile = lat_lon_to_tile_string(lat, lon, is_worldcover=False, postfix="") - tile_tif = "tiles/" + tile + ".tif" - tile_cog = "tiles/cogs/" + tile + ".tif" + tile = lat_lon_to_tile_string(lat, lon, is_worldcover=False, postfix='') + tile_tif = 'tiles/' + tile + '.tif' + tile_cog = 'tiles/cogs/' + tile + '.tif' - print(f"Processing: {tile}") + print(f'Processing: {tile}') xmin, ymin = lon, lat pixel_size_x = 0.00009009009 @@ -76,7 +70,7 @@ def main(): data = np.empty((55500, 55500)) data.fill(fill_value) - driver = gdal.GetDriverByName("GTiff") + driver = gdal.GetDriverByName('GTiff') dst_ds = driver.Create( tile_tif, xsize=data.shape[0], @@ -92,12 +86,10 @@ def main(): dst_band.WriteArray(data) del dst_ds - command = f"gdal_translate -of COG -co NUM_THREADS=all_cpus {tile_tif} {tile_cog}".split( - " " - ) + command = f'gdal_translate -of COG -co NUM_THREADS=all_cpus {tile_tif} {tile_cog}'.split(' ') subprocess.run(command) os.remove(tile_tif) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/src/asf_tools/watermasking/generate_osm_tiles.py b/src/asf_tools/watermasking/generate_osm_tiles.py index 51fac43..d42b681 100644 --- a/src/asf_tools/watermasking/generate_osm_tiles.py +++ b/src/asf_tools/watermasking/generate_osm_tiles.py @@ -15,10 +15,10 @@ gdal.UseExceptions() -INTERIOR_TILE_DIR = "interior_tiles/" -OCEAN_TILE_DIR = "ocean_tiles/" -FINISHED_TILE_DIR = "tiles/" -GDAL_OPTIONS = ["COMPRESS=LZW", "NUM_THREADS=all_cpus"] +INTERIOR_TILE_DIR = 'interior_tiles/' +OCEAN_TILE_DIR = 'ocean_tiles/' +FINISHED_TILE_DIR = 'tiles/' +GDAL_OPTIONS = ['COMPRESS=LZW', 'NUM_THREADS=all_cpus'] def process_pbf(planet_file: str, output_file: str): @@ -29,24 +29,14 @@ def process_pbf(planet_file: str, output_file: str): output_file: The desired path of the processed PBF file. """ - natural_file = "planet_natural.pbf" - waterways_file = "planet_waterways.pbf" - reservoirs_file = "planet_reservoirs.pbf" + natural_file = 'planet_natural.pbf' + waterways_file = 'planet_waterways.pbf' + reservoirs_file = 'planet_reservoirs.pbf' - natural_water_command = ( - f"osmium tags-filter -o {natural_file} {planet_file} wr/natural=water".split( - " " - ) - ) - waterways_command = ( - f'osmium tags-filter -o {waterways_file} {planet_file} waterway="*"'.split(" ") - ) - reservoirs_command = f"osmium tags-filter -o {reservoirs_file} {planet_file} landuse=reservoir".split( - " " - ) - merge_command = f"osmium merge {natural_file} {waterways_file} {reservoirs_file} -o {output_file}".split( - " " - ) + natural_water_command = f'osmium tags-filter -o {natural_file} {planet_file} wr/natural=water'.split(' ') + waterways_command = f'osmium tags-filter -o {waterways_file} {planet_file} waterway="*"'.split(' ') + reservoirs_command = f'osmium tags-filter -o {reservoirs_file} {planet_file} landuse=reservoir'.split(' ') + merge_command = f'osmium merge {natural_file} {waterways_file} {reservoirs_file} -o {output_file}'.split(' ') subprocess.run(natural_water_command) subprocess.run(waterways_command) @@ -54,9 +44,7 @@ def process_pbf(planet_file: str, output_file: str): subprocess.run(merge_command) -def process_ocean_tiles( - ocean_polygons_path, lat, lon, tile_width_deg, tile_height_deg, output_dir -): +def process_ocean_tiles(ocean_polygons_path, lat, lon, tile_width_deg, tile_height_deg, output_dir): """Process and crop OSM ocean polygons into a tif tile. Args: @@ -67,17 +55,15 @@ def process_ocean_tiles( tile_height_deg: The height of the tile in degrees. """ - tile = lat_lon_to_tile_string(lat, lon, is_worldcover=False, postfix="") - tile_tif = output_dir + tile + ".tif" + tile = lat_lon_to_tile_string(lat, lon, is_worldcover=False, postfix='') + tile_tif = output_dir + tile + '.tif' xmin, xmax, ymin, ymax = lon, lon + tile_width_deg, lat, lat + tile_height_deg pixel_size_x = 0.00009009009 pixel_size_y = 0.00009009009 - clipped_polygons_path = tile + ".shp" - command = f"ogr2ogr -clipsrc {xmin} {ymin} {xmax} {ymax} {clipped_polygons_path} {ocean_polygons_path}".split( - " " - ) + clipped_polygons_path = tile + '.shp' + command = f'ogr2ogr -clipsrc {xmin} {ymin} {xmax} {ymax} {clipped_polygons_path} {ocean_polygons_path}'.split(' ') subprocess.run(command) gdal.Rasterize( @@ -92,18 +78,16 @@ def process_ocean_tiles( ) temp_files = [ - tile + ".dbf", - tile + ".cpg", - tile + ".prj", - tile + ".shx", - tile + ".shp", + tile + '.dbf', + tile + '.cpg', + tile + '.prj', + tile + '.shx', + tile + '.shp', ] remove_temp_files(temp_files) -def extract_water( - water_file, lat, lon, tile_width_deg, tile_height_deg, interior_tile_dir -): +def extract_water(water_file, lat, lon, tile_width_deg, tile_height_deg, interior_tile_dir): """Rasterize a water tile from the processed global PBF file. Args: @@ -114,34 +98,28 @@ def extract_water( tile_height_deg: The desired height of the tile in degrees. """ - tile = lat_lon_to_tile_string(lat, lon, is_worldcover=False, postfix="") - tile_pbf = tile + ".osm.pbf" - tile_tif = interior_tile_dir + tile + ".tif" - tile_shp = tile + ".shp" - tile_geojson = tile + ".geojson" + tile = lat_lon_to_tile_string(lat, lon, is_worldcover=False, postfix='') + tile_pbf = tile + '.osm.pbf' + tile_tif = interior_tile_dir + tile + '.tif' + tile_shp = tile + '.shp' + tile_geojson = tile + '.geojson' # Extract tile from the main pbf, then convert it to a tif. - bbox = f"--bbox {lon},{lat},{lon+tile_width_deg},{lat+tile_height_deg}" - extract_command = f"osmium extract -s smart -S tags=natural=water {bbox} {water_file} -o {tile_pbf}".split( - " " - ) - export_command = ( - f"osmium export --geometry-types=polygon {tile_pbf} -o {tile_geojson}".split( - " " - ) - ) + bbox = f'--bbox {lon},{lat},{lon+tile_width_deg},{lat+tile_height_deg}' + extract_command = f'osmium extract -s smart -S tags=natural=water {bbox} {water_file} -o {tile_pbf}'.split(' ') + export_command = f'osmium export --geometry-types=polygon {tile_pbf} -o {tile_geojson}'.split(' ') subprocess.run(extract_command) subprocess.run(export_command) # Islands and Islets can be members of the water features, so they must be removed. - water_gdf = gpd.read_file(tile_geojson, engine="pyogrio") + water_gdf = gpd.read_file(tile_geojson, engine='pyogrio') try: - water_gdf = water_gdf.drop(water_gdf[water_gdf["place"] == "island"].index) - water_gdf = water_gdf.drop(water_gdf[water_gdf["place"] == "islet"].index) + water_gdf = water_gdf.drop(water_gdf[water_gdf['place'] == 'island'].index) + water_gdf = water_gdf.drop(water_gdf[water_gdf['place'] == 'islet'].index) except KeyError: # When there are no islands to remove, an AttributeError should throw, but we don't care about it. pass - water_gdf.to_file(tile_shp, mode="w", engine="pyogrio") + water_gdf.to_file(tile_shp, mode='w', engine='pyogrio') water_gdf = None xmin, xmax, ymin, ymax = lon, lon + tile_width_deg, lat, lat + tile_height_deg @@ -160,10 +138,10 @@ def extract_water( ) temp_files = [ - tile + ".dbf", - tile + ".cpg", - tile + ".prj", - tile + ".shx", + tile + '.dbf', + tile + '.cpg', + tile + '.prj', + tile + '.shx', tile_shp, tile_pbf, tile_geojson, @@ -171,9 +149,7 @@ def extract_water( remove_temp_files(temp_files) -def merge_interior_and_ocean( - internal_tile_dir, ocean_tile_dir, finished_tile_dir, translate_to_cog: bool = False -): +def merge_interior_and_ocean(internal_tile_dir, ocean_tile_dir, finished_tile_dir, translate_to_cog: bool = False): """Merge the interior water tiles and ocean water tiles. Args: @@ -182,99 +158,87 @@ def merge_interior_and_ocean( merged_tile_dir: The path to the directory containing the merged water tiles. """ index = 0 - num_tiles = len([f for f in os.listdir(internal_tile_dir) if f.endswith("tif")]) - 1 + num_tiles = len([f for f in os.listdir(internal_tile_dir) if f.endswith('tif')]) - 1 for filename in os.listdir(internal_tile_dir): - if filename.endswith(".tif"): + if filename.endswith('.tif'): start_time = time.time() internal_tile = internal_tile_dir + filename external_tile = ocean_tile_dir + filename output_tile = finished_tile_dir + filename command = [ - "gdal_calc.py", - "-A", + 'gdal_calc.py', + '-A', internal_tile, - "-B", + '-B', external_tile, - "--format", - "GTiff", - "--outfile", + '--format', + 'GTiff', + '--outfile', output_tile, - "--calc", + '--calc', '"logical_or(A, B)"', ] subprocess.run(command) if translate_to_cog: - cogs_dir = finished_tile_dir + "cogs/" + cogs_dir = finished_tile_dir + 'cogs/' try: os.mkdir(cogs_dir) except FileExistsError: pass out_file = cogs_dir + filename - translate_string = ( - "gdal_translate -ot Byte -of COG -co NUM_THREADS=all_cpus" - ) - command = f"{translate_string} {output_tile} {out_file}".split(" ") + translate_string = 'gdal_translate -ot Byte -of COG -co NUM_THREADS=all_cpus' + command = f'{translate_string} {output_tile} {out_file}'.split(' ') subprocess.run(command) os.remove(output_tile) end_time = time.time() total_time = end_time - start_time - print(f"Elapsed Time: {total_time}(s)") - print(f"Completed {index} of {num_tiles}") + print(f'Elapsed Time: {total_time}(s)') + print(f'Completed {index} of {num_tiles}') index += 1 def main(): parser = argparse.ArgumentParser( - prog="generate_osm_tiles.py", - description="Main script for creating a tiled watermask dataset from OSM data.", + prog='generate_osm_tiles.py', + description='Main script for creating a tiled watermask dataset from OSM data.', ) + parser.add_argument('--planet-file-path', help='The path to the global planet.pbf file.') + parser.add_argument('--ocean-polygons-path', help='The path to the global OSM ocean polygons.') parser.add_argument( - "--planet-file-path", help="The path to the global planet.pbf file." - ) - parser.add_argument( - "--ocean-polygons-path", help="The path to the global OSM ocean polygons." - ) - parser.add_argument( - "--lat-begin", - help="The minimum latitude of the dataset in EPSG:4326.", + '--lat-begin', + help='The minimum latitude of the dataset in EPSG:4326.', default=-85, ) parser.add_argument( - "--lat-end", - help="The maximum latitude of the dataset in EPSG:4326.", + '--lat-end', + help='The maximum latitude of the dataset in EPSG:4326.', default=85, ) parser.add_argument( - "--lon-begin", - help="The minimum longitude of the dataset in EPSG:4326.", + '--lon-begin', + help='The minimum longitude of the dataset in EPSG:4326.', default=-180, ) parser.add_argument( - "--lon-end", - help="The maximum longitude of the dataset in EPSG:4326.", + '--lon-end', + help='The maximum longitude of the dataset in EPSG:4326.', default=180, ) - parser.add_argument( - "--tile-width", help="The desired width of the tile in degrees.", default=5 - ) - parser.add_argument( - "--tile-height", help="The desired height of the tile in degrees.", default=5 - ) + parser.add_argument('--tile-width', help='The desired width of the tile in degrees.', default=5) + parser.add_argument('--tile-height', help='The desired height of the tile in degrees.', default=5) args = parser.parse_args() try: - subprocess.run(["osmium", "--help"], check=True, stdout=subprocess.DEVNULL) + subprocess.run(['osmium', '--help'], check=True, stdout=subprocess.DEVNULL) except subprocess.CalledProcessError: - raise ImportError( - "osmium-tool must be installed to run this program: https://osmcode.org/osmium-tool/." - ) + raise ImportError('osmium-tool must be installed to run this program: https://osmcode.org/osmium-tool/.') lat_begin = int(args.lat_begin) lat_end = int(args.lat_end) @@ -285,11 +249,11 @@ def main(): setup_directories([INTERIOR_TILE_DIR, OCEAN_TILE_DIR, FINISHED_TILE_DIR]) - print("Extracting water from planet file...") - processed_pbf_path = "planet_processed.pbf" + print('Extracting water from planet file...') + processed_pbf_path = 'planet_processed.pbf' process_pbf(args.planet_file_path, processed_pbf_path) - print("Processing tiles...") + print('Processing tiles...') lat_range = range(lat_begin, lat_end, tile_height) lon_range = range(lon_begin, lon_end, tile_width) num_tiles = len(lat_range) * len(lon_range) - 1 @@ -316,16 +280,12 @@ def main(): ) end_time = time.time() total_time = end_time - start_time - print( - f"Finished initial creation of {tile_name} in {total_time}(s). {index} of {num_tiles}" - ) + print(f'Finished initial creation of {tile_name} in {total_time}(s). {index} of {num_tiles}') index += 1 - print("Merging processed tiles...") - merge_interior_and_ocean( - INTERIOR_TILE_DIR, OCEAN_TILE_DIR, FINISHED_TILE_DIR, translate_to_cog=True - ) + print('Merging processed tiles...') + merge_interior_and_ocean(INTERIOR_TILE_DIR, OCEAN_TILE_DIR, FINISHED_TILE_DIR, translate_to_cog=True) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/src/asf_tools/watermasking/generate_worldcover_tiles.py b/src/asf_tools/watermasking/generate_worldcover_tiles.py index 2271d99..92fd90d 100644 --- a/src/asf_tools/watermasking/generate_worldcover_tiles.py +++ b/src/asf_tools/watermasking/generate_worldcover_tiles.py @@ -16,12 +16,12 @@ gdal.UseExceptions() -PREPROCESSED_TILE_DIR = "worldcover_tiles_preprocessed/" -UNCROPPED_TILE_DIR = "worldcover_tiles_uncropped/" -CROPPED_TILE_DIR = "worldcover_tiles/" -FILENAME_POSTFIX = ".tif" +PREPROCESSED_TILE_DIR = 'worldcover_tiles_preprocessed/' +UNCROPPED_TILE_DIR = 'worldcover_tiles_uncropped/' +CROPPED_TILE_DIR = 'worldcover_tiles/' +FILENAME_POSTFIX = '.tif' WORLDCOVER_TILE_SIZE = 3 -GDAL_OPTIONS = ["COMPRESS=LZW", "TILED=YES", "NUM_THREADS=all_cpus"] +GDAL_OPTIONS = ['COMPRESS=LZW', 'TILED=YES', 'NUM_THREADS=all_cpus'] def tile_preprocessing(tile_dir, min_lat, max_lat, min_lon, max_lon): @@ -32,12 +32,12 @@ def tile_preprocessing(tile_dir, min_lat, max_lat, min_lon, max_lon): tile_dir: The directory containing all of the worldcover tiles. """ - filenames = [f for f in os.listdir(tile_dir) if f.endswith(".tif")] + filenames = [f for f in os.listdir(tile_dir) if f.endswith('.tif')] def filename_filter(filename): - latitude = int(filename.split("_")[5][1:3]) - longitude = int(filename.split("_")[5][4:7]) - if filename.split("_")[5][3] == "W": + latitude = int(filename.split('_')[5][1:3]) + longitude = int(filename.split('_')[5][4:7]) + if filename.split('_')[5][3] == 'W': longitude = -longitude mnlat = min_lat - (min_lat % WORLDCOVER_TILE_SIZE) mnlon = min_lon - (min_lon % WORLDCOVER_TILE_SIZE) @@ -54,11 +54,11 @@ def filename_filter(filename): for filename in filenames_filtered: start_time = time.time() - tile_name = filename.split("_")[5] + tile_name = filename.split('_')[5] filename = str(Path(tile_dir) / filename) - dst_filename = PREPROCESSED_TILE_DIR + tile_name + ".tif" + dst_filename = PREPROCESSED_TILE_DIR + tile_name + '.tif' - print(f"Processing: {filename} --- {dst_filename} -- {index} of {num_tiles}") + print(f'Processing: {filename} --- {dst_filename} -- {index} of {num_tiles}') src_ds = gdal.Open(filename) src_band = src_ds.GetRasterBand(1) @@ -68,7 +68,7 @@ def filename_filter(filename): water_arr = np.ones(src_arr.shape) water_arr[not_water] = 0 - driver = gdal.GetDriverByName("GTiff") + driver = gdal.GetDriverByName('GTiff') dst_ds = driver.Create( dst_filename, water_arr.shape[0], @@ -89,7 +89,7 @@ def filename_filter(filename): end_time = time.time() total_time = end_time - start_time - print(f"Processing {dst_filename} took {total_time} seconds.") + print(f'Processing {dst_filename} took {total_time} seconds.') index += 1 @@ -103,15 +103,13 @@ def create_missing_tiles(tile_dir, lat_range, lon_range): Returns: current_existing_tiles: The list of tiles that exist after the function has completed. """ - current_existing_tiles = [ - f for f in os.listdir(tile_dir) if f.endswith(FILENAME_POSTFIX) - ] + current_existing_tiles = [f for f in os.listdir(tile_dir) if f.endswith(FILENAME_POSTFIX)] for lon in lon_range: for lat in lat_range: tile = lat_lon_to_tile_string(lat, lon, is_worldcover=True) - print(f"Checking {tile}") + print(f'Checking {tile}') if tile not in current_existing_tiles: - print(f"Could not find {tile}") + print(f'Could not find {tile}') filename = PREPROCESSED_TILE_DIR + tile x_size, y_size = 36000, 36000 @@ -120,7 +118,7 @@ def create_missing_tiles(tile_dir, lat_range, lon_range): ul_lat = lat + WORLDCOVER_TILE_SIZE geotransform = (ul_lon, x_res, 0, ul_lat, 0, y_res) - driver = gdal.GetDriverByName("GTiff") + driver = gdal.GetDriverByName('GTiff') ds = driver.Create( filename, xsize=x_size, @@ -129,18 +127,16 @@ def create_missing_tiles(tile_dir, lat_range, lon_range): eType=gdal.GDT_Byte, options=GDAL_OPTIONS, ) - ds.SetProjection("EPSG:4326") + ds.SetProjection('EPSG:4326') ds.SetGeoTransform(geotransform) - band = ds.GetRasterBand( - 1 - ) # Write ones, as tiles should only be missing over water. + band = ds.GetRasterBand(1) # Write ones, as tiles should only be missing over water. band.WriteArray(np.ones((x_size, y_size))) del ds del band current_existing_tiles.append(tile) - print(f"Added {tile}") + print(f'Added {tile}') return current_existing_tiles @@ -175,9 +171,7 @@ def get_tiles(osm_tile_coord: tuple, wc_tile_width: int, tile_width: int): return tiles -def lat_lon_to_filenames( - worldcover_tile_dir, osm_tile_coord: tuple, wc_tile_width: int, tile_width: int -): +def lat_lon_to_filenames(worldcover_tile_dir, osm_tile_coord: tuple, wc_tile_width: int, tile_width: int): """Get a list of the Worldcover tile filenames that are necessary to overlap an OSM tile. Args: @@ -191,10 +185,7 @@ def lat_lon_to_filenames( filenames = [] tiles = get_tiles(osm_tile_coord, wc_tile_width, tile_width) for tile in tiles: - filenames.append( - worldcover_tile_dir - + lat_lon_to_tile_string(tile[0], tile[1], is_worldcover=True) - ) + filenames.append(worldcover_tile_dir + lat_lon_to_tile_string(tile[0], tile[1], is_worldcover=True)) return filenames @@ -215,11 +206,11 @@ def crop_tile(tile, lat, lon, tile_width, tile_height): projWin=[lon, lat + tile_height, lon + tile_width, lat], xRes=pixel_size_x, yRes=pixel_size_y, - outputSRS="EPSG:4326", - format="COG", - creationOptions=["NUM_THREADS=all_cpus"], + outputSRS='EPSG:4326', + format='COG', + creationOptions=['NUM_THREADS=all_cpus'], ) - remove_temp_files(["tmp_px_size.tif", "tmp.shp"]) + remove_temp_files(['tmp_px_size.tif', 'tmp.shp']) def build_dataset(worldcover_tile_dir, lat_range, lon_range, tile_width, tile_height): @@ -236,54 +227,48 @@ def build_dataset(worldcover_tile_dir, lat_range, lon_range, tile_width, tile_he start_time = time.time() tile = lat_lon_to_tile_string(lat, lon, is_worldcover=False) tile_filename = UNCROPPED_TILE_DIR + tile - worldcover_tiles = lat_lon_to_filenames( - worldcover_tile_dir, (lat, lon), WORLDCOVER_TILE_SIZE, tile_width - ) - print(f"Processing: {tile_filename} {worldcover_tiles}") - merge_tiles(worldcover_tiles, tile_filename, "GTiff", compress=True) + worldcover_tiles = lat_lon_to_filenames(worldcover_tile_dir, (lat, lon), WORLDCOVER_TILE_SIZE, tile_width) + print(f'Processing: {tile_filename} {worldcover_tiles}') + merge_tiles(worldcover_tiles, tile_filename, 'GTiff', compress=True) crop_tile(tile, lat, lon, tile_width, tile_height) end_time = time.time() total_time = end_time - start_time - print(f"Time Elapsed: {total_time}s") + print(f'Time Elapsed: {total_time}s') def main(): parser = argparse.ArgumentParser( - prog="generate_worldcover_tiles.py", - description="Main script for creating a tiled watermask dataset from the ESA WorldCover dataset.", + prog='generate_worldcover_tiles.py', + description='Main script for creating a tiled watermask dataset from the ESA WorldCover dataset.', ) parser.add_argument( - "--worldcover-tiles-dir", - help="The path to the directory containing the worldcover tifs.", + '--worldcover-tiles-dir', + help='The path to the directory containing the worldcover tifs.', ) parser.add_argument( - "--lat-begin", - help="The minimum latitude of the dataset in EPSG:4326.", + '--lat-begin', + help='The minimum latitude of the dataset in EPSG:4326.', default=-85, required=True, ) parser.add_argument( - "--lat-end", - help="The maximum latitude of the dataset in EPSG:4326.", + '--lat-end', + help='The maximum latitude of the dataset in EPSG:4326.', default=85, ) parser.add_argument( - "--lon-begin", - help="The minimum longitude of the dataset in EPSG:4326.", + '--lon-begin', + help='The minimum longitude of the dataset in EPSG:4326.', default=-180, ) parser.add_argument( - "--lon-end", - help="The maximum longitude of the dataset in EPSG:4326.", + '--lon-end', + help='The maximum longitude of the dataset in EPSG:4326.', default=180, ) - parser.add_argument( - "--tile-width", help="The desired width of the tile in degrees.", default=5 - ) - parser.add_argument( - "--tile-height", help="The desired height of the tile in degrees.", default=5 - ) + parser.add_argument('--tile-width', help='The desired width of the tile in degrees.', default=5) + parser.add_argument('--tile-height', help='The desired height of the tile in degrees.', default=5) args = parser.parse_args() @@ -299,9 +284,7 @@ def main(): setup_directories([PREPROCESSED_TILE_DIR, UNCROPPED_TILE_DIR, CROPPED_TILE_DIR]) # Process the multi-class masks into water/not-water masks. - tile_preprocessing( - args.worldcover_tiles_dir, lat_begin, lat_end, lon_begin, lon_end - ) + tile_preprocessing(args.worldcover_tiles_dir, lat_begin, lat_end, lon_begin, lon_end) wc_lat_range = range( lat_begin - (lat_begin % WORLDCOVER_TILE_SIZE), @@ -326,5 +309,5 @@ def main(): ) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/src/asf_tools/watermasking/utils.py b/src/asf_tools/watermasking/utils.py index da203f1..7f7449b 100644 --- a/src/asf_tools/watermasking/utils.py +++ b/src/asf_tools/watermasking/utils.py @@ -4,9 +4,7 @@ import numpy as np -def lat_lon_to_tile_string( - lat, lon, is_worldcover: bool = False, postfix: str = ".tif" -): +def lat_lon_to_tile_string(lat, lon, is_worldcover: bool = False, postfix: str = '.tif'): """Get the name of the tile with lower left corner (lat, lon). Args: @@ -18,7 +16,7 @@ def lat_lon_to_tile_string( Returns: The name of the tile. """ - prefixes = ["N", "S", "E", "W"] if is_worldcover else ["n", "s", "e", "w"] + prefixes = ['N', 'S', 'E', 'W'] if is_worldcover else ['n', 's', 'e', 'w'] if lat >= 0: lat_part = prefixes[0] + str(int(lat)).zfill(2) else: @@ -38,19 +36,19 @@ def merge_tiles(tiles, out_filename, out_format, compress=False): out_format: The format of the output image. out_filename: The name of the output COG. """ - vrt = "merged.vrt" - build_vrt_command = ["gdalbuildvrt", vrt] + tiles + vrt = 'merged.vrt' + build_vrt_command = ['gdalbuildvrt', vrt] + tiles if not compress: - translate_command = ["gdal_translate", "-of", out_format, vrt, out_filename] + translate_command = ['gdal_translate', '-of', out_format, vrt, out_filename] else: translate_command = [ - "gdal_translate", - "-of", + 'gdal_translate', + '-of', out_format, - "-co", - "COMPRESS=LZW", - "-co", - "NUM_THREADS=all_cpus", + '-co', + 'COMPRESS=LZW', + '-co', + 'NUM_THREADS=all_cpus', vrt, out_filename, ] @@ -69,7 +67,7 @@ def remove_temp_files(temp_files: list): try: os.remove(file) except FileNotFoundError: - print(f"Temp file {file} was not found, skipping removal...") + print(f'Temp file {file} was not found, skipping removal...') def setup_directories(dirs: list[str]): diff --git a/tests/conftest.py b/tests/conftest.py index 64a8157..7fa4613 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,24 +2,22 @@ def pytest_configure(config): - config.addinivalue_line("markers", "integration: marks tests as integration") + config.addinivalue_line('markers', 'integration: marks tests as integration') def pytest_addoption(parser): parser.addoption( - "--integration", - action="store_true", + '--integration', + action='store_true', default=False, - dest="integration", - help="enable integration tests", + dest='integration', + help='enable integration tests', ) def pytest_collection_modifyitems(config, items): - if not config.getoption("--integration"): - integration_skip = pytest.mark.skip( - reason="Integration tests not requested; skipping." - ) + if not config.getoption('--integration'): + integration_skip = pytest.mark.skip(reason='Integration tests not requested; skipping.') for item in items: - if "integration" in item.keywords: + if 'integration' in item.keywords: item.add_marker(integration_skip) diff --git a/tests/hydrosar/conftest.py b/tests/hydrosar/conftest.py index d5d5b35..4f3fa2e 100644 --- a/tests/hydrosar/conftest.py +++ b/tests/hydrosar/conftest.py @@ -4,37 +4,37 @@ import pytest -@pytest.fixture(scope="session") +@pytest.fixture(scope='session') def raster_tiles(): - tiles_file = Path(__file__).parent / "data" / "em_tiles.npz" + tiles_file = Path(__file__).parent / 'data' / 'em_tiles.npz' tile_data = np.load(tiles_file) - tiles = np.ma.MaskedArray(tile_data["tiles"], mask=tile_data["mask"]) + tiles = np.ma.MaskedArray(tile_data['tiles'], mask=tile_data['mask']) return np.log10(tiles) + 30 -@pytest.fixture(scope="session") +@pytest.fixture(scope='session') def thresholds(): - thresholds_file = Path(__file__).parent / "data" / "em_thresholds.npz" + thresholds_file = Path(__file__).parent / 'data' / 'em_thresholds.npz' thresholds_data = np.load(thresholds_file) - return thresholds_data["thresholds"] + return thresholds_data['thresholds'] -@pytest.fixture(scope="session") +@pytest.fixture(scope='session') def hand_candidates(): - hand_file = Path(__file__).parent / "data" / "hand_candidates.npz" + hand_file = Path(__file__).parent / 'data' / 'hand_candidates.npz' hand_data = np.load(hand_file) - return hand_data["hand_candidates"] + return hand_data['hand_candidates'] -@pytest.fixture(scope="session") +@pytest.fixture(scope='session') def hand_window(): - hand_file = Path(__file__).parent / "data" / "hand_window.npz" + hand_file = Path(__file__).parent / 'data' / 'hand_window.npz' hand_data = np.load(hand_file) - return hand_data["hand_window"] + return hand_data['hand_window'] -@pytest.fixture(scope="session") +@pytest.fixture(scope='session') def flood_window(): - flood_file = Path(__file__).parent / "data" / "flood_window.npz" + flood_file = Path(__file__).parent / 'data' / 'flood_window.npz' flood_data = np.load(flood_file) - return flood_data["flood_window"] + return flood_data['flood_window'] diff --git a/tests/hydrosar/test_flood_map.py b/tests/hydrosar/test_flood_map.py index cc9f9f8..f92062e 100644 --- a/tests/hydrosar/test_flood_map.py +++ b/tests/hydrosar/test_flood_map.py @@ -10,16 +10,16 @@ @pytest.mark.integration def test_get_waterbody(): water_raster = ( - "/vsicurl/https://hyp3-testing.s3.us-west-2.amazonaws.com/asf-tools/" - "S1A_IW_20230228T120437_DVR_RTC30/flood_map/watermap.tif" + '/vsicurl/https://hyp3-testing.s3.us-west-2.amazonaws.com/asf-tools/' + 'S1A_IW_20230228T120437_DVR_RTC30/flood_map/watermap.tif' ) - info = gdal.Info(water_raster, format="json") + info = gdal.Info(water_raster, format='json') known_water_mask = flood_map.get_waterbody(info, threshold=30) test_mask = ( - "/vsicurl/https://hyp3-testing.s3.us-west-2.amazonaws.com/asf-tools/S1A_IW_20230228T120437_DVR_RTC30/" - "flood_map/known_water_mask.tif" + '/vsicurl/https://hyp3-testing.s3.us-west-2.amazonaws.com/asf-tools/S1A_IW_20230228T120437_DVR_RTC30/' + 'flood_map/known_water_mask.tif' ) test_mask_array = gdal.Open(test_mask, gdal.GA_ReadOnly).ReadAsArray() @@ -50,7 +50,7 @@ def test_estimate_flood_depths_logstat(flood_window, hand_window): 1, hand_window, flood_window, - estimator="logstat", + estimator='logstat', water_level_sigma=3, iterative_bounds=(0, 15), ) @@ -63,7 +63,7 @@ def test_estimate_flood_depths_nmad(flood_window, hand_window): 1, hand_window, flood_window, - estimator="nmad", + estimator='nmad', water_level_sigma=3, iterative_bounds=(0, 15), ) @@ -77,7 +77,7 @@ def test_estimate_flood_depths_numpy(flood_window, hand_window): 1, hand_window, flood_window, - estimator="numpy", + estimator='numpy', water_level_sigma=3, iterative_bounds=(0, 15), ) @@ -87,30 +87,28 @@ def test_estimate_flood_depths_numpy(flood_window, hand_window): @pytest.mark.integration def test_make_flood_map(tmp_path): water_raster = ( - "/vsicurl/https://hyp3-testing.s3.us-west-2.amazonaws.com/asf-tools/" - "S1A_IW_20230228T120437_DVR_RTC30/flood_map/watermap.tif" + '/vsicurl/https://hyp3-testing.s3.us-west-2.amazonaws.com/asf-tools/' + 'S1A_IW_20230228T120437_DVR_RTC30/flood_map/watermap.tif' ) vv_raster = ( - "/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/" - "S1A_IW_20230228T120437_DVR_RTC30/flood_map/RTC_VV.tif" + '/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/' + 'S1A_IW_20230228T120437_DVR_RTC30/flood_map/RTC_VV.tif' ) hand_raster = ( - "/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/" - "S1A_IW_20230228T120437_DVR_RTC30/flood_map/watermap_HAND.tif" + '/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/' + 'S1A_IW_20230228T120437_DVR_RTC30/flood_map/watermap_HAND.tif' ) - out_flood_map = tmp_path / "flood_map.tif" - flood_map.make_flood_map( - out_flood_map, vv_raster, water_raster, hand_raster, estimator="nmad" - ) - out_flood_map = out_flood_map.parent / f"{out_flood_map.stem}_nmad_FloodDepth.tif" + out_flood_map = tmp_path / 'flood_map.tif' + flood_map.make_flood_map(out_flood_map, vv_raster, water_raster, hand_raster, estimator='nmad') + out_flood_map = out_flood_map.parent / f'{out_flood_map.stem}_nmad_FloodDepth.tif' assert out_flood_map.exists() golden_flood_map = ( - "/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/" - "asf-tools/S1A_IW_20230228T120437_DVR_RTC30/" - "flood_map/flood_map_nmad.tif" + '/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/' + 'asf-tools/S1A_IW_20230228T120437_DVR_RTC30/' + 'flood_map/flood_map_nmad.tif' ) diffs = find_diff(golden_flood_map, str(out_flood_map)) diff --git a/tests/hydrosar/test_hand.py b/tests/hydrosar/test_hand.py index 8eea108..5929126 100644 --- a/tests/hydrosar/test_hand.py +++ b/tests/hydrosar/test_hand.py @@ -9,12 +9,12 @@ from asf_tools.hydrosar import hand HAND_BASINS = ( - "/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/" - "asf-tools/S1A_IW_20230228T120437_DVR_RTC30/hand/hybas_af_lev12_v1c_firstpoly.geojson" + '/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/' + 'asf-tools/S1A_IW_20230228T120437_DVR_RTC30/hand/hybas_af_lev12_v1c_firstpoly.geojson' ) GOLDEN_HAND = ( - "/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/" - "asf-tools/S1A_IW_20230228T120437_DVR_RTC30/hand/hybas_af_lev12_v1c_firstpoly.tif" + '/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/' + 'asf-tools/S1A_IW_20230228T120437_DVR_RTC30/hand/hybas_af_lev12_v1c_firstpoly.tif' ) gdal.UseExceptions() @@ -33,7 +33,7 @@ def nodata_equal_nan(golden_hand, out_hand): @pytest.mark.integration def test_make_copernicus_hand(tmp_path): - out_hand = tmp_path / "hand.tif" + out_hand = tmp_path / 'hand.tif' hand.make_copernicus_hand(out_hand, HAND_BASINS) assert out_hand.exists() @@ -44,19 +44,19 @@ def test_make_copernicus_hand(tmp_path): def test_prepare_hand_vrt_no_coverage(): geojson = { - "type": "Point", - "coordinates": [0, 0], + 'type': 'Point', + 'coordinates': [0, 0], } geometry = ogr.CreateGeometryFromJson(json.dumps(geojson)) with pytest.raises(ValueError): - hand.prepare_hand_vrt("foo", geometry) + hand.prepare_hand_vrt('foo', geometry) def test_prepare_hand_vrt(tmp_path): - hand_vrt = tmp_path / "hand.tif" + hand_vrt = tmp_path / 'hand.tif' geojson = { - "type": "Polygon", - "coordinates": [ + 'type': 'Polygon', + 'coordinates': [ [ [0.4, 10.16], [0.4, 10.86], @@ -71,8 +71,8 @@ def test_prepare_hand_vrt(tmp_path): hand.prepare_hand_vrt(str(hand_vrt), geometry) assert hand_vrt.exists() - info = gdal.Info(str(hand_vrt), format="json") - assert info["geoTransform"] == [ + info = gdal.Info(str(hand_vrt), format='json') + assert info['geoTransform'] == [ -0.0001388888888889, 0.0002777777777778, 0.0, @@ -80,13 +80,13 @@ def test_prepare_hand_vrt(tmp_path): 0.0, -0.0002777777777778, ] - assert info["size"] == [3600, 3600] + assert info['size'] == [3600, 3600] def test_prepare_hand_vrt_antimeridian(): geojson = { - "type": "MultiPolygon", - "coordinates": [ + 'type': 'MultiPolygon', + 'coordinates': [ [ [ [179.5, 51.4], @@ -110,22 +110,22 @@ def test_prepare_hand_vrt_antimeridian(): geometry = ogr.CreateGeometryFromJson(json.dumps(geojson)) with pytest.raises(ValueError): - hand.prepare_hand_vrt("foo", geometry) + hand.prepare_hand_vrt('foo', geometry) def test_intersects_hand_feature(): features = vector.get_features(hand.prepare.HAND_GEOJSON) geojson = { - "type": "Point", - "coordinates": [169, -45], + 'type': 'Point', + 'coordinates': [169, -45], } geometry = ogr.CreateGeometryFromJson(json.dumps(geojson)) assert vector.get_property_values_for_intersecting_features(geometry, features) geojson = { - "type": "Point", - "coordinates": [0, 0], + 'type': 'Point', + 'coordinates': [0, 0], } geometry = ogr.CreateGeometryFromJson(json.dumps(geojson)) assert not vector.get_property_values_for_intersecting_features(geometry, features) diff --git a/tests/hydrosar/test_water_map.py b/tests/hydrosar/test_water_map.py index c758fd2..443786b 100644 --- a/tests/hydrosar/test_water_map.py +++ b/tests/hydrosar/test_water_map.py @@ -15,58 +15,52 @@ def test_determine_em_threshold(raster_tiles): @pytest.mark.integration def test_select_hand_tiles(hand_candidates): - hand_geotif = "/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/water-map/20200603_HAND.tif" + hand_geotif = '/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/water-map/20200603_HAND.tif' hand_array = read_as_array(str(hand_geotif)) - hand_tiles = np.ma.masked_invalid( - tile_array(hand_array, tile_shape=(100, 100), pad_value=np.nan) - ) + hand_tiles = np.ma.masked_invalid(tile_array(hand_array, tile_shape=(100, 100), pad_value=np.nan)) selected_tiles = water_map.select_hand_tiles(hand_tiles, 15.0, 0.8) assert np.all(selected_tiles == hand_candidates) with pytest.raises(ValueError): - _ = water_map.select_hand_tiles( - np.zeros(shape=(10, 10, 10), dtype=float), 15.0, 0.8 - ) + _ = water_map.select_hand_tiles(np.zeros(shape=(10, 10, 10), dtype=float), 15.0, 0.8) @pytest.mark.integration def test_select_backscatter_tiles(hand_candidates): - backscatter_geotif = "/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/water-map/20200603_VH.tif" + backscatter_geotif = '/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/water-map/20200603_VH.tif' backscatter_array = np.ma.masked_invalid(read_as_array(backscatter_geotif)) backscatter_tiles = np.ma.masked_less_equal( tile_array(backscatter_array, tile_shape=(100, 100), pad_value=0.0), 0.0 ) - tile_indexes = water_map.select_backscatter_tiles( - backscatter_tiles, hand_candidates - ) + tile_indexes = water_map.select_backscatter_tiles(backscatter_tiles, hand_candidates) assert np.all(tile_indexes == np.array([771, 1974, 2397, 1205, 2577])) @pytest.mark.integration def test_make_water_map(tmp_path): vv_geotif = ( - "/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/" - "S1A_IW_20230228T120437_DVR_RTC30/water_map/RTC_VV.tif" + '/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/' + 'S1A_IW_20230228T120437_DVR_RTC30/water_map/RTC_VV.tif' ) vh_geotif = ( - "/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/" - "S1A_IW_20230228T120437_DVR_RTC30/water_map/RTC_VH.tif" + '/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/' + 'S1A_IW_20230228T120437_DVR_RTC30/water_map/RTC_VH.tif' ) hand_geotif = ( - "/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/" - "S1A_IW_20230228T120437_DVR_RTC30/water_map/HAND.tif" + '/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/' + 'S1A_IW_20230228T120437_DVR_RTC30/water_map/HAND.tif' ) - out_water_map = tmp_path / "water_map.tif" + out_water_map = tmp_path / 'water_map.tif' water_map.make_water_map(out_water_map, vv_geotif, vh_geotif, hand_geotif) assert out_water_map.exists() golden_water_map = ( - "/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/" - "S1A_IW_20230228T120437_DVR_RTC30/water_map/fuzzy_water_map.tif" + '/vsicurl/https://hyp3-testing.s3-us-west-2.amazonaws.com/asf-tools/' + 'S1A_IW_20230228T120437_DVR_RTC30/water_map/fuzzy_water_map.tif' ) diffs = find_diff(golden_water_map, str(out_water_map)) assert diffs == 0 diff --git a/tests/test_aws.py b/tests/test_aws.py index 36ee459..ffe767d 100644 --- a/tests/test_aws.py +++ b/tests/test_aws.py @@ -12,101 +12,87 @@ def s3_stubber(): def test_get_tag_set(): - assert aws.get_tag_set() == {"TagSet": [{"Key": "file_type", "Value": "product"}]} + assert aws.get_tag_set() == {'TagSet': [{'Key': 'file_type', 'Value': 'product'}]} def test_get_content_type(): - assert aws.get_content_type("foo") == "application/octet-stream" - assert aws.get_content_type("foo.asfd") == "application/octet-stream" - assert aws.get_content_type("foo.txt") == "text/plain" - assert aws.get_content_type("foo.zip") == "application/zip" - assert aws.get_content_type("foo/bar.png") == "image/png" + assert aws.get_content_type('foo') == 'application/octet-stream' + assert aws.get_content_type('foo.asfd') == 'application/octet-stream' + assert aws.get_content_type('foo.txt') == 'text/plain' + assert aws.get_content_type('foo.zip') == 'application/zip' + assert aws.get_content_type('foo/bar.png') == 'image/png' def test_upload_file_to_s3(tmp_path, s3_stubber): expected_params = { - "Body": ANY, - "Bucket": "myBucket", - "Key": "myFile.zip", - "ContentType": "application/zip", + 'Body': ANY, + 'Bucket': 'myBucket', + 'Key': 'myFile.zip', + 'ContentType': 'application/zip', } tag_params = { - "Bucket": "myBucket", - "Key": "myFile.zip", - "Tagging": {"TagSet": [{"Key": "file_type", "Value": "product"}]}, + 'Bucket': 'myBucket', + 'Key': 'myFile.zip', + 'Tagging': {'TagSet': [{'Key': 'file_type', 'Value': 'product'}]}, } - s3_stubber.add_response( - method="put_object", expected_params=expected_params, service_response={} - ) - s3_stubber.add_response( - method="put_object_tagging", expected_params=tag_params, service_response={} - ) + s3_stubber.add_response(method='put_object', expected_params=expected_params, service_response={}) + s3_stubber.add_response(method='put_object_tagging', expected_params=tag_params, service_response={}) - file_to_upload = tmp_path / "myFile.zip" + file_to_upload = tmp_path / 'myFile.zip' file_to_upload.touch() - aws.upload_file_to_s3(file_to_upload, "myBucket") + aws.upload_file_to_s3(file_to_upload, 'myBucket') def test_upload_file_to_s3_with_prefix(tmp_path, s3_stubber): expected_params = { - "Body": ANY, - "Bucket": "myBucket", - "Key": "myPrefix/myFile.txt", - "ContentType": "text/plain", + 'Body': ANY, + 'Bucket': 'myBucket', + 'Key': 'myPrefix/myFile.txt', + 'ContentType': 'text/plain', } tag_params = { - "Bucket": "myBucket", - "Key": "myPrefix/myFile.txt", - "Tagging": {"TagSet": [{"Key": "file_type", "Value": "product"}]}, + 'Bucket': 'myBucket', + 'Key': 'myPrefix/myFile.txt', + 'Tagging': {'TagSet': [{'Key': 'file_type', 'Value': 'product'}]}, } - s3_stubber.add_response( - method="put_object", expected_params=expected_params, service_response={} - ) - s3_stubber.add_response( - method="put_object_tagging", expected_params=tag_params, service_response={} - ) - file_to_upload = tmp_path / "myFile.txt" + s3_stubber.add_response(method='put_object', expected_params=expected_params, service_response={}) + s3_stubber.add_response(method='put_object_tagging', expected_params=tag_params, service_response={}) + file_to_upload = tmp_path / 'myFile.txt' file_to_upload.touch() - aws.upload_file_to_s3(file_to_upload, "myBucket", "myPrefix") + aws.upload_file_to_s3(file_to_upload, 'myBucket', 'myPrefix') def test_get_path_to_s3_file(s3_stubber): expected_params = { - "Bucket": "myBucket", - "Prefix": "myPrefix", + 'Bucket': 'myBucket', + 'Prefix': 'myPrefix', } service_response = { - "Contents": [ - {"Key": "myPrefix/foo.txt"}, - {"Key": "myPrefix/foo.nc"}, - {"Key": "myPrefix/foo.txt"}, - {"Key": "myPrefix/bar.nc"}, + 'Contents': [ + {'Key': 'myPrefix/foo.txt'}, + {'Key': 'myPrefix/foo.nc'}, + {'Key': 'myPrefix/foo.txt'}, + {'Key': 'myPrefix/bar.nc'}, ], } s3_stubber.add_response( - method="list_objects_v2", + method='list_objects_v2', expected_params=expected_params, service_response=service_response, ) - assert ( - aws.get_path_to_s3_file("myBucket", "myPrefix", file_type=".nc") - == "/vsis3/myBucket/myPrefix/foo.nc" - ) + assert aws.get_path_to_s3_file('myBucket', 'myPrefix', file_type='.nc') == '/vsis3/myBucket/myPrefix/foo.nc' s3_stubber.add_response( - method="list_objects_v2", + method='list_objects_v2', expected_params=expected_params, service_response=service_response, ) - assert ( - aws.get_path_to_s3_file("myBucket", "myPrefix", file_type=".txt") - == "/vsis3/myBucket/myPrefix/foo.txt" - ) + assert aws.get_path_to_s3_file('myBucket', 'myPrefix', file_type='.txt') == '/vsis3/myBucket/myPrefix/foo.txt' s3_stubber.add_response( - method="list_objects_v2", + method='list_objects_v2', expected_params=expected_params, service_response=service_response, ) - assert aws.get_path_to_s3_file("myBucket", "myPrefix", file_type=".csv") is None + assert aws.get_path_to_s3_file('myBucket', 'myPrefix', file_type='.csv') is None diff --git a/tests/test_composite.py b/tests/test_composite.py index bba165c..d1e4ee0 100644 --- a/tests/test_composite.py +++ b/tests/test_composite.py @@ -25,12 +25,7 @@ def test_get_target_epsg_code(): assert composite.get_target_epsg_code([32701, 32760, 32701]) == 32701 assert composite.get_target_epsg_code([32701, 32760, 32760]) == 32760 - assert ( - composite.get_target_epsg_code( - [32731, 32631, 32731, 32631, 32732, 32633, 32733, 32633, 32733] - ) - == 32732 - ) + assert composite.get_target_epsg_code([32731, 32631, 32731, 32631, 32732, 32633, 32733, 32633, 32733]) == 32732 # bounds with pytest.raises(ValueError): @@ -46,34 +41,25 @@ def test_get_target_epsg_code(): def test_get_area_raster(): - raster = "S1A_IW_20181102T155531_DVP_RTC30_G_gpuned_5685_VV.tif" - assert ( - composite.get_area_raster(raster) - == "S1A_IW_20181102T155531_DVP_RTC30_G_gpuned_5685_area.tif" - ) + raster = 'S1A_IW_20181102T155531_DVP_RTC30_G_gpuned_5685_VV.tif' + assert composite.get_area_raster(raster) == 'S1A_IW_20181102T155531_DVP_RTC30_G_gpuned_5685_area.tif' - raster = "./foo/S1B_IW_20181104T030247_DVP_RTC30_G_gpuned_9F91_VH.tif" - assert ( - composite.get_area_raster(raster) - == "./foo/S1B_IW_20181104T030247_DVP_RTC30_G_gpuned_9F91_area.tif" - ) + raster = './foo/S1B_IW_20181104T030247_DVP_RTC30_G_gpuned_9F91_VH.tif' + assert composite.get_area_raster(raster) == './foo/S1B_IW_20181104T030247_DVP_RTC30_G_gpuned_9F91_area.tif' - raster = "/tmp/bar/S1B_IW_20181102T031956_DVP_RTC30_G_gpuned_1259_HH.tif" - assert ( - composite.get_area_raster(raster) - == "/tmp/bar/S1B_IW_20181102T031956_DVP_RTC30_G_gpuned_1259_area.tif" - ) + raster = '/tmp/bar/S1B_IW_20181102T031956_DVP_RTC30_G_gpuned_1259_HH.tif' + assert composite.get_area_raster(raster) == '/tmp/bar/S1B_IW_20181102T031956_DVP_RTC30_G_gpuned_1259_area.tif' def test_get_full_extents(): data = {} - data["a"] = { - "cornerCoordinates": { - "upperLeft": [10.0, 130.0], - "lowerRight": [110.0, 30.0], + data['a'] = { + 'cornerCoordinates': { + 'upperLeft': [10.0, 130.0], + 'lowerRight': [110.0, 30.0], }, - "geoTransform": [10.0, 2.0, 0.0, 40.0, 0.0, -2.0], + 'geoTransform': [10.0, 2.0, 0.0, 40.0, 0.0, -2.0], } expected_upper_left = (10.0, 130.0) @@ -85,12 +71,12 @@ def test_get_full_extents(): expected_geotransform, ) - data["b"] = { - "cornerCoordinates": { - "upperLeft": [20.0, 140.0], - "lowerRight": [120.0, 40.0], + data['b'] = { + 'cornerCoordinates': { + 'upperLeft': [20.0, 140.0], + 'lowerRight': [120.0, 40.0], }, - "geoTransform": [20.0, 1.0, 12.0, 140.0, 13.0, -1.0], + 'geoTransform': [20.0, 1.0, 12.0, 140.0, 13.0, -1.0], } expected_upper_left = (10.0, 140.0) @@ -120,10 +106,8 @@ def test_make_composite(tmp_path): [1, 1, 1, 1], ] ) - asf_tools.raster.write_cog( - "first_data.tif", data, transform, epsg_code, nodata_value=0 - ) - asf_tools.raster.write_cog("first_area.tif", area, transform, epsg_code) + asf_tools.raster.write_cog('first_data.tif', data, transform, epsg_code, nodata_value=0) + asf_tools.raster.write_cog('first_area.tif', area, transform, epsg_code) transform = [30.0, 30.0, 0.0, 30.0, 0.0, -30.0] data = np.array( @@ -138,15 +122,13 @@ def test_make_composite(tmp_path): [1, 1, 2, 1], ] ) - asf_tools.raster.write_cog("second_data.tif", data, transform, epsg_code) - asf_tools.raster.write_cog("second_area.tif", area, transform, epsg_code) + asf_tools.raster.write_cog('second_data.tif', data, transform, epsg_code) + asf_tools.raster.write_cog('second_area.tif', area, transform, epsg_code) - out_file, count_file = composite.make_composite( - "out", ["first_data.tif", "second_data.tif"] - ) + out_file, count_file = composite.make_composite('out', ['first_data.tif', 'second_data.tif']) - assert out_file == "out.tif" - assert count_file == "out_counts.tif" + assert out_file == 'out.tif' + assert count_file == 'out_counts.tif' assert os.path.exists(out_file) assert os.path.exists(count_file) diff --git a/tests/test_dem.py b/tests/test_dem.py index 27e5bd4..8d1f079 100644 --- a/tests/test_dem.py +++ b/tests/test_dem.py @@ -10,19 +10,19 @@ def test_prepare_dem_vrt_no_coverage(): geojson = { - "type": "Point", - "coordinates": [0, 0], + 'type': 'Point', + 'coordinates': [0, 0], } geometry = ogr.CreateGeometryFromJson(json.dumps(geojson)) with pytest.raises(ValueError): - dem.prepare_dem_vrt("foo", geometry) + dem.prepare_dem_vrt('foo', geometry) def test_prepare_dem_vrt(tmp_path): - dem_vrt = tmp_path / "dem.tif" + dem_vrt = tmp_path / 'dem.tif' geojson = { - "type": "Polygon", - "coordinates": [ + 'type': 'Polygon', + 'coordinates': [ [ [0.4, 10.16], [0.4, 10.86], @@ -37,8 +37,8 @@ def test_prepare_dem_vrt(tmp_path): dem.prepare_dem_vrt(str(dem_vrt), geometry) assert dem_vrt.exists() - info = gdal.Info(str(dem_vrt), format="json") - assert info["geoTransform"] == [ + info = gdal.Info(str(dem_vrt), format='json') + assert info['geoTransform'] == [ -0.0001388888888889, 0.0002777777777778, 0.0, @@ -46,14 +46,14 @@ def test_prepare_dem_vrt(tmp_path): 0.0, -0.0002777777777778, ] - assert info["size"] == [3600, 3600] + assert info['size'] == [3600, 3600] def test_prepare_dem_geotiff_antimeridian(tmp_path): - dem_vrt = tmp_path / "dem.vrt" + dem_vrt = tmp_path / 'dem.vrt' geojson = { - "type": "MultiPolygon", - "coordinates": [ + 'type': 'MultiPolygon', + 'coordinates': [ [ [ [179.5, 51.4], diff --git a/tests/test_entrypoints.py b/tests/test_entrypoints.py index 2e2293d..d8f1434 100644 --- a/tests/test_entrypoints.py +++ b/tests/test_entrypoints.py @@ -1,18 +1,18 @@ def test_make_composite(script_runner): - ret = script_runner.run(["make_composite", "-h"]) + ret = script_runner.run(['make_composite', '-h']) assert ret.success def test_water_map(script_runner): - ret = script_runner.run(["water_map", "-h"]) + ret = script_runner.run(['water_map', '-h']) assert ret.success def test_make_hand(script_runner): - ret = script_runner.run(["calculate_hand", "-h"]) + ret = script_runner.run(['calculate_hand', '-h']) assert ret.success def test_flood_map(script_runner): - ret = script_runner.run(["flood_map", "-h"]) + ret = script_runner.run(['flood_map', '-h']) assert ret.success diff --git a/tests/test_raster.py b/tests/test_raster.py index e6243ab..143bc12 100644 --- a/tests/test_raster.py +++ b/tests/test_raster.py @@ -7,60 +7,50 @@ def test_convert_scale(): - c = raster.convert_scale(np.array([-10, -5, 0, 5, 10]), "amplitude", "power") + c = raster.convert_scale(np.array([-10, -5, 0, 5, 10]), 'amplitude', 'power') assert np.allclose(c, np.array([100, 25, 0, 25, 100])) - c = raster.convert_scale(np.array([-10, -5, 0, 5, 10]), "amplitude", "db") + c = raster.convert_scale(np.array([-10, -5, 0, 5, 10]), 'amplitude', 'db') assert np.allclose(c, np.array([20.0, 13.97940009, -np.inf, 13.97940009, 20.0])) - c = raster.convert_scale(np.array([-1, 0, 1, 4, 9]), "power", "amplitude") + c = raster.convert_scale(np.array([-1, 0, 1, 4, 9]), 'power', 'amplitude') assert np.isnan(c[0]) assert np.allclose(c[1:], np.array([0, 1, 2, 3])) - c = raster.convert_scale(np.array([-1, 0, 1, 4, 9]), "power", "db") + c = raster.convert_scale(np.array([-1, 0, 1, 4, 9]), 'power', 'db') assert np.isnan(c[0]) assert np.allclose( c[1:], np.array([-np.inf, 0.0, 6.02059991, 9.54242509]), ) - c = raster.convert_scale( - np.array([np.nan, -np.inf, 0.0, 6.02059991, 9.54242509]), "db", "power" - ) + c = raster.convert_scale(np.array([np.nan, -np.inf, 0.0, 6.02059991, 9.54242509]), 'db', 'power') assert np.isnan(c[0]) assert np.allclose(c[1:], np.array([0, 1, 4, 9])) - c = raster.convert_scale( - np.array([-np.inf, -20.0, 0.0, 13.97940009, 20.0]), "db", "amplitude" - ) + c = raster.convert_scale(np.array([-np.inf, -20.0, 0.0, 13.97940009, 20.0]), 'db', 'amplitude') assert np.allclose(c, np.array([0.0, 0.1, 1.0, 5.0, 10.0])) a = np.array([-10, -5, 0, 5, 10]) with pytest.raises(ValueError): - _ = raster.convert_scale(a, "power", "foo") + _ = raster.convert_scale(a, 'power', 'foo') with pytest.raises(ValueError): - _ = raster.convert_scale(a, "bar", "amplitude") + _ = raster.convert_scale(a, 'bar', 'amplitude') with pytest.warns(UserWarning): assert np.allclose( - raster.convert_scale(a, "amplitude", "amplitude"), + raster.convert_scale(a, 'amplitude', 'amplitude'), np.array([-10, -5, 0, 5, 10]), ) with pytest.warns(UserWarning): - assert np.allclose( - raster.convert_scale(a, "power", "power"), np.array([-10, -5, 0, 5, 10]) - ) + assert np.allclose(raster.convert_scale(a, 'power', 'power'), np.array([-10, -5, 0, 5, 10])) with pytest.warns(UserWarning): - assert np.allclose( - raster.convert_scale(a, "db", "db"), np.array([-10, -5, 0, 5, 10]) - ) + assert np.allclose(raster.convert_scale(a, 'db', 'db'), np.array([-10, -5, 0, 5, 10])) def test_convert_scale_masked_arrays(): - masked_array = np.ma.MaskedArray( - [-1, 0, 1, 4, 9], mask=[False, False, False, False, False] - ) - c = raster.convert_scale(masked_array, "power", "db") + masked_array = np.ma.MaskedArray([-1, 0, 1, 4, 9], mask=[False, False, False, False, False]) + c = raster.convert_scale(masked_array, 'power', 'db') assert np.allclose(c.mask, [True, True, False, False, False]) assert np.allclose( c, @@ -70,15 +60,13 @@ def test_convert_scale_masked_arrays(): ), ) - a = raster.convert_scale(c, "db", "power") + a = raster.convert_scale(c, 'db', 'power') assert np.allclose(a.mask, [True, True, False, False, False]) - assert np.allclose( - a, np.ma.MaskedArray([-1, 0, 1, 4, 9], mask=[True, True, False, False, False]) - ) + assert np.allclose(a, np.ma.MaskedArray([-1, 0, 1, 4, 9], mask=[True, True, False, False, False])) def test_write_cog(tmp_path): - outfile = tmp_path / "out.tif" + outfile = tmp_path / 'out.tif' data = np.ones((1024, 1024)) transform = [10.0, 0.0, 1.0, 20.0, 0.0, -1.0] epsg_code = 4326 @@ -87,10 +75,10 @@ def test_write_cog(tmp_path): assert result == str(outfile) assert outfile.exists() - info = gdal.Info(result, format="json") - assert info["geoTransform"] == transform - assert info["driverShortName"] == "GTiff" - assert info["size"] == [1024, 1024] - assert "overviews" in info["bands"][0] - assert info["metadata"]["IMAGE_STRUCTURE"]["LAYOUT"] == "COG" - assert info["metadata"]["IMAGE_STRUCTURE"]["COMPRESSION"] == "LZW" + info = gdal.Info(result, format='json') + assert info['geoTransform'] == transform + assert info['driverShortName'] == 'GTiff' + assert info['size'] == [1024, 1024] + assert 'overviews' in info['bands'][0] + assert info['metadata']['IMAGE_STRUCTURE']['LAYOUT'] == 'COG' + assert info['metadata']['IMAGE_STRUCTURE']['COMPRESSION'] == 'LZW' diff --git a/tests/test_tile.py b/tests/test_tile.py index fe8219b..c888bda 100644 --- a/tests/test_tile.py +++ b/tests/test_tile.py @@ -66,22 +66,12 @@ def test_tile_masked_array(): tiled = tile.tile_array(ma, tile_shape=(3, 3), pad_value=4) assert isinstance(tiled, np.ma.MaskedArray) assert tiled.shape == (4, 3, 3) + assert np.all(np.ma.getdata(tiled[0, :, :]) == np.array([[0, 0, 1], [0, 0, 1], [2, 2, 3]])) assert np.all( - np.ma.getdata(tiled[0, :, :]) == np.array([[0, 0, 1], [0, 0, 1], [2, 2, 3]]) - ) - assert np.all( - tiled[0, :, :].mask - == np.array( - [[False, False, False], [False, False, False], [False, False, False]] - ) - ) - assert np.all( - np.ma.getdata(tiled[-1, :, :]) == np.array([[3, 4, 4], [4, 4, 4], [4, 4, 4]]) - ) - assert np.all( - tiled[-1, :, :].mask - == np.array([[True, True, True], [True, True, True], [True, True, True]]) + tiled[0, :, :].mask == np.array([[False, False, False], [False, False, False], [False, False, False]]) ) + assert np.all(np.ma.getdata(tiled[-1, :, :]) == np.array([[3, 4, 4], [4, 4, 4], [4, 4, 4]])) + assert np.all(tiled[-1, :, :].mask == np.array([[True, True, True], [True, True, True], [True, True, True]])) def test_untile_array(): @@ -96,43 +86,21 @@ def test_untile_array(): ] ) - assert np.all( - a - == tile.untile_array(tile.tile_array(a, tile_shape=(2, 2)), array_shape=a.shape) - ) - assert np.all( - a - == tile.untile_array( - tile.tile_array(a, tile_shape=(4, 4), pad_value=9), array_shape=a.shape - ) - ) - assert np.all( - a - == tile.untile_array( - tile.tile_array(a, tile_shape=(2, 4), pad_value=9), array_shape=a.shape - ) - ) - assert np.all( - a - == tile.untile_array( - tile.tile_array(a, tile_shape=(4, 2), pad_value=9), array_shape=a.shape - ) - ) + assert np.all(a == tile.untile_array(tile.tile_array(a, tile_shape=(2, 2)), array_shape=a.shape)) + assert np.all(a == tile.untile_array(tile.tile_array(a, tile_shape=(4, 4), pad_value=9), array_shape=a.shape)) + assert np.all(a == tile.untile_array(tile.tile_array(a, tile_shape=(2, 4), pad_value=9), array_shape=a.shape)) + assert np.all(a == tile.untile_array(tile.tile_array(a, tile_shape=(4, 2), pad_value=9), array_shape=a.shape)) with pytest.raises(ValueError): tile.untile_array(tile.tile_array(a, tile_shape=(4, 4)), array_shape=(9, 9)) with pytest.raises(ValueError): - tile.untile_array( - tile.tile_array(a, tile_shape=(2, 4), pad_value=9), array_shape=(6, 9) - ) + tile.untile_array(tile.tile_array(a, tile_shape=(2, 4), pad_value=9), array_shape=(6, 9)) # array shape will subset some of the padding that was required to tile `a` with `tile_shape` assert np.all( np.pad(a, ((0, 0), (0, 2)), constant_values=9) - == tile.untile_array( - tile.tile_array(a, tile_shape=(2, 4), pad_value=9), array_shape=(6, 8) - ) + == tile.untile_array(tile.tile_array(a, tile_shape=(2, 4), pad_value=9), array_shape=(6, 8)) ) @@ -140,9 +108,7 @@ def test_untile_masked_array(): a = np.array([[0, 0, 1, 1], [0, 0, 1, 1], [2, 2, 3, 3], [2, 2, 3, 3]]) with pytest.raises(AttributeError): - _ = tile.untile_array( - tile.tile_array(a, tile_shape=(2, 2)), array_shape=a.shape - ).mask + _ = tile.untile_array(tile.tile_array(a, tile_shape=(2, 2)), array_shape=a.shape).mask m = np.array( [ @@ -154,15 +120,11 @@ def test_untile_masked_array(): ) ma = np.ma.MaskedArray(a, mask=m) - untiled = tile.untile_array( - tile.tile_array(ma.copy(), tile_shape=(2, 2)), array_shape=a.shape - ) + untiled = tile.untile_array(tile.tile_array(ma.copy(), tile_shape=(2, 2)), array_shape=a.shape) assert np.all(ma == untiled) assert np.all(ma.mask == untiled.mask) - untiled = tile.untile_array( - tile.tile_array(ma.copy(), tile_shape=(3, 3), pad_value=4), array_shape=a.shape - ) + untiled = tile.untile_array(tile.tile_array(ma.copy(), tile_shape=(3, 3), pad_value=4), array_shape=a.shape) assert np.all(ma == untiled) assert np.all(ma.mask == untiled.mask) diff --git a/tests/test_util.py b/tests/test_util.py index 5178de9..59f3c16 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -5,28 +5,28 @@ def test_get_epsg_code(): wkt = 'PROJCS["WGS 84 / UTM zone 54N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",141],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","32654"]]' - info = {"coordinateSystem": {"wkt": wkt}} + info = {'coordinateSystem': {'wkt': wkt}} assert util.get_epsg_code(info) == 32654 wkt = 'PROJCS["WGS 84 / UTM zone 22N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-51],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","32622"]]' - info = {"coordinateSystem": {"wkt": wkt}} + info = {'coordinateSystem': {'wkt': wkt}} assert util.get_epsg_code(info) == 32622 wkt = 'PROJCS["WGS 84 / UTM zone 33S",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",15],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",10000000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","32733"]]' - info = {"coordinateSystem": {"wkt": wkt}} + info = {'coordinateSystem': {'wkt': wkt}} assert util.get_epsg_code(info) == 32733 wkt = 'PROJCS["NAD83 / Alaska Albers",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],PROJECTION["Albers_Conic_Equal_Area"],PARAMETER["latitude_of_center",50],PARAMETER["longitude_of_center",-154],PARAMETER["standard_parallel_1",55],PARAMETER["standard_parallel_2",65],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","3338"]]' - info = {"coordinateSystem": {"wkt": wkt}} + info = {'coordinateSystem': {'wkt': wkt}} assert util.get_epsg_code(info) == 3338 def test_get_coordinates(): water_raster = ( - "/vsicurl/https://hyp3-testing.s3.us-west-2.amazonaws.com/asf-tools/" - "S1A_IW_20230228T120437_DVR_RTC30/flood_map/watermap.tif" + '/vsicurl/https://hyp3-testing.s3.us-west-2.amazonaws.com/asf-tools/' + 'S1A_IW_20230228T120437_DVR_RTC30/flood_map/watermap.tif' ) - info = gdal.Info(water_raster, format="json") + info = gdal.Info(water_raster, format='json') west, south, east, north = util.get_coordinates(info) diff --git a/tests/test_vector.py b/tests/test_vector.py index dba75d4..24ddbf9 100644 --- a/tests/test_vector.py +++ b/tests/test_vector.py @@ -9,15 +9,15 @@ def test_intersects_feature(): features = vector.get_features(dem.DEM_GEOJSON) geojson = { - "type": "Point", - "coordinates": [169, -45], + 'type': 'Point', + 'coordinates': [169, -45], } geometry = ogr.CreateGeometryFromJson(json.dumps(geojson)) assert vector.get_property_values_for_intersecting_features(geometry, features) geojson = { - "type": "Point", - "coordinates": [0, 0], + 'type': 'Point', + 'coordinates': [0, 0], } geometry = ogr.CreateGeometryFromJson(json.dumps(geojson)) assert not vector.get_property_values_for_intersecting_features(geometry, features) @@ -27,39 +27,32 @@ def test_get_intersecting_feature_properties(): dem_tile_features = vector.get_features(dem.DEM_GEOJSON) geojson = { - "type": "Point", - "coordinates": [0, 0], + 'type': 'Point', + 'coordinates': [0, 0], } geometry = ogr.CreateGeometryFromJson(json.dumps(geojson)) - assert ( - vector.intersecting_feature_properties(geometry, dem_tile_features, "file_path") - == [] - ) + assert vector.intersecting_feature_properties(geometry, dem_tile_features, 'file_path') == [] geojson = { - "type": "Point", - "coordinates": [169, -45], + 'type': 'Point', + 'coordinates': [169, -45], } geometry = ogr.CreateGeometryFromJson(json.dumps(geojson)) - assert vector.intersecting_feature_properties( - geometry, dem_tile_features, "file_path" - ) == [ - "/vsicurl/https://copernicus-dem-30m.s3.amazonaws.com/" - "Copernicus_DSM_COG_10_S46_00_E169_00_DEM/Copernicus_DSM_COG_10_S46_00_E169_00_DEM.tif" + assert vector.intersecting_feature_properties(geometry, dem_tile_features, 'file_path') == [ + '/vsicurl/https://copernicus-dem-30m.s3.amazonaws.com/' + 'Copernicus_DSM_COG_10_S46_00_E169_00_DEM/Copernicus_DSM_COG_10_S46_00_E169_00_DEM.tif' ] geojson = { - "type": "MultiPoint", - "coordinates": [[0, 0], [169, -45], [-121.5, 73.5]], + 'type': 'MultiPoint', + 'coordinates': [[0, 0], [169, -45], [-121.5, 73.5]], } geometry = ogr.CreateGeometryFromJson(json.dumps(geojson)) - assert vector.intersecting_feature_properties( - geometry, dem_tile_features, "file_path" - ) == [ - "/vsicurl/https://copernicus-dem-30m.s3.amazonaws.com/" - "Copernicus_DSM_COG_10_N73_00_W122_00_DEM/Copernicus_DSM_COG_10_N73_00_W122_00_DEM.tif", - "/vsicurl/https://copernicus-dem-30m.s3.amazonaws.com/" - "Copernicus_DSM_COG_10_S46_00_E169_00_DEM/Copernicus_DSM_COG_10_S46_00_E169_00_DEM.tif", + assert vector.intersecting_feature_properties(geometry, dem_tile_features, 'file_path') == [ + '/vsicurl/https://copernicus-dem-30m.s3.amazonaws.com/' + 'Copernicus_DSM_COG_10_N73_00_W122_00_DEM/Copernicus_DSM_COG_10_N73_00_W122_00_DEM.tif', + '/vsicurl/https://copernicus-dem-30m.s3.amazonaws.com/' + 'Copernicus_DSM_COG_10_S46_00_E169_00_DEM/Copernicus_DSM_COG_10_S46_00_E169_00_DEM.tif', ]