Skip to content

Commit

Permalink
add ruff config options and reformat
Browse files Browse the repository at this point in the history
  • Loading branch information
jtherrmann committed Dec 16, 2024
1 parent 5b3a2ca commit 79a91d7
Show file tree
Hide file tree
Showing 34 changed files with 719 additions and 1,045 deletions.
5 changes: 1 addition & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,11 +52,8 @@ flood_map = "asf_tools.hydrosar.flood_map:hyp3"

[project.optional-dependencies]
develop = [
"flake8",
"flake8-import-order",
"flake8-blind-except",
"flake8-builtins",
"gdal-utils",
"ruff",
"pytest",
"pytest-cov",
"pytest-console-scripts",
Expand Down
6 changes: 5 additions & 1 deletion ruff.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,6 @@
cache-dir = "~/.cache/ruff"
exclude = ["prototype"]

line-length = 120

[format]
quote-style = "single"
2 changes: 1 addition & 1 deletion src/asf_tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@
__version__ = version(__name__)

__all__ = [
"__version__",
'__version__',
]
16 changes: 7 additions & 9 deletions src/asf_tools/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,23 +4,21 @@


def main():
parser = argparse.ArgumentParser(
prefix_chars="+", formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser = argparse.ArgumentParser(prefix_chars='+', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
"++process",
choices=["water_map", "flood_map"],
default="water_map",
help="Select the HyP3 entrypoint to use", # HyP3 entrypoints are specified in `pyproject.toml`
'++process',
choices=['water_map', 'flood_map'],
default='water_map',
help='Select the HyP3 entrypoint to use', # HyP3 entrypoints are specified in `pyproject.toml`
)

args, unknowns = parser.parse_known_args()
# NOTE: Cast to set because of: https://github.com/pypa/setuptools/issues/3649
(process_entry_point,) = set(entry_points(group="hyp3", name=args.process))
(process_entry_point,) = set(entry_points(group='hyp3', name=args.process))

sys.argv = [args.process, *unknowns]
sys.exit(process_entry_point.load()())


if __name__ == "__main__":
if __name__ == '__main__':
main()
18 changes: 9 additions & 9 deletions src/asf_tools/aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,28 +5,28 @@

import boto3

S3_CLIENT = boto3.client("s3")
S3_CLIENT = boto3.client('s3')
log = logging.getLogger(__name__)


def get_tag_set() -> dict:
tag_set = {"TagSet": [{"Key": "file_type", "Value": "product"}]}
tag_set = {'TagSet': [{'Key': 'file_type', 'Value': 'product'}]}
return tag_set


def get_content_type(file_location: Union[Path, str]) -> str:
content_type = guess_type(file_location)[0]
if not content_type:
content_type = "application/octet-stream"
content_type = 'application/octet-stream'
return content_type


def upload_file_to_s3(path_to_file: Union[str, Path], bucket: str, prefix: str = ""):
def upload_file_to_s3(path_to_file: Union[str, Path], bucket: str, prefix: str = ''):
path_to_file = Path(path_to_file)
key = str(Path(prefix) / path_to_file.name)
extra_args = {"ContentType": get_content_type(key)}
extra_args = {'ContentType': get_content_type(key)}

log.info(f"Uploading s3://{bucket}/{key}")
log.info(f'Uploading s3://{bucket}/{key}')
S3_CLIENT.upload_file(str(path_to_file), bucket, key, extra_args)

tag_set = get_tag_set()
Expand All @@ -36,7 +36,7 @@ def upload_file_to_s3(path_to_file: Union[str, Path], bucket: str, prefix: str =

def get_path_to_s3_file(bucket_name, bucket_prefix, file_type: str):
result = S3_CLIENT.list_objects_v2(Bucket=bucket_name, Prefix=bucket_prefix)
for s3_object in result["Contents"]:
key = s3_object["Key"]
for s3_object in result['Contents']:
key = s3_object['Key']
if key.endswith(file_type):
return f"/vsis3/{bucket_name}/{key}"
return f'/vsis3/{bucket_name}/{key}'
105 changes: 40 additions & 65 deletions src/asf_tools/composite.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def get_target_epsg_code(codes: List[int]) -> int:
# South: 327XX
valid_codes = list(range(32601, 32661)) + list(range(32701, 32761))
if bad_codes := set(codes) - set(valid_codes):
raise ValueError(f"Non UTM EPSG code encountered: {bad_codes}")
raise ValueError(f'Non UTM EPSG code encountered: {bad_codes}')

hemispheres = [c // 100 * 100 for c in codes]
# if even modes, choose lowest (North)
Expand All @@ -67,7 +67,7 @@ def get_area_raster(raster: str) -> str:
Returns:
area_raster: path of the area raster, e.g. S1A_IW_20181102T155531_DVP_RTC30_G_gpuned_5685_area.tif
"""
return "_".join(raster.split("_")[:-1] + ["area.tif"])
return '_'.join(raster.split('_')[:-1] + ['area.tif'])


def get_full_extent(raster_info: dict):
Expand All @@ -81,24 +81,20 @@ def get_full_extent(raster_info: dict):
upper_right: The lower right corner of the extent as a tuple
geotransform: The geotransform of the extent as a list
"""
upper_left_corners = [
info["cornerCoordinates"]["upperLeft"] for info in raster_info.values()
]
lower_right_corners = [
info["cornerCoordinates"]["lowerRight"] for info in raster_info.values()
]
upper_left_corners = [info['cornerCoordinates']['upperLeft'] for info in raster_info.values()]
lower_right_corners = [info['cornerCoordinates']['lowerRight'] for info in raster_info.values()]

ulx = min([ul[0] for ul in upper_left_corners])
uly = max([ul[1] for ul in upper_left_corners])
lrx = max([lr[0] for lr in lower_right_corners])
lry = min([lr[1] for lr in lower_right_corners])

log.debug(f"Full extent raster upper left: ({ulx, uly}); lower right: ({lrx, lry})")
log.debug(f'Full extent raster upper left: ({ulx, uly}); lower right: ({lrx, lry})')

trans = []
for info in raster_info.values():
# Only need info from any one raster
trans = info["geoTransform"]
trans = info['geoTransform']
break

trans[0] = ulx
Expand All @@ -107,9 +103,7 @@ def get_full_extent(raster_info: dict):
return (ulx, uly), (lrx, lry), trans


def reproject_to_target(
raster_info: dict, target_epsg_code: int, target_resolution: float, directory: str
) -> dict:
def reproject_to_target(raster_info: dict, target_epsg_code: int, target_resolution: float, directory: str) -> dict:
"""Reprojects a set of raster images to a common projection and resolution
Args:
Expand All @@ -124,38 +118,34 @@ def reproject_to_target(
target_raster_info = {}
for raster, info in raster_info.items():
epsg_code = get_epsg_code(info)
resolution = info["geoTransform"][1]
resolution = info['geoTransform'][1]
if epsg_code != target_epsg_code or resolution != target_resolution:
log.info(f"Reprojecting {raster}")
log.info(f'Reprojecting {raster}')
reprojected_raster = os.path.join(directory, os.path.basename(raster))
gdal.Warp(
reprojected_raster,
raster,
dstSRS=f"EPSG:{target_epsg_code}",
dstSRS=f'EPSG:{target_epsg_code}',
xRes=target_resolution,
yRes=target_resolution,
targetAlignedPixels=True,
)

area_raster = get_area_raster(raster)
log.info(f"Reprojecting {area_raster}")
reprojected_area_raster = os.path.join(
directory, os.path.basename(area_raster)
)
log.info(f'Reprojecting {area_raster}')
reprojected_area_raster = os.path.join(directory, os.path.basename(area_raster))
gdal.Warp(
reprojected_area_raster,
area_raster,
dstSRS=f"EPSG:{target_epsg_code}",
dstSRS=f'EPSG:{target_epsg_code}',
xRes=target_resolution,
yRes=target_resolution,
targetAlignedPixels=True,
)

target_raster_info[reprojected_raster] = gdal.Info(
reprojected_raster, format="json"
)
target_raster_info[reprojected_raster] = gdal.Info(reprojected_raster, format='json')
else:
log.info(f"No need to reproject {raster}")
log.info(f'No need to reproject {raster}')
target_raster_info[raster] = info

return target_raster_info
Expand All @@ -174,25 +164,23 @@ def make_composite(out_name: str, rasters: List[str], resolution: float = None):
out_counts_raster: Path to the created GeoTIFF with counts of scenes contributing to each pixel
"""
if not rasters:
raise ValueError("Must specify at least one raster to composite")
raise ValueError('Must specify at least one raster to composite')

raster_info = {}
for raster in rasters:
raster_info[raster] = gdal.Info(raster, format="json")
raster_info[raster] = gdal.Info(raster, format='json')
# make sure gdal can read the area raster
gdal.Info(get_area_raster(raster))

target_epsg_code = get_target_epsg_code(
[get_epsg_code(info) for info in raster_info.values()]
)
log.debug(f"Composite projection is EPSG:{target_epsg_code}")
target_epsg_code = get_target_epsg_code([get_epsg_code(info) for info in raster_info.values()])
log.debug(f'Composite projection is EPSG:{target_epsg_code}')

if resolution is None:
resolution = max([info["geoTransform"][1] for info in raster_info.values()])
log.debug(f"Composite resolution is {resolution} meters")
resolution = max([info['geoTransform'][1] for info in raster_info.values()])
log.debug(f'Composite resolution is {resolution} meters')

# resample rasters to maximum resolution & common UTM zone
with TemporaryDirectory(prefix="reprojected_") as temp_dir:
with TemporaryDirectory(prefix='reprojected_') as temp_dir:
raster_info = reproject_to_target(
raster_info,
target_epsg_code=target_epsg_code,
Expand All @@ -211,7 +199,7 @@ def make_composite(out_name: str, rasters: List[str], resolution: float = None):
counts = np.zeros(outputs.shape, dtype=np.int8)

for raster, info in raster_info.items():
log.info(f"Processing raster {raster}")
log.info(f'Processing raster {raster}')
log.debug(
f"Raster upper left: {info['cornerCoordinates']['upperLeft']}; "
f"lower right: {info['cornerCoordinates']['lowerRight']}"
Expand All @@ -222,27 +210,23 @@ def make_composite(out_name: str, rasters: List[str], resolution: float = None):
area_raster = get_area_raster(raster)
areas = read_as_array(area_raster)

ulx, uly = info["cornerCoordinates"]["upperLeft"]
ulx, uly = info['cornerCoordinates']['upperLeft']
y_index_start = int((full_ul[1] - uly) // resolution)
y_index_end = y_index_start + values.shape[0]

x_index_start = int((ulx - full_ul[0]) // resolution)
x_index_end = x_index_start + values.shape[1]

log.debug(
f"Placing values in output grid at {y_index_start}:{y_index_end} and {x_index_start}:{x_index_end}"
f'Placing values in output grid at {y_index_start}:{y_index_end} and {x_index_start}:{x_index_end}'
)

mask = values == 0
raster_weights = 1.0 / areas
raster_weights[mask] = 0

outputs[y_index_start:y_index_end, x_index_start:x_index_end] += (
values * raster_weights
)
weights[y_index_start:y_index_end, x_index_start:x_index_end] += (
raster_weights
)
outputs[y_index_start:y_index_end, x_index_start:x_index_end] += values * raster_weights
weights[y_index_start:y_index_end, x_index_start:x_index_end] += raster_weights
counts[y_index_start:y_index_end, x_index_start:x_index_end] += ~mask

del values, areas, mask, raster_weights
Expand All @@ -251,13 +235,11 @@ def make_composite(out_name: str, rasters: List[str], resolution: float = None):
outputs /= weights
del weights

out_raster = write_cog(
f"{out_name}.tif", outputs, full_trans, target_epsg_code, nodata_value=0
)
out_raster = write_cog(f'{out_name}.tif', outputs, full_trans, target_epsg_code, nodata_value=0)
del outputs

out_counts_raster = write_cog(
f"{out_name}_counts.tif",
f'{out_name}_counts.tif',
counts,
full_trans,
target_epsg_code,
Expand All @@ -273,34 +255,27 @@ def main():
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('out_name', help='Base name of output composite GeoTIFF (without extension)')
parser.add_argument('rasters', nargs='+', help='Sentinel-1 GeoTIFF rasters to composite')
parser.add_argument(
"out_name", help="Base name of output composite GeoTIFF (without extension)"
)
parser.add_argument(
"rasters", nargs="+", help="Sentinel-1 GeoTIFF rasters to composite"
)
parser.add_argument(
"-r",
"--resolution",
'-r',
'--resolution',
type=float,
help="Desired output resolution in meters "
"(default is the max resolution of all the input files)",
)
parser.add_argument(
"-v", "--verbose", action="store_true", help="Turn on verbose logging"
help='Desired output resolution in meters ' '(default is the max resolution of all the input files)',
)
parser.add_argument('-v', '--verbose', action='store_true', help='Turn on verbose logging')
args = parser.parse_args()

level = logging.DEBUG if args.verbose else logging.INFO
logging.basicConfig(
stream=sys.stdout,
format="%(asctime)s - %(levelname)s - %(message)s",
format='%(asctime)s - %(levelname)s - %(message)s',
level=level,
)
log.debug(" ".join(sys.argv))
log.info(f"Creating a composite of {len(args.rasters)} rasters")
log.debug(' '.join(sys.argv))
log.info(f'Creating a composite of {len(args.rasters)} rasters')

raster, counts = make_composite(args.out_name, args.rasters, args.resolution)

log.info(f"Composite created successfully: {raster}")
log.info(f"Number of rasters contributing to each pixel: {counts}")
log.info(f'Composite created successfully: {raster}')
log.info(f'Number of rasters contributing to each pixel: {counts}')
22 changes: 7 additions & 15 deletions src/asf_tools/dem.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from asf_tools import vector
from asf_tools.util import GDALConfigManager

DEM_GEOJSON = "/vsicurl/https://asf-dem-west.s3.amazonaws.com/v2/cop30-2021.geojson"
DEM_GEOJSON = '/vsicurl/https://asf-dem-west.s3.amazonaws.com/v2/cop30-2021.geojson'

gdal.UseExceptions()
ogr.UseExceptions()
Expand All @@ -27,26 +27,18 @@ def prepare_dem_vrt(vrt: Union[str, Path], geometry: Union[ogr.Geometry, BaseGeo
geometry: Geometry in EPSG:4326 (lon/lat) projection for which to prepare a DEM mosaic
"""
with GDALConfigManager(GDAL_DISABLE_READDIR_ON_OPEN="EMPTY_DIR"):
with GDALConfigManager(GDAL_DISABLE_READDIR_ON_OPEN='EMPTY_DIR'):
if isinstance(geometry, BaseGeometry):
geometry = ogr.CreateGeometryFromWkb(geometry.wkb)

min_lon, max_lon, _, _ = geometry.GetEnvelope()
if min_lon < -160.0 and max_lon > 160.0:
raise ValueError(
f"asf_tools does not currently support geometries that cross the antimeridian: {geometry}"
)
raise ValueError(f'asf_tools does not currently support geometries that cross the antimeridian: {geometry}')

tile_features = vector.get_features(DEM_GEOJSON)
if not vector.get_property_values_for_intersecting_features(
geometry, tile_features
):
raise ValueError(
f"Copernicus GLO-30 DEM does not intersect this geometry: {geometry}"
)

dem_file_paths = vector.intersecting_feature_properties(
geometry, tile_features, "file_path"
)
if not vector.get_property_values_for_intersecting_features(geometry, tile_features):
raise ValueError(f'Copernicus GLO-30 DEM does not intersect this geometry: {geometry}')

dem_file_paths = vector.intersecting_feature_properties(geometry, tile_features, 'file_path')

gdal.BuildVRT(str(vrt), dem_file_paths)
Loading

0 comments on commit 79a91d7

Please sign in to comment.