diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml index 3ce6f429..7740de9e 100644 --- a/.github/workflows/changelog.yml +++ b/.github/workflows/changelog.yml @@ -13,6 +13,4 @@ on: jobs: call-changelog-check-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-changelog-check.yml@v0.11.0 - secrets: - USER_TOKEN: ${{ secrets.GITHUB_TOKEN }} + uses: ASFHyP3/actions/.github/workflows/reusable-changelog-check.yml@v0.11.2 diff --git a/.github/workflows/create-jira-issue.yml b/.github/workflows/create-jira-issue.yml index 0b69efec..99489d50 100644 --- a/.github/workflows/create-jira-issue.yml +++ b/.github/workflows/create-jira-issue.yml @@ -6,7 +6,7 @@ on: jobs: call-create-jira-issue-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-create-jira-issue.yml@v0.11.0 + uses: ASFHyP3/actions/.github/workflows/reusable-create-jira-issue.yml@v0.11.2 secrets: JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} diff --git a/.github/workflows/labeled-pr.yml b/.github/workflows/labeled-pr.yml index 66ba502e..f89f3e3b 100644 --- a/.github/workflows/labeled-pr.yml +++ b/.github/workflows/labeled-pr.yml @@ -12,4 +12,4 @@ on: jobs: call-labeled-pr-check-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-labeled-pr-check.yml@v0.11.0 + uses: ASFHyP3/actions/.github/workflows/reusable-labeled-pr-check.yml@v0.11.2 diff --git a/.github/workflows/release-template-comment.yml b/.github/workflows/release-template-comment.yml index cae89e6f..dd3650ce 100644 --- a/.github/workflows/release-template-comment.yml +++ b/.github/workflows/release-template-comment.yml @@ -7,6 +7,6 @@ on: jobs: call-release-checklist-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-release-checklist-comment.yml@v0.11.0 + uses: ASFHyP3/actions/.github/workflows/reusable-release-checklist-comment.yml@v0.11.2 secrets: USER_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c8fed248..784648ef 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,7 +7,7 @@ on: jobs: call-release-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-release.yml@v0.11.0 + uses: ASFHyP3/actions/.github/workflows/reusable-release.yml@v0.11.2 with: release_prefix: HyP3 autoRIFT secrets: diff --git a/.github/workflows/static-analysis.yml b/.github/workflows/static-analysis.yml index 3b07f93b..c7f96d1e 100644 --- a/.github/workflows/static-analysis.yml +++ b/.github/workflows/static-analysis.yml @@ -4,10 +4,10 @@ on: push jobs: call-flake8-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-flake8.yml@v0.11.0 + uses: ASFHyP3/actions/.github/workflows/reusable-flake8.yml@v0.11.2 with: local_package_names: hyp3_autorift excludes: src/hyp3_autorift/vend call-secrets-analysis-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-secrets-analysis.yml@v0.11.0 + uses: ASFHyP3/actions/.github/workflows/reusable-secrets-analysis.yml@v0.11.2 diff --git a/.github/workflows/tag-version.yml b/.github/workflows/tag-version.yml index 4714c1f1..3b94479b 100644 --- a/.github/workflows/tag-version.yml +++ b/.github/workflows/tag-version.yml @@ -7,6 +7,6 @@ on: jobs: call-bump-version-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-bump-version.yml@v0.11.0 + uses: ASFHyP3/actions/.github/workflows/reusable-bump-version.yml@v0.11.2 secrets: USER_TOKEN: ${{ secrets.TOOLS_BOT_PAK }} diff --git a/.github/workflows/test-and-build.yml b/.github/workflows/test-and-build.yml index cee1a1e2..f23e4baf 100644 --- a/.github/workflows/test-and-build.yml +++ b/.github/workflows/test-and-build.yml @@ -12,18 +12,20 @@ on: jobs: call-pytest-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-pytest.yml@v0.11.0 + uses: ASFHyP3/actions/.github/workflows/reusable-pytest.yml@v0.11.2 with: local_package_name: hyp3_autorift python_versions: >- ["3.9"] call-version-info-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-version-info.yml@v0.11.0 + uses: ASFHyP3/actions/.github/workflows/reusable-version-info.yml@v0.11.2 + with: + python_version: '3.9' call-docker-ghcr-workflow: needs: call-version-info-workflow - uses: ASFHyP3/actions/.github/workflows/reusable-docker-ghcr.yml@v0.11.0 + uses: ASFHyP3/actions/.github/workflows/reusable-docker-ghcr.yml@v0.11.2 with: version_tag: ${{ needs.call-version-info-workflow.outputs.version_tag }} secrets: diff --git a/CHANGELOG.md b/CHANGELOG.md index 9c60d4c4..1ad1b937 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.18.0] +### Added +* The Sentinel-1 correction workflow will now calculate and write the M11/M12 conversion matrices to a netCDF file. + +### Fixed +* `hyp3_autorift.crop` will now preserve the `add_offset` and `scale_factor` encoding attributes for all variables, and in particular, for the M11/M12 conversion matrices. + +### Removed +* Support for Python 3.8 has been dropped. ## [0.17.0] ## Changed diff --git a/environment.yml b/environment.yml index 0afab498..3fb87871 100644 --- a/environment.yml +++ b/environment.yml @@ -6,7 +6,7 @@ channels: dependencies: - boto3 - botocore - - python>=3.8,<3.10 # Top pin to fix ISCE2 incompatibility: https://github.com/isce-framework/isce2/issues/458 + - python>=3.9,<3.10 # Top pin to fix ISCE2 incompatibility: https://github.com/isce-framework/isce2/issues/458 - pip # For packaging, and testing - build diff --git a/pyproject.toml b/pyproject.toml index 2032aa16..878d283b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "hyp3_autorift" -requires-python = ">=3.8" +requires-python = ">=3.9" authors = [ {name="ASF APD/Tools Team", email="uaf-asf-apd@alaska.edu"}, ] @@ -17,9 +17,7 @@ classifiers=[ "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", ] dependencies = [ 'boto3', diff --git a/src/hyp3_autorift/crop.py b/src/hyp3_autorift/crop.py index 4e14ab6a..58a725dc 100644 --- a/src/hyp3_autorift/crop.py +++ b/src/hyp3_autorift/crop.py @@ -36,22 +36,7 @@ import pyproj import xarray as xr - -ENCODING_TEMPLATE = { - 'interp_mask': {'_FillValue': 0.0, 'dtype': 'ubyte', "zlib": True, "complevel": 2, "shuffle": True}, - 'chip_size_height': {'_FillValue': 0.0, 'dtype': 'ushort', "zlib": True, "complevel": 2, "shuffle": True}, - 'chip_size_width': {'_FillValue': 0.0, 'dtype': 'ushort', "zlib": True, "complevel": 2, "shuffle": True}, - 'M11': {'_FillValue': -32767, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, - 'M12': {'_FillValue': -32767, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, - 'v': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, - 'vx': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, - 'vy': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, - 'v_error': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, - 'va': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, - 'vr': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, - 'x': {'_FillValue': None}, - 'y': {'_FillValue': None} - } +ENCODING_ATTRS = ['_FillValue', 'dtype', "zlib", "complevel", "shuffle", 'add_offset', 'scale_factor'] def crop_netcdf_product(netcdf_file: Path) -> Path: @@ -114,10 +99,12 @@ def crop_netcdf_product(netcdf_file: Path) -> Path: chunk_lines = np.min([np.ceil(8192 / dims['y']) * 128, dims['y']]) two_dim_chunks_settings = (chunk_lines, dims['x']) - encoding = ENCODING_TEMPLATE.copy() - if not netcdf_file.name.startswith('S1'): - for radar_variable in ['M11', 'M12', 'va', 'vr']: - del encoding[radar_variable] + encoding = {} + for variable in ds.data_vars.keys(): + if variable in ['img_pair_info', 'mapping']: + continue + attributes = {attr: ds[variable].encoding[attr] for attr in ENCODING_ATTRS if attr in ds[variable].encoding} + encoding[variable] = attributes for _, attributes in encoding.items(): if attributes['_FillValue'] is not None: diff --git a/src/hyp3_autorift/process.py b/src/hyp3_autorift/process.py index 38f5d9f8..0ff7b60d 100644 --- a/src/hyp3_autorift/process.py +++ b/src/hyp3_autorift/process.py @@ -242,7 +242,7 @@ def apply_wallis_nodata_fill_filter(array: np.ndarray, nodata: int) -> Tuple[np. def _apply_filter_function(image_path: str, filter_function: Callable) -> Tuple[str, Optional[str]]: - image_array, image_transform, image_projection, image_nodata = utils.load_geospatial(image_path) + image_array, image_transform, image_projection, _, image_nodata = utils.load_geospatial(image_path) image_array = image_array.astype(np.float32) image_filtered, zero_mask = filter_function(image_array, image_nodata) diff --git a/src/hyp3_autorift/s1_correction.py b/src/hyp3_autorift/s1_correction.py index dc5995a1..fd785566 100644 --- a/src/hyp3_autorift/s1_correction.py +++ b/src/hyp3_autorift/s1_correction.py @@ -16,8 +16,6 @@ def main(): ) parser.add_argument('--bucket', help='AWS bucket to upload product files to') parser.add_argument('--bucket-prefix', default='', help='AWS prefix (location in bucket) to add to product files') - parser.add_argument('--esa-username', default=None, help="Username for ESA's Copernicus Data Space Ecosystem") - parser.add_argument('--esa-password', default=None, help="Password for ESA's Copernicus Data Space Ecosystem") parser.add_argument('--buffer', type=int, default=0, help='Number of pixels to buffer each edge of the input scene') parser.add_argument('--parameter-file', default=DEFAULT_PARAMETER_FILE, help='Shapefile for determining the correct search parameters by geographic location. ' @@ -25,8 +23,9 @@ def main(): parser.add_argument('granule', help='Reference granule to process') args = parser.parse_args() - _ = generate_correction_data(args.granule, buffer=args.buffer) + _, conversion_nc = generate_correction_data(args.granule, buffer=args.buffer) if args.bucket: + upload_file_to_s3(conversion_nc, args.bucket, args.bucket_prefix) for geotiff in Path.cwd().glob('*.tif'): upload_file_to_s3(geotiff, args.bucket, args.bucket_prefix) diff --git a/src/hyp3_autorift/s1_isce2.py b/src/hyp3_autorift/s1_isce2.py index c1b2ee97..58c873ac 100644 --- a/src/hyp3_autorift/s1_isce2.py +++ b/src/hyp3_autorift/s1_isce2.py @@ -3,15 +3,17 @@ import os import sys import textwrap -from datetime import timedelta +from datetime import datetime, timedelta from pathlib import Path -from typing import Optional +from typing import List import numpy as np +from autoRIFT import __version__ as version from hyp3lib.fetch import download_file from hyp3lib.get_orb import downloadSentinelOrbitFile from hyp3lib.scene import get_download_url -from osgeo import gdal +from netCDF4 import Dataset +from osgeo import gdal, osr from hyp3_autorift import geometry, utils from hyp3_autorift.process import DEFAULT_PARAMETER_FILE @@ -89,13 +91,197 @@ def process_sentinel1_with_isce2(reference, secondary, parameter_file): return netcdf_file +def write_conversion_file( + *, + file_name: str, + srs: osr.SpatialReference, + epsg: int, + tran: List[float], + x: np.ndarray, + y: np.ndarray, + M11: np.ndarray, + M12: np.ndarray, + dr_2_vr_factor: float, + ChunkSize: List[int], + NoDataValue: int = -32767, + noDataMask: np.ndarray, + parameter_file: str +) -> str: + + nc_outfile = Dataset(file_name, 'w', clobber=True, format='NETCDF4') + + nc_outfile.setncattr('GDAL_AREA_OR_POINT', 'Area') + nc_outfile.setncattr('Conventions', 'CF-1.8') + nc_outfile.setncattr('date_created', datetime.now().strftime("%d-%b-%Y %H:%M:%S")) + nc_outfile.setncattr('title', 'autoRIFT S1 Corrections') + nc_outfile.setncattr('autoRIFT_software_version', version) + nc_outfile.setncattr('autoRIFT_parameter_file', parameter_file) + + nc_outfile.createDimension('x', len(x)) + nc_outfile.createDimension('y', len(y)) + + var = nc_outfile.createVariable('x', np.dtype('float64'), 'x', fill_value=None) + var.setncattr('standard_name', 'projection_x_coordinate') + var.setncattr('description', 'x coordinate of projection') + var.setncattr('units', 'm') + var[:] = x + + var = nc_outfile.createVariable('y', np.dtype('float64'), 'y', fill_value=None) + var.setncattr('standard_name', 'projection_y_coordinate') + var.setncattr('description', 'y coordinate of projection') + var.setncattr('units', 'm') + var[:] = y + + mapping_var_name = 'mapping' + var = nc_outfile.createVariable(mapping_var_name, 'U1', (), fill_value=None) + if srs.GetAttrValue('PROJECTION') == 'Polar_Stereographic': + var.setncattr('grid_mapping_name', 'polar_stereographic') + var.setncattr('straight_vertical_longitude_from_pole', srs.GetProjParm('central_meridian')) + var.setncattr('false_easting', srs.GetProjParm('false_easting')) + var.setncattr('false_northing', srs.GetProjParm('false_northing')) + var.setncattr('latitude_of_projection_origin', np.sign(srs.GetProjParm('latitude_of_origin')) * 90.0) + var.setncattr('latitude_of_origin', srs.GetProjParm('latitude_of_origin')) + var.setncattr('semi_major_axis', float(srs.GetAttrValue('GEOGCS|SPHEROID', 1))) + var.setncattr('scale_factor_at_projection_origin', 1) + var.setncattr('inverse_flattening', float(srs.GetAttrValue('GEOGCS|SPHEROID', 2))) + var.setncattr('spatial_ref', srs.ExportToWkt()) + var.setncattr('crs_wkt', srs.ExportToWkt()) + var.setncattr('proj4text', srs.ExportToProj4()) + var.setncattr('spatial_epsg', epsg) + var.setncattr('GeoTransform', ' '.join(str(x) for x in tran)) + + elif srs.GetAttrValue('PROJECTION') == 'Transverse_Mercator': + var.setncattr('grid_mapping_name', 'universal_transverse_mercator') + zone = epsg - np.floor(epsg / 100) * 100 + var.setncattr('utm_zone_number', zone) + var.setncattr('longitude_of_central_meridian', srs.GetProjParm('central_meridian')) + var.setncattr('false_easting', srs.GetProjParm('false_easting')) + var.setncattr('false_northing', srs.GetProjParm('false_northing')) + var.setncattr('latitude_of_projection_origin', srs.GetProjParm('latitude_of_origin')) + var.setncattr('semi_major_axis', float(srs.GetAttrValue('GEOGCS|SPHEROID', 1))) + var.setncattr('scale_factor_at_central_meridian', srs.GetProjParm('scale_factor')) + var.setncattr('inverse_flattening', float(srs.GetAttrValue('GEOGCS|SPHEROID', 2))) + var.setncattr('spatial_ref', srs.ExportToWkt()) + var.setncattr('crs_wkt', srs.ExportToWkt()) + var.setncattr('proj4text', srs.ExportToProj4()) + var.setncattr('spatial_epsg', epsg) + var.setncattr('GeoTransform', ' '.join(str(x) for x in tran)) + else: + raise Exception(f'Projection {srs.GetAttrValue("PROJECTION")} not recognized for this program') + + var = nc_outfile.createVariable('M11', np.dtype('int16'), ('y', 'x'), fill_value=NoDataValue, + zlib=True, complevel=2, shuffle=True, chunksizes=ChunkSize) + var.setncattr('standard_name', 'conversion_matrix_element_11') + var.setncattr( + 'description', + 'conversion matrix element (1st row, 1st column) that can be multiplied with vx to give range pixel ' + 'displacement dr (see Eq. A18 in https://www.mdpi.com/2072-4292/13/4/749)' + ) + var.setncattr('units', 'pixel/(meter/year)') + var.setncattr('grid_mapping', mapping_var_name) + var.setncattr('dr_to_vr_factor', dr_2_vr_factor) + var.setncattr('dr_to_vr_factor_description', 'multiplicative factor that converts slant range ' + 'pixel displacement dr to slant range velocity vr') + + x1 = np.nanmin(M11[:]) + x2 = np.nanmax(M11[:]) + y1 = -50 + y2 = 50 + + C = [(y2 - y1) / (x2 - x1), y1 - x1 * (y2 - y1) / (x2 - x1)] + var.setncattr('scale_factor', np.float32(1 / C[0])) + var.setncattr('add_offset', np.float32(-C[1] / C[0])) + + M11[noDataMask] = NoDataValue * np.float32(1 / C[0]) + np.float32(-C[1] / C[0]) + var[:] = M11 + + var = nc_outfile.createVariable('M12', np.dtype('int16'), ('y', 'x'), fill_value=NoDataValue, + zlib=True, complevel=2, shuffle=True, chunksizes=ChunkSize) + var.setncattr('standard_name', 'conversion_matrix_element_12') + var.setncattr( + 'description', + 'conversion matrix element (1st row, 2nd column) that can be multiplied with vy to give range pixel ' + 'displacement dr (see Eq. A18 in https://www.mdpi.com/2072-4292/13/4/749)' + ) + var.setncattr('units', 'pixel/(meter/year)') + var.setncattr('grid_mapping', mapping_var_name) + var.setncattr('dr_to_vr_factor', dr_2_vr_factor) + var.setncattr('dr_to_vr_factor_description', + 'multiplicative factor that converts slant range pixel displacement dr to slant range velocity vr') + + x1 = np.nanmin(M12[:]) + x2 = np.nanmax(M12[:]) + y1 = -50 + y2 = 50 + + C = [(y2 - y1) / (x2 - x1), y1 - x1 * (y2 - y1) / (x2 - x1)] + var.setncattr('scale_factor', np.float32(1 / C[0])) + var.setncattr('add_offset', np.float32(-C[1] / C[0])) + + M12[noDataMask] = NoDataValue * np.float32(1 / C[0]) + np.float32(-C[1] / C[0]) + var[:] = M12 + + return file_name + + +def create_conversion_matrices( + *, + scene: str, + grid_location: str = 'window_location.tif', + offset2vx: str = 'window_rdr_off2vel_x_vec.tif', + offset2vy: str = 'window_rdr_off2vel_y_vec.tif', + scale_factor: str = 'window_scale_factor.tif', + epsg: int = 4326, + parameter_file: str = DEFAULT_PARAMETER_FILE, + **kwargs, +) -> str: + xGrid, tran, _, srs, nodata = utils.load_geospatial(grid_location, band=1) + + offset2vy_1, _, _, _, _ = utils.load_geospatial(offset2vy, band=1) + offset2vy_1[offset2vy_1 == nodata] = np.nan + + offset2vy_2, _, _, _, _ = utils.load_geospatial(offset2vy, band=2) + offset2vy_2[offset2vy_2 == nodata] = np.nan + + offset2vx_1, _, _, _, _ = utils.load_geospatial(offset2vx, band=1) + offset2vx_1[offset2vx_1 == nodata] = np.nan + + offset2vx_2, _, _, _, _ = utils.load_geospatial(offset2vx, band=2) + offset2vx_2[offset2vx_2 == nodata] = np.nan + + offset2vr, _, _, _, _ = utils.load_geospatial(offset2vx, band=3) + offset2vr[offset2vr == nodata] = np.nan + + scale_factor_1, _, _, _, _ = utils.load_geospatial(scale_factor, band=1) + scale_factor_1[scale_factor_1 == nodata] = np.nan + + dimidY, dimidX = xGrid.shape + noDataMask = xGrid == nodata + + y = np.arange(tran[3], tran[3] + tran[5] * dimidY, tran[5]) + x = np.arange(tran[0], tran[0] + tran[1] * dimidX, tran[1]) + + chunk_lines = np.min([np.ceil(8192 / dimidY) * 128, dimidY]) + ChunkSize = [chunk_lines, dimidX] + + M11 = offset2vy_2 / (offset2vx_1 * offset2vy_2 - offset2vx_2 * offset2vy_1) / scale_factor_1 + M12 = -offset2vx_2 / (offset2vx_1 * offset2vy_2 - offset2vx_2 * offset2vy_1) / scale_factor_1 + + dr_2_vr_factor = np.median(offset2vr[np.logical_not(np.isnan(offset2vr))]) + + conversion_nc = write_conversion_file( + file_name=f'{scene}_conversion_matrices.nc', srs=srs, epsg=epsg, tran=tran, x=x, y=y, M11=M11, M12=M12, + dr_2_vr_factor=dr_2_vr_factor, ChunkSize=ChunkSize, noDataMask=noDataMask, parameter_file=parameter_file, + ) + + return conversion_nc + + def generate_correction_data( scene: str, buffer: int = 0, parameter_file: str = DEFAULT_PARAMETER_FILE, - esa_username: Optional[str] = None, - esa_password: Optional[str] = None, -): +) -> (dict, str): from hyp3_autorift.vend.testGeogrid_ISCE import loadParsedata, runGeogrid scene_path = Path(f'{scene}.zip') if not scene_path.exists(): @@ -105,8 +291,7 @@ def generate_correction_data( orbits = Path('Orbits').resolve() orbits.mkdir(parents=True, exist_ok=True) - if (esa_username is None) or (esa_password is None): - esa_username, esa_password = get_esa_credentials() + esa_username, esa_password = get_esa_credentials() state_vec, oribit_provider = downloadSentinelOrbitFile( scene, directory=str(orbits), esa_credentials=(esa_username, esa_password) @@ -131,7 +316,15 @@ def generate_correction_data( geogrid_info = runGeogrid(reference_meta, secondary_meta, epsg=parameter_info['epsg'], **parameter_info['geogrid']) - return geogrid_info + # NOTE: After Geogrid is run, all drivers are no longer registered. + # I've got no idea why, or if there are other effects... + gdal.AllRegister() + + conversion_nc = create_conversion_matrices( + scene=scene, epsg=parameter_info['epsg'], parameter_file=parameter_file, **parameter_info['autorift'] + ) + + return geogrid_info, conversion_nc class SysArgvManager: diff --git a/src/hyp3_autorift/utils.py b/src/hyp3_autorift/utils.py index a36a80a4..f2c58e85 100644 --- a/src/hyp3_autorift/utils.py +++ b/src/hyp3_autorift/utils.py @@ -125,10 +125,13 @@ def load_geospatial(infile: str, band: int = 1): data = ds.GetRasterBand(band).ReadAsArray() nodata = ds.GetRasterBand(band).GetNoDataValue() - projection = ds.GetProjection() + transform = ds.GetGeoTransform() + projection = ds.GetProjection() + srs = ds.GetSpatialRef() + del ds - return data, transform, projection, nodata + return data, transform, projection, srs, nodata def write_geospatial(outfile: str, data, transform, projection, nodata,