Skip to content

Commit

Permalink
Merge pull request #278 from ASFHyP3/develop
Browse files Browse the repository at this point in the history
Release 0.18.0
  • Loading branch information
jhkennedy authored Jul 10, 2024
2 parents c0b7613 + 95a884d commit fe9f1f4
Show file tree
Hide file tree
Showing 16 changed files with 241 additions and 52 deletions.
4 changes: 1 addition & 3 deletions .github/workflows/changelog.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,4 @@ on:

jobs:
call-changelog-check-workflow:
uses: ASFHyP3/actions/.github/workflows/[email protected]
secrets:
USER_TOKEN: ${{ secrets.GITHUB_TOKEN }}
uses: ASFHyP3/actions/.github/workflows/[email protected]
2 changes: 1 addition & 1 deletion .github/workflows/create-jira-issue.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ on:

jobs:
call-create-jira-issue-workflow:
uses: ASFHyP3/actions/.github/workflows/[email protected].0
uses: ASFHyP3/actions/.github/workflows/[email protected].2
secrets:
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/labeled-pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@ on:

jobs:
call-labeled-pr-check-workflow:
uses: ASFHyP3/actions/.github/workflows/[email protected].0
uses: ASFHyP3/actions/.github/workflows/[email protected].2
2 changes: 1 addition & 1 deletion .github/workflows/release-template-comment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ on:

jobs:
call-release-checklist-workflow:
uses: ASFHyP3/actions/.github/workflows/[email protected].0
uses: ASFHyP3/actions/.github/workflows/[email protected].2
secrets:
USER_TOKEN: ${{ secrets.GITHUB_TOKEN }}
2 changes: 1 addition & 1 deletion .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ on:

jobs:
call-release-workflow:
uses: ASFHyP3/actions/.github/workflows/[email protected].0
uses: ASFHyP3/actions/.github/workflows/[email protected].2
with:
release_prefix: HyP3 autoRIFT
secrets:
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/static-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@ on: push

jobs:
call-flake8-workflow:
uses: ASFHyP3/actions/.github/workflows/[email protected].0
uses: ASFHyP3/actions/.github/workflows/[email protected].2
with:
local_package_names: hyp3_autorift
excludes: src/hyp3_autorift/vend

call-secrets-analysis-workflow:
uses: ASFHyP3/actions/.github/workflows/[email protected].0
uses: ASFHyP3/actions/.github/workflows/[email protected].2
2 changes: 1 addition & 1 deletion .github/workflows/tag-version.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,6 @@ on:

jobs:
call-bump-version-workflow:
uses: ASFHyP3/actions/.github/workflows/[email protected].0
uses: ASFHyP3/actions/.github/workflows/[email protected].2
secrets:
USER_TOKEN: ${{ secrets.TOOLS_BOT_PAK }}
8 changes: 5 additions & 3 deletions .github/workflows/test-and-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,20 @@ on:

jobs:
call-pytest-workflow:
uses: ASFHyP3/actions/.github/workflows/[email protected].0
uses: ASFHyP3/actions/.github/workflows/[email protected].2
with:
local_package_name: hyp3_autorift
python_versions: >-
["3.9"]
call-version-info-workflow:
uses: ASFHyP3/actions/.github/workflows/[email protected]
uses: ASFHyP3/actions/.github/workflows/[email protected]
with:
python_version: '3.9'

call-docker-ghcr-workflow:
needs: call-version-info-workflow
uses: ASFHyP3/actions/.github/workflows/[email protected].0
uses: ASFHyP3/actions/.github/workflows/[email protected].2
with:
version_tag: ${{ needs.call-version-info-workflow.outputs.version_tag }}
secrets:
Expand Down
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [0.18.0]
### Added
* The Sentinel-1 correction workflow will now calculate and write the M11/M12 conversion matrices to a netCDF file.

### Fixed
* `hyp3_autorift.crop` will now preserve the `add_offset` and `scale_factor` encoding attributes for all variables, and in particular, for the M11/M12 conversion matrices.

### Removed
* Support for Python 3.8 has been dropped.

## [0.17.0]
## Changed
Expand Down
2 changes: 1 addition & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ channels:
dependencies:
- boto3
- botocore
- python>=3.8,<3.10 # Top pin to fix ISCE2 incompatibility: https://github.com/isce-framework/isce2/issues/458
- python>=3.9,<3.10 # Top pin to fix ISCE2 incompatibility: https://github.com/isce-framework/isce2/issues/458
- pip
# For packaging, and testing
- build
Expand Down
4 changes: 1 addition & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "hyp3_autorift"
requires-python = ">=3.8"
requires-python = ">=3.9"
authors = [
{name="ASF APD/Tools Team", email="[email protected]"},
]
Expand All @@ -17,9 +17,7 @@ classifiers=[
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
]
dependencies = [
'boto3',
Expand Down
27 changes: 7 additions & 20 deletions src/hyp3_autorift/crop.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,22 +36,7 @@
import pyproj
import xarray as xr


ENCODING_TEMPLATE = {
'interp_mask': {'_FillValue': 0.0, 'dtype': 'ubyte', "zlib": True, "complevel": 2, "shuffle": True},
'chip_size_height': {'_FillValue': 0.0, 'dtype': 'ushort', "zlib": True, "complevel": 2, "shuffle": True},
'chip_size_width': {'_FillValue': 0.0, 'dtype': 'ushort', "zlib": True, "complevel": 2, "shuffle": True},
'M11': {'_FillValue': -32767, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True},
'M12': {'_FillValue': -32767, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True},
'v': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True},
'vx': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True},
'vy': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True},
'v_error': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True},
'va': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True},
'vr': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True},
'x': {'_FillValue': None},
'y': {'_FillValue': None}
}
ENCODING_ATTRS = ['_FillValue', 'dtype', "zlib", "complevel", "shuffle", 'add_offset', 'scale_factor']


def crop_netcdf_product(netcdf_file: Path) -> Path:
Expand Down Expand Up @@ -114,10 +99,12 @@ def crop_netcdf_product(netcdf_file: Path) -> Path:
chunk_lines = np.min([np.ceil(8192 / dims['y']) * 128, dims['y']])
two_dim_chunks_settings = (chunk_lines, dims['x'])

encoding = ENCODING_TEMPLATE.copy()
if not netcdf_file.name.startswith('S1'):
for radar_variable in ['M11', 'M12', 'va', 'vr']:
del encoding[radar_variable]
encoding = {}
for variable in ds.data_vars.keys():
if variable in ['img_pair_info', 'mapping']:
continue
attributes = {attr: ds[variable].encoding[attr] for attr in ENCODING_ATTRS if attr in ds[variable].encoding}
encoding[variable] = attributes

for _, attributes in encoding.items():
if attributes['_FillValue'] is not None:
Expand Down
2 changes: 1 addition & 1 deletion src/hyp3_autorift/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ def apply_wallis_nodata_fill_filter(array: np.ndarray, nodata: int) -> Tuple[np.


def _apply_filter_function(image_path: str, filter_function: Callable) -> Tuple[str, Optional[str]]:
image_array, image_transform, image_projection, image_nodata = utils.load_geospatial(image_path)
image_array, image_transform, image_projection, _, image_nodata = utils.load_geospatial(image_path)
image_array = image_array.astype(np.float32)

image_filtered, zero_mask = filter_function(image_array, image_nodata)
Expand Down
5 changes: 2 additions & 3 deletions src/hyp3_autorift/s1_correction.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,16 @@ def main():
)
parser.add_argument('--bucket', help='AWS bucket to upload product files to')
parser.add_argument('--bucket-prefix', default='', help='AWS prefix (location in bucket) to add to product files')
parser.add_argument('--esa-username', default=None, help="Username for ESA's Copernicus Data Space Ecosystem")
parser.add_argument('--esa-password', default=None, help="Password for ESA's Copernicus Data Space Ecosystem")
parser.add_argument('--buffer', type=int, default=0, help='Number of pixels to buffer each edge of the input scene')
parser.add_argument('--parameter-file', default=DEFAULT_PARAMETER_FILE,
help='Shapefile for determining the correct search parameters by geographic location. '
'Path to shapefile must be understood by GDAL')
parser.add_argument('granule', help='Reference granule to process')
args = parser.parse_args()

_ = generate_correction_data(args.granule, buffer=args.buffer)
_, conversion_nc = generate_correction_data(args.granule, buffer=args.buffer)

if args.bucket:
upload_file_to_s3(conversion_nc, args.bucket, args.bucket_prefix)
for geotiff in Path.cwd().glob('*.tif'):
upload_file_to_s3(geotiff, args.bucket, args.bucket_prefix)
Loading

0 comments on commit fe9f1f4

Please sign in to comment.