Skip to content

Commit

Permalink
Merge pull request #244 from ASFHyP3/develop
Browse files Browse the repository at this point in the history
Release v4.0.0
  • Loading branch information
forrestfwilliams authored Nov 21, 2023
2 parents 1870920 + 5ffa238 commit 485bac9
Show file tree
Hide file tree
Showing 8 changed files with 136 additions and 14 deletions.
17 changes: 17 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,23 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [4.0.0]
### Added
* The HyP3 SDK now explicitly supports Python 3.9-3.12
* Added `HyP3.submit_insar_isce_burst_job` and `HyP3.prepare_insar_isce_burst_job` methods for submitting
InSAR ISCE burst jobs to HyP3.
* A `pending` method to the `Job` class.
* A `pending` argument to the `Batch.filter_jobs()` method.

### Changed
* The order of the arguments for `Batch.filter_jobs()`. The new order is `succeeded, pending, running, failed, include_expired`.

### Removed
* Support for Python 3.8 has been dropped.

### Fixed
* The `running` method of the `Job` class now only returns `True` if job has status `RUNNING`. Jobs in the `PENDING` state now return `True` when calling the `pending` method of `Job`.

## [3.1.0]
### Added
* Added the `phase_filter_parameter` keyword argument for the `HyP3.submit_insar_job` and `HyP3.prepare_insar_job` methods.
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ An instance of the `HyP3` class will be needed to interact with the external HyP
```python
rtc_job = hyp3.submit_rtc_job('granule_id', 'job_name')
insar_job = hyp3.submit_insar_job('reference_granule_id', 'secondary_granule_id', 'job_name')
insar_burst_job = hyp3.submit_insar_isce_burst_job('reference_granule_id', 'secondary_granule_id', 'job_name')
autorift_job = hyp3.submit_autorift_job('reference_granule_id', 'secondary_granule_id', 'job_name')
```
Each of these functions will return an instance of the `Job` class that represents a new HyP3 job request.
Expand Down
2 changes: 1 addition & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ channels:
- conda-forge
- nodefaults
dependencies:
- python>=3.8
- python>=3.9
- pip
# For packaging, and testing
- build
Expand Down
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "hyp3_sdk"
requires-python = ">=3.8"
requires-python = ">=3.9"
authors = [
{name="ASF APD/Tools Team", email="[email protected]"},
]
Expand All @@ -17,9 +17,10 @@ classifiers=[
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
]
dependencies = [
"python-dateutil",
Expand Down
56 changes: 56 additions & 0 deletions src/hyp3_sdk/hyp3.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,6 +422,62 @@ def prepare_insar_job(cls,
job_dict['name'] = name
return job_dict

def submit_insar_isce_burst_job(self,
granule1: str,
granule2: str,
name: Optional[str] = None,
apply_water_mask: bool = False,
looks: Literal['20x4', '10x2', '5x1'] = '20x4') -> Batch:
"""Submit an InSAR ISCE burst job.
Args:
granule1: The first granule (scene) to use
granule2: The second granule (scene) to use
name: A name for the job
apply_water_mask: Sets pixels over coastal waters and large inland waterbodies
as invalid for phase unwrapping
looks: Number of looks to take in range and azimuth
Returns:
A Batch object containing the InSAR ISCE burst job
"""
arguments = locals().copy()
arguments.pop('self')
job_dict = self.prepare_insar_isce_burst_job(**arguments)
return self.submit_prepared_jobs(prepared_jobs=job_dict)

@classmethod
def prepare_insar_isce_burst_job(cls,
granule1: str,
granule2: str,
name: Optional[str] = None,
apply_water_mask: bool = False,
looks: Literal['20x4', '10x2', '5x1'] = '20x4') -> dict:
"""Prepare an InSAR ISCE burst job.
Args:
granule1: The first granule (scene) to use
granule2: The second granule (scene) to use
name: A name for the job
apply_water_mask: Sets pixels over coastal waters and large inland waterbodies
as invalid for phase unwrapping
looks: Number of looks to take in range and azimuth
Returns:
A dictionary containing the prepared InSAR ISCE burst job
"""
job_parameters = locals().copy()
for key in ['cls', 'granule1', 'granule2', 'name']:
job_parameters.pop(key)

job_dict = {
'job_parameters': {'granules': [granule1, granule2], **job_parameters},
'job_type': 'INSAR_ISCE_BURST',
}
if name is not None:
job_dict['name'] = name
return job_dict

def my_info(self) -> dict:
"""
Returns:
Expand Down
21 changes: 16 additions & 5 deletions src/hyp3_sdk/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,10 +100,11 @@ def failed(self) -> bool:
def complete(self) -> bool:
return self.succeeded() or self.failed()

# TODO may want to update this to check if status code is actually RUNNING, because currently this also returns
# true if status is PENDING
def pending(self) -> bool:
return self.status_code == 'PENDING'

def running(self) -> bool:
return not self.complete()
return self.status_code == 'RUNNING'

def expired(self) -> bool:
return self.expiration_time is not None and datetime.now(tz.UTC) >= self.expiration_time
Expand Down Expand Up @@ -249,12 +250,19 @@ def any_expired(self) -> bool:
return False

def filter_jobs(
self, succeeded: bool = True, running: bool = True, failed: bool = False, include_expired: bool = True,
self,
succeeded: bool = True,
pending: bool = True,
running: bool = True,
failed: bool = False,
include_expired: bool = True,
) -> 'Batch':
"""Filter jobs by status. By default, only succeeded and still running jobs will be in the returned batch.
"""Filter jobs by status. By default, only succeeded, pending,
and still running jobs will be in the returned batch.
Args:
succeeded: Include all succeeded jobs
pending: Include all pending jobs
running: Include all running jobs
failed: Include all failed jobs
include_expired: Include expired jobs in the result
Expand All @@ -273,6 +281,9 @@ def filter_jobs(
elif job.running() and running:
filtered_jobs.append(job)

elif job.pending() and pending:
filtered_jobs.append(job)

elif job.failed() and failed:
filtered_jobs.append(job)

Expand Down
36 changes: 36 additions & 0 deletions tests/test_hyp3.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,6 +296,27 @@ def test_prepare_insar_job():
}


def test_prepare_insar_isce_burst_job():
assert HyP3.prepare_insar_isce_burst_job(granule1='my_granule1', granule2='my_granule2') == {
'job_type': 'INSAR_ISCE_BURST',
'job_parameters': {
'granules': ['my_granule1', 'my_granule2'],
'apply_water_mask': False,
'looks': '20x4',
}
}
assert HyP3.prepare_insar_isce_burst_job(granule1='my_granule1', granule2='my_granule2', name='my_name',
apply_water_mask=True, looks='10x2') == {
'job_type': 'INSAR_ISCE_BURST',
'name': 'my_name',
'job_parameters': {
'granules': ['my_granule1', 'my_granule2'],
'apply_water_mask': True,
'looks': '10x2',
}
}


def test_deprecated_warning():
with warnings.catch_warnings(record=True) as w:
HyP3.prepare_insar_job(granule1='my_granule1', granule2='my_granule2', include_los_displacement=False)
Expand Down Expand Up @@ -353,6 +374,21 @@ def test_submit_insar_job(get_mock_job):
assert batch.jobs[0] == job


@responses.activate
def test_submit_insar_isce_burst_job(get_mock_job):
job = get_mock_job('INSAR_ISCE_BURST', job_parameters={'granules': ['g1', 'g2']})
api_response = {
'jobs': [
job.to_dict()
]
}
with patch('hyp3_sdk.util.get_authenticated_session', mock_get_authenticated_session):
api = HyP3()
responses.add(responses.POST, urljoin(api.url, '/jobs'), json=api_response)
batch = api.submit_insar_isce_burst_job('g1', 'g2')
assert batch.jobs[0] == job


@responses.activate
def test_resubmit_previous_job(get_mock_job):
job = get_mock_job()
Expand Down
12 changes: 6 additions & 6 deletions tests/test_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def test_job_complete_succeeded_failed_running():
assert not job.complete()
assert not job.succeeded()
assert not job.failed()
assert job.running()
assert job.pending()

job.status_code = 'RUNNING'
assert not job.complete()
Expand Down Expand Up @@ -410,25 +410,25 @@ def test_batch_filter_jobs():
assert not_failed.jobs[0].succeeded() and not not_failed.jobs[0].expired()
assert not_failed.jobs[1].running()
assert not_failed.jobs[2].succeeded() and not_failed.jobs[2].expired()
assert not_failed.jobs[3].running()
assert not_failed.jobs[3].pending()

not_failed_or_expired = batch.filter_jobs(include_expired=False)
assert len(not_failed_or_expired) == 3
assert not_failed_or_expired.jobs[0].succeeded() and not not_failed_or_expired.jobs[0].expired()
assert not_failed_or_expired.jobs[1].running()
assert not_failed_or_expired.jobs[2].running()
assert not_failed_or_expired.jobs[2].pending()

succeeded = batch.filter_jobs(running=False)
succeeded = batch.filter_jobs(pending=False, running=False)
assert len(succeeded) == 2
assert succeeded.jobs[0].succeeded() and not succeeded.jobs[0].expired()
assert succeeded.jobs[1].succeeded() and succeeded.jobs[1].expired()

running = batch.filter_jobs(succeeded=False)
assert len(running) == 2
assert running.jobs[0].running()
assert running.jobs[1].running()
assert running.jobs[1].pending()

failed = batch.filter_jobs(succeeded=False, running=False, failed=True)
failed = batch.filter_jobs(succeeded=False, pending=False, running=False, failed=True)
assert len(failed) == 1
assert failed.jobs[0].failed()

Expand Down

0 comments on commit 485bac9

Please sign in to comment.