diff --git a/CHANGELOG.md b/CHANGELOG.md index 0bfd7a6..ec32786 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [6.2.0] + +### Added +* New functionality in `stac.py` that allows users to create STAC collections from Batches of HyP3 Burst InSAR/RTC jobs +* New functionality in `load.py` that allows users to load HyP3 collection directly into Xarray objects +* Ability to reformat a HyP3 InSAR Xarray into MintPy-compatible hdf5 files + ## [6.1.0] ### Added diff --git a/environment.yml b/environment.yml index 67f1fae..89f4e4a 100644 --- a/environment.yml +++ b/environment.yml @@ -22,3 +22,12 @@ dependencies: - requests - tqdm - urllib3 + # For STAC creation + - pystac + - fsspec + - aiohttp + - tifffile + # For data loading + - h5py + - gdal + - odc-stac diff --git a/pyproject.toml b/pyproject.toml index 44f8003..43ab8da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,14 @@ dynamic = ["version"] develop = [ "pytest", "pytest-cov", - "responses" + "responses", + "pystac", + "fsspec", + "aiohttp", + "tifffile", + "h5py", + "gdal", + "odc-stac", ] [project.urls] diff --git a/src/hyp3_sdk/stac/__init__.py b/src/hyp3_sdk/stac/__init__.py new file mode 100644 index 0000000..54290c5 --- /dev/null +++ b/src/hyp3_sdk/stac/__init__.py @@ -0,0 +1,13 @@ +from importlib import import_module + + +missing_modules = [] +needed_modules = ['aiohttp', 'fsspec', 'h5py', 'pyproj', 'pystac', 'odc.stac', 'tifffile'] +for module in needed_modules: + try: + import_module(module) + except ImportError: + missing_modules.append(module) + +if missing_modules: + raise ImportError(f'package(s) {", ".join(missing_modules)} is/are required for this submodule.') diff --git a/src/hyp3_sdk/stac/load.py b/src/hyp3_sdk/stac/load.py new file mode 100644 index 0000000..587f34f --- /dev/null +++ b/src/hyp3_sdk/stac/load.py @@ -0,0 +1,370 @@ +import datetime as dt +import time +from pathlib import Path +from typing import Iterable, Optional, Tuple + +import dask +import h5py +import numpy as np +import odc.stac +import pystac +import xarray as xr +from pyproj import CRS +from pyproj.transformer import Transformer + + +odc.stac.configure_rio(cloud_defaults=True) + +SPEED_OF_LIGHT = 299792458 # m/s +SENTINEL1 = { + 'carrier_frequency': 5.405e9, # Hz + 'azimuth_pixel_size': 14.1, # m, this is the ground azimuth pixel spacing, NOT on orbits! + 'range_pixel_size': 2.3, # m +} + + +def incidence_angle2slant_range_distance( + spacecraft_height: float, earth_radius: float, inc_angle: np.ndarray +) -> np.ndarray: + """Calculate the corresponding slant range distance given an incidence angle. + Originally created by Zhang Yunjun and the MintPy team. + + Law of sines: + r + H r range_dist + --------------------- = ----------------- = ------------------ = 2R + sin(pi - inc_angle) sin(look_angle) sin(range_angle) + + where range_angle = inc_angle - look_angle + R is the radius of the circumcircle. + + Args: + earth_radius: radius of the Earth + spacecraft_height: height of the spacecraft + inc_angle: incidence angle in degrees + + Returns: + slant range distance array + """ + inc_angle = inc_angle / 180 * np.pi + + # calculate 2R based on the law of sines + r2 = (earth_radius + spacecraft_height) / np.sin(np.pi - inc_angle) + + look_angle = np.arcsin(earth_radius / r2) + range_angle = inc_angle - look_angle + range_dist = r2 * np.sin(range_angle) + + return range_dist + + +def utm2latlon(epsg_code: int, easting: float, northing: float) -> (float, float): + """Convert UTM easting/northing in meters to lat/lon in degrees. + Originally created by Zhang Yunjun and the MintPy team. + + Args: + utm_zone: UTM zone number and letter + easting: UTM easting in meters + northing: UTM northing in meters + + Returns: + lat, lon: latitude and longitude in degrees + """ + transformer = Transformer.from_crs(f'EPSG:{epsg_code}', 'EPSG:4326', always_xy=True) + lon, lat = transformer.transform(easting, northing) + return lat, lon + + +def wrap(data: np.ndarray, wrap_range: Optional[Tuple] = [-1.0 * np.pi, np.pi]) -> np.ndarray: + """Wrap data into a range. + Originally created by Zhang Yunjun and the MintPy team. + + Args: + data: data to be wrapped + wrap_range: range to be wrapped into + + Returns: + wrapped data + """ + w0, w1 = wrap_range + data = w0 + np.mod(data - w0, w1 - w0) + return data + + +def create_xarray_dataset( + stac_items: Iterable[pystac.Item], + select_bands: Optional[Iterable[str]] = None, + subset_geo: Optional[Iterable[float]] = None, + subset_xy: Optional[Iterable[int]] = None, + chunksize: dict = {'x': 1024, 'y': 1024}, +): + """Create an Xarray dataset from a list of STAC items. + + Args: + stac_items: list of STAC items + select_bands: list of bands to select + subset_geo: geographic subset as [W, E, S, N] + subset_xy: index subset as [x1, x2, y1, y2] + chunksize: chunk size for Dask + + Returns: + Xarray dataset + """ + # anchor='center' is used to ensure "pixel as point" convention + dataset = odc.stac.load(stac_items, chunks=chunksize, anchor='center') + + if select_bands: + dataset = dataset.sel(band=select_bands) + + if subset_geo and subset_xy: + print('Both geographic and index subsets were provided. Using geographic subset method.') + + if subset_geo: + dataset = dataset.sel(x=slice(subset_geo[0], subset_geo[1]), y=slice(subset_geo[3], subset_geo[2])) + elif subset_xy: + dataset = dataset.isel(x=slice(subset_xy[0], subset_xy[1]), y=slice(subset_xy[2], subset_xy[3])) + + dataset = dataset.fillna(0) + return dataset + + +def get_metadata(dataset: xr.Dataset, items: Iterable[pystac.Item]) -> (dict, list, np.ndarray): + """Extract metadata from a Xarray dataset of HyP3 InSAR products and return MintPy compatible metadata. + + Args: + dataset: Xarray dataset of HyP3 InSAR products + + Returns: + meta: dictionary with metadata + date12s: list of date pairs + perp_baseline: array of perpendicular baselines + """ + # Add geospatial metadata + meta = {} + meta['X_UNIT'] = 'meters' + meta['Y_UNIT'] = 'meters' + meta['LENGTH'] = dataset.sizes['y'] + meta['WIDTH'] = dataset.sizes['x'] + transform = [float(x) for x in dataset.spatial_ref.attrs['GeoTransform'].split(' ')] + _, meta['X_STEP'], _, _, _, meta['Y_STEP'] = transform + meta['X_FIRST'] = dataset.coords['x'].to_numpy()[0] + meta['Y_FIRST'] = dataset.coords['y'].to_numpy()[0] + meta['DATA_TYPE'] = str(dataset['unw_phase'].dtype) + meta['EPSG'] = dataset.spatial_ref.values.item() + meta['NoDataValue'] = 0 + # srs = osr.SpatialReference() + # srs.ImportFromEPSG(meta['EPSG']) + # meta['UTM_ZONE'] = srs.GetName().split(' ')[-1] + meta['UTM_ZONE'] = CRS.from_epsg(meta['EPSG']).utm_zone + + hyp3_meta = {} + keys = list(items[0].properties.keys()) + for item in items: + for key in keys: + if key.startswith('hyp3:'): + simple_key = key.split(':')[-1] + if simple_key in hyp3_meta: + hyp3_meta[simple_key].append(item.properties[key]) + else: + hyp3_meta[simple_key] = [item.properties[key]] + + # add universal hyp3 metadata + meta['PROCESSOR'] = 'hyp3' + meta['ALOOKS'] = hyp3_meta['azimuth_looks'][0] + meta['RLOOKS'] = hyp3_meta['range_looks'][0] + meta['EARTH_RADIUS'] = np.mean(hyp3_meta['earth_radius_at_nadir']) + meta['HEIGHT'] = np.mean(hyp3_meta['spacecraft_height']) + meta['STARTING_RANGE'] = np.mean(hyp3_meta['slant_range_near']) + meta['CENTER_LINE_UTC'] = np.mean(hyp3_meta['utc_time']) + meta['HEADING'] = np.mean(hyp3_meta['heading']) % 360.0 - 360.0 # ensure negative value for the heading angle + + # add LAT/LON_REF1/2/3/4 based on whether satellite ascending or descending + N = float(meta['Y_FIRST']) + W = float(meta['X_FIRST']) + S = N + float(meta['Y_STEP']) * int(meta['LENGTH']) + E = W + float(meta['X_STEP']) * int(meta['WIDTH']) + + # convert UTM to lat/lon + N, W = utm2latlon(meta['EPSG'], W, N) + S, E = utm2latlon(meta['EPSG'], E, S) + + meta['ORBIT_DIRECTION'] = hyp3_meta['reference_orbit_direction'][0].upper() + if meta['ORBIT_DIRECTION'] == 'ASCENDING': + meta['LAT_REF1'] = str(S) + meta['LAT_REF2'] = str(S) + meta['LAT_REF3'] = str(N) + meta['LAT_REF4'] = str(N) + meta['LON_REF1'] = str(W) + meta['LON_REF2'] = str(E) + meta['LON_REF3'] = str(W) + meta['LON_REF4'] = str(E) + else: + meta['LAT_REF1'] = str(N) + meta['LAT_REF2'] = str(N) + meta['LAT_REF3'] = str(S) + meta['LAT_REF4'] = str(S) + meta['LON_REF1'] = str(E) + meta['LON_REF2'] = str(W) + meta['LON_REF3'] = str(E) + meta['LON_REF4'] = str(W) + + if hyp3_meta['reference_granule'][0].startswith('S1'): + meta['PLATFORM'] = 'Sen' + meta['ANTENNA_SIDE'] = -1 + meta['WAVELENGTH'] = SPEED_OF_LIGHT / SENTINEL1['carrier_frequency'] + meta['RANGE_PIXEL_SIZE'] = SENTINEL1['range_pixel_size'] * int(meta['RLOOKS']) + meta['AZIMUTH_PIXEL_SIZE'] = SENTINEL1['azimuth_pixel_size'] * int(meta['ALOOKS']) + else: + raise NotImplementedError('Only Sentinel-1 data is currently supported') + + date1s = [dt.datetime.fromisoformat(x) for x in hyp3_meta['start_datetime']] + date2s = [dt.datetime.fromisoformat(x) for x in hyp3_meta['end_datetime']] + + date12s = [''] * len(date1s) + perp_baseline = np.zeros(len(date1s)) + dataset_dates = dataset.time.to_numpy() + for date1, date2, baseline in zip(date1s, date2s, hyp3_meta['baseline']): + date_mid = date1 + ((date2 - date1) / 2) + date_mid = date_mid.replace(tzinfo=dt.timezone.utc) + np_date_mid = np.array(date_mid).astype('datetime64[ns]') + index = np.where(np_date_mid == dataset_dates)[0][0] + + date12s[index] = f"{date1.strftime('%Y%m%d')}_{date2.strftime('%Y%m%d')}" + perp_baseline[index] = np.abs(baseline) + + return meta, date12s, perp_baseline + + +def write_mintpy_ifgram_stack( + outfile: str, dataset: xr.Dataset, metadata: dict, date12s: Iterable, perp_baselines: np.ndarray +): + """Create a MintPy compatible interferogram stack and write it to a file. + + Args: + outfile: output file path + dataset: Xarray dataset of HyP3 InSAR products + metadata: metadata dictionary + date12s: list of date pairs + perp_baselines: array of perpendicular baselines + """ + stack_dataset_names = {'unw_phase': 'unwrapPhase', 'corr': 'coherence'} + if 'conncomp' in list(dataset.data_vars): + stack_dataset_names['conncomp'] = 'connectComponent' + + new_dataset = dataset.copy() + new_dataset.attrs = {} + to_drop = list(set(new_dataset.data_vars) - set(stack_dataset_names.keys())) + new_dataset = new_dataset.drop_vars(to_drop) + new_dataset = new_dataset.rename(stack_dataset_names) + new_dataset = new_dataset.drop(list(new_dataset.coords)) + new_dataset['bperp'] = perp_baselines + + date1 = np.array([d1.split('_')[0].encode('utf-8') for d1 in date12s]) + date2 = np.array([d2.split('_')[1].encode('utf-8') for d2 in date12s]) + dates = np.array((date1, date2)).astype(np.unicode_).transpose() + new_dataset['date'] = (('date12', 'pair'), dates) + + for key, value in metadata.items(): + new_dataset.attrs[key] = str(value) + new_dataset.to_netcdf(outfile, format='NETCDF4', mode='w') + + # FIXME this shouldn't be needed, but the call below results in dropIfgram + # being read back in as an int, not a bool. + # new_dataset['dropIfgram'] = np.ones(new_dataset['unwrapPhase'].shape[0], dtype=np.bool_) + shape = (new_dataset['unwrapPhase'].shape[0],) + with h5py.File(outfile, 'a') as f: + f.create_dataset('dropIfgram', shape=shape, dtype=np.bool_) + f['dropIfgram'][:] = True + + +def write_mintpy_geometry(outfile: str, dataset: xr.Dataset, metadata: dict) -> None: + """Create a MintPy compatible geometry stack and write it to a file. + + Args: + outfile: output file path + dataset: Xarray dataset of HyP3 InSAR products + metadata: metadata dictionary + """ + first_product = dataset.isel(time=0).copy() + first_product.attrs = {} + first_product = first_product.drop(list(first_product.coords)) + + # Convert from hyp3/gamma to mintpy/isce2 convention + incidence_angle = first_product['lv_theta'] + incidence_angle = 90 - (incidence_angle * 180 / np.pi) + + # Calculate Slant Range distance + slant_range_distance = incidence_angle2slant_range_distance( + metadata['HEIGHT'], metadata['EARTH_RADIUS'], incidence_angle + ) + + # Convert from hyp3/gamma to mintpy/isce2 convention + azimuth_angle = first_product['lv_phi'] + azimuth_angle = azimuth_angle * 180 / np.pi - 90 + azimuth_angle = wrap(azimuth_angle, wrap_range=[-180, 180]) # rewrap within -180 to 180 + + bands = { + 'height': first_product['dem'].astype(np.float32), + 'incidenceAngle': incidence_angle.astype(np.float32), + 'slantRangeDistance': slant_range_distance.astype(np.float32), + 'azimuthAngle': azimuth_angle.astype(np.float32), + 'waterMask': first_product['water_mask'].astype(np.bool_), + } + new_dataset = xr.Dataset() + for name in bands: + new_dataset[name] = bands[name] + new_dataset[name].attrs['MODIFICATION_TIME'] = str(time.time()) + + for key, value in metadata.items(): + new_dataset.attrs[key] = str(value) + new_dataset.to_netcdf(outfile, format='NETCDF4', mode='w') + + +def create_mintpy_inputs( + stac_collection_file: str, + subset_geo: Optional[Iterable[float]] = None, + subset_xy: Optional[Iterable[int]] = None, + mintpy_dir: Optional[str] = None, + chunksize: dict = {'x': 512, 'y': 512}, + n_threads: int = 20, +): + """Create MintPy compatible input files from a STAC collection. + + Args: + stac_collection_file: path to the STAC collection file + subset_geo: geographic subset as [W, E, S, N] + subset_xy: index subset as [x1, x2, y1, y2] + mintpy_dir: directory to create "input" dir in where the MintPy files will be saved + chunksize: chunk size for Dask + n_threads: number of threads to use for Dask + """ + if mintpy_dir is None: + mintpy_dir = Path.cwd() + mintpy_dir = Path(mintpy_dir) + + collection = pystac.Collection.from_file(stac_collection_file) + items = list(collection.get_all_items()) + + dataset = create_xarray_dataset(items, subset_geo=subset_geo, subset_xy=subset_xy, chunksize=chunksize) + + meta, date12s, perp_baselines = get_metadata(dataset, items) + + input_dir = mintpy_dir / 'inputs' + input_dir.mkdir(exist_ok=True, parents=True) + + msg = f'Downloading using chunks of {chunksize}' + if n_threads > 1: + msg += f' and {n_threads} threads' + dask.config.set(scheduler='threads', num_workers=n_threads) + print(msg) + + meta['FILE_TYPE'] = 'ifgramStack' + ifg_outfile = input_dir / 'ifgramStack.h5' + print('Creating interferogram stack...') + write_mintpy_ifgram_stack(ifg_outfile, dataset, meta, date12s, perp_baselines) + + meta['FILE_TYPE'] = 'geometry' + geom_outfile = input_dir / 'geometryGeo.h5' + print('Creating geometry dataset...') + write_mintpy_geometry(geom_outfile, dataset, meta) + + print('Done!') diff --git a/src/hyp3_sdk/stac/stac.py b/src/hyp3_sdk/stac/stac.py new file mode 100644 index 0000000..914bc9e --- /dev/null +++ b/src/hyp3_sdk/stac/stac.py @@ -0,0 +1,584 @@ +"""A module for creating STAC collections based on HyP3-SDK Batch/Job objects""" +import json +import warnings +from concurrent.futures import ThreadPoolExecutor +from dataclasses import dataclass +from datetime import datetime, timezone +from pathlib import Path +from typing import Iterable, List, Optional, Tuple + +import fsspec +import pystac +import tifffile +from pyproj import Transformer +from pystac.extensions import projection, raster, sar, sat, view +from tqdm import tqdm + +from hyp3_sdk import Batch, Job + + +warnings.warn( + 'This submodule is under rapid development. STAC item (not collection) creation will eventually move to hyp3-lib.', + FutureWarning, +) + + +SENTINEL_CONSTELLATION = 'sentinel-1' +SENTINEL_PLATFORMS = ['sentinel-1a', 'sentinel-1b'] +SENTINEL_PROVIDER = pystac.Provider( + name='ESA', + roles=[pystac.ProviderRole.LICENSOR, pystac.ProviderRole.PRODUCER], + url='https://sentinel.esa.int/web/sentinel/missions/sentinel-1', +) +HYP3_PROVIDER = pystac.Provider( + name='ASF DAAC', + roles=[pystac.ProviderRole.LICENSOR, pystac.ProviderRole.PROCESSOR, pystac.ProviderRole.HOST], + url='https://hyp3-docs.asf.alaska.edu/', + extra_fields={'processing:level': 'L3', 'processing:lineage': 'ASF DAAC HyP3'}, +) +SENTINEL_DATA_DESCRIPTION = ( + 'HyP3 genereted Sentinel-1 SAR products and their associated files.' + ' The source data for these products are Sentinel-1 Single Look Complex (SLC) products processed by ESA' +) + +RTC_PRODUCTS = ['rgb', 'area', 'dem', 'inc_map', 'ls_map'] + + +@dataclass +class ParameterFile: + """Class representing the parameters of a HyP3 InSAR product""" + + reference_granule: str + secondary_granule: str + reference_orbit_direction: str + reference_orbit_number: str + secondary_orbit_direction: str + secondary_orbit_number: str + baseline: float + utc_time: float + heading: float + spacecraft_height: float + earth_radius_at_nadir: float + slant_range_near: float + slant_range_center: float + slant_range_far: float + range_looks: int + azimuth_looks: int + insar_phase_filter: bool + phase_filter_parameter: float + range_bandpass_filter: bool + azimuth_bandpass_filter: bool + dem_source: str + dem_resolution: int + unwrapping_type: str + speckle_filter: bool + water_mask: bool + + def __str__(self): + output_strings = [ + f'Reference Granule: {self.reference_granule}\n', + f'Secondary Granule: {self.secondary_granule}\n', + f'Reference Pass Direction: {self.reference_orbit_direction}\n', + f'Reference Orbit Number: {self.reference_orbit_number}\n', + f'Secondary Pass Direction: {self.secondary_orbit_direction}\n', + f'Secondary Orbit Number: {self.secondary_orbit_number}\n', + f'Baseline: {self.baseline}\n', + f'UTC time: {self.utc_time}\n', + f'Heading: {self.heading}\n', + f'Spacecraft height: {self.spacecraft_height}\n', + f'Earth radius at nadir: {self.earth_radius_at_nadir}\n', + f'Slant range near: {self.slant_range_near}\n', + f'Slant range center: {self.slant_range_center}\n', + f'Slant range far: {self.slant_range_far}\n', + f'Range looks: {self.range_looks}\n', + f'Azimuth looks: {self.azimuth_looks}\n', + f'INSAR phase filter: {"yes" if self.insar_phase_filter else "no"}\n', + f'Phase filter parameter: {self.phase_filter_parameter}\n', + f'Range bandpass filter: {"yes" if self.range_bandpass_filter else "no"}\n', + f'Azimuth bandpass filter: {"yes" if self.azimuth_bandpass_filter else "no"}\n', + f'DEM source: {self.dem_source}\n', + f'DEM resolution (m): {self.dem_resolution}\n', + f'Unwrapping type: {self.unwrapping_type}\n', + f'Speckle filter: {"yes" if self.speckle_filter else "no"}\n', + f'Water mask: {"yes" if self.water_mask else "no"}\n', + ] + + return ''.join(output_strings) + + def __repr__(self): + return self.__str__() + + def write(self, out_path: Path): + out_path.write_text(self.__str__()) + + @staticmethod + def read(file_path: Path, base_fs: Optional[fsspec.AbstractFileSystem] = None) -> 'ParameterFile': + """Read a HyP3 InSAR parameter file + + Args: + file_path: Path to the parameter file + base_fs: fsspec filesystem to use for reading the file + + Returns: + A parameter file object + """ + file_path = str(file_path) + if base_fs is None: + if file_path.startswith('https'): + base_fs = fsspec.filesystem('https', block_size=int(0.1 * (2**20))) + else: + base_fs = fsspec.filesystem('file') + + with base_fs.open(file_path, 'r') as file: + text = file.read().strip() + + parameters = {} + for line in text.split('\n'): + key, *values = line.strip().split(':') + value = values[0].replace(' ', '') + parameters[key] = value + + param_file = ParameterFile( + reference_granule=parameters['Reference Granule'], + secondary_granule=parameters['Secondary Granule'], + reference_orbit_direction=parameters['Reference Pass Direction'], + reference_orbit_number=int(parameters['Reference Orbit Number']), + secondary_orbit_direction=parameters['Secondary Pass Direction'], + secondary_orbit_number=int(parameters['Secondary Orbit Number']), + baseline=float(parameters['Baseline']), + utc_time=float(parameters['UTC time']), + heading=float(parameters['Heading']), + spacecraft_height=float(parameters['Spacecraft height']), + earth_radius_at_nadir=float(parameters['Earth radius at nadir']), + slant_range_near=float(parameters['Slant range near']), + slant_range_center=float(parameters['Slant range center']), + slant_range_far=float(parameters['Slant range far']), + range_looks=int(parameters['Range looks']), + azimuth_looks=int(parameters['Azimuth looks']), + insar_phase_filter=parameters['INSAR phase filter'] == 'yes', + phase_filter_parameter=float(parameters['Phase filter parameter']), + range_bandpass_filter=parameters['Range bandpass filter'] == 'yes', + azimuth_bandpass_filter=parameters['Azimuth bandpass filter'] == 'yes', + dem_source=parameters['DEM source'], + dem_resolution=int(parameters['DEM resolution (m)']), + unwrapping_type=parameters['Unwrapping type'], + speckle_filter=parameters['Speckle filter'] == 'yes', + water_mask=parameters['Water mask'] == 'yes', + ) + + return param_file + + +@dataclass +class GeoInfo: + """Class representing the geospatial information of a geotiff file""" + + transform: Tuple[float] + shape: Tuple[int] # y first + epsg: int + + def __post_init__(self): + """Add bounding box, bounding box geojson, and proj_transform attributes""" + length, width = self.shape + min_x, size_x, _, max_y, _, size_y = self.transform + max_x = min_x + (size_x * width) + min_y = max_y + (size_y * length) + + transformer = Transformer.from_crs(f'EPSG:{self.epsg}', 'EPSG:4326', always_xy=True) + (min_lon, max_lon), (max_lat, min_lat) = transformer.transform([min_x, max_x], [max_y, min_y]) + self.bbox = [min_lon, min_lat, max_lon, max_lat] + + bbox_geojson = { + 'type': 'Polygon', + 'coordinates': [ + [ + [min_lon, min_lat], + [max_lon, min_lat], + [max_lon, max_lat], + [min_lon, max_lat], + [min_lon, min_lat], + ] + ], + } + self.bbox_geojson = bbox_geojson + + proj_transform = [ + self.transform[1], + self.transform[2], + self.transform[0], + self.transform[4], + self.transform[5], + self.transform[3], + 0, + 0, + 1, + ] + self.proj_transform = proj_transform + + +def _get_epsg(geo_key_list: Iterable[int]) -> int: + """Get the EPSG code from a GeoKeyDirectoryTag. + Will only workfor projected coordinate systems. + + Args: + geo_key_list: A list of GeoKeyDirectoryTag values + + Returns: + The EPSG code for the projected coordinate system + """ + projected_crs_key_id = 3072 + geo_keys = [geo_key_list[i: i + 4] for i in range(0, len(geo_key_list), 4)] # fmt: off + for key in geo_keys: + if key[0] == projected_crs_key_id: + return int(key[3]) + + raise ValueError('No projected EPSG code found in GeoKeyDirectoryTag') + + +def _get_geotiff_info_nogdal(file_path: str, base_fs: Optional[fsspec.AbstractFileSystem] = None) -> GeoInfo: + """Get geotiff projection info without using GDAL. + + Args: + file_path: Path to the geotiff file + base_fs: fsspec filesystem to use for reading the file + + Returns: + A GeoInfo object containing the geospatial information + """ + # Tag IDs from the TIFF and GeoTIFF specs + # https://www.itu.int/itudoc/itu-t/com16/tiff-fx/docs/tiff6.pdf + # https://docs.ogc.org/is/19-008r4/19-008r4.html + tag_ids = { + 'ImageWidth': 256, + 'ImageLength': 257, + 'ModelPixelScaleTag': 33550, + 'ModelTiepointTag': 33922, + 'GeoKeyDirectoryTag': 34735, + } + if base_fs is None: + if file_path.startswith('https'): + base_fs = fsspec.filesystem('https', block_size=int(0.1 * (2**20))) + else: + base_fs = fsspec.filesystem('file') + + meta_dict = {} + with base_fs.open(file_path, 'rb') as file: + with tifffile.TiffFile(file) as tif: + tags = tif.pages[0].tags + for key, value in tag_ids.items(): + meta_dict[key] = tags[value].value + + width = int(meta_dict['ImageWidth']) + length = int(meta_dict['ImageLength']) + pixel_x, pixel_y = meta_dict['ModelPixelScaleTag'][:2] + pixel_y *= -1 + origin_x, origin_y = meta_dict['ModelTiepointTag'][3:5] + geotransform = [int(value) for value in [origin_x, pixel_x, 0, origin_y, 0, pixel_y]] + utm_epsg = _get_epsg(meta_dict['GeoKeyDirectoryTag']) + geo_info = GeoInfo(geotransform, (length, width), utm_epsg) + return geo_info + + +def get_overall_bbox(bboxes: Iterable[float]) -> List[float]: + """Get the overall bounding box for a list of bounding boxes + + Args: + bboxes: A list of bounding boxes + + Returns: + A list containing the overall bounding box coordinates (min_x, min_y, max_x, max_y) + """ + min_x = min([bbox[0] for bbox in bboxes]) + min_y = min([bbox[1] for bbox in bboxes]) + max_x = max([bbox[2] for bbox in bboxes]) + max_y = max([bbox[3] for bbox in bboxes]) + return [min_x, min_y, max_x, max_y] + + +def validate_stack(batch: Batch) -> None: + """Verifies that all jobs in batch: + - Have the SUCCEEDED status + - Are not expired + - Have the same job type + - The job type is one of INSAR_GAMMA, RTC_GAMMA, INSAR + - Have the same processing parameters + Notably DOES NOT check that all jobs are co-located. + + Args: + batch: A HyP3 Batch object + """ + job_dicts = [job.to_dict() for job in batch] + n_success = [job['status_code'] == 'SUCCEEDED' for job in job_dicts].count(True) + if n_success != len(batch): + raise ValueError('Not all jobs in the batch have succeeded yet') + + # TODO add this later + # if batch.any_expired(): + # raise ValueError('Some of the jobs in the batch have expired') + + job_types = list(set([job['job_type'] for job in job_dicts])) + if len(job_types) != 1: + raise ValueError(f'Not all jobs have the same job type. Included types: {" ".join(job_types)}') + + supported_job_types = ['INSAR_GAMMA', 'RTC_GAMMA', 'INSAR_ISCE_BURST'] + if job_types[0] not in supported_job_types: + msg = f'Job type {job_types[0]} is not supported. Only {" ,".join(supported_job_types)} are supported' + raise NotImplementedError(msg) + + job_params = [job['job_parameters'] for job in job_dicts] + [job.pop('granules', None) for job in job_params] + param_set = list(set([str(job) for job in job_params])) + if len(param_set) != 1: + raise ValueError('Not all jobs have the same processing parameters') + + +def _write_item(item): + item_dict = item.to_dict(include_self_link=False, transform_hrefs=False) + with open(f'{item.id}.json', 'w') as f: + f.write(json.dumps(item_dict)) + + +def _create_insar_stac_item(job: Job, geo_info: GeoInfo, param_file: ParameterFile) -> pystac.Item: + """Create a STAC item from a HyP3 product. + + Args: + job: A HyP3 Job object + geo_info: A GeoInfo object containing the geospatial information of the product + param_file: A ParameterFile object containing the processing parameters of the product + + Returns: + A STAC item for the product + """ + base_url = job.to_dict()['files'][0]['url'] + + insar_products = ['corr', 'unw_phase', 'lv_phi', 'lv_theta', 'dem', 'water_mask'] + if job.to_dict()['job_type'] == 'INSAR_GAMMA': + date_loc = 5 + reference_polarization = 'VV' + secondary_polarization = 'VV' + lineage = 'Created by ASF DAAC HyP3 using the hyp3_isce2 plugin running ISCE2 SAR processing software' + elif job.to_dict()['job_type'] == 'INSAR_ISCE_BURST': + date_loc = 3 + reference_polarization = param_file.reference_granule.split('_')[4] + secondary_polarization = param_file.secondary_granule.split('_')[4] + insar_products += ['conncomp', 'wrapped_phase'] + lineage = 'Created by ASF DAAC HyP3 using the hyp3_gamma plugin running GAMMA SAR processing software' + + pattern = '%Y%m%dT%H%M%S' + start_time = datetime.strptime(param_file.reference_granule.split('_')[date_loc], pattern).replace( + tzinfo=timezone.utc + ) + stop_time = datetime.strptime(param_file.secondary_granule.split('_')[date_loc], pattern).replace( + tzinfo=timezone.utc + ) + mid_time = start_time + ((stop_time - start_time) / 2) + mid_time = mid_time.replace(tzinfo=timezone.utc) + polarizations = list(set([reference_polarization, secondary_polarization])) + + extra_properties = {f'hyp3:{key}': value for key, value in param_file.__dict__.items()} + extra_properties.update( + { + 'hyp3:start_datetime': start_time.isoformat(), + 'hyp3:end_datetime': stop_time.isoformat(), + 'sar:product_type': job.to_dict()['job_type'], + 'sar:polarizations': polarizations, + 'sar:looks_azimuth': extra_properties['hyp3:azimuth_looks'], + 'sar:looks_range': extra_properties['hyp3:range_looks'], + 'sat:orbit_state': extra_properties['hyp3:reference_orbit_direction'].lower(), + 'sat:absolute_orbit': extra_properties['hyp3:reference_orbit_number'], + 'view:azimuth': (360 + extra_properties['hyp3:heading']) % 360, # change of convention + 'processing:level': 'L3', + 'processing:lineage': lineage, + 'insar:reference_datetime': start_time.isoformat(), + 'insar:secondary_datetime': stop_time.isoformat(), + 'insar:processing_dem': 'COP-DEM_GLO30', + 'insar:geocoding_dem': 'COP-DEM_GLO30', + 'insar:perpendicular_baseline': extra_properties['hyp3:baseline'], + 'insar:temporal_baseline': int((stop_time - start_time).days), + } + ) + + item = _create_item(base_url, mid_time, geo_info, insar_products, extra_properties) + thumbnail = base_url.replace('.zip', '_unw_phase.png') + item.add_asset( + key='thumbnail', + asset=pystac.Asset(href=thumbnail, media_type=pystac.MediaType.PNG, roles=['thumbnail']), + ) + # TODO: insar extension isn't available in pystac yet, so can't add it here. + item.stac_extensions += [ + sat.SatExtension.get_schema_uri(), + view.ViewExtension.get_schema_uri(), + # insar.InsarExtension.get_schema_uri(), + ] + item.validate() + return item + + +def _create_rtc_stac_item(job: Job, geo_info: GeoInfo, available_polarizations: Iterable[str]) -> pystac.Item: + """Create a STAC item from a HyP3 RTC product. + + Args: + job: A HyP3 Job object + geo_info: A GeoInfo object containing the geospatial information of the product + available_polarizations: A list of the available polarizations + + Returns: + A STAC item for the product + """ + base_url = job.to_dict()['files'][0]['url'] + pattern = '%Y%m%dT%H%M%S' + date_string = base_url.split('/')[-1].split('_')[2].split('.')[0] + start_time = datetime.strptime(date_string, pattern).replace(tzinfo=timezone.utc) + lineage = 'Created by ASF DAAC HyP3 using the hyp3_gamma plugin running GAMMA SAR processing software' + extra_properties = { + 'sar:product_type': job.to_dict()['job_type'], + 'sar:polarizations': available_polarizations, + 'processing:level': 'L3', + 'processing:lineage': lineage, + } + item = _create_item(base_url, start_time, geo_info, available_polarizations + RTC_PRODUCTS, extra_properties) + thumbnail = base_url.replace('.zip', '_rgb_thumb.png') + item.add_asset( + key='thumbnail', + asset=pystac.Asset(href=thumbnail, media_type=pystac.MediaType.PNG, roles=['thumbnail']), + ) + item.validate() + return item + + +def _create_item( + base_url: str, start_time: datetime, geo_info: GeoInfo, product_types: Iterable[str], extra_properties: dict +) -> pystac.Item: + """Create a STAC item from a HyP3 product. + + Args: + base_url: The base url for the product + start_time: The start time of the product + geo_info: A GeoInfo object containing the geospatial information of the product + product_types: A list of the product types available + extra_properties: A dictionary of extra properties to add to the STAC item + + Returns: + A STAC item for the product + """ + properties = { + 'data_type': 'float32', + 'nodata': 0, + 'proj:shape': geo_info.shape, + 'proj:transform': geo_info.proj_transform, + 'proj:epsg': geo_info.epsg, + 'sar:instrument_mode': 'IW', + 'sar:frequency_band': sar.FrequencyBand.C, + 'sar:observation_direction': 'right', + } + properties.update(extra_properties) + item = pystac.Item( + id=base_url.split('/')[-1].replace('.zip', ''), + geometry=geo_info.bbox_geojson, + bbox=geo_info.bbox, + datetime=start_time, + properties=properties, + stac_extensions=[ + projection.ProjectionExtension.get_schema_uri(), + raster.RasterExtension.get_schema_uri(), + sar.SarExtension.get_schema_uri(), + ], + ) + for asset_type in product_types: + item.add_asset( + key=asset_type, + asset=pystac.Asset( + href=base_url.replace('.zip', f'_{asset_type}.tif'), + media_type=pystac.MediaType.GEOTIFF, + roles=['data'], + ), + ) + return item + + +def _get_insar_info(job: Job) -> Tuple[GeoInfo, ParameterFile]: + """Get the geospatial and parameter information for an InSAR job. + Includes all https requests needed to get job info. + + Args: + job: A HyP3 Job object + + Returns: + A tuple containing the geospatial information and parameter file + """ + base_url = job.to_dict()['files'][0]['url'] + unw_file_url = base_url.replace('.zip', '_unw_phase.tif') + param_file_url = base_url.replace('.zip', '.txt') + + base_fs = fsspec.filesystem('https', block_size=int(0.1 * (2**20))) + geo_info = _get_geotiff_info_nogdal(unw_file_url, base_fs) + param_file = ParameterFile.read(param_file_url, base_fs) + return geo_info, param_file + + +def _get_rtc_info(job: Job) -> Tuple[GeoInfo, List[str]]: + """Get the geospatial and polarization information for an RTC job. + Includes all https requests needed to get job info. + + Args: + job: A HyP3 Job object + + Returns: + A tuple containing the geospatial information and available polarizations + """ + base_url = job.to_dict()['files'][0]['url'] + base_fs = fsspec.filesystem('https', block_size=int(0.1 * (2**20))) + available_pols = [] + # FIXME: This would be more efficient if I can get an ls command to work + for pol in ['VV', 'VH', 'HH', 'HV']: + url = base_url.replace('.zip', f'_{pol}.tif') + if base_fs.exists(url): + available_pols.append(pol) + + first_pol_url = base_url.replace('.zip', f'_{available_pols[0]}.tif') + geo_info = _get_geotiff_info_nogdal(first_pol_url, base_fs) + return geo_info, available_pols + + +def create_stac_collection(batch: Batch, out_path: Path, collection_id: str = 'hyp3_jobs', workers: int = 10) -> None: + """Create a STAC collection from a HyP3 batch and save it to a directory. + + Args: + batch: A HyP3 Batch object, or a list of jobs + out_path: Path to the directory where the STAC collection will be saved + id: The ID of the STAC catalog + workers: The number of concurent requests to make when getting job info + """ + validate_stack(batch) + + job_type = batch[0].to_dict()['job_type'] + if 'INSAR' in job_type: + with ThreadPoolExecutor(max_workers=workers) as executor: + results = list(tqdm(executor.map(_get_insar_info, batch), total=len(batch))) + items = [_create_insar_stac_item(job, geo, param) for job, (geo, param) in zip(batch, results)] + elif 'RTC' in job_type: + with ThreadPoolExecutor(max_workers=workers) as executor: + results = list(tqdm(executor.map(_get_rtc_info, batch), total=len(batch))) + items = [_create_rtc_stac_item(job, geo, pols) for job, (geo, pols) in zip(batch, results)] + + bboxes = [item.bbox for item in items] + dates = [item.datetime for item in items] + + extent = pystac.Extent( + pystac.SpatialExtent(get_overall_bbox(bboxes)), pystac.TemporalExtent([[min(dates), max(dates)]]) + ) + summary_dict = {'constellation': [SENTINEL_CONSTELLATION], 'platform': SENTINEL_PLATFORMS} + + collection = pystac.Collection( + id=collection_id, + description=SENTINEL_DATA_DESCRIPTION, + extent=extent, + keywords=['sentinel', 'copernicus', 'esa', 'sar'], + providers=[SENTINEL_PROVIDER, HYP3_PROVIDER], + summaries=pystac.Summaries(summary_dict), + title='ASF HyP3 Products', + ) + [collection.add_item(item) for item in items] + collection.normalize_hrefs(str(out_path)) + collection.validate() + collection.save(catalog_type=pystac.CatalogType.SELF_CONTAINED) diff --git a/tests/test_load.py b/tests/test_load.py new file mode 100644 index 0000000..9f4ea91 --- /dev/null +++ b/tests/test_load.py @@ -0,0 +1,26 @@ +import numpy as np + +from hyp3_sdk.stac import load + + +def test_incidence_angle2slant_range_distance(): + incidence_angles = np.array([1, 10, 30, 45]) + earth_radius = 10_000 + orbit_altitude = 500 + slant_range_distances = load.incidence_angle2slant_range_distance(orbit_altitude, earth_radius, incidence_angles) + assert np.allclose(slant_range_distances, np.array([500.07253639, 507.33801148, 572.83861847, 691.01953626])) + + +def test_utm2latlon(): + # Coordinates near Ridgecrest, CA + utm_x = 677962 + utm_y = 4096742 + epsg = 32610 + lat, lon = load.utm2latlon(epsg, utm_x, utm_y) + assert np.allclose([lat, lon], [37, -121]) + + +def test_wrap(): + unwrapped = np.array([0, np.pi * 0.5, np.pi, np.pi * 1.5, np.pi * 2]) + wrapped = load.wrap(unwrapped) + assert np.allclose(wrapped, [0, np.pi * 0.5, -np.pi, -np.pi * 0.5, 0]) diff --git a/tests/test_stac.py b/tests/test_stac.py new file mode 100644 index 0000000..1162dac --- /dev/null +++ b/tests/test_stac.py @@ -0,0 +1,232 @@ +"""Tests for the stac module""" +from copy import deepcopy +from datetime import datetime, timezone + +import numpy as np +import pytest +import tifffile + +from hyp3_sdk import Job +from hyp3_sdk.stac import stac + + +@pytest.fixture +def param_file(): + param_file = stac.ParameterFile( + reference_granule='foo', + secondary_granule='bar', + reference_orbit_direction='ASCENDING', + reference_orbit_number=123, + secondary_orbit_direction='DESCENDING', + secondary_orbit_number=124, + baseline=100.0, + utc_time=1546344000.0, + heading=-10.0, + spacecraft_height=700000.0, + earth_radius_at_nadir=6371000.0, + slant_range_near=800000.0, + slant_range_center=850000.0, + slant_range_far=900000.0, + range_looks=5, + azimuth_looks=5, + insar_phase_filter=True, + phase_filter_parameter=0.2, + range_bandpass_filter=True, + azimuth_bandpass_filter=True, + dem_source='baz', + dem_resolution=30, + unwrapping_type='SNAPHU', + speckle_filter=True, + water_mask=True, + ) + return param_file + + +@pytest.fixture +def geo_info(): + return stac.GeoInfo(transform=[677962, 1, 0, 4096742, 0, -1], shape=[108630, 92459], epsg=32610) + + +def test_parameter_file(tmp_path, param_file): + """Test the ParameterFile class""" + assert str(param_file).startswith('Reference Granule: foo\n') + assert str(param_file).endswith('Water mask: yes\n') + + param_file.write(tmp_path / 'test.txt') + assert (tmp_path / 'test.txt').read_text() == str(param_file) + loaded_param_file = stac.ParameterFile.read(tmp_path / 'test.txt') + assert loaded_param_file == param_file + + +def test_geoinfo(geo_info): + assert np.all(np.round(geo_info.bbox, 4) == np.round([-121, 36, -120, 37], 4)) + assert geo_info.bbox_geojson['type'] == 'Polygon' + assert len(geo_info.bbox_geojson['coordinates'][0]) == 5 + assert geo_info.proj_transform == [1, 0, 677962, 0, -1, 4096742, 0, 0, 1] + + +def test__get_epsg(): + test_geo_key_list = [9999, 0, 1, 5555, 3072, 0, 1, 4326] + assert stac._get_epsg(test_geo_key_list) == 4326 + + test_geo_key_list = [9999, 0, 1, 4326, 3071, 0, 1, 4326] + with pytest.raises(ValueError, match='No .* EPSG .*'): + stac._get_epsg(test_geo_key_list) + + +def create_temp_geotiff(file_path): + width, length = 100, 200 + pixel_x, pixel_y = 1, 2 + origin_x, origin_y = 10, 20 + image_data = np.zeros((length, width), dtype=np.uint16) + + tags = { + 'ModelPixelScaleTag': [33550, 'i', 3, (pixel_x, pixel_y, 0)], + 'ModelTiepointTag': [33922, 'i', 6, (0, 0, 0, origin_x, origin_y, 0)], + 'GeoKeyDirectoryTag': [34735, 'i', 4, (3072, 0, 1, 4326)], + } + extra_tags = [tags[key] for key in tags] + tifffile.imwrite( + file_path, + image_data, + extratags=extra_tags, + ) + return + + +def test__get_geotiff_info_nogdal(tmp_path): + tmp_file = tmp_path / 'test.tif' + create_temp_geotiff(tmp_file) + geo_info = stac._get_geotiff_info_nogdal(str(tmp_file)) + assert geo_info.transform == [10, 1, 0, 20, 0, -2] + assert geo_info.shape == (200, 100) + assert geo_info.epsg == 4326 + + +def test_get_overall_bbox(): + boxes = [[0, 2, 3, 4], [5, 0, 10, 8], [1, 6, 7, 11]] + assert stac.get_overall_bbox(boxes) == [0, 0, 10, 11] + + +def test_validate_stac(): + job1 = Job( + job_type='INSAR_GAMMA', + job_id='foo', + request_time=datetime.now(), + status_code='SUCCEEDED', + user_id='me', + job_parameters={'reference_granule': 'foo', 'secondary_granule': 'bar'}, + ) + job2 = Job( + job_type='INSAR_GAMMA', + job_id='foo', + request_time=datetime.now(), + status_code='SUCCEEDED', + user_id='me', + job_parameters={'reference_granule': 'foo', 'secondary_granule': 'baz'}, + ) + job3 = Job(job_type='INSAR_GAMMA', job_id='foo', request_time=datetime.now(), status_code='PENDING', user_id='me') + job4 = Job( + job_type='INSAR_ISCE_BURST', job_id='foo', request_time=datetime.now(), status_code='SUCCEEDED', user_id='me' + ) + job5 = Job(job_type='AUTORIFT', job_id='foo', request_time=datetime.now(), status_code='SUCCEEDED', user_id='me') + + with pytest.raises(ValueError, match='Not all .* succeeded .*'): + stac.validate_stack([job1, job3]) + + with pytest.raises(ValueError, match='Not all.*job type.*'): + stac.validate_stack([job1, job4]) + + with pytest.raises(ValueError, match='Not all .* parameters'): + stac.validate_stack([job1, job2]) + + with pytest.raises(NotImplementedError, match='Job type .* not supported.*'): + stac.validate_stack([job5]) + + stac.validate_stack([job1, job1]) + + +def test__create_insar_stac_item(param_file, geo_info): + job_gamma = Job( + job_type='INSAR_GAMMA', + job_id='my_job_id', + request_time=datetime.now(), + status_code='SUCCEEDED', + user_id='me', + files=[{'url': 'https://example.com/my_job_id.zip'}], + ) + gamma_param_file = deepcopy(param_file) + gamma_param_file.reference_granule = '0_1_2_3_4_20190101T000000' + gamma_param_file.secondary_granule = '0_1_2_3_4_20190102T000000' + item = stac._create_insar_stac_item(job_gamma, geo_info, gamma_param_file) + item.validate() + assert item.id == 'my_job_id' + assert item.datetime == datetime(2019, 1, 1, 12, 0, 0).replace(tzinfo=timezone.utc) + assert np.all(np.round(item.bbox, 4) == np.round([-121, 36, -120, 37], 4)) + assert item.properties['sar:product_type'] == 'INSAR_GAMMA' + assert item.properties['sar:polarizations'] == ['VV'] + assert item.properties['hyp3:start_datetime'] == '2019-01-01T00:00:00+00:00' + assert item.properties['hyp3:end_datetime'] == '2019-01-02T00:00:00+00:00' + assert item.properties['hyp3:reference_granule'] == '0_1_2_3_4_20190101T000000' + assert item.properties['hyp3:secondary_granule'] == '0_1_2_3_4_20190102T000000' + assert item.properties['view:azimuth'] == 350.0 + assert item.properties['insar:perpendicular_baseline'] == 100.0 + assert item.properties['insar:temporal_baseline'] == 1 + assert item.properties['insar:processing_dem'] == 'COP-DEM_GLO30' + assert item.properties['insar:geocoding_dem'] == 'COP-DEM_GLO30' + assert item.assets['unw_phase'].href == 'https://example.com/my_job_id_unw_phase.tif' + + job_isce = Job( + job_type='INSAR_ISCE_BURST', + job_id='my_job_id', + request_time=datetime.now(), + status_code='SUCCEEDED', + user_id='me', + files=[{'url': 'https://example.com/my_job_id.zip'}], + ) + isce_param_file = deepcopy(param_file) + isce_param_file.reference_granule = '0_1_2_20190101T000000_VH' + isce_param_file.secondary_granule = '0_1_2_20190102T000000_VH' + item = stac._create_insar_stac_item(job_isce, geo_info, isce_param_file) + item.validate() + # only testing the new properties + assert item.properties['sar:product_type'] == 'INSAR_ISCE_BURST' + assert item.properties['sar:polarizations'] == ['VH'] + assert item.properties['hyp3:start_datetime'] == '2019-01-01T00:00:00+00:00' + assert item.properties['hyp3:end_datetime'] == '2019-01-02T00:00:00+00:00' + + +def test__create_rtc_stac_item(geo_info): + job = Job( + job_type='RTC_GAMMA', + job_id='my_job_20190101T000000', + request_time=datetime.now(), + status_code='SUCCEEDED', + user_id='me', + files=[{'url': 'https://example.com/my_job_20190101T000000.zip'}], + ) + available_polarizations = ['VV', 'VH'] + item = stac._create_rtc_stac_item(job, geo_info, available_polarizations) + item.validate() + assert item.id == 'my_job_20190101T000000' + assert item.datetime == datetime(2019, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc) + assert np.all(np.round(item.bbox, 4) == np.round([-121, 36, -120, 37], 4)) + assert item.properties['sar:product_type'] == 'RTC_GAMMA' + assert item.properties['sar:polarizations'] == ['VV', 'VH'] + assert item.assets['VV'].href == 'https://example.com/my_job_20190101T000000_VV.tif' + + +def test__create_item(geo_info): + base_url = 'https://example.com/my_job_id.zip' + start_time = datetime(2019, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc) + product_types = ['unw_phase', 'corr'] + extra_properties = {'foo': 'bar'} + item = stac._create_item(base_url, start_time, geo_info, product_types, extra_properties) + assert item.id == 'my_job_id' + assert item.datetime == start_time + assert np.all(np.round(item.bbox, 4) == np.round([-121, 36, -120, 37], 4)) + assert item.properties['foo'] == 'bar' + assert item.properties['sar:instrument_mode'] == 'IW' + assert item.properties['sar:frequency_band'] == 'C' + assert item.assets['unw_phase'].href == 'https://example.com/my_job_id_unw_phase.tif' + assert item.assets['corr'].href == 'https://example.com/my_job_id_corr.tif'