diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index b6afc5d8..7fd9c57c 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -39,14 +39,15 @@ jobs: - name: Setup miniconda uses: conda-incubator/setup-miniconda@v2 with: - auto-update-conda: true - miniforge-variant: Mambaforge + auto-update-conda: true # false + miniforge-version: latest channels: conda-forge # defaults automatically added python-version: ${{ matrix.python-version }} activate-environment: geofabrics_CI environment-file: environment_CI.yml - use-mamba: true auto-activate-base: false + # use-only-tar-bz2: true + - name: Conda list shell: pwsh diff --git a/pyproject.toml b/pyproject.toml index fc080aed..c7440234 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta" [project] name = "geofabrics" -version = "1.1.23" +version = "1.1.24" description = "A package for creating geofabrics for flood modelling." readme = "README.md" authors = [{ name = "Rose pearson", email = "rose.pearson@niwa.co.nz" }] diff --git a/src/geofabrics/bathymetry_estimation.py b/src/geofabrics/bathymetry_estimation.py index 8d8f4d3c..c4920b53 100644 --- a/src/geofabrics/bathymetry_estimation.py +++ b/src/geofabrics/bathymetry_estimation.py @@ -1925,6 +1925,7 @@ def _create_flat_water_polygon(self, cross_sections: geopandas.GeoDataFrame): start_xy = geopandas.GeoDataFrame( geometry=[shapely.geometry.LineString(start_xy)], crs=cross_sections.crs ) + start_xy = Channel(start_xy, resolution=self.cross_section_spacing) start_xy_spline = start_xy.get_parametric_spline_fit_points() @@ -2296,7 +2297,6 @@ def estimate_width_and_slope( maximum_threshold=max_threshold, min_channel_width=min_channel_width, ) - # generate a flat water polygon river_polygon = self._create_flat_water_polygon( cross_sections=cross_sections, diff --git a/src/geofabrics/dem.py b/src/geofabrics/dem.py index 8b90e147..b3504ba1 100644 --- a/src/geofabrics/dem.py +++ b/src/geofabrics/dem.py @@ -300,6 +300,7 @@ class DemBase(abc.ABC): "patch": 6, "stopbanks": 7, "masked feature": 8, + "lakes": 9, "interpolated": 0, "no data": -1, } @@ -1106,9 +1107,9 @@ def clip_within_polygon(self, polygon_paths: list, label: str): f"No clipping. Polygons {polygon_paths} do not overlap DEM." ) - def interpolate_elevations_within_polygon( + def add_points_within_polygon_chunked( self, - elevations: geometry.EstimatedElevationPoints, + elevations: geometry.ElevationPoints, method: str, cache_path: pathlib.Path, label: str, @@ -1171,7 +1172,7 @@ def interpolate_elevations_within_polygon( point_cloud = numpy.concatenate([edge_points, point_cloud]) # Save river points in a temporary laz file - lidar_file = cache_path / "waterways_points.laz" + lidar_file = cache_path / f"{label}_points.laz" pdal_pipeline_instructions = [ { "type": "writers.las", @@ -1196,7 +1197,7 @@ def interpolate_elevations_within_polygon( self.logger.info(f"Preparing {[len(chunked_dim_x), len(chunked_dim_y)]} chunks") # cycle through index chunks - and collect in a delayed array - self.logger.info("Running over ocean chunked") + self.logger.info(f"Running over {label} chunked") delayed_chunked_matrix = [] for i, dim_y in enumerate(chunked_dim_y): delayed_chunked_x = [] @@ -1246,11 +1247,12 @@ def interpolate_elevations_within_polygon( ) self._write_netcdf_conventions_in_place(self._dem, self.catchment_geometry.crs) - def interpolate_rivers( + def add_points_within_polygon_nearest_chunked( self, - elevations: geometry.EstimatedElevationPoints, + elevations: geometry.ElevationPoints, method: str, cache_path: pathlib.Path, + label: str, k_nearest_neighbours: int = 100, ) -> xarray.Dataset: """Performs interpolation from estimated bathymetry points within a polygon @@ -1273,23 +1275,21 @@ def interpolate_rivers( # Define the region to rasterise region_to_rasterise = elevations.polygons - # Extract and saveriver elevations - river_points = elevations.points_array - river_points_file = cache_path / "river_points.laz" + # Tempoarily save the points to add + points = elevations.points_array + points_file = cache_path / f"{label}_points.laz" pdal_pipeline_instructions = [ { "type": "writers.las", "a_srs": f"EPSG:" f"{crs['horizontal']}+" f"{crs['vertical']}", - "filename": str(river_points_file), + "filename": str(points_file), "compression": "laszip", } ] - pdal_pipeline = pdal.Pipeline( - json.dumps(pdal_pipeline_instructions), [river_points] - ) + pdal_pipeline = pdal.Pipeline(json.dumps(pdal_pipeline_instructions), [points]) pdal_pipeline.execute() - # Extract and save river/fan adjacent elevations from DEM + # Tempoarily save the adjacent points from the DEM edge_dem = self._dem.rio.clip( region_to_rasterise.dissolve().buffer(self.catchment_geometry.resolution), drop=True, @@ -1306,19 +1306,19 @@ def interpolate_rivers( drop=True, ) - # Define the river and mouth edge points + # Save provided points grid_x, grid_y = numpy.meshgrid(edge_dem.x, edge_dem.y) flat_x = grid_x.flatten() flat_y = grid_y.flatten() flat_z = edge_dem.z.values.flatten() mask_z = ~numpy.isnan(flat_z) - # Interpolate the estimated river bank heights along only the river + # Interpolate the estimated bank heights around the polygon if they exist if elevations.bank_heights_exist(): - # Get the estimated river bank heights and define a mask where nan - river_bank_points = elevations.bank_height_points() - river_bank_nan_mask = numpy.logical_not(numpy.isnan(river_bank_points["Z"])) - # Interpolate from the estimated river bank heights + # Get the estimated bank heights and define a mask where nan + bank_points = elevations.bank_height_points() + bank_nan_mask = numpy.logical_not(numpy.isnan(bank_points["Z"])) + # Interpolate from the estimated bank heights xy_out = numpy.concatenate( [[flat_x[mask_z]], [flat_y[mask_z]]], axis=0 ).transpose() @@ -1328,19 +1328,17 @@ def interpolate_rivers( "method": "linear", "strict": False, } - estimated_river_edge_z = elevation_from_points( - point_cloud=river_bank_points[river_bank_nan_mask], + estimated_edge_z = elevation_from_points( + point_cloud=bank_points[bank_nan_mask], xy_out=xy_out, options=options, ) # Use the estimated bank heights where lower than the DEM edge values - mask_z_river_edge = mask_z.copy() - mask_z_river_edge[:] = False - mask_z_river_edge[mask_z] = flat_z[mask_z] > estimated_river_edge_z - flat_z[mask_z_river_edge] = estimated_river_edge_z[ - flat_z[mask_z] > estimated_river_edge_z - ] + mask_z_edge = mask_z.copy() + mask_z_edge[:] = False + mask_z_edge[mask_z] = flat_z[mask_z] > estimated_edge_z + flat_z[mask_z_edge] = estimated_edge_z[flat_z[mask_z] > estimated_edge_z] # Use the flat_x/y/z to define edge points and heights edge_points = numpy.empty( @@ -1355,12 +1353,12 @@ def interpolate_rivers( edge_points["Y"] = flat_y[mask_z] edge_points["Z"] = flat_z[mask_z] - river_edge_file = cache_path / "river_edge_points.laz" + edge_file = cache_path / f"{label}_edge_points.laz" pdal_pipeline_instructions = [ { "type": "writers.las", "a_srs": f"EPSG:" f"{crs['horizontal']}+" f"{crs['vertical']}", - "filename": str(river_edge_file), + "filename": str(edge_file), "compression": "laszip", } ] @@ -1370,18 +1368,18 @@ def interpolate_rivers( pdal_pipeline.execute() if ( - len(river_points) < k_nearest_neighbours - or len(edge_points) < k_nearest_neighbours + len(points) < raster_options["k_nearest_neighbours"] + or len(edge_points) < raster_options["k_nearest_neighbours"] ): logging.info( - f"Fewer river or edge points than the default expected {k_nearest_neighbours}. " - f"Updating k_nearest_neighbours to {min(len(river_points), len(edge_points))}." + f"Fewer points or edge points than the default expected {raster_options['k_nearest_neighbours']}. " + f"Updating k_nearest_neighbours to {min(len(points), len(edge_points))}." ) - k_nearest_neighbours = min(len(river_points), len(edge_points)) - if k_nearest_neighbours < 3: + raster_options["k_nearest_neighbours"] = min(len(points), len(edge_points)) + if raster_options["k_nearest_neighbours"] < 3: logging.warning( - f"Not enough river or edge points to meaningfully include {k_nearest_neighbours}. " - f"Exiting without including the river and edge points." + f"Not enough points or edge points to meaningfully include {raster_options['k_nearest_neighbours']}. " + f"Exiting without including the points and edge points." ) return @@ -1396,22 +1394,24 @@ def interpolate_rivers( self.logger.info(f"Preparing {[len(chunked_dim_x), len(chunked_dim_y)]} chunks") # cycle through index chunks - and collect in a delayed array - self.logger.info("Running over ocean chunked") + self.logger.info( + "Running over points chunked - nearest of points & edge points" + ) delayed_chunked_matrix = [] for i, dim_y in enumerate(chunked_dim_y): delayed_chunked_x = [] for j, dim_x in enumerate(chunked_dim_x): self.logger.debug(f"\tLiDAR chunk {[i, j]}") # Load in points - river_points = delayed_load_tiles_in_chunk( - lidar_files=[river_points_file], + points = delayed_load_tiles_in_chunk( + lidar_files=[points_file], source_crs=raster_options["crs"], chunk_region_to_tile=None, crs=raster_options["crs"], ) - river_edge_points = delayed_load_tiles_in_chunk( - lidar_files=[river_edge_file], + edge_points = delayed_load_tiles_in_chunk( + lidar_files=[edge_file], source_crs=raster_options["crs"], chunk_region_to_tile=None, crs=raster_options["crs"], @@ -1423,8 +1423,8 @@ def interpolate_rivers( delayed_elevation_over_chunk_from_nearest( dim_x=dim_x, dim_y=dim_y, - points=river_points, - edge_points=river_edge_points, + points=points, + edge_points=edge_points, options=raster_options, ), shape=(len(dim_y), len(dim_x)), @@ -1437,17 +1437,17 @@ def interpolate_rivers( elevations = dask.array.block(delayed_chunked_matrix) # Update DEM layers - copy everyhere within the region to rasterise - rivers_mask = clip_mask( + polygon_mask = clip_mask( self._dem.z, region_to_rasterise.geometry, self.chunk_size, ) - rivers_mask.load() - self._dem["z"] = self._dem.z.where(~rivers_mask, elevations) - mask = ~(rivers_mask & self._dem.z.notnull()) + polygon_mask.load() + self._dem["z"] = self._dem.z.where(~polygon_mask, elevations) + mask = ~(polygon_mask & self._dem.z.notnull()) self._dem["data_source"] = self._dem.data_source.where( mask, - self.SOURCE_CLASSIFICATION["rivers and fans"], + self.SOURCE_CLASSIFICATION[label], ) self._dem["lidar_source"] = self._dem.lidar_source.where( mask, self.SOURCE_CLASSIFICATION["no data"] diff --git a/src/geofabrics/geometry.py b/src/geofabrics/geometry.py index c44aabcb..005f870f 100644 --- a/src/geofabrics/geometry.py +++ b/src/geofabrics/geometry.py @@ -553,7 +553,7 @@ def sample(self) -> numpy.ndarray: return points -class EstimatedElevationPoints: +class ElevationPoints: """A class for accessing estimated or measured river, mouth and waterway elevations as points. Paired elevation and polygon files are expected. The elevations are used to interpolate elevations within the polygons. @@ -630,7 +630,7 @@ def _set_up( polygon_list.append(polygon_i) # Set CRS, clip to size and reset index if len(points_list) == 0: - self.logger.warning("No waterways elevations. Ignoring.") + self.logger.warning("No elevations. Ignoring.") self._points = [] self._polygon = [] return @@ -758,6 +758,43 @@ def z(self): return self._z +class ElevationContours(ElevationPoints): + """Resample at spatial resolution at points""" + + def __init__( + self, + points_files: list, + polygon_files: list, + catchment_geometry: CatchmentGeometry, + z_labels: list = None, + ): + super(ElevationContours, self).__init__( + points_files=points_files, + polygon_files=polygon_files, + catchment_geometry=catchment_geometry, + filter_osm_ids=[], + z_labels=z_labels, + ) + self.logger = logging.getLogger(f"{__name__}.{self.__class__.__name__}") + + # convert contoursto samples points at resolution + self.sample_contours(self.catchment_geometry.resolution) + + def sample_contours(self, resolution: float) -> numpy.ndarray: + """Sample the contours at the specified resolution.""" + + # convert contours to multipoints + self._points.loc[:, "geometry"] = self._points.geometry.apply( + lambda row: shapely.geometry.MultiPoint( + [ + row.interpolate(i * resolution) + for i in range(int(numpy.ceil(row.length / resolution))) + ] + ) + ) + self._points = self._points.explode(index_parts=True, ignore_index=True) + + class TileInfo: """A class for working with tiling information.""" @@ -854,7 +891,7 @@ def _get_mouth_alignment(self): # Get the river alignment and clip to the river polygon aligned_channel = geopandas.read_file(self.aligned_channel_file) - river_polygon = geopandas.read_file(self.river_polygon_file) + river_polygon = geopandas.read_file(self.river_polygon_file).make_valid() aligned_channel = aligned_channel.clip(river_polygon) # Explode incase the aligned channel is clipped into a MultiPolyLine (x, y) = aligned_channel.explode().iloc[0].geometry.xy diff --git a/src/geofabrics/processor.py b/src/geofabrics/processor.py index 2023bae5..84bb73c3 100644 --- a/src/geofabrics/processor.py +++ b/src/geofabrics/processor.py @@ -290,6 +290,7 @@ def get_instruction_general(self, key: str, subkey: str = None): "interpolation": { "rivers": "rbf", "waterways": "cubic", + "lakes": "linear", "stopbanks": "nearest", "ocean": "rbf", "lidar": "idw", @@ -297,6 +298,7 @@ def get_instruction_general(self, key: str, subkey: str = None): }, "z_labels": { "waterways": "z", + "lakes": "z", "stopbanks": "z", "rivers": "z", "ocean": None, @@ -1233,7 +1235,7 @@ def add_hydrological_features( self.logger.info(f"Incorporating waterways: {elevations}") # Load in bathymetry - estimated_elevations = geometry.EstimatedElevationPoints( + estimated_elevations = geometry.ElevationPoints( points_files=elevations, polygon_files=polygons, filter_osm_ids=self.get_instruction_general( @@ -1248,7 +1250,7 @@ def add_hydrological_features( # Call interpolate river on the DEM - the class checks to see if any pixels # actually fall inside the polygon if len(estimated_elevations.polygons) > 0: # Skip if no waterways - hydrologic_dem.interpolate_elevations_within_polygon( + hydrologic_dem.add_points_within_polygon_chunked( elevations=estimated_elevations, method=self.get_instruction_general( key="interpolation", subkey="waterways" @@ -1265,6 +1267,65 @@ def add_hydrological_features( if cached_file is not None: self.clean_cached_file(cached_file) cached_file = temp_file + # Check for lakes + if "lakes" in self.instructions["data_paths"]: + # Loop through each lake in turn adding individually + subfolder = self.get_instruction_path(key="subfolder") + z_labels = self.get_instruction_general(key="z_labels", subkey="lakes") + lakes = self.instructions["data_paths"]["lakes"] + if isinstance(z_labels, str): + z_labels = [z_labels for i in range(len(lakes))] + elif not isinstance(z_labels, list) or len(z_labels) != len(lakes): + raise ValueError( + "There is a mismatch in length between the provided z_labels " + f"and the lakes: {z_labels} {lakes}" + ) + for index, lake_dict in enumerate(lakes): + elevation = pathlib.Path(lake_dict["elevations"]) + polygon = pathlib.Path(lake_dict["extents"]) + if not elevation.is_absolute(): + elevation = subfolder / elevation + if not polygon.is_absolute(): + polygon = subfolder / polygon + + self.logger.info(f"Incorporating lake: {elevation}") + # Load in elevations + elevations = geometry.ElevationContours( + points_files=[elevation], + polygon_files=[polygon], + catchment_geometry=self.catchment_geometry, + z_labels=z_labels[index], + ) + + if ( + len(elevations.points_array) == 0 + or elevations.polygons.area.sum() + < self.catchment_geometry.resolution**2 + ): + self.logger.warning( + "No points or an area less than one grid cell in " + f"lake {elevation}. Ignoring." + ) + continue + + # Add lake to DEM + hydrologic_dem.add_points_within_polygon_nearest_chunked( + elevations=elevations, + method=self.get_instruction_general( + key="interpolation", subkey="lakes" + ), + cache_path=temp_folder, + label="lakes", + ) + temp_file = temp_folder / f"dem_added_{index + 1}_lake.nc" + self.logger.info( + f"Save temp DEM with lake {index + 1} added to netCDF: {temp_file}" + ) + hydrologic_dem.save_and_load_dem(temp_file) + # Remove previous cached file and replace with new one + if cached_file is not None: + self.clean_cached_file(cached_file) + cached_file = temp_file # Load in river bathymetry and incorporate where discernable at the resolution if "rivers" in self.instructions["data_paths"]: # Loop through each river in turn adding individually @@ -1289,7 +1350,7 @@ def add_hydrological_features( self.logger.info(f"Incorporating river: {elevation}") # Load in bathymetry - estimated_elevations = geometry.EstimatedElevationPoints( + estimated_elevations = geometry.ElevationPoints( points_files=[elevation], polygon_files=[polygon], catchment_geometry=self.catchment_geometry, @@ -1309,12 +1370,13 @@ def add_hydrological_features( # Call interpolate river on the DEM - the class checks to see if any pixels # actually fall inside the polygon - hydrologic_dem.interpolate_rivers( + hydrologic_dem.add_points_within_polygon_nearest_chunked( elevations=estimated_elevations, method=self.get_instruction_general( key="interpolation", subkey="rivers" ), cache_path=temp_folder, + label="rivers and fans", ) temp_file = temp_folder / f"dem_added_{index + 1}_rivers.nc" self.logger.info( @@ -1346,7 +1408,7 @@ def add_hydrological_features( self.logger.info(f"Incorporating stopbanks: {elevations}") # Load in bathymetry - estimated_elevations = geometry.EstimatedElevationPoints( + estimated_elevations = geometry.ElevationPoints( points_files=elevations, polygon_files=polygons, catchment_geometry=self.catchment_geometry, @@ -1358,7 +1420,7 @@ def add_hydrological_features( # Call interpolate river on the DEM - the class checks to see if any pixels # actually fall inside the polygon if len(estimated_elevations.polygons) > 0: # Skip if no stopbanks - hydrologic_dem.interpolate_elevations_within_polygon( + hydrologic_dem.add_points_within_polygon_chunked( elevations=estimated_elevations, method=self.get_instruction_general( key="interpolation", subkey="stopbanks" @@ -1570,7 +1632,7 @@ def run(self): elevation_range=None, ) patch_paths = self.get_vector_or_raster_paths( - key="patchs", data_type="raster" + key="patches", data_type="raster" ) if self.get_patch_instruction("patch_on_top"): patch_paths = patch_paths[::-1] # Reverse so first ends up on top @@ -2641,8 +2703,11 @@ def align_channel_from_osm( # Note projection function is limited between [0, osm_channel.length] end_split_length = float(osm_channel.project(network_end)) start_split_length = float(osm_channel.project(network_start)) - # Ensure the OSM line is defined upstream - if start_split_length > end_split_length: + # Ensure the OSM line is defined mouth to upstream + if ( + start_split_length > end_split_length + or start_split_length >= float(osm_channel.length) / 2 + ): # Reverse direction of the geometry osm_channel.loc[0, "geometry"] = shapely.geometry.LineString( list(osm_channel.iloc[0].geometry.coords)[::-1] @@ -2658,20 +2723,17 @@ def align_channel_from_osm( osm_channel.loc[0].geometry, split_point.loc[0], tolerance=0.1 ) osm_channel = geopandas.GeoDataFrame( - { - "geometry": [ - list(shapely.ops.split(osm_channel, split_point.loc[0]).geoms)[ - 1 - ] - ] - }, + geometry=[ + list(shapely.ops.split(osm_channel, split_point.loc[0]).geoms)[1] + ], crs=crs, ) - else: + elif start_split_length == 0 and not self.get_bathymetry_instruction( + "keep_downstream_osm" + ): self.logger.warning( - "The OSM reference line starts upstream of the" - "network line. The bottom of the network will be" - "ignored over a stright line distance of " + "The OSM reference line starts upstream of the network line. The bottom " + "of the network will be ignored over a stright line distance of " f"{osm_channel.distance(network_start)}" ) # Clip end if needed - recacluate clip position incase front clipped. @@ -2682,22 +2744,34 @@ def align_channel_from_osm( osm_channel.loc[0].geometry, split_point.loc[0], tolerance=0.1 ) osm_channel = geopandas.GeoDataFrame( - { - "geometry": [ - list(shapely.ops.split(osm_channel, split_point.loc[0]).geoms)[ - 0 - ] - ] - }, + geometry=[ + list(shapely.ops.split(osm_channel, split_point.loc[0]).geoms)[0] + ], crs=crs, ) else: self.logger.warning( - "The OSM reference line ends downstream of the" - "network line. The top of the network will be" - "ignored over a stright line distance of " + "The OSM reference line ends upstream of the network line. The top of " + "the network will be ignored over a stright line distance of " f"{osm_channel.distance(network_end)}" ) + # In case of both network points at far end ensure only short end is returned + if start_split_length == 0 and end_split_length == 0: + split_point = osm_channel.interpolate(channel.length) + osm_channel = shapely.ops.snap( + osm_channel.loc[0].geometry, split_point.loc[0], tolerance=0.1 + ) + osm_channel = geopandas.GeoDataFrame( + geometry=[ + list(shapely.ops.split(osm_channel, split_point.loc[0]).geoms)[0] + ], + crs=crs, + ) + self.logger.warning( + "The OSM reference line ends upstream of both ends of the network line. It " + "will be clipped to the total length of the network line " + f"{channel.length}. Please review if unexpected." + ) if self.debug: osm_channel.to_file( @@ -3789,7 +3863,7 @@ def maximum_elevation_in_polygon( if dem.x[-1] - dem.x[0] > 0 else slice(bbox[2], bbox[0]) ) - # breakpoint() + small_z = dem.z.sel(x=x_slice, y=y_slice) # clip to polygon and return minimum elevation @@ -3847,7 +3921,7 @@ def sample(geometry): for index, rows in points.groupby(level=0): dem_file = self.get_result_file_path(key="raw_dem", index=index) dem = self.load_dem(filename=dem_file) - # breakpoint() + zs = rows["polygons"].apply( lambda geometry: self.maximum_elevation_in_polygon( geometry=geometry, dem=dem diff --git a/src/geofabrics/version.py b/src/geofabrics/version.py index fda6eb8c..9a1caf91 100644 --- a/src/geofabrics/version.py +++ b/src/geofabrics/version.py @@ -3,4 +3,4 @@ Contains the package version information """ -__version__ = "1.1.23" +__version__ = "1.1.24" diff --git a/tests/test_add_patches_ngaruroro/instruction.json b/tests/test_add_patches_ngaruroro/instruction.json index 37019306..b940053c 100644 --- a/tests/test_add_patches_ngaruroro/instruction.json +++ b/tests/test_add_patches_ngaruroro/instruction.json @@ -17,7 +17,7 @@ "local_cache": "tests/test_add_patches_ngaruroro/data", "subfolder": "results", "extents": "catchment_boundary.geojson", - "patchs": ["../patch_1.nc", "../patch_2.tif"], + "patches": ["../patch_1.nc", "../patch_2.tif"], "raw_dem": "../initial_dem.nc", "result_dem": "test_dem.nc", "benchmark_dem": "benchmark_dem.nc" diff --git a/tests/test_dem_generation_westport_1/test_case.py b/tests/test_dem_generation_westport_1/test_case.py index 110cb866..a8e9c36f 100644 --- a/tests/test_dem_generation_westport_1/test_case.py +++ b/tests/test_dem_generation_westport_1/test_case.py @@ -55,7 +55,7 @@ class Test(base_test.Test): "CL2_BR20_2020_1000_4212.laz": 8340310, "CL2_BR20_2020_1000_4213.laz": 6094309, "CL2_BR20_2020_1000_4214.laz": 8492543, - DATASET + "_TileIndex.zip": 1125874, + DATASET + "_TileIndex.zip": 1848391, } @classmethod diff --git a/tests/test_dem_generation_westport_2/test_case.py b/tests/test_dem_generation_westport_2/test_case.py index e2dbf82c..c404ca80 100644 --- a/tests/test_dem_generation_westport_2/test_case.py +++ b/tests/test_dem_generation_westport_2/test_case.py @@ -41,7 +41,7 @@ class Test(base_test.Test): FILE_SIZES = { "CL2_BR20_2020_1000_4012.laz": 2636961, "CL2_BR20_2020_1000_4013.laz": 3653378, - DATASET + "_TileIndex.zip": 1125874, + DATASET + "_TileIndex.zip": 1848391, } @classmethod diff --git a/tests/test_dem_generation_westport_3/test_case.py b/tests/test_dem_generation_westport_3/test_case.py index b43389d5..ff40928f 100644 --- a/tests/test_dem_generation_westport_3/test_case.py +++ b/tests/test_dem_generation_westport_3/test_case.py @@ -45,7 +45,7 @@ class Test(base_test.Test): FILE_SIZES = { "CL2_BR20_2020_1000_4012.laz": 2636961, "CL2_BR20_2020_1000_4112.laz": 9036407, - DATASET + "_TileIndex.zip": 1125874, + DATASET + "_TileIndex.zip": 1848391, } @classmethod diff --git a/tests/test_dem_generation_westport_4/test_case.py b/tests/test_dem_generation_westport_4/test_case.py index 2f0f1810..53f20c71 100644 --- a/tests/test_dem_generation_westport_4/test_case.py +++ b/tests/test_dem_generation_westport_4/test_case.py @@ -47,7 +47,7 @@ class Test(base_test.Test): "CL2_BR21_2020_1000_4704.laz": 20851153, "CL2_BR21_2020_1000_4705.laz": 19749374, "CL2_BR21_2020_1000_4804.laz": 18379794, - DATASET + "_TileIndex.zip": 1125874, + DATASET + "_TileIndex.zip": 1848391, } @classmethod diff --git a/tests/test_many_stages_waikanae/data/benchmark.nc b/tests/test_many_stages_waikanae/data/benchmark.nc index 5e08ed96..b91a5d04 100644 --- a/tests/test_many_stages_waikanae/data/benchmark.nc +++ b/tests/test_many_stages_waikanae/data/benchmark.nc @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:23a43a4b0ea54fc75fb3eb326b67011ae9b7318cc635b229fcc54cac5fc74cc2 -size 110896 +oid sha256:65f580ba43b43b9712dfbe2747c3ab04ef8eb7d9c1a3251bd4ad4d38b93424d7 +size 111104 diff --git a/tests/test_many_stages_waikanae/data/lake_contours.gpkg b/tests/test_many_stages_waikanae/data/lake_contours.gpkg new file mode 100755 index 00000000..75d697b0 Binary files /dev/null and b/tests/test_many_stages_waikanae/data/lake_contours.gpkg differ diff --git a/tests/test_many_stages_waikanae/data/lake_outline.gpkg b/tests/test_many_stages_waikanae/data/lake_outline.gpkg new file mode 100755 index 00000000..604a5c62 Binary files /dev/null and b/tests/test_many_stages_waikanae/data/lake_outline.gpkg differ diff --git a/tests/test_many_stages_waikanae/instruction.json b/tests/test_many_stages_waikanae/instruction.json index 0fd48855..b5d7cc75 100644 --- a/tests/test_many_stages_waikanae/instruction.json +++ b/tests/test_many_stages_waikanae/instruction.json @@ -135,6 +135,7 @@ "rivers": [{"extents": "rivers/river_polygon.geojson", "elevations": "rivers/river_bathymetry.geojson"}], "waterways": [{"extents": "waterways/closed_waterways_polygon.geojson", "elevations": "waterways/closed_waterways_elevation.geojson"}, {"extents": "waterways/open_waterways_polygon.geojson", "elevations": "waterways/open_waterways_elevation.geojson"}], + "lakes": [{"extents": "../lake_outline.gpkg", "elevations": "../lake_contours.gpkg"}], "result_dem": "test_dem.nc" }, "datasets": {