diff --git a/.github/workflows/build_main.yml b/.github/workflows/build_main.yml index 4f0c676cd..f1088a5b6 100644 --- a/.github/workflows/build_main.yml +++ b/.github/workflows/build_main.yml @@ -28,7 +28,7 @@ jobs: uses: ./.github/actions/scala_build - name: build python uses: ./.github/actions/python_build - # - name: build R - # uses: ./.github/actions/r_build + - name: build R + uses: ./.github/actions/r_build - name: upload artefacts uses: ./.github/actions/upload_artefacts diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index a7ca1a226..5519ecec0 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -26,8 +26,28 @@ jobs: with: documentation_path: docs/source requirements_path: docs/docs-requirements.txt + - name: checkout v0.3.x archive + # Please do not change any step in here, even though it may look hacky + # This is the only way to emulate git archive --remote with actions/checkout + # git checkout gh-pages-v0.3.x is required to have a local branch for archiving + # git pull is optional, but it's a good practice to have the latest version + # git checkout gh-pages right after is required to go back to the working branch + # mkdir ./v0.3.x is required to create a directory for the archive + # git archive gh-pages-v0.3.x | tar -x -C ./v0.3.x is required to extract the archive + # in the right place + # git add --all is required to add the new files to the working branch + # git commit -am "Adding v0.3.x docs" is required to commit the changes + run: | + git checkout gh-pages-v0.3.x + git pull + git checkout gh-pages + mkdir ./v0.3.x + git archive gh-pages-v0.3.x | tar -x -C ./v0.3.x + git add --all + git commit -am "Adding v0.3.x docs" - name: Push changes uses: ad-m/github-push-action@master with: github_token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages + diff --git a/R/sparkR-mosaic/sparkrMosaic/DESCRIPTION b/R/sparkR-mosaic/sparkrMosaic/DESCRIPTION index 351d48fe9..7bc9e3f62 100644 --- a/R/sparkR-mosaic/sparkrMosaic/DESCRIPTION +++ b/R/sparkR-mosaic/sparkrMosaic/DESCRIPTION @@ -1,6 +1,6 @@ Package: sparkrMosaic Title: SparkR bindings for Databricks Mosaic -Version: 0.3.14 +Version: 0.4.0 Authors@R: person("Robert", "Whiffin", , "robert.whiffin@databricks.com", role = c("aut", "cre") ) diff --git a/R/sparklyr-mosaic/sparklyrMosaic/DESCRIPTION b/R/sparklyr-mosaic/sparklyrMosaic/DESCRIPTION index dc3fd0904..4dbd7b03d 100644 --- a/R/sparklyr-mosaic/sparklyrMosaic/DESCRIPTION +++ b/R/sparklyr-mosaic/sparklyrMosaic/DESCRIPTION @@ -1,6 +1,6 @@ Package: sparklyrMosaic Title: sparklyr bindings for Databricks Mosaic -Version: 0.3.14 +Version: 0.4.0 Authors@R: person("Robert", "Whiffin", , "robert.whiffin@databricks.com", role = c("aut", "cre") ) diff --git a/R/sparklyr-mosaic/tests.R b/R/sparklyr-mosaic/tests.R index 18d441864..17bdd882a 100644 --- a/R/sparklyr-mosaic/tests.R +++ b/R/sparklyr-mosaic/tests.R @@ -11,7 +11,7 @@ library(sparklyr) spark_home <- Sys.getenv("SPARK_HOME") spark_home_set(spark_home) -install.packages("sparklyrMosaic_0.3.14.tar.gz", repos = NULL) +install.packages("sparklyrMosaic_0.4.0.tar.gz", repos = NULL) library(sparklyrMosaic) # find the mosaic jar in staging diff --git a/docs/code-example-notebooks/predicates.scala b/docs/code-example-notebooks/predicates.scala index 9374d4f67..df51a94fb 100644 --- a/docs/code-example-notebooks/predicates.scala +++ b/docs/code-example-notebooks/predicates.scala @@ -87,3 +87,36 @@ df.select(st_intersects($"p1", $"p2")).show(false) // MAGIC %r // MAGIC df <- createDataFrame(data.frame(p1 = "POLYGON ((0 0, 0 3, 3 3, 3 0))", p2 = "POLYGON ((2 2, 2 4, 4 4, 4 2))")) // MAGIC showDF(select(df, st_intersects(column("p1"), column("p2"))), truncate=F) + +// MAGIC %md +// MAGIC ### st_within + +// COMMAND ---------- + +// MAGIC %python +// MAGIC help(st_within) + +// COMMAND ---------- + +// MAGIC %python +// MAGIC df = spark.createDataFrame([{'point': 'POINT (25 15)', 'poly': 'POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))'}]) +// MAGIC df.select(st_within('point', 'poly')).show() + +// COMMAND ---------- + +val df = List(("POINT (25 15)", "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))")).toDF("point", "poly") +df.select(st_within($"point", $"poly")).show() + +// COMMAND ---------- + +// MAGIC %sql +// MAGIC SELECT st_within("POINT (25 15)", "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))") + +// COMMAND ---------- + +// MAGIC %r +// MAGIC df <- createDataFrame(data.frame(point = c( "POINT (25 15)"), poly = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))")) +// MAGIC showDF(select(df, st_within(column("point"), column("poly")))) + +// COMMAND ---------- + diff --git a/docs/docs-requirements.txt b/docs/docs-requirements.txt index a448f7f7d..969601087 100644 --- a/docs/docs-requirements.txt +++ b/docs/docs-requirements.txt @@ -1,10 +1,9 @@ setuptools==68.1.2 -Sphinx==4.4.0 -sphinx-material==0.0.35 -nbsphinx==0.8.8 +Sphinx==6.1.3 +sphinx-material==0.0.36 +nbsphinx>=0.8.8 ipython>=8.10.1 sphinxcontrib-fulltoc==1.2.0 livereload==2.6.3 -autodocsumm==0.2.7 -sphinx-tabs==3.2.0 -renku-sphinx-theme==0.2.3 \ No newline at end of file +sphinx-tabs==3.4.4 +renku-sphinx-theme==0.3.0 \ No newline at end of file diff --git a/docs/source/api/raster-format-readers.rst b/docs/source/api/raster-format-readers.rst index dabcc821e..3e0c6443e 100644 --- a/docs/source/api/raster-format-readers.rst +++ b/docs/source/api/raster-format-readers.rst @@ -4,8 +4,9 @@ Raster Format Readers Intro -################ +##### Mosaic provides spark readers for the following raster formats: + * GTiff (GeoTiff) using .tif file extension - https://gdal.org/drivers/raster/gtiff.html * COG (Cloud Optimized GeoTiff) using .tif file extension - https://gdal.org/drivers/raster/cog.html * HDF4 using .hdf file extension - https://gdal.org/drivers/raster/hdf4.html @@ -20,6 +21,7 @@ Mosaic provides spark readers for the following raster formats: * XPM using .xpm file extension - https://gdal.org/drivers/raster/xpm.html * GRIB using .grb file extension - https://gdal.org/drivers/raster/grib.html * Zarr using .zarr file extension - https://gdal.org/drivers/raster/zarr.html + Other formats are supported if supported by GDAL available drivers. Mosaic provides two flavors of the readers: @@ -32,6 +34,7 @@ spark.read.format("gdal") A base Spark SQL data source for reading GDAL raster data sources. It reads metadata of the raster and exposes the direct paths for the raster files. The output of the reader is a DataFrame with the following columns: + * tile - loaded raster tile (RasterTileType) * ySize - height of the raster in pixels (IntegerType) * xSize - width of the raster in pixels (IntegerType) @@ -94,6 +97,7 @@ If the raster pixels are larger than the grid cells, the cell values can be calc The interpolation method used is Inverse Distance Weighting (IDW) where the distance function is a k_ring distance of the grid. The reader supports the following options: + * fileExtension - file extension of the raster file (StringType) - default is *.* * vsizip - if the rasters are zipped files, set this to true (BooleanType) * resolution - resolution of the output grid (IntegerType) diff --git a/docs/source/api/spatial-predicates.rst b/docs/source/api/spatial-predicates.rst index 1a45dc745..09fc6fa31 100644 --- a/docs/source/api/spatial-predicates.rst +++ b/docs/source/api/spatial-predicates.rst @@ -58,6 +58,8 @@ st_contains | true| +------------------------+ +.. note:: ST_Within is the inverse of ST_Contains, where ST_Contains(a, b)==ST_Within(b,a). + st_intersects ************* @@ -114,4 +116,62 @@ st_intersects | true| +---------------------+ -.. note:: Intersection logic will be dependent on the chosen geometry API (ESRI or JTS). ESRI is only available for mosaic < 0.4.x series, in mosaic >= 0.4.0 JTS is the only geometry API. \ No newline at end of file +.. note:: Intersection logic will be dependent on the chosen geometry API (ESRI or JTS). ESRI is only available for mosaic < 0.4.x series, in mosaic >= 0.4.0 JTS is the only geometry API. + +st_within +********* + +.. function:: st_within(geom1, geom2) + + Returns `true` if `geom1` 'spatially' is within `geom2`. + + :param geom1: Geometry + :type geom1: Column + :param geom2: Geometry + :type geom2: Column + :rtype: Column: BooleanType + + :example: + +.. tabs:: + .. code-tab:: py + + df = spark.createDataFrame([{'point': 'POINT (25 15)', 'poly': 'POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))'}]) + df.select(st_within('point', 'poly')).show() + +----------------------+ + |st_within(point, poly)| + +----------------------+ + | true| + +----------------------+ + + .. code-tab:: scala + + val df = List(("POINT (25 15)", "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))")).toDF("point", "poly") + df.select(st_within($"point", $"poly")).show() + +----------------------+ + |st_within(point, poly)| + +----------------------+ + | true| + +----------------------+ + + .. code-tab:: sql + + SELECT st_within("POINT (25 15)", "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))") + +----------------------+ + |st_within(point, poly)| + +----------------------+ + | true| + +----------------------+ + + .. code-tab:: r R + + df <- createDataFrame(data.frame(point = c( "POINT (25 15)"), poly = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))")) + showDF(select(df, st_within(column("point"), column("poly")))) + +----------------------+ + |st_within(point, poly)| + +----------------------+ + | true| + +----------------------+ + +.. note:: ST_Within is the inverse of ST_Contains, where ST_Contains(a, b)==ST_Within(b,a). + diff --git a/docs/source/api/vector-format-readers.rst b/docs/source/api/vector-format-readers.rst index 8825803d5..8d9b420e2 100644 --- a/docs/source/api/vector-format-readers.rst +++ b/docs/source/api/vector-format-readers.rst @@ -8,36 +8,26 @@ Intro Mosaic provides spark readers for vector files supported by GDAL OGR drivers. Only the drivers that are built by default are supported. Here are some common useful file formats: - * GeoJSON (also ESRIJSON, TopoJSON) - https://gdal.org/drivers/vector/geojson.html - * ESRI File Geodatabase (FileGDB) and ESRI File Geodatabase vector (OpenFileGDB) - Mosaic implements named reader geo_db (described in this doc) - https://gdal.org/drivers/vector/filegdb.html - * ESRI Shapefile / DBF (ESRI Shapefile) - Mosaic implements named reader shapefile (described in this doc) - https://gdal.org/drivers/vector/shapefile.html - * Network Common Data Form (netCDF) - Mosaic implements raster reader also - https://gdal.org/drivers/raster/netcdf.html - * (Geo)Parquet (Parquet) - Mosaic will be implementing a custom reader soon - https://gdal.org/drivers/vector/parquet.html - * Spreadsheets (XLSX, XLS, ODS) - https://gdal.org/drivers/vector/xls.html - * U.S. Census TIGER/Line (TIGER) - https://gdal.org/drivers/vector/tiger.html - * PostgreSQL Dump (PGDump) - https://gdal.org/drivers/vector/pgdump.html - * Keyhole Markup Language (KML) - https://gdal.org/drivers/vector/kml.html - * Geography Markup Language (GML) - https://gdal.org/drivers/vector/gml.html - * GRASS - option for Linear Referencing Systems (LRS) - https://gdal.org/drivers/vector/grass.html + + * GeoJSON (also ESRIJSON, TopoJSON) https://gdal.org/drivers/vector/geojson.html + * ESRI File Geodatabase (FileGDB) and ESRI File Geodatabase vector (OpenFileGDB). Mosaic implements named reader geo_db (described in this doc). https://gdal.org/drivers/vector/filegdb.html + * ESRI Shapefile / DBF (ESRI Shapefile) - Mosaic implements named reader shapefile (described in this doc) https://gdal.org/drivers/vector/shapefile.html + * Network Common Data Form (netCDF) - Mosaic implements raster reader also https://gdal.org/drivers/raster/netcdf.html + * (Geo)Parquet (Parquet) - Mosaic will be implementing a custom reader soon https://gdal.org/drivers/vector/parquet.html + * Spreadsheets (XLSX, XLS, ODS) https://gdal.org/drivers/vector/xls.html + * U.S. Census TIGER/Line (TIGER) https://gdal.org/drivers/vector/tiger.html + * PostgreSQL Dump (PGDump) https://gdal.org/drivers/vector/pgdump.html + * Keyhole Markup Language (KML) https://gdal.org/drivers/vector/kml.html + * Geography Markup Language (GML) https://gdal.org/drivers/vector/gml.html + * GRASS - option for Linear Referencing Systems (LRS) https://gdal.org/drivers/vector/grass.html + For more information please refer to gdal documentation: https://gdal.org/drivers/vector/index.html Mosaic provides two flavors of the readers: - * spark.read.format("ogr") for reading 1 file per spark task - * mos.read().format("multi_read_ogr") for reading file in parallel with multiple spark tasks +* spark.read.format("ogr") for reading 1 file per spark task +* mos.read().format("multi_read_ogr") for reading file in parallel with multiple spark tasks spark.read.format("ogr") @@ -46,12 +36,13 @@ A base Spark SQL data source for reading GDAL vector data sources. The output of the reader is a DataFrame with inferred schema. The schema is inferred from both features and fields in the vector file. Each feature will be provided as 2 columns: - * geometry - geometry of the feature (GeometryType) - * srid - spatial reference system identifier of the feature (StringType) +* geometry - geometry of the feature (GeometryType) +* srid - spatial reference system identifier of the feature (StringType) The fields of the feature will be provided as columns in the DataFrame. The types of the fields are coerced to most concrete type that can hold all the values. The reader supports the following options: + * driverName - GDAL driver name (StringType) * vsizip - if the vector files are zipped files, set this to true (BooleanType) * asWKB - if the geometry should be returned as WKB (BooleanType) - default is false @@ -109,12 +100,13 @@ Chunk size is the number of file rows that will be read per single task. The output of the reader is a DataFrame with inferred schema. The schema is inferred from both features and fields in the vector file. Each feature will be provided as 2 columns: - * geometry - geometry of the feature (GeometryType) - * srid - spatial reference system identifier of the feature (StringType) +* geometry - geometry of the feature (GeometryType) +* srid - spatial reference system identifier of the feature (StringType) The fields of the feature will be provided as columns in the DataFrame. The types of the fields are coerced to most concrete type that can hold all the values. The reader supports the following options: + * driverName - GDAL driver name (StringType) * vsizip - if the vector files are zipped files, set this to true (BooleanType) * asWKB - if the geometry should be returned as WKB (BooleanType) - default is false @@ -171,6 +163,7 @@ Mosaic provides a reader for GeoDB files natively in Spark. The output of the reader is a DataFrame with inferred schema. Only 1 file per task is read. For parallel reading of large files use the multi_read_ogr reader. The reader supports the following options: + * asWKB - if the geometry should be returned as WKB (BooleanType) - default is false * layerName - name of the layer to read (StringType) * layerNumber - number of the layer to read (IntegerType) @@ -223,6 +216,7 @@ Mosaic provides a reader for Shapefiles natively in Spark. The output of the reader is a DataFrame with inferred schema. Only 1 file per task is read. For parallel reading of large files use the multi_read_ogr reader. The reader supports the following options: + * asWKB - if the geometry should be returned as WKB (BooleanType) - default is false * layerName - name of the layer to read (StringType) * layerNumber - number of the layer to read (IntegerType) diff --git a/docs/source/conf.py b/docs/source/conf.py index c2772c7a0..b8e4b5c3d 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -22,7 +22,7 @@ author = 'Stuart Lynn, Milos Colic, Erni Durdevic, Robert Whiffin, Timo Roest' # The full version, including alpha/beta/rc tags -release = "v0.3.14" +release = "v0.4.0" # -- General configuration --------------------------------------------------- diff --git a/docs/source/index.rst b/docs/source/index.rst index 2aa34a1cd..5c446b10e 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -68,6 +68,54 @@ Mosaic provides: * optimisations for performing point-in-polygon joins using an approach we co-developed with Ordnance Survey (`blog post `_); and * the choice of a Scala, SQL and Python API. +.. note:: + For Mosaic versions < 0.4.0 please use the `0.3.x docs `_. + + +Version 0.4.0 +============= + +We recommend using Databricks Runtime versions 13.3 LTS with Photon enabled. + +.. warning:: + Mosaic 0.4.x series only supports DBR 13.x DBRs. + If running on a different DBR it will throw an exception: + + **DEPRECATION ERROR: Mosaic v0.4.x series only supports Databricks Runtime 13. You can specify `%pip install 'databricks-mosaic<0.4,>=0.3'` for DBR < 13.** + +As of the 0.4.0 release, Mosaic issues the following ERROR when initialized on a cluster that is neither Photon Runtime nor Databricks Runtime ML `ADB `_ | `AWS `_ | `GCP `_ : + +**DEPRECATION ERROR: Please use a Databricks Photon-enabled Runtime for performance benefits or Runtime ML for spatial AI benefits; Mosaic 0.4.x series restricts executing this cluster.** + +As of Mosaic 0.4.0 (subject to change in follow-on releases) + * No Mosaic SQL expressions cannot yet be registered with `Unity Catalog `_ due to API changes affecting DBRs >= 13. + * `Assigned Clusters `_ : Mosaic Python, R, and Scala APIs. + * `Shared Access Clusters `_ : Mosaic Scala API (JVM) with Admin `allowlisting `_ ; Python bindings to Mosaic Scala APIs are blocked by Py4J Security on Shared Access Clusters. + +.. note:: + As of Mosaic 0.4.0 (subject to change in follow-on releases) + + * `Unity Catalog `_ : Enforces process isolation which is difficult to accomplish with custom JVM libraries; as such only built-in (aka platform provided) JVM APIs can be invoked from other supported languages in Shared Access Clusters. + * `Volumes `_ : Along the same principle of isolation, clusters (both assigned and shared access) can read Volumes via relevant built-in readers and writers or via custom python calls which do not involve any custom JVM code. + + + +Version 0.3.x Series +==================== + +We recommend using Databricks Runtime versions 12.2 LTS with Photon enabled. +For Mosaic versions < 0.4.0 please use the `0.3.x docs `_. + +.. warning:: + Mosaic 0.3.x series does not support DBR 13.x DBRs. + +As of the 0.3.11 release, Mosaic issues the following WARNING when initialized on a cluster that is neither Photon Runtime nor Databricks Runtime ML `ADB `_ | `AWS `_ | `GCP `_ : + +**DEPRECATION WARNING: Please use a Databricks Photon-enabled Runtime for performance benefits or Runtime ML for spatial AI benefits; Mosaic will stop working on this cluster after v0.3.x.** +If you are receiving this warning in v0.3.11+, you will want to begin to plan for a supported runtime. The reason we are making this change is that we are streamlining Mosaic internals to be more aligned with future product APIs which are powered by Photon. Along this direction of change, Mosaic has standardized to JTS as its default and supported Vector Geometry Provider. + + + Documentation @@ -83,6 +131,7 @@ Documentation usage/usage models/models literature/videos + v0.3.x/index Indices and tables diff --git a/pom.xml b/pom.xml index 8be751ab0..cdb4c8d2c 100644 --- a/pom.xml +++ b/pom.xml @@ -149,7 +149,7 @@ org.scoverage scoverage-maven-plugin - 2.0.0 + 2.0.1 scoverage-report @@ -191,7 +191,7 @@ org.apache.maven.plugins maven-surefire-plugin - 3.2.3 + 3.2.5 true diff --git a/python/mosaic/api/functions.py b/python/mosaic/api/functions.py index fff13d493..9819caa5f 100644 --- a/python/mosaic/api/functions.py +++ b/python/mosaic/api/functions.py @@ -16,6 +16,7 @@ "st_length", "st_perimeter", "st_convexhull", + "st_concavehull", "st_buffer", "st_bufferloop", "st_dimension", @@ -154,6 +155,45 @@ def st_convexhull(geom: ColumnOrName) -> Column: ) +def st_concavehull(geom: ColumnOrName, concavity: ColumnOrName, has_holes: Any = False) -> Column: + """ + Compute the concave hull of a geometry or multi-geometry object. + It uses lengthRatio and + allowHoles to determine the concave hull. lengthRatio is the fraction of the + difference between the longest and shortest edge lengths in the Delaunay + Triangulation. If set to 1, this is the same as the convex hull. If set to + 0, it produces produces maximum concaveness. AllowHoles is a boolean that + determines whether the concave hull can have holes. If set to true, the + concave hull can have holes. If set to false, the concave hull will not have + holes. (For PostGIS, the default is false.) + + Parameters + ---------- + geom : Column + The input geometry + concavity : Column + The concavity of the hull + has_holes : Column + Whether the hull has holes + + Returns + ------- + Column + A polygon + + """ + + if type(has_holes) == bool: + has_holes = lit(has_holes) + + return config.mosaic_context.invoke_function( + "st_concavehull", + pyspark_to_java_column(geom), + pyspark_to_java_column(concavity), + pyspark_to_java_column(has_holes) + ) + + def st_buffer(geom: ColumnOrName, radius: ColumnOrName) -> Column: """ Compute the buffered geometry based on geom and radius. @@ -177,7 +217,7 @@ def st_buffer(geom: ColumnOrName, radius: ColumnOrName) -> Column: def st_bufferloop( - geom: ColumnOrName, inner_radius: ColumnOrName, outer_radius: ColumnOrName + geom: ColumnOrName, inner_radius: ColumnOrName, outer_radius: ColumnOrName ) -> Column: """ Compute the buffered geometry loop (hollow ring) based on geom and provided radius-es. @@ -323,7 +363,7 @@ def st_transform(geom: ColumnOrName, srid: ColumnOrName) -> Column: def st_hasvalidcoordinates( - geom: ColumnOrName, crs: ColumnOrName, which: ColumnOrName + geom: ColumnOrName, crs: ColumnOrName, which: ColumnOrName ) -> Column: """ Checks if all points in geometry are valid with respect to crs bounds. @@ -530,7 +570,7 @@ def st_distance(geom1: ColumnOrName, geom2: ColumnOrName) -> Column: def st_haversine( - lat1: ColumnOrName, lng1: ColumnOrName, lat2: ColumnOrName, lng2: ColumnOrName + lat1: ColumnOrName, lng1: ColumnOrName, lat2: ColumnOrName, lng2: ColumnOrName ) -> Column: """ Compute the haversine distance in kilometers between two latitude/longitude pairs. @@ -682,7 +722,7 @@ def st_unaryunion(geom: ColumnOrName) -> Column: def st_updatesrid( - geom: ColumnOrName, srcSRID: ColumnOrName, destSRID: ColumnOrName + geom: ColumnOrName, srcSRID: ColumnOrName, destSRID: ColumnOrName ) -> Column: """ Updates the SRID of the input geometry `geom` from `srcSRID` to `destSRID`. @@ -951,7 +991,7 @@ def grid_boundary(index_id: ColumnOrName, format_name: ColumnOrName) -> Column: def grid_longlatascellid( - lon: ColumnOrName, lat: ColumnOrName, resolution: ColumnOrName + lon: ColumnOrName, lat: ColumnOrName, resolution: ColumnOrName ) -> Column: """ Returns the grid's cell ID associated with the input `lng` and `lat` coordinates at a given grid `resolution`. @@ -1019,7 +1059,7 @@ def grid_polyfill(geom: ColumnOrName, resolution: ColumnOrName) -> Column: def grid_tessellate( - geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True + geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True ) -> Column: """ Generates: @@ -1054,7 +1094,7 @@ def grid_tessellate( def grid_tessellateexplode( - geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True + geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True ) -> Column: """ Generates: @@ -1214,7 +1254,7 @@ def grid_cellkloopexplode(cellid: ColumnOrName, k: ColumnOrName) -> Column: def grid_geometrykring( - geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName + geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName ) -> Column: """ Returns the k-ring of cells around the input geometry. @@ -1239,7 +1279,7 @@ def grid_geometrykring( def grid_geometrykloop( - geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName + geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName ) -> Column: """ Returns the k loop (hollow ring) of cells around the input geometry. @@ -1264,7 +1304,7 @@ def grid_geometrykloop( def grid_geometrykringexplode( - geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName + geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName ) -> Column: """ Returns the exploded k-ring of cells around the input geometry. @@ -1289,7 +1329,7 @@ def grid_geometrykringexplode( def grid_geometrykloopexplode( - geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName + geom: ColumnOrName, resolution: ColumnOrName, k: ColumnOrName ) -> Column: """ Returns the exploded k loop (hollow ring) of cells around the input geometry. @@ -1336,7 +1376,7 @@ def point_index_geom(geom: ColumnOrName, resolution: ColumnOrName) -> Column: def point_index_lonlat( - lon: ColumnOrName, lat: ColumnOrName, resolution: ColumnOrName + lon: ColumnOrName, lat: ColumnOrName, resolution: ColumnOrName ) -> Column: """ [Deprecated] alias for `grid_longlatascellid` @@ -1393,7 +1433,7 @@ def polyfill(geom: ColumnOrName, resolution: ColumnOrName) -> Column: def mosaic_explode( - geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True + geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True ) -> Column: """ [Deprecated] alias for `grid_tessellateexplode` @@ -1428,7 +1468,7 @@ def mosaic_explode( def mosaicfill( - geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True + geom: ColumnOrName, resolution: ColumnOrName, keep_core_geometries: Any = True ) -> Column: """ [Deprecated] alias for `grid_tessellate` diff --git a/python/mosaic/api/predicates.py b/python/mosaic/api/predicates.py index 0b7d01815..39f856597 100644 --- a/python/mosaic/api/predicates.py +++ b/python/mosaic/api/predicates.py @@ -52,3 +52,24 @@ def st_contains(geom1: ColumnOrName, geom2: ColumnOrName) -> Column: pyspark_to_java_column(geom1), pyspark_to_java_column(geom2), ) + + +def st_within(geom1: ColumnOrName, geom2: ColumnOrName) -> Column: + """ + Returns `true` if geom1 'spatially' is within geom2. + + Parameters + ---------- + geom1 : Column + geom2 : Column + + Returns + ------- + Column (BooleanType) + + """ + return config.mosaic_context.invoke_function( + "st_within", + pyspark_to_java_column(geom1), + pyspark_to_java_column(geom2), + ) diff --git a/python/mosaic/api/raster.py b/python/mosaic/api/raster.py index d27f669bc..b191ba8d5 100644 --- a/python/mosaic/api/raster.py +++ b/python/mosaic/api/raster.py @@ -927,7 +927,9 @@ def rst_fromcontent(raster: ColumnOrName, driver: ColumnOrName, sizeInMB: Column """ return config.mosaic_context.invoke_function( - "rst_fromcontent", pyspark_to_java_column(raster), pyspark_to_java_column(driver), + "rst_fromcontent", + pyspark_to_java_column(raster), + pyspark_to_java_column(driver), pyspark_to_java_column(sizeInMB) ) diff --git a/python/test/test_vector_functions.py b/python/test/test_vector_functions.py index 67cfc3cf2..3dfa1640f 100644 --- a/python/test/test_vector_functions.py +++ b/python/test/test_vector_functions.py @@ -45,6 +45,7 @@ def test_st_bindings_happy_flow(self): .withColumn("st_buffer", api.st_bufferloop("wkt", lit(1.1), lit(1.2))) .withColumn("st_perimeter", api.st_perimeter("wkt")) .withColumn("st_convexhull", api.st_convexhull("wkt")) + .withColumn("st_concavehull", api.st_concavehull("wkt", lit(0.5))) .withColumn("st_dump", api.st_dump("wkt")) .withColumn("st_translate", api.st_translate("wkt", lit(1), lit(1))) .withColumn("st_scale", api.st_scale("wkt", lit(1), lit(1))) diff --git a/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometry.scala b/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometry.scala index 8af8c9996..0093aa7c5 100644 --- a/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometry.scala +++ b/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometry.scala @@ -83,6 +83,8 @@ trait MosaicGeometry extends GeometryWriter with Serializable { def contains(other: MosaicGeometry): Boolean + def within(other: MosaicGeometry): Boolean + def flatten: Seq[MosaicGeometry] def equals(other: MosaicGeometry): Boolean @@ -95,6 +97,9 @@ trait MosaicGeometry extends GeometryWriter with Serializable { def convexHull: MosaicGeometry + // Allow holes is set to false by default to match the behavior of the POSTGIS implementation + def concaveHull(lengthRatio: Double, allow_holes: Boolean = false): MosaicGeometry + def minMaxCoord(dimension: String, func: String): Double = { val coordArray = this.getShellPoints.map(shell => { val unitArray = dimension.toUpperCase(Locale.ROOT) match { diff --git a/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryESRI.scala b/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryESRI.scala deleted file mode 100644 index e69de29bb..000000000 diff --git a/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryJTS.scala b/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryJTS.scala index 17960d423..d509bc9ec 100644 --- a/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryJTS.scala +++ b/src/main/scala/com/databricks/labs/mosaic/core/geometry/MosaicGeometryJTS.scala @@ -11,6 +11,7 @@ import com.databricks.labs.mosaic.core.types.model.GeometryTypeEnum import com.databricks.labs.mosaic.core.types.model.GeometryTypeEnum._ import com.esotericsoftware.kryo.Kryo import org.apache.spark.sql.catalyst.InternalRow +import org.locationtech.jts.algorithm.hull.ConcaveHull import org.locationtech.jts.geom.{Geometry, GeometryCollection, GeometryFactory} import org.locationtech.jts.geom.util.AffineTransformation import org.locationtech.jts.io._ @@ -148,6 +149,8 @@ abstract class MosaicGeometryJTS(geom: Geometry) extends MosaicGeometry { override def contains(geom2: MosaicGeometry): Boolean = geom.contains(geom2.asInstanceOf[MosaicGeometryJTS].getGeom) + override def within(geom2: MosaicGeometry): Boolean = geom.within(geom2.asInstanceOf[MosaicGeometryJTS].getGeom) + def getGeom: Geometry = geom override def isValid: Boolean = geom.isValid @@ -180,6 +183,12 @@ abstract class MosaicGeometryJTS(geom: Geometry) extends MosaicGeometry { MosaicGeometryJTS(convexHull) } + override def concaveHull(lengthRatio: Double, allow_holes: Boolean = false): MosaicGeometryJTS = { + val concaveHull = ConcaveHull.concaveHullByLengthRatio(geom, lengthRatio, allow_holes) + concaveHull.setSRID(geom.getSRID) + MosaicGeometryJTS(concaveHull) + } + override def unaryUnion: MosaicGeometryJTS = { val unaryUnion = geom.union() unaryUnion.setSRID(geom.getSRID) diff --git a/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala b/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala index 6487f2f66..33605eaa8 100644 --- a/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala +++ b/src/main/scala/com/databricks/labs/mosaic/core/geometry/polygon/MosaicPolygonJTS.scala @@ -84,14 +84,29 @@ object MosaicPolygonJTS extends GeometryReader { val newGeom = GeometryTypeEnum.fromString(geomSeq.head.getGeometryType) match { case POINT => val extractedPoints = geomSeq.map(_.asInstanceOf[MosaicPointJTS]) - val exteriorRing = extractedPoints.map(_.coord).toArray ++ Array(extractedPoints.head.coord) + val exteriorRing = + if (extractedPoints.head.coord == extractedPoints.last.coord) { + extractedPoints.map(_.coord).toArray + } else { + extractedPoints.map(_.coord).toArray ++ Array(extractedPoints.head.coord) + } gf.createPolygon(exteriorRing) case LINESTRING => val extractedLines = geomSeq.map(_.asInstanceOf[MosaicLineStringJTS]) val exteriorRing = - gf.createLinearRing(extractedLines.head.asSeq.map(_.coord).toArray ++ Array(extractedLines.head.asSeq.head.coord)) + if (extractedLines.head.asSeq.head.coord == extractedLines.head.asSeq.last.coord) { + gf.createLinearRing(extractedLines.head.asSeq.map(_.coord).toArray) + } else { + gf.createLinearRing(extractedLines.head.asSeq.map(_.coord).toArray ++ Array(extractedLines.head.asSeq.head.coord)) + } val holes = extractedLines.tail - .map({ h: MosaicLineStringJTS => h.asSeq.map(_.coord).toArray ++ Array(h.asSeq.head.coord) }) + .map({ h: MosaicLineStringJTS => + if (h.asSeq.head.coord == h.asSeq.last.coord) { + h.asSeq.map(_.coord).toArray + } else { + h.asSeq.map(_.coord).toArray ++ Array(h.asSeq.head.coord) + } + }) .map(gf.createLinearRing) .toArray gf.createPolygon(exteriorRing, holes) diff --git a/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHull.scala b/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHull.scala new file mode 100644 index 000000000..0a4cc88d5 --- /dev/null +++ b/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHull.scala @@ -0,0 +1,77 @@ +package com.databricks.labs.mosaic.expressions.geometry + +import com.databricks.labs.mosaic.core.geometry.MosaicGeometry +import com.databricks.labs.mosaic.expressions.base.{GenericExpressionFactory, WithExpressionInfo} +import com.databricks.labs.mosaic.expressions.geometry.base.UnaryVector2ArgExpression +import com.databricks.labs.mosaic.functions.MosaicExpressionConfig +import org.apache.spark.sql.adapters.Column +import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.FunctionBuilder +import org.apache.spark.sql.catalyst.expressions.Expression +import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext +import org.apache.spark.sql.types.DataType + +/** + * Returns the concave hull for a given geometry. It uses lengthRatio and + * allowHoles to determine the concave hull. lengthRatio is the fraction of the + * difference between the longest and shortest edge lengths in the Delaunay + * Triangulation. If set to 1, this is the same as the convex hull. If set to + * 0, it produces produces maximum concaveness. AllowHoles is a boolean that + * determines whether the concave hull can have holes. If set to true, the + * concave hull can have holes. If set to false, the concave hull will not have + * holes. (For PostGIS, the default is false.) + * @param inputGeom + * The input geometry. + * @param expressionConfig + * Additional arguments for the expression (expressionConfigs). + */ +case class ST_ConcaveHull( + inputGeom: Expression, + lengthRatio: Expression, + allowHoles: Expression, + expressionConfig: MosaicExpressionConfig +) extends UnaryVector2ArgExpression[ST_ConcaveHull]( + inputGeom, + lengthRatio, + allowHoles, + returnsGeometry = true, + expressionConfig + ) { + + override def dataType: DataType = inputGeom.dataType + + override def geometryTransform(geometry: MosaicGeometry, arg1: Any, arg2: Any): Any = { + val lenRatio = arg1.asInstanceOf[Double] + val allowHoles = arg2.asInstanceOf[Boolean] + geometry.concaveHull(lenRatio, allowHoles) + } + + override def geometryCodeGen(geometryRef: String, arg1Ref: String, arg2Ref: String, ctx: CodegenContext): (String, String) = { + val convexHull = ctx.freshName("concaveHull") + val code = s"""$mosaicGeomClass $convexHull = $geometryRef.concaveHull($arg1Ref, $arg2Ref);""" + (code, convexHull) + } + +} + +/** Expression info required for the expression registration for spark SQL. */ +object ST_ConcaveHull extends WithExpressionInfo { + + override def name: String = "st_concavehull" + + override def usage: String = "_FUNC_(expr1, expr2, expr3) - Returns the concave hull for a given geometry with or without holes." + + override def example: String = + """ + | Examples: + | > SELECT _FUNC_(a, 0.1, false); + | {"POLYGON (( 0 0, 1 0, 1 1, 0 1 ))"} + | """.stripMargin + + override def builder(expressionConfig: MosaicExpressionConfig): FunctionBuilder = { (children: Seq[Expression]) => + GenericExpressionFactory.construct[ST_ConcaveHull]( + Array(children.head, Column(children(1)).cast("double").expr, children(2)), + expressionConfig + ) + } + +} diff --git a/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_Within.scala b/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_Within.scala new file mode 100644 index 000000000..c5cfd5a8f --- /dev/null +++ b/src/main/scala/com/databricks/labs/mosaic/expressions/geometry/ST_Within.scala @@ -0,0 +1,64 @@ +package com.databricks.labs.mosaic.expressions.geometry + +import com.databricks.labs.mosaic.core.geometry.MosaicGeometry +import com.databricks.labs.mosaic.expressions.base.{GenericExpressionFactory, WithExpressionInfo} +import com.databricks.labs.mosaic.expressions.geometry.base.BinaryVectorExpression +import com.databricks.labs.mosaic.functions.MosaicExpressionConfig +import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.FunctionBuilder +import org.apache.spark.sql.catalyst.expressions.Expression +import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext +import org.apache.spark.sql.types.{BooleanType, DataType} + +/** + * Returns true if leftGeom is within rightGeom. + * @param leftGeom + * The left geometry. + * @param rightGeom + * The right geometry. + * @param expressionConfig + * Additional arguments for the expression (expressionConfigs). + */ +case class ST_Within( + leftGeom: Expression, + rightGeom: Expression, + expressionConfig: MosaicExpressionConfig +) extends BinaryVectorExpression[ST_Within]( + leftGeom, + rightGeom, + returnsGeometry = false, + expressionConfig + ) { + + override def dataType: DataType = BooleanType + + override def geometryTransform(leftGeometry: MosaicGeometry, rightGeometry: MosaicGeometry): Any = { + leftGeometry.within(rightGeometry) + } + + override def geometryCodeGen(leftGeometryRef: String, rightGeometryRef: String, ctx: CodegenContext): (String, String) = { + val within = ctx.freshName("within") + val code = s"""boolean $within = $leftGeometryRef.within($rightGeometryRef);""" + (code, within) + } + +} + +/** Expression info required for the expression registration for spark SQL. */ +object ST_Within extends WithExpressionInfo { + + override def name: String = "st_within" + + override def usage: String = "_FUNC_(expr1, expr2) - Returns true if expr1 is within expr2." + + override def example: String = + """ + | Examples: + | > SELECT _FUNC_(A, B); + | true + | """.stripMargin + + override def builder(expressionConfig: MosaicExpressionConfig): FunctionBuilder = { + GenericExpressionFactory.getBaseBuilder[ST_Within](2, expressionConfig) + } + +} diff --git a/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala b/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala index dc061d597..cd516ecc5 100644 --- a/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala +++ b/src/main/scala/com/databricks/labs/mosaic/functions/MosaicContext.scala @@ -149,6 +149,7 @@ class MosaicContext(indexSystem: IndexSystem, geometryAPI: GeometryAPI) extends mosaicRegistry.registerExpression[ST_Centroid](expressionConfig) mosaicRegistry.registerExpression[ST_Contains](expressionConfig) mosaicRegistry.registerExpression[ST_ConvexHull](expressionConfig) + mosaicRegistry.registerExpression[ST_ConcaveHull](expressionConfig) mosaicRegistry.registerExpression[ST_Distance](expressionConfig) mosaicRegistry.registerExpression[ST_Difference](expressionConfig) mosaicRegistry.registerExpression[ST_Dimension](expressionConfig) @@ -177,6 +178,7 @@ class MosaicContext(indexSystem: IndexSystem, geometryAPI: GeometryAPI) extends mosaicRegistry.registerExpression[ST_UnaryUnion](expressionConfig) mosaicRegistry.registerExpression[ST_Union](expressionConfig) mosaicRegistry.registerExpression[ST_UpdateSRID](expressionConfig) + mosaicRegistry.registerExpression[ST_Within](expressionConfig) mosaicRegistry.registerExpression[ST_X](expressionConfig) mosaicRegistry.registerExpression[ST_Y](expressionConfig) mosaicRegistry.registerExpression[ST_Haversine](expressionConfig) @@ -559,6 +561,10 @@ class MosaicContext(indexSystem: IndexSystem, geometryAPI: GeometryAPI) extends ColumnAdapter(ST_BufferCapStyle(geom.expr, lit(radius).cast("double").expr, lit(capStyle).expr, expressionConfig)) def st_centroid(geom: Column): Column = ColumnAdapter(ST_Centroid(geom.expr, expressionConfig)) def st_convexhull(geom: Column): Column = ColumnAdapter(ST_ConvexHull(geom.expr, expressionConfig)) + def st_concavehull(geom: Column, concavity: Column, allowHoles: Column): Column = + ColumnAdapter(ST_ConcaveHull(geom.expr, concavity.cast("double").expr, allowHoles.expr, expressionConfig)) + def st_concavehull(geom: Column, concavity: Double, allowHoles: Boolean = false): Column = + ColumnAdapter(ST_ConcaveHull(geom.expr, lit(concavity).cast("double").expr, lit(allowHoles).expr, expressionConfig)) def st_difference(geom1: Column, geom2: Column): Column = ColumnAdapter(ST_Difference(geom1.expr, geom2.expr, expressionConfig)) def st_distance(geom1: Column, geom2: Column): Column = ColumnAdapter(ST_Distance(geom1.expr, geom2.expr, expressionConfig)) def st_dimension(geom: Column): Column = ColumnAdapter(ST_Dimension(geom.expr, expressionConfig)) @@ -630,6 +636,7 @@ class MosaicContext(indexSystem: IndexSystem, geometryAPI: GeometryAPI) extends /** Spatial predicates */ def st_contains(geom1: Column, geom2: Column): Column = ColumnAdapter(ST_Contains(geom1.expr, geom2.expr, expressionConfig)) def st_intersects(left: Column, right: Column): Column = ColumnAdapter(ST_Intersects(left.expr, right.expr, expressionConfig)) + def st_within(geom1: Column, geom2: Column): Column = ColumnAdapter(ST_Within(geom1.expr, geom2.expr, expressionConfig)) /** RasterAPI dependent functions */ def rst_bandmetadata(raster: Column, band: Column): Column = @@ -704,16 +711,15 @@ class MosaicContext(indexSystem: IndexSystem, geometryAPI: GeometryAPI) extends ColumnAdapter(RST_Tessellate(raster.expr, resolution.expr, expressionConfig)) def rst_tessellate(raster: Column, resolution: Int): Column = ColumnAdapter(RST_Tessellate(raster.expr, lit(resolution).expr, expressionConfig)) - def rst_fromcontent(raster: Column, driver:Column): Column = + def rst_fromcontent(raster: Column, driver: Column): Column = ColumnAdapter(RST_FromContent(raster.expr, driver.expr, lit(-1).expr, expressionConfig)) - def rst_fromcontent(raster: Column, driver:Column, sizeInMB:Column): Column = + def rst_fromcontent(raster: Column, driver: Column, sizeInMB: Column): Column = ColumnAdapter(RST_FromContent(raster.expr, driver.expr, sizeInMB.expr, expressionConfig)) - def rst_fromcontent(raster: Column, driver:String): Column = + def rst_fromcontent(raster: Column, driver: String): Column = ColumnAdapter(RST_FromContent(raster.expr, lit(driver).expr, lit(-1).expr, expressionConfig)) - def rst_fromcontent(raster: Column, driver:String, sizeInMB:Int): Column = + def rst_fromcontent(raster: Column, driver: String, sizeInMB: Int): Column = ColumnAdapter(RST_FromContent(raster.expr, lit(driver).expr, lit(sizeInMB).expr, expressionConfig)) - def rst_fromfile(raster: Column): Column = - ColumnAdapter(RST_FromFile(raster.expr, lit(-1).expr, expressionConfig)) + def rst_fromfile(raster: Column): Column = ColumnAdapter(RST_FromFile(raster.expr, lit(-1).expr, expressionConfig)) def rst_fromfile(raster: Column, sizeInMB: Column): Column = ColumnAdapter(RST_FromFile(raster.expr, sizeInMB.expr, expressionConfig)) def rst_fromfile(raster: Column, sizeInMB: Int): Column = @@ -1008,11 +1014,12 @@ object MosaicContext extends Logging { val isML = sparkVersion.contains("-ml-") val isPhoton = sparkVersion.contains("-photon-") - val isTest = ( - dbrMajor == 0 - && !spark.conf.getAll.exists(_._1.startsWith("spark.databricks.clusterUsageTags.")) - ) - + val isTest = + ( + dbrMajor == 0 + && !spark.conf.getAll.exists(_._1.startsWith("spark.databricks.clusterUsageTags.")) + ) + if (dbrMajor != 13 && !isTest) { val msg = """|DEPRECATION ERROR: | Mosaic v0.4.x series only supports Databricks Runtime 13. @@ -1032,7 +1039,7 @@ object MosaicContext extends Logging { logError(msg) println(msg) throw new Exception(msg) - } + } true } diff --git a/src/test/scala/com/databricks/labs/mosaic/core/geometry/polygon/TestPolygonJTS.scala b/src/test/scala/com/databricks/labs/mosaic/core/geometry/polygon/TestPolygonJTS.scala index 498e2f84f..f63fbc9e2 100644 --- a/src/test/scala/com/databricks/labs/mosaic/core/geometry/polygon/TestPolygonJTS.scala +++ b/src/test/scala/com/databricks/labs/mosaic/core/geometry/polygon/TestPolygonJTS.scala @@ -42,11 +42,16 @@ class TestPolygonJTS extends AnyFlatSpec { "MosaicPolygonJTS" should "be instantiable from a Seq of MosaicPointJTS" in { val polygonReference = MosaicPolygonJTS.fromWKT("POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))") - val pointSeq = Seq("POINT (30 10)", "POINT (40 40)", "POINT (20 40)", "POINT (10 20)") + val pointSeq_open = Seq("POINT (30 10)", "POINT (40 40)", "POINT (20 40)", "POINT (10 20)") .map(MosaicPointJTS.fromWKT) .map(_.asInstanceOf[MosaicPointJTS]) - val polygonTest = MosaicPolygonJTS.fromSeq(pointSeq) - polygonReference.equals(polygonTest) shouldBe true + val pointSeq_closed = Seq("POINT (30 10)", "POINT (40 40)", "POINT (20 40)", "POINT (10 20)", "POINT (30 10)") + .map(MosaicPointJTS.fromWKT) + .map(_.asInstanceOf[MosaicPointJTS]) + val polygonTest_open = MosaicPolygonJTS.fromSeq(pointSeq_open) + val polygonTest_closed = MosaicPolygonJTS.fromSeq(pointSeq_closed) + polygonReference.equals(polygonTest_open) shouldBe true + polygonReference.equals(polygonTest_closed) shouldBe true } "MosaicPolygonJTS" should "not fail for empty Seq" in { @@ -59,11 +64,16 @@ class TestPolygonJTS extends AnyFlatSpec { "MosaicPolygonJTS" should "be instantiable from a Seq of MosaicLineStringJTS" in { val polygonReference = MosaicPolygonJTS.fromWKT("POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10), (20 30, 35 35, 30 20, 20 30))") - val linesSeq = Seq("LINESTRING (35 10, 45 45, 15 40, 10 20)", "LINESTRING (20 30, 35 35, 30 20)") + val linesSeq_open = Seq("LINESTRING (35 10, 45 45, 15 40, 10 20)", "LINESTRING (20 30, 35 35, 30 20)") + .map(MosaicLineStringJTS.fromWKT) + .map(_.asInstanceOf[MosaicLineStringJTS]) + val linesSeq_closed = Seq("LINESTRING (35 10, 45 45, 15 40, 10 20, 35 10)", "LINESTRING (20 30, 35 35, 30 20, 20 30)") .map(MosaicLineStringJTS.fromWKT) .map(_.asInstanceOf[MosaicLineStringJTS]) - val polygonTest = MosaicPolygonJTS.fromSeq(linesSeq) - polygonReference.equals(polygonTest) shouldBe true + val polygonTest_open = MosaicPolygonJTS.fromSeq(linesSeq_open) + val polygonTest_closed = MosaicPolygonJTS.fromSeq(linesSeq_closed) + polygonReference.equals(polygonTest_open) shouldBe true + polygonReference.equals(polygonTest_closed) shouldBe true } "MosaicPolygonJTS" should "return a Seq of MosaicLineStringJTS object when calling asSeq" in { diff --git a/src/test/scala/com/databricks/labs/mosaic/core/index/TestCustomIndexSystem.scala b/src/test/scala/com/databricks/labs/mosaic/core/index/TestCustomIndexSystem.scala index b20a19c91..022de18a4 100644 --- a/src/test/scala/com/databricks/labs/mosaic/core/index/TestCustomIndexSystem.scala +++ b/src/test/scala/com/databricks/labs/mosaic/core/index/TestCustomIndexSystem.scala @@ -118,16 +118,16 @@ class TestCustomIndexSystem extends AnyFunSuite { // First quadrant val wkt0 = grid.indexToGeometry(0 | resolutionMask, JTS).toWKT - wkt0 shouldBe "POLYGON ((0 0, 50 0, 50 50, 0 50, 0 0, 0 0))" + wkt0 shouldBe "POLYGON ((0 0, 50 0, 50 50, 0 50, 0 0))" val wkt1 = grid.indexToGeometry(1 | resolutionMask, JTS).toWKT - wkt1 shouldBe "POLYGON ((50 0, 100 0, 100 50, 50 50, 50 0, 50 0))" + wkt1 shouldBe "POLYGON ((50 0, 100 0, 100 50, 50 50, 50 0))" val wkt2 = grid.indexToGeometry(2 | resolutionMask, JTS).toWKT - wkt2 shouldBe "POLYGON ((0 50, 50 50, 50 100, 0 100, 0 50, 0 50))" + wkt2 shouldBe "POLYGON ((0 50, 50 50, 50 100, 0 100, 0 50))" val wkt3 = grid.indexToGeometry(3 | resolutionMask, JTS).toWKT - wkt3 shouldBe "POLYGON ((50 50, 100 50, 100 100, 50 100, 50 50, 50 50))" + wkt3 shouldBe "POLYGON ((50 50, 100 50, 100 100, 50 100, 50 50))" } test("polyfill single cell") { diff --git a/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHullBehaviors.scala b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHullBehaviors.scala new file mode 100644 index 000000000..1d75bc146 --- /dev/null +++ b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHullBehaviors.scala @@ -0,0 +1,99 @@ +package com.databricks.labs.mosaic.expressions.geometry + +import com.databricks.labs.mosaic.core.geometry.api.GeometryAPI +import com.databricks.labs.mosaic.core.index._ +import com.databricks.labs.mosaic.functions.MosaicContext +import org.apache.spark.sql.QueryTest +import org.apache.spark.sql.catalyst.expressions.codegen.{CodeGenerator, CodegenContext} +import org.apache.spark.sql.execution.WholeStageCodegenExec +import org.apache.spark.sql.functions.lit +import org.scalatest.matchers.must.Matchers.noException +import org.scalatest.matchers.should.Matchers.{an, be, convertToAnyShouldWrapper} + +trait ST_ConcaveHullBehaviors extends QueryTest { + + def concaveHullBehavior(indexSystem: IndexSystem, geometryAPI: GeometryAPI): Unit = { + spark.sparkContext.setLogLevel("FATAL") + val mc = MosaicContext.build(indexSystem, geometryAPI) + import mc.functions._ + val sc = spark + import sc.implicits._ + mc.register(spark) + + val multiPoint = List("MULTIPOINT (-70 35, -72 40, -78 40, -80 45, -70 45, -80 35)") + val expected = List("POLYGON ((-78 40, -80 45, -72 40, -70 45, -70 35, -80 35, -78 40))") + .map(mc.getGeometryAPI.geometry(_, "WKT")) + + val results = multiPoint + .toDF("multiPoint") + .crossJoin(multiPoint.toDF("other")) + .withColumn("result", st_concavehull($"multiPoint", 0.1)) + .select($"result") + .as[String] + .collect() + .map(mc.getGeometryAPI.geometry(_, "WKT")) + + results.zip(expected).foreach { case (l, r) => l.equals(r) shouldEqual true } + + noException should be thrownBy multiPoint.toDF("multiPoint") + .withColumn("result", st_concavehull($"multiPoint", 0.01, allowHoles = true)) + .select($"result") + .as[String] + .collect() + + multiPoint.toDF("multiPoint").createOrReplaceTempView("multiPoint") + + spark.sql("SELECT ST_ConcaveHull(multiPoint, 0.01, true) FROM multiPoint") + .as[String] + .collect() + + } + + def concaveHullCodegen(indexSystem: IndexSystem, geometryAPI: GeometryAPI): Unit = { + spark.sparkContext.setLogLevel("FATAL") + val mc = MosaicContext.build(indexSystem, geometryAPI) + val sc = spark + import mc.functions._ + import sc.implicits._ + mc.register(spark) + + val multiPoint = List("MULTIPOINT (-70 35, -80 45, -70 45, -80 35)").toDF("multiPoint") + + val result = multiPoint + .withColumn("result", st_concavehull($"multiPoint", 0.01)) + .select(st_asbinary($"result")) + + val queryExecution = result.queryExecution + val plan = queryExecution.executedPlan + + val wholeStageCodegenExec = plan.find(_.isInstanceOf[WholeStageCodegenExec]) + + wholeStageCodegenExec.isDefined shouldBe true + + val codeGenStage = wholeStageCodegenExec.get.asInstanceOf[WholeStageCodegenExec] + val (_, code) = codeGenStage.doCodeGen() + + noException should be thrownBy CodeGenerator.compile(code) + + val stConvexHull = ST_ConvexHull(lit(1).expr, mc.expressionConfig) + val ctx = new CodegenContext + an[Error] should be thrownBy stConvexHull.genCode(ctx) + } + + def auxiliaryMethods(indexSystem: IndexSystem, geometryAPI: GeometryAPI): Unit = { + spark.sparkContext.setLogLevel("FATAL") + val mc = MosaicContext.build(indexSystem, geometryAPI) + mc.register(spark) + + val stConcaveHull = ST_ConcaveHull(lit("MULTIPOINT (-70 35, -80 45, -70 45, -80 35)").expr, lit(0.01).expr, lit(true).expr, mc.expressionConfig) + + stConcaveHull.children.length shouldEqual 3 + stConcaveHull.first shouldEqual lit("MULTIPOINT (-70 35, -80 45, -70 45, -80 35)").expr + stConcaveHull.second shouldEqual lit(0.01).expr + stConcaveHull.third shouldEqual lit(true).expr + + stConcaveHull.makeCopy(Array(lit("MULTIPOINT (-70 35, -80 45, -70 45, -80 35)").expr, lit(0.01).expr, lit(true).expr)) shouldEqual stConcaveHull + + } + +} diff --git a/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHullTest.scala b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHullTest.scala new file mode 100644 index 000000000..36e5a3023 --- /dev/null +++ b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_ConcaveHullTest.scala @@ -0,0 +1,34 @@ +package com.databricks.labs.mosaic.expressions.geometry + +import com.databricks.labs.mosaic.core.geometry.api.JTS +import com.databricks.labs.mosaic.core.index.{BNGIndexSystem, H3IndexSystem} +import org.apache.spark.sql.QueryTest +import org.apache.spark.sql.catalyst.expressions.CodegenObjectFactoryMode +import org.apache.spark.sql.internal.SQLConf +import org.apache.spark.sql.test.SharedSparkSession + +class ST_ConcaveHullTest extends QueryTest with SharedSparkSession with ST_ConcaveHullBehaviors { + + private val noCodegen = + withSQLConf( + SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "false", + SQLConf.CODEGEN_FACTORY_MODE.key -> CodegenObjectFactoryMode.NO_CODEGEN.toString + ) _ + + private val codegenOnly = + withSQLConf( + SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "false", + SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "true", + SQLConf.CODEGEN_FACTORY_MODE.key -> CodegenObjectFactoryMode.CODEGEN_ONLY.toString + ) _ + + test("Testing ST_ConcaveHull (H3, JTS) NO_CODEGEN") { noCodegen { concaveHullBehavior(H3IndexSystem, JTS) } } + test("Testing ST_ConcaveHull (BNG, JTS) NO_CODEGEN") { noCodegen { concaveHullBehavior(BNGIndexSystem, JTS) } } + test("Testing ST_ConcaveHull (H3, JTS) CODEGEN compilation") { codegenOnly { concaveHullCodegen(H3IndexSystem, JTS) } } + test("Testing ST_ConcaveHull (BNG, JTS) CODEGEN compilation") { codegenOnly { concaveHullCodegen(BNGIndexSystem, JTS) } } + test("Testing ST_ConcaveHull (H3, JTS) CODEGEN_ONLY") { codegenOnly { concaveHullBehavior(H3IndexSystem, JTS) } } + test("Testing ST_ConcaveHull (BNG, JTS) CODEGEN_ONLY") { codegenOnly { concaveHullBehavior(BNGIndexSystem, JTS) } } + test("Testing ST_ConcaveHull auxiliaryMethods (H3, JTS)") { noCodegen { auxiliaryMethods(H3IndexSystem, JTS) } } + test("Testing ST_ConcaveHull auxiliaryMethods (BNG, JTS)") { noCodegen { auxiliaryMethods(BNGIndexSystem, JTS) } } + +} diff --git a/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_WithinBehaviors.scala b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_WithinBehaviors.scala new file mode 100644 index 000000000..fba4805b5 --- /dev/null +++ b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_WithinBehaviors.scala @@ -0,0 +1,104 @@ +package com.databricks.labs.mosaic.expressions.geometry + +import com.databricks.labs.mosaic.functions.MosaicContext +import com.databricks.labs.mosaic.test.MosaicSpatialQueryTest +import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, CodeGenerator} +import org.apache.spark.sql.execution.WholeStageCodegenExec +import org.apache.spark.sql.functions.lit +import org.apache.spark.sql.types._ +import org.scalatest.matchers.must.Matchers.noException +import org.scalatest.matchers.should.Matchers.{an, be, convertToAnyShouldWrapper} + +trait ST_WithinBehaviors extends MosaicSpatialQueryTest { + + def withinBehavior(mosaicContext: MosaicContext): Unit = { + spark.sparkContext.setLogLevel("FATAL") + val mc = mosaicContext + import mc.functions._ + val sc = spark + import sc.implicits._ + mc.register(spark) + + val poly = """POLYGON ((10 10, 110 10, 110 110, 10 110, 10 10), + | (20 20, 20 30, 30 30, 30 20, 20 20), + | (40 20, 40 30, 50 30, 50 20, 40 20))""".stripMargin.filter(_ >= ' ') + + val rows = List( + ("POINT (35 25)", poly, true), + ("POINT (25 25)", poly, false) + ) + + val results = rows + .toDF("leftGeom", "rightGeom", "expected") + .withColumn("result", st_within($"leftGeom", $"rightGeom")) + .where($"expected" === $"result") + + results.count shouldBe 2 + } + + def withinCodegen(mosaicContext: MosaicContext): Unit = { + spark.sparkContext.setLogLevel("FATAL") + val mc = mosaicContext + val sc = spark + import mc.functions._ + import sc.implicits._ + mc.register(spark) + + val poly = """POLYGON ((10 10, 110 10, 110 110, 10 110, 10 10), + | (20 20, 20 30, 30 30, 30 20, 20 20), + | (40 20, 40 30, 50 30, 50 20, 40 20))""".stripMargin.filter(_ >= ' ') + + val rows = List( + ("POINT (35 25)", true), + ("POINT (25 25)", false) + ) + + val polygons = List(poly).toDF("rightGeom") + val points = rows.toDF("leftGeom", "expected") + + val result = polygons + .crossJoin(points) + .withColumn("result", st_within($"leftGeom", $"rightGeom")) + .where($"expected" === $"result") + + val queryExecution = result.queryExecution + val plan = queryExecution.executedPlan + + val wholeStageCodegenExec = plan.find(_.isInstanceOf[WholeStageCodegenExec]) + + wholeStageCodegenExec.isDefined shouldBe true + + val codeGenStage = wholeStageCodegenExec.get.asInstanceOf[WholeStageCodegenExec] + val (_, code) = codeGenStage.doCodeGen() + + noException should be thrownBy CodeGenerator.compile(code) + + val stWithin = ST_Within(lit(rows.head._1).expr, lit(1).expr, mc.expressionConfig) + val ctx = new CodegenContext + an[Error] should be thrownBy stWithin.genCode(ctx) + } + + def auxiliaryMethods(mosaicContext: MosaicContext): Unit = { + spark.sparkContext.setLogLevel("FATAL") + val mc = mosaicContext + mc.register(spark) + + val poly = """POLYGON ((10 10, 110 10, 110 110, 10 110, 10 10), + | (20 20, 20 30, 30 30, 30 20, 20 20), + | (40 20, 40 30, 50 30, 50 20, 40 20))""".stripMargin.filter(_ >= ' ') + + val rows = List( + ("POINT (35 25)", true), + ("POINT (25 25)", false) + ) + + val stWithin = ST_Within(lit(rows.head._1).expr, lit(poly).expr, mc.expressionConfig) + + stWithin.left shouldEqual lit(rows.head._1).expr + stWithin.right shouldEqual lit(poly).expr + stWithin.dataType shouldEqual BooleanType + noException should be thrownBy stWithin.makeCopy(Array(stWithin.left, stWithin.right)) + + } + +} diff --git a/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_WithinTest.scala b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_WithinTest.scala new file mode 100644 index 000000000..963843c27 --- /dev/null +++ b/src/test/scala/com/databricks/labs/mosaic/expressions/geometry/ST_WithinTest.scala @@ -0,0 +1,13 @@ +package com.databricks.labs.mosaic.expressions.geometry + +import com.databricks.labs.mosaic.test.MosaicSpatialQueryTest +import org.apache.spark.sql.test.SharedSparkSession + +class ST_WithinTest extends MosaicSpatialQueryTest with SharedSparkSession with ST_WithinBehaviors { + + testAllGeometriesNoCodegen("ST_Within behavior") { withinBehavior } + testAllGeometriesCodegen("ST_Within codegen compilation") { withinCodegen } + testAllGeometriesCodegen("ST_Within codegen behavior") { withinBehavior } + testAllGeometriesNoCodegen("ST_Within auxiliary methods") { auxiliaryMethods } + +}