Skip to content

Commit

Permalink
Merge pull request #484 from databrickslabs/feature/dbr_warning
Browse files Browse the repository at this point in the history
Fix issues with Warning and Error messages in enable mosaic stage.
  • Loading branch information
Milos Colic authored Dec 14, 2023
2 parents aad7db4 + d8dc64a commit 2154ab8
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 15 deletions.
13 changes: 6 additions & 7 deletions python/mosaic/api/enable.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,12 @@ def enable_mosaic(spark: SparkSession, dbutils=None) -> None:
isSupported = config.mosaic_context._context.checkDBR(spark._jsparkSession)
if not isSupported:
print(
"DEPRECATION WARNING: Mosaic is not supported on the selected Databricks Runtime. \n"
)
print(
"DEPRECATION WARNING: Mosaic will stop working on this cluster from version v0.4.0+. \n"
)
print(
"Please use a Databricks Photon-enabled Runtime (for performance benefits) or Runtime ML (for spatial AI benefits). \n"
"""
DEPRECATION WARNING:
Please use a Databricks:
- Photon-enabled Runtime for performance benefits
- Runtime ML for spatial AI benefits
Mosaic will stop working on this cluster after v0.3.x."""
)

# Not yet added to the pyspark API
Expand Down
25 changes: 20 additions & 5 deletions python/mosaic/core/library_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,11 +81,26 @@ def auto_attach(self):
converters = self.sc._jvm.scala.collection.JavaConverters

JarURI = JavaURI.create("file:" + self._jar_path)
lib = JavaJarId(
JarURI,
ManagedLibraryId.defaultOrganization(),
NoVersionModule.simpleString(),
)
dbr_version = self.spark.conf.get("spark.databricks.clusterUsageTags.sparkVersion").split("-")[0]

try:
# This will fix the exception when running on Databricks Runtime 13.x+
optionClass = getattr(self.sc._jvm.scala, "Option$")
optionModule = getattr(optionClass, "MODULE$")
lib = JavaJarId(
JarURI,
ManagedLibraryId.defaultOrganization(),
NoVersionModule.simpleString(),
optionModule.apply(None),
optionModule.apply(None),
)
except:
lib = JavaJarId(
JarURI,
ManagedLibraryId.defaultOrganization(),
NoVersionModule.simpleString()
)

libSeq = converters.asScalaBufferConverter((lib,)).asScala().toSeq()

context = DatabricksILoop.getSharedDriverContextIfExists().get()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -990,11 +990,11 @@ object MosaicContext extends Logging {
def reset(): Unit = instance = None

// noinspection ScalaStyle,ScalaWeakerAccess
def checkDBR(spark: SparkSession): Unit = {
def checkDBR(spark: SparkSession): Boolean = {
val sparkVersion = spark.conf.get("spark.databricks.clusterUsageTags.sparkVersion", "0")
val isML = sparkVersion.contains("-ml-")
val isPhoton = spark.conf.getOption("spark.databricks.photon.enabled").getOrElse("false").toBoolean
val isTest = spark.conf.getOption("spark.databricks.clusterUsageTags.clusterType").isEmpty
val isTest = !spark.conf.getAll.exists(_._1.startsWith("spark.databricks.clusterUsageTags."))

val dbrMajor = sparkVersion.split("-").head.split("\\.").head.toInt
if (
Expand All @@ -1018,7 +1018,9 @@ object MosaicContext extends Logging {
| Mosaic will stop working on this cluster after v0.3.x.""".stripMargin
logWarning(msg)
println(msg)

false
} else {
true
}
}

Expand Down

0 comments on commit 2154ab8

Please sign in to comment.