Skip to content

Commit

Permalink
Cleanup build.sbt to be more succinct for x-compile
Browse files Browse the repository at this point in the history
  • Loading branch information
scottsand-db committed Mar 28, 2024
1 parent a010007 commit d75b102
Showing 1 changed file with 50 additions and 67 deletions.
117 changes: 50 additions & 67 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,11 @@ val all_scala_versions = Seq(scala212, scala213)
val default_scala_version = settingKey[String]("Default Scala version")
Global / default_scala_version := scala212

val LATEST_RELEASED_SPARK_VERSION = "3.5.0"
val SPARK_MASTER_VERSION = "4.0.0-SNAPSHOT"
val cross_spark_version = settingKey[String]("Spark version")
spark / cross_spark_version := {
/**
* Refactor this logic out so that callers that are not inside tasks or setting macros can still
* access the value of `spark / cross_spark_version` (i.e. just call this method instead).
*/
def getCrossSparkVersion(): String = {
// e.g. build/sbt -Dcross_spark_version=4.0.0-SNAPSHOT
val input = sys.props.getOrElse("cross_spark_version", LATEST_RELEASED_SPARK_VERSION)
input match {
Expand All @@ -47,6 +48,11 @@ spark / cross_spark_version := {
}
}

val LATEST_RELEASED_SPARK_VERSION = "3.5.0"
val SPARK_MASTER_VERSION = "4.0.0-SNAPSHOT"
val cross_spark_version = settingKey[String]("Spark version")
spark / cross_spark_version := getCrossSparkVersion()

// Dependent library versions
val sparkVersion = LATEST_RELEASED_SPARK_VERSION
val flinkVersion = "1.16.1"
Expand Down Expand Up @@ -103,6 +109,45 @@ lazy val commonSettings = Seq(
unidocSourceFilePatterns := Nil,
)

/**
* Note: we cannot access cross_spark_version.value here, since that can only be used within a task
* or setting macro, such as :=, +=, ++=, Def.task, or Def.setting.
*/
def crossSparkSettings(): Seq[Setting[_]] = getCrossSparkVersion() match {
case LATEST_RELEASED_SPARK_VERSION => Seq(
scalaVersion := default_scala_version.value,
crossScalaVersions := all_scala_versions,
targetJvm := "1.8",
// For adding staged Spark RC versions, e.g.:
// resolvers += "Apache Spark 3.5.0 (RC1) Staging" at "https://repository.apache.org/content/repositories/orgapachespark-1444/",
Compile / unmanagedSourceDirectories += (Compile / baseDirectory).value / "src" / "shims" / "spark-3.5",
Antlr4 / antlr4Version := "4.9.3"
)

case SPARK_MASTER_VERSION => Seq(
scalaVersion := scala213,
crossScalaVersions := Seq(scala213),
targetJvm := "17",
resolvers += "Spark master staging" at "https://repository.apache.org/content/groups/snapshots/",
Compile / unmanagedSourceDirectories += (Compile / baseDirectory).value / "src" / "shims" / "spark-4.0",
Antlr4 / antlr4Version := "4.13.1",
Test / javaOptions ++= Seq(
// Copied from SparkBuild.scala to support Java 17 for unit tests (see apache/spark#34153)
"--add-opens=java.base/java.lang=ALL-UNNAMED",
"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED",
"--add-opens=java.base/java.io=ALL-UNNAMED",
"--add-opens=java.base/java.net=ALL-UNNAMED",
"--add-opens=java.base/java.nio=ALL-UNNAMED",
"--add-opens=java.base/java.util=ALL-UNNAMED",
"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED",
"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED",
"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED",
"--add-opens=java.base/sun.security.action=ALL-UNNAMED",
"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED"
)
)
}

lazy val spark = (project in file("spark"))
.dependsOn(storage)
.enablePlugins(Antlr4Plugin)
Expand All @@ -112,24 +157,7 @@ lazy val spark = (project in file("spark"))
scalaStyleSettings,
sparkMimaSettings,
releaseSettings,
scalaVersion := {
cross_spark_version.value match {
case LATEST_RELEASED_SPARK_VERSION => default_scala_version.value
case SPARK_MASTER_VERSION => scala213
}
},
crossScalaVersions := {
cross_spark_version.value match {
case LATEST_RELEASED_SPARK_VERSION => all_scala_versions
case SPARK_MASTER_VERSION => Seq(scala213)
}
},
targetJvm := {
cross_spark_version.value match {
case LATEST_RELEASED_SPARK_VERSION => "1.8"
case SPARK_MASTER_VERSION => "17"
}
},
crossSparkSettings(),
libraryDependencies ++= Seq(
// Adding test classifier seems to break transitive resolution of the core dependencies
"org.apache.spark" %% "spark-hive" % cross_spark_version.value % "provided",
Expand All @@ -147,59 +175,14 @@ lazy val spark = (project in file("spark"))
"org.apache.spark" %% "spark-sql" % cross_spark_version.value % "test" classifier "tests",
"org.apache.spark" %% "spark-hive" % cross_spark_version.value % "test" classifier "tests",
),
resolvers ++= {
// For adding staged Spark RC versions, Ex:
// "Apche Spark 3.5.0 (RC1) Staging" at "https://repository.apache.org/content/repositories/orgapachespark-1444/",
cross_spark_version.value match {
case LATEST_RELEASED_SPARK_VERSION => Nil
case SPARK_MASTER_VERSION => Seq(
"Spark master staging" at "https://repository.apache.org/content/groups/snapshots/"
)
}
},
Compile / unmanagedSourceDirectories ++= {
cross_spark_version.value match {
// (baseDirectory in Compile).value is `<delta>/spark`
case LATEST_RELEASED_SPARK_VERSION =>
Seq((Compile / baseDirectory).value / "src" / "shims" / "spark-3.5")
case SPARK_MASTER_VERSION =>
Seq((Compile / baseDirectory).value / "src" / "shims" / "spark-4.0")
}
},
Compile / packageBin / mappings := (Compile / packageBin / mappings).value ++
listPythonFiles(baseDirectory.value.getParentFile / "python"),

Antlr4 / antlr4Version := {
cross_spark_version.value match {
case LATEST_RELEASED_SPARK_VERSION => "4.9.3"
case SPARK_MASTER_VERSION => "4.13.1"
}
},
Antlr4 / antlr4PackageName := Some("io.delta.sql.parser"),
Antlr4 / antlr4GenListener := true,
Antlr4 / antlr4GenVisitor := true,

Test / testOptions += Tests.Argument("-oDF"),
Test / testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
Test / javaOptions ++= {
cross_spark_version.value match {
case LATEST_RELEASED_SPARK_VERSION => Nil
case SPARK_MASTER_VERSION => Seq(
// Copied from SparkBuild.scala to support Java 17 for unit tests (see apache/spark#34153)
"--add-opens=java.base/java.lang=ALL-UNNAMED",
"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED",
"--add-opens=java.base/java.io=ALL-UNNAMED",
"--add-opens=java.base/java.net=ALL-UNNAMED",
"--add-opens=java.base/java.nio=ALL-UNNAMED",
"--add-opens=java.base/java.util=ALL-UNNAMED",
"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED",
"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED",
"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED",
"--add-opens=java.base/sun.security.action=ALL-UNNAMED",
"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED"
)
}
},

// Don't execute in parallel since we can't have multiple Sparks in the same JVM
Test / parallelExecution := false,
Expand Down

0 comments on commit d75b102

Please sign in to comment.