Skip to content

Commit

Permalink
[Spark] Change Scala version to match Spark 3.4
Browse files Browse the repository at this point in the history
- [X] Spark
- [ ] Standalone
- [ ] Flink
- [ ] Kernel
- [ ] Other (fill in here)

Matches Scala version of Spark 3.4: https://github.com/apache/spark/blob/59fcecb5a59df54ecb3c675d4f3722fc72c1466e/pom.xml#L171

https://github.com/scala/scala/releases/tag/v2.12.16
https://github.com/scala/scala/releases/tag/v2.12.17

https://github.com/scala/scala/releases/tag/v2.13.6
https://github.com/scala/scala/releases/tag/v2.13.7
https://github.com/scala/scala/releases/tag/v2.13.8

Fix delta-io#1909

Changes Scala version, which should be compatible:
"As usual for our minor releases, Scala 2.12.17 is binary-compatible with the whole Scala 2.12 series."
"As usual for our minor releases, Scala 2.13.8 is binary-compatible with the whole Scala 2.13 series."

Closes delta-io#1936

Signed-off-by: Allison Portis <allison.portis@databricks.com>
GitOrigin-RevId: 232abd3a2f7f8d7395e1cdeb21baecea096f15a6
  • Loading branch information
felipepessoto authored and scottsand-db committed Oct 13, 2023
1 parent 41c5179 commit 962495e
Show file tree
Hide file tree
Showing 11 changed files with 29 additions and 18 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/connectors_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ jobs:
strategy:
matrix:
# These Scala versions must match those in the build.sbt
scala: [2.13.5, 2.12.15]
scala: [2.13.8, 2.12.17]
steps:
- uses: actions/checkout@v2
- name: install java
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/kernel_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ jobs:
test:
runs-on: ubuntu-20.04
env:
SCALA_VERSION: 2.12.15
SCALA_VERSION: 2.12.17
steps:
- uses: actions/checkout@v3
- name: install java
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/spark_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ jobs:
strategy:
matrix:
# These Scala versions must match those in the build.sbt
scala: [2.12.15, 2.13.5]
scala: [2.12.17, 2.13.8]
env:
SCALA_VERSION: ${{ matrix.scala }}
steps:
Expand Down
2 changes: 1 addition & 1 deletion benchmarks/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
*/

name := "benchmarks"
scalaVersion := "2.12.15"
scalaVersion := "2.12.17"

lazy val root = (project in file("."))
.settings(
Expand Down
6 changes: 3 additions & 3 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,15 @@ import Mima._
import Unidoc._

// Scala versions
val scala212 = "2.12.15"
val scala213 = "2.13.5"
val scala212 = "2.12.17"
val scala213 = "2.13.8"
val all_scala_versions = Seq(scala212, scala213)

// Due to how publishArtifact is determined for javaOnlyReleaseSettings, incl. storage
// It was necessary to change default_scala_version to scala213 in build.sbt
// to build the project with Scala 2.13 only
// As a setting, it's possible to set it on command line easily
// sbt 'set default_scala_version := 2.13.5' [commands]
// sbt 'set default_scala_version := 2.13.8' [commands]
// FIXME Why not use scalaVersion?
val default_scala_version = settingKey[String]("Default Scala version")
Global / default_scala_version := scala212
Expand Down
2 changes: 1 addition & 1 deletion connectors/.github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ jobs:
runs-on: ubuntu-20.04
strategy:
matrix:
scala: [2.13.8, 2.12.8, 2.11.12]
scala: [2.13.8, 2.12.17, 2.11.12]
steps:
- uses: actions/checkout@v2
- name: install java
Expand Down
10 changes: 5 additions & 5 deletions connectors/examples/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@ name := "examples"
organization := "com.examples"
organizationName := "examples"

scalaVersion := "2.12.8"
scalaVersion := "2.12.17"
version := "0.1.0"

lazy val commonSettings = Seq(
crossScalaVersions := Seq("2.13.8", "2.12.8", "2.11.12"),
crossScalaVersions := Seq("2.13.8", "2.12.17", "2.11.12"),
resolvers += Resolver.mavenLocal,
libraryDependencies ++= Seq(
"io.delta" %% "delta-standalone" % getStandaloneVersion(),
Expand All @@ -47,14 +47,14 @@ lazy val extraMavenRepo = sys.env.get("EXTRA_MAVEN_REPO").toSeq.map { repo =>

lazy val convertToDelta = (project in file("convert-to-delta")) settings (
name := "convert",
scalaVersion := "2.12.8",
scalaVersion := "2.12.17",
commonSettings,
extraMavenRepo
)

lazy val helloWorld = (project in file("hello-world")) settings (
name := "hello",
scalaVersion := "2.12.8",
scalaVersion := "2.12.17",
commonSettings,
extraMavenRepo
)
Expand All @@ -63,7 +63,7 @@ val flinkVersion = "1.16.1"
val flinkHadoopVersion = "3.1.0"
lazy val flinkExample = (project in file("flink-example")) settings (
name := "flink",
scalaVersion := "2.12.8",
scalaVersion := "2.12.17",
commonSettings,
extraMavenRepo,
resolvers += Resolver.mavenLocal,
Expand Down
4 changes: 2 additions & 2 deletions connectors/examples/run_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def __exit__(self, tpe, value, traceback):
by running the following commands in the root connectors folder.
build/sbt '++2.11.12 publishM2'
build/sbt '++2.12.8 publishM2'
build/sbt '++2.12.17 publishM2'
build/sbt '++2.13.8 publishM2'
"""

Expand Down Expand Up @@ -146,5 +146,5 @@ def __exit__(self, tpe, value, traceback):
run_maven_proj(path.join(root_dir, dir), className, args.version, args.maven_repo, "2.13")

run_sbt_proj(root_dir, proj, className, args.version, args.maven_repo, "2.11.12")
run_sbt_proj(root_dir, proj, className, args.version, args.maven_repo, "2.12.8")
run_sbt_proj(root_dir, proj, className, args.version, args.maven_repo, "2.12.17")
run_sbt_proj(root_dir, proj, className, args.version, args.maven_repo, "2.13.8")
2 changes: 1 addition & 1 deletion examples/scala/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ name := "example"
organization := "com.example"
organizationName := "example"

val scala212 = "2.12.15"
val scala212 = "2.12.17"
val scala213 = "2.13.8"
val deltaVersion = "2.1.0"
val icebergVersion = "1.4.0"
Expand Down
13 changes: 12 additions & 1 deletion project/plugins.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,18 @@ addSbtPlugin("com.simplytyped" % "sbt-antlr4" % "0.8.3")

addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.15")

addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.0")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.6")
//Upgrade sbt-scoverage to 2.0.3+ because 2.0.0 is not compatible to Scala 2.12.17:
//sbt.librarymanagement.ResolveException: Error downloading org.scoverage:scalac-scoverage-plugin_2.12.17:2.0.0

//It caused a conflict issue:
//[error] java.lang.RuntimeException: found version conflict(s) in library dependencies; some are suspected to be binary incompatible:
//[error]
//[error] * org.scala-lang.modules:scala-xml_2.12:2.1.0 (early-semver) is selected over 1.0.6
//[error] +- org.scoverage:scalac-scoverage-reporter_2.12:2.0.7 (depends on 2.1.0)
//[error] +- org.scalariform:scalariform_2.12:0.2.0 (depends on 1.0.6)
//The following fix the conflict:
libraryDependencySchemes += "org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always % "test"

addSbtPlugin("net.aichler" % "sbt-jupiter-interface" % "0.9.1")

Expand Down
2 changes: 1 addition & 1 deletion run-tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def run_sbt_tests(root_dir, test_group, coverage, scala_version=None):
cmd += ["+ %s" % test_cmd] # build/sbt ... "+ project/test" ...
else:
# when no scala version is specified, run test with only the specified scala version
cmd += ["++ %s" % scala_version, test_cmd] # build/sbt ... "++ 2.13.5" "project/test" ...
cmd += ["++ %s" % scala_version, test_cmd] # build/sbt ... "++ 2.13.8" "project/test" ...

if is_running_spark_tests:
cmd += ["unidoc"]
Expand Down

0 comments on commit 962495e

Please sign in to comment.