Skip to content

Commit

Permalink
Merge pull request #1562 from apache/master
Browse files Browse the repository at this point in the history
Create a new pull request by comparing changes across two branches
  • Loading branch information
GulajavaMinistudio authored Sep 22, 2023
2 parents f093827 + 498b7e1 commit 9e55bcf
Show file tree
Hide file tree
Showing 160 changed files with 909 additions and 2,300 deletions.
7 changes: 2 additions & 5 deletions .github/workflows/benchmark.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@ on:
required: true
default: '8'
scala:
description: 'Scala version: 2.12 or 2.13'
description: 'Scala version: 2.13'
required: true
default: '2.12'
default: '2.13'
failfast:
description: 'Failfast: true or false'
required: true
Expand Down Expand Up @@ -170,7 +170,6 @@ jobs:
key: tpcds-${{ hashFiles('.github/workflows/benchmark.yml', 'sql/core/src/test/scala/org/apache/spark/sql/TPCDSSchema.scala') }}
- name: Run benchmarks
run: |
dev/change-scala-version.sh ${{ github.event.inputs.scala }}
./build/sbt -Pscala-${{ github.event.inputs.scala }} -Pyarn -Pmesos -Pkubernetes -Phive -Phive-thriftserver -Phadoop-cloud -Pkinesis-asl -Pspark-ganglia-lgpl Test/package
# Make less noisy
cp conf/log4j2.properties.template conf/log4j2.properties
Expand All @@ -181,8 +180,6 @@ jobs:
--jars "`find . -name '*-SNAPSHOT-tests.jar' -o -name '*avro*-SNAPSHOT.jar' | paste -sd ',' -`" \
"`find . -name 'spark-core*-SNAPSHOT-tests.jar'`" \
"${{ github.event.inputs.class }}"
# Revert to default Scala version to clean up unnecessary git diff
dev/change-scala-version.sh 2.12
# To keep the directory structure and file permissions, tar them
# See also https://github.com/actions/upload-artifact#maintaining-file-permissions-and-case-sensitive-files
echo "Preparing the benchmark results:"
Expand Down
52 changes: 2 additions & 50 deletions .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ jobs:
sparkr=`./dev/is-changed.py -m sparkr`
tpcds=`./dev/is-changed.py -m sql`
docker=`./dev/is-changed.py -m docker-integration-tests`
# 'build', 'scala-213', and 'java-other-versions' are always true for now.
# 'build' and 'java-other-versions' are always true for now.
# It does not save significant time and most of PRs trigger the build.
precondition="
{
Expand All @@ -95,7 +95,6 @@ jobs:
\"sparkr\": \"$sparkr\",
\"tpcds-1g\": \"$tpcds\",
\"docker-integration-tests\": \"$docker\",
\"scala-213\": \"true\",
\"java-other-versions\": \"true\",
\"lint\" : \"true\",
\"k8s-integration-tests\" : \"true\",
Expand Down Expand Up @@ -828,53 +827,6 @@ jobs:
./build/mvn $MAVEN_CLI_OPTS -DskipTests -Pyarn -Pmesos -Pkubernetes -Pvolcano -Phive -Phive-thriftserver -Phadoop-cloud -Djava.version=${JAVA_VERSION/-ea} install
rm -rf ~/.m2/repository/org/apache/spark
scala-213:
needs: precondition
if: fromJson(needs.precondition.outputs.required).scala-213 == 'true'
name: Scala 2.13 build with SBT
runs-on: ubuntu-22.04
timeout-minutes: 300
steps:
- name: Checkout Spark repository
uses: actions/checkout@v3
with:
fetch-depth: 0
repository: apache/spark
ref: ${{ inputs.branch }}
- name: Sync the current branch with the latest in Apache Spark
if: github.repository != 'apache/spark'
run: |
git fetch https://github.com/$GITHUB_REPOSITORY.git ${GITHUB_REF#refs/heads/}
git -c user.name='Apache Spark Test Account' -c user.email='sparktestacc@gmail.com' merge --no-commit --progress --squash FETCH_HEAD
git -c user.name='Apache Spark Test Account' -c user.email='sparktestacc@gmail.com' commit -m "Merged commit" --allow-empty
- name: Cache Scala, SBT and Maven
uses: actions/cache@v3
with:
path: |
build/apache-maven-*
build/scala-*
build/*.jar
~/.sbt
key: build-${{ hashFiles('**/pom.xml', 'project/build.properties', 'build/mvn', 'build/sbt', 'build/sbt-launch-lib.bash', 'build/spark-build-info') }}
restore-keys: |
build-
- name: Cache Coursier local repository
uses: actions/cache@v3
with:
path: ~/.cache/coursier
key: scala-213-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
restore-keys: |
scala-213-coursier-
- name: Install Java 8
uses: actions/setup-java@v3
with:
distribution: zulu
java-version: 8
- name: Build with SBT
run: |
./dev/change-scala-version.sh 2.13
./build/sbt -Pyarn -Pmesos -Pkubernetes -Pvolcano -Phive -Phive-thriftserver -Phadoop-cloud -Pkinesis-asl -Pdocker-integration-tests -Pkubernetes-integration-tests -Pspark-ganglia-lgpl -Pscala-2.13 compile Test/compile
# Any TPC-DS related updates on this job need to be applied to tpcds-1g-gen job of benchmark.yml as well
tpcds-1g:
needs: precondition
Expand Down Expand Up @@ -1108,7 +1060,7 @@ jobs:
export PVC_TESTS_VM_PATH=$PVC_TMP_DIR
minikube mount ${PVC_TESTS_HOST_PATH}:${PVC_TESTS_VM_PATH} --gid=0 --uid=185 &
kubectl create clusterrolebinding serviceaccounts-cluster-admin --clusterrole=cluster-admin --group=system:serviceaccounts || true
kubectl apply -f https://raw.githubusercontent.com/volcano-sh/volcano/v1.7.0/installer/volcano-development.yaml || true
kubectl apply -f https://raw.githubusercontent.com/volcano-sh/volcano/v1.8.0/installer/volcano-development.yaml || true
eval $(minikube docker-env)
build/sbt -Psparkr -Pkubernetes -Pvolcano -Pkubernetes-integration-tests -Dspark.kubernetes.test.driverRequestCores=0.5 -Dspark.kubernetes.test.executorRequestCores=0.2 -Dspark.kubernetes.test.volcanoMaxConcurrencyJobNum=1 -Dtest.exclude.tags=local "kubernetes-integration-tests/test"
- name: Upload Spark on K8S integration tests log files
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_ansi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# under the License.
#

name: "Build / ANSI (master, Hadoop 3, JDK 8, Scala 2.12)"
name: "Build / ANSI (master, Hadoop 3, JDK 8, Scala 2.13)"

on:
schedule:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# under the License.
#

name: "Build / Coverage (master, Scala 2.12, Hadoop 3, JDK 8)"
name: "Build / Coverage (master, Scala 2.13, Hadoop 3, JDK 8)"

on:
schedule:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_java11.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# under the License.
#

name: "Build (master, Scala 2.12, Hadoop 3, JDK 11)"
name: "Build (master, Scala 2.13, Hadoop 3, JDK 11)"

on:
schedule:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_java17.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# under the License.
#

name: "Build (master, Scala 2.12, Hadoop 3, JDK 17)"
name: "Build (master, Scala 2.13, Hadoop 3, JDK 17)"

on:
schedule:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_java21.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# under the License.
#

name: "Build (master, Scala 2.12, Hadoop 3, JDK 21)"
name: "Build (master, Scala 2.13, Hadoop 3, JDK 21)"

on:
schedule:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_maven.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# under the License.
#

name: "Build using Maven (master, Scala 2.12, Hadoop 3, JDK 8)"
name: "Build using Maven (master, Scala 2.13, Hadoop 3, JDK 8)"

on:
schedule:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_rockdb_as_ui_backend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# under the License.
#

name: "Build / RocksDB as UI Backend (master, Hadoop 3, JDK 8, Scala 2.12)"
name: "Build / RocksDB as UI Backend (master, Hadoop 3, JDK 8, Scala 2.13)"

on:
schedule:
Expand Down
49 changes: 0 additions & 49 deletions .github/workflows/build_scala213.yml

This file was deleted.

58 changes: 29 additions & 29 deletions LICENSE-binary
Original file line number Diff line number Diff line change
Expand Up @@ -209,34 +209,34 @@ org.apache.zookeeper:zookeeper
oro:oro
commons-configuration:commons-configuration
commons-digester:commons-digester
com.chuusai:shapeless_2.12
com.chuusai:shapeless_2.13
com.googlecode.javaewah:JavaEWAH
com.twitter:chill-java
com.twitter:chill_2.12
com.twitter:chill_2.13
com.univocity:univocity-parsers
javax.jdo:jdo-api
joda-time:joda-time
net.sf.opencsv:opencsv
org.apache.derby:derby
org.objenesis:objenesis
org.roaringbitmap:RoaringBitmap
org.scalanlp:breeze-macros_2.12
org.scalanlp:breeze_2.12
org.typelevel:macro-compat_2.12
org.scalanlp:breeze-macros_2.13
org.scalanlp:breeze_2.13
org.typelevel:macro-compat_2.13
org.yaml:snakeyaml
org.apache.xbean:xbean-asm7-shaded
com.squareup.okhttp3:logging-interceptor
com.squareup.okhttp3:okhttp
com.squareup.okio:okio
org.apache.spark:spark-catalyst_2.12
org.apache.spark:spark-kvstore_2.12
org.apache.spark:spark-launcher_2.12
org.apache.spark:spark-mllib-local_2.12
org.apache.spark:spark-network-common_2.12
org.apache.spark:spark-network-shuffle_2.12
org.apache.spark:spark-sketch_2.12
org.apache.spark:spark-tags_2.12
org.apache.spark:spark-unsafe_2.12
org.apache.spark:spark-catalyst_2.13
org.apache.spark:spark-kvstore_2.13
org.apache.spark:spark-launcher_2.13
org.apache.spark:spark-mllib-local_2.13
org.apache.spark:spark-network-common_2.13
org.apache.spark:spark-network-shuffle_2.13
org.apache.spark:spark-sketch_2.13
org.apache.spark:spark-tags_2.13
org.apache.spark:spark-unsafe_2.13
commons-httpclient:commons-httpclient
com.vlkan:flatbuffers
com.ning:compress-lzf
Expand Down Expand Up @@ -299,10 +299,10 @@ org.apache.orc:orc-mapreduce
org.mortbay.jetty:jetty
org.mortbay.jetty:jetty-util
com.jolbox:bonecp
org.json4s:json4s-ast_2.12
org.json4s:json4s-core_2.12
org.json4s:json4s-jackson_2.12
org.json4s:json4s-scalap_2.12
org.json4s:json4s-ast_2.13
org.json4s:json4s-core_2.13
org.json4s:json4s-jackson_2.13
org.json4s:json4s-scalap_2.13
com.carrotsearch:hppc
com.fasterxml.jackson.core:jackson-annotations
com.fasterxml.jackson.core:jackson-core
Expand All @@ -312,7 +312,7 @@ com.fasterxml.jackson.jaxrs:jackson-jaxrs-base
com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider
com.fasterxml.jackson.module:jackson-module-jaxb-annotations
com.fasterxml.jackson.module:jackson-module-paranamer
com.fasterxml.jackson.module:jackson-module-scala_2.12
com.fasterxml.jackson.module:jackson-module-scala_2.13
com.github.mifmif:generex
com.google.code.findbugs:jsr305
com.google.code.gson:gson
Expand Down Expand Up @@ -385,8 +385,8 @@ org.eclipse.jetty:jetty-xml
org.scala-lang:scala-compiler
org.scala-lang:scala-library
org.scala-lang:scala-reflect
org.scala-lang.modules:scala-parser-combinators_2.12
org.scala-lang.modules:scala-xml_2.12
org.scala-lang.modules:scala-parser-combinators_2.13
org.scala-lang.modules:scala-xml_2.13
com.github.joshelser:dropwizard-metrics-hadoop-metrics2-reporter
com.zaxxer.HikariCP
org.apache.hive:hive-beeline
Expand Down Expand Up @@ -471,19 +471,19 @@ MIT License
-----------

com.microsoft.sqlserver:mssql-jdbc
org.typelevel:spire_2.12
org.typelevel:spire-macros_2.12
org.typelevel:spire-platform_2.12
org.typelevel:spire-util_2.12
org.typelevel:algebra_2.12:jar
org.typelevel:cats-kernel_2.12
org.typelevel:machinist_2.12
org.typelevel:spire_2.13
org.typelevel:spire-macros_2.13
org.typelevel:spire-platform_2.13
org.typelevel:spire-util_2.13
org.typelevel:algebra_2.13:jar
org.typelevel:cats-kernel_2.13
org.typelevel:machinist_2.13
net.razorvine:pickle
org.slf4j:jcl-over-slf4j
org.slf4j:jul-to-slf4j
org.slf4j:slf4j-api
org.slf4j:slf4j-log4j12
com.github.scopt:scopt_2.12
com.github.scopt:scopt_2.13
dev.ludovic.netlib:blas
dev.ludovic.netlib:arpack
dev.ludovic.netlib:lapack
Expand Down
2 changes: 1 addition & 1 deletion R/pkg/tests/fulltests/test_sparkSQL.R
Original file line number Diff line number Diff line change
Expand Up @@ -4199,7 +4199,7 @@ test_that("catalog APIs, listTables, getTable, listColumns, listFunctions, funct

# recoverPartitions does not work with temporary view
expect_error(recoverPartitions("cars"),
"[UNSUPPORTED_VIEW_OPERATION.WITH_SUGGESTION]*`cars`*")
"[EXPECT_TABLE_NOT_VIEW.NO_ALTERNATIVE]*`cars`*")
expect_error(refreshTable("cars"), NA)
expect_error(refreshByPath("/"), NA)

Expand Down
4 changes: 2 additions & 2 deletions assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,12 @@
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<artifactId>spark-parent_2.13</artifactId>
<version>4.0.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

<artifactId>spark-assembly_2.12</artifactId>
<artifactId>spark-assembly_2.13</artifactId>
<name>Spark Project Assembly</name>
<url>https://spark.apache.org/</url>
<packaging>pom</packaging>
Expand Down
30 changes: 15 additions & 15 deletions bin/load-spark-env.cmd
Original file line number Diff line number Diff line change
Expand Up @@ -39,21 +39,21 @@ set SCALA_VERSION_2=2.12
set ASSEMBLY_DIR1="%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_1%"
set ASSEMBLY_DIR2="%SPARK_HOME%\assembly\target\scala-%SCALA_VERSION_2%"
set ENV_VARIABLE_DOC=https://spark.apache.org/docs/latest/configuration.html#environment-variables

if not defined SPARK_SCALA_VERSION (
if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
echo Presence of build for multiple Scala versions detected ^(%ASSEMBLY_DIR1% and %ASSEMBLY_DIR2%^).
echo Remove one of them or, set SPARK_SCALA_VERSION=%SCALA_VERSION_1% in spark-env.cmd.
echo Visit %ENV_VARIABLE_DOC% for more details about setting environment variables in spark-env.cmd.
echo Either clean one of them or, set SPARK_SCALA_VERSION in spark-env.cmd.
exit 1
)
if exist %ASSEMBLY_DIR1% (
set SPARK_SCALA_VERSION=%SCALA_VERSION_1%
) else (
set SPARK_SCALA_VERSION=%SCALA_VERSION_2%
)
)
set SPARK_SCALA_VERSION=2.13
rem if not defined SPARK_SCALA_VERSION (
rem if exist %ASSEMBLY_DIR2% if exist %ASSEMBLY_DIR1% (
rem echo Presence of build for multiple Scala versions detected ^(%ASSEMBLY_DIR1% and %ASSEMBLY_DIR2%^).
rem echo Remove one of them or, set SPARK_SCALA_VERSION=%SCALA_VERSION_1% in spark-env.cmd.
rem echo Visit %ENV_VARIABLE_DOC% for more details about setting environment variables in spark-env.cmd.
rem echo Either clean one of them or, set SPARK_SCALA_VERSION in spark-env.cmd.
rem exit 1
rem )
rem if exist %ASSEMBLY_DIR1% (
rem set SPARK_SCALA_VERSION=%SCALA_VERSION_1%
rem ) else (
rem set SPARK_SCALA_VERSION=%SCALA_VERSION_2%
rem )
rem )
exit /b 0

:LoadSparkEnv
Expand Down
Loading

0 comments on commit 9e55bcf

Please sign in to comment.