Disable failed tests for triaging #157
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Java CI | ||
on: | ||
push: | ||
branches: | ||
- master | ||
- 'release-*' | ||
- branch-0.x | ||
pull_request: | ||
paths-ignore: | ||
- '**.bmp' | ||
- '**.gif' | ||
- '**.jpg' | ||
- '**.jpeg' | ||
- '**.md' | ||
- '**.pdf' | ||
- '**.png' | ||
- '**.svg' | ||
- '**.yaml' | ||
- '.gitignore' | ||
branches: | ||
- master | ||
- 'release-*' | ||
- branch-0.x | ||
concurrency: | ||
group: ${{ github.ref }} | ||
cancel-in-progress: ${{ !contains(github.ref, 'master') && !contains(github.ref, 'branch-0.x') && !contains(github.ref, 'release-') }} | ||
env: | ||
MVN_ARGS: -e -ntp -B -V -Dgpg.skip -Djacoco.skip -Pwarn-log -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.shade=warn -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugins.dependency=warn -Dmaven.wagon.httpconnectionManager.ttlSeconds=25 -Dmaven.wagon.http.retryHandler.count=5 | ||
SPARK_COMMON_MODULES: hudi-spark-datasource/hudi-spark,hudi-spark-datasource/hudi-spark-common | ||
jobs: | ||
validate-source: | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up JDK 8 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '8' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Check Binary Files | ||
run: ./scripts/release/validate_source_binary_files.sh | ||
- name: Check Copyright | ||
run: | | ||
./scripts/release/create_source_directory.sh hudi-tmp-repo | ||
cd hudi-tmp-repo | ||
./scripts/release/validate_source_copyright.sh | ||
- name: RAT check | ||
run: ./scripts/release/validate_source_rat.sh | ||
test-spark-java-tests: | ||
runs-on: ubuntu-latest | ||
strategy: | ||
matrix: | ||
include: | ||
- scalaProfile: "scala-2.11" | ||
sparkProfile: "spark2.4" | ||
sparkModules: "hudi-spark-datasource/hudi-spark2" | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.0" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.0.x" | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.1" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.1.x" | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.2" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.2.x" | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.3" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.3.x" | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.4" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.4.x" | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.5" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | ||
- scalaProfile: "scala-2.13" | ||
sparkProfile: "spark3.5" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up JDK 8 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '8' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Build Project | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
run: | ||
mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,$SPARK_COMMON_MODULES,$SPARK_MODULES" | ||
- name: Quickstart Test | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
run: | ||
mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl hudi-examples/hudi-examples-spark $MVN_ARGS | ||
- name: Java UT - Common & Spark | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_MODULES: ${{ matrix.sparkModules }} | ||
if: !endsWith(env.SPARK_PROFILE, '3.5') && !endsWith(env.SCALA_PROFILE, '2.12') # skip test Spark 3.5 and Scala 2.12 as it's covered by Azure CI | ||
run: | ||
mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | ||
- name: Java FT - Spark | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_MODULES: ${{ matrix.sparkModules }} | ||
if: !endsWith(env.SPARK_PROFILE, '3.5') && !endsWith(env.SCALA_PROFILE, '2.12') # skip test Spark 3.5 and Scala 2.12 as it's covered by Azure CI | ||
run: | ||
mvn test -Pfunctional-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | ||
test-spark-scala-tests: | ||
runs-on: ubuntu-latest | ||
strategy: | ||
matrix: | ||
include: | ||
- scalaProfile: "scala-2.11" | ||
sparkProfile: "spark2.4" | ||
sparkModules: "hudi-spark-datasource/hudi-spark2" | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.0" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.0.x" | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.1" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.1.x" | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.2" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.2.x" | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.3" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.3.x" | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.4" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.4.x" | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.5" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | ||
- scalaProfile: "scala-2.13" | ||
sparkProfile: "spark3.5" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up JDK 8 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '8' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Build Project | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
run: | ||
mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,$SPARK_COMMON_MODULES,$SPARK_MODULES" | ||
- name: Scala UT - Common & Spark | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_MODULES: ${{ matrix.sparkModules }} | ||
if: !endsWith(env.SPARK_PROFILE, '3.5') && !endsWith(env.SCALA_PROFILE, '2.12') # skip test Spark 3.5 and Scala 2.12 as it's covered by Azure CI | ||
run: | ||
mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | ||
- name: Scala FT - Spark | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_MODULES: ${{ matrix.sparkModules }} | ||
if: !endsWith(env.SPARK_PROFILE, '3.5') && !endsWith(env.SCALA_PROFILE, '2.12') # skip test Spark 3.5 and Scala 2.12 as it's covered by Azure CI | ||
run: | ||
mvn test -Pfunctional-tests -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | ||
test-hudi-hadoop-mr-and-hudi-java-client: | ||
runs-on: ubuntu-latest | ||
timeout-minutes: 40 | ||
strategy: | ||
matrix: | ||
include: | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.5" | ||
flinkProfile: "flink1.18" | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up JDK 8 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '8' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Generate Maven Wrapper | ||
run: | ||
mvn -N io.takari:maven:wrapper | ||
- name: Build Project | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
FLINK_PROFILE: ${{ matrix.flinkProfile }} | ||
run: | ||
./mvnw clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -D"FLINK_PROFILE" -DskipTests=true -Phudi-platform-service $MVN_ARGS -am -pl hudi-hadoop-mr,hudi-client/hudi-java-client | ||
- name: UT - hudi-hadoop-mr and hudi-client/hudi-java-client | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
FLINK_PROFILE: ${{ matrix.flinkProfile }} | ||
run: | ||
./mvnw test -Punit-tests -fae -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -D"FLINK_PROFILE" -pl hudi-hadoop-mr,hudi-client/hudi-java-client $MVN_ARGS | ||
test-spark-java17-java-tests: | ||
runs-on: ubuntu-latest | ||
strategy: | ||
matrix: | ||
include: | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.3" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.3.x" | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.4" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.4.x" | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up JDK 8 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '8' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Build Project | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
run: | ||
mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" | ||
- name: Set up JDK 17 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '17' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Quickstart Test | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
run: | ||
mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl hudi-examples/hudi-examples-spark $MVN_ARGS | ||
- name: Java UT - Common & Spark | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_MODULES: ${{ matrix.sparkModules }} | ||
run: | ||
mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl "hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | ||
- name: Java FT - Spark | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_MODULES: ${{ matrix.sparkModules }} | ||
run: | ||
mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | ||
test-spark-java17-scala-tests: | ||
runs-on: ubuntu-latest | ||
strategy: | ||
matrix: | ||
include: | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.3" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.3.x" | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.4" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.4.x" | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up JDK 8 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '8' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Build Project | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
run: | ||
mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" | ||
- name: Set up JDK 17 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '17' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Scala UT - Common & Spark | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_MODULES: ${{ matrix.sparkModules }} | ||
run: | ||
mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests -DfailIfNoTests=false -pl "hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | ||
- name: Scala FT - Spark | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_MODULES: ${{ matrix.sparkModules }} | ||
run: | ||
mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | ||
test-spark-java11-17-java-tests: | ||
runs-on: ubuntu-latest | ||
strategy: | ||
matrix: | ||
include: | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.5" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | ||
- scalaProfile: "scala-2.13" | ||
sparkProfile: "spark3.5" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up JDK 11 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '11' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Build Project | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
run: | ||
mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" | ||
- name: Set up JDK 17 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '17' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Quickstart Test | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
run: | ||
mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl hudi-examples/hudi-examples-spark $MVN_ARGS | ||
- name: Java UT - Common & Spark | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_MODULES: ${{ matrix.sparkModules }} | ||
run: | ||
mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DwildcardSuites=skipScalaTests -DfailIfNoTests=false -pl "hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | ||
- name: Java FT - Spark | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_MODULES: ${{ matrix.sparkModules }} | ||
run: | ||
mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | ||
test-spark-java11-17-scala-tests: | ||
runs-on: ubuntu-latest | ||
strategy: | ||
matrix: | ||
include: | ||
- scalaProfile: "scala-2.12" | ||
sparkProfile: "spark3.5" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | ||
- scalaProfile: "scala-2.13" | ||
sparkProfile: "spark3.5" | ||
sparkModules: "hudi-spark-datasource/hudi-spark3.5.x" | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up JDK 11 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '11' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Build Project | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
run: | ||
mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DskipTests=true $MVN_ARGS -am -pl "hudi-examples/hudi-examples-spark,hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" | ||
- name: Set up JDK 17 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '17' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Scala UT - Common & Spark | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_MODULES: ${{ matrix.sparkModules }} | ||
run: | ||
mvn test -Punit-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests -DfailIfNoTests=false -pl "hudi-common,$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | ||
- name: Scala FT - Spark | ||
env: | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_MODULES: ${{ matrix.sparkModules }} | ||
run: | ||
mvn test -Pfunctional-tests -Pjava17 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -Dtest=skipJavaTests -DfailIfNoTests=false -pl "$SPARK_COMMON_MODULES,$SPARK_MODULES" $MVN_ARGS | ||
test-flink: | ||
runs-on: ubuntu-latest | ||
strategy: | ||
matrix: | ||
include: | ||
- flinkProfile: "flink1.14" | ||
- flinkProfile: "flink1.15" | ||
- flinkProfile: "flink1.16" | ||
- flinkProfile: "flink1.17" | ||
- flinkProfile: "flink1.18" | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up JDK 8 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '8' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Build Project | ||
env: | ||
SCALA_PROFILE: 'scala-2.12' | ||
FLINK_PROFILE: ${{ matrix.flinkProfile }} | ||
run: | ||
mvn clean install -T 2 -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-examples/hudi-examples-flink -am -Davro.version=1.10.0 -DskipTests=true $MVN_ARGS | ||
- name: Quickstart Test | ||
env: | ||
SCALA_PROFILE: 'scala-2.12' | ||
FLINK_PROFILE: ${{ matrix.flinkProfile }} | ||
run: | ||
mvn test -Punit-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-examples/hudi-examples-flink $MVN_ARGS | ||
- name: Integration Test | ||
env: | ||
SCALA_PROFILE: 'scala-2.12' | ||
FLINK_PROFILE: ${{ matrix.flinkProfile }} | ||
if: ${{ endsWith(env.FLINK_PROFILE, '1.18') }} | ||
run: | | ||
mvn clean install -T 2 -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-flink-datasource/hudi-flink -am -Davro.version=1.10.0 -DskipTests=true $MVN_ARGS | ||
mvn verify -Pintegration-tests -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -pl hudi-flink-datasource/hudi-flink $MVN_ARGS | ||
docker-java17-test: | ||
runs-on: ubuntu-latest | ||
strategy: | ||
matrix: | ||
include: | ||
- scalaProfile: 'scala-2.13' | ||
flinkProfile: 'flink1.18' | ||
sparkProfile: 'spark3.5' | ||
sparkRuntime: 'spark3.5.0' | ||
- scalaProfile: 'scala-2.12' | ||
flinkProfile: 'flink1.18' | ||
sparkProfile: 'spark3.5' | ||
sparkRuntime: 'spark3.5.0' | ||
- scalaProfile: 'scala-2.12' | ||
flinkProfile: 'flink1.18' | ||
sparkProfile: 'spark3.4' | ||
sparkRuntime: 'spark3.4.0' | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up JDK 8 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '8' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: UT/FT - Docker Test - OpenJDK 17 | ||
env: | ||
FLINK_PROFILE: ${{ matrix.flinkProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_RUNTIME: ${{ matrix.sparkRuntime }} | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
if: ${{ env.SPARK_PROFILE >= 'spark3.4' }} # Only support Spark 3.4 for now | ||
run: | | ||
HUDI_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) | ||
./packaging/bundle-validation/run_docker_java17.sh | ||
validate-bundles: | ||
runs-on: ubuntu-latest | ||
strategy: | ||
matrix: | ||
include: | ||
- scalaProfile: 'scala-2.13' | ||
flinkProfile: 'flink1.18' | ||
sparkProfile: 'spark3.5' | ||
sparkRuntime: 'spark3.5.0' | ||
- scalaProfile: 'scala-2.12' | ||
flinkProfile: 'flink1.18' | ||
sparkProfile: 'spark3.5' | ||
sparkRuntime: 'spark3.5.0' | ||
- scalaProfile: 'scala-2.12' | ||
flinkProfile: 'flink1.18' | ||
sparkProfile: 'spark3.4' | ||
sparkRuntime: 'spark3.4.0' | ||
- scalaProfile: 'scala-2.12' | ||
flinkProfile: 'flink1.17' | ||
sparkProfile: 'spark3.3' | ||
sparkRuntime: 'spark3.3.2' | ||
- scalaProfile: 'scala-2.12' | ||
flinkProfile: 'flink1.16' | ||
sparkProfile: 'spark3.3' | ||
sparkRuntime: 'spark3.3.1' | ||
- scalaProfile: 'scala-2.12' | ||
flinkProfile: 'flink1.15' | ||
sparkProfile: 'spark3.2' | ||
sparkRuntime: 'spark3.2.3' | ||
- scalaProfile: 'scala-2.12' | ||
flinkProfile: 'flink1.14' | ||
sparkProfile: 'spark3.1' | ||
sparkRuntime: 'spark3.1.3' | ||
- scalaProfile: 'scala-2.12' | ||
flinkProfile: 'flink1.14' | ||
sparkProfile: 'spark3.0' | ||
sparkRuntime: 'spark3.0.2' | ||
- scalaProfile: 'scala-2.12' | ||
flinkProfile: 'flink1.14' | ||
sparkProfile: 'spark2.4' | ||
sparkRuntime: 'spark2.4.8' | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up JDK 8 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '8' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Build Project | ||
env: | ||
FLINK_PROFILE: ${{ matrix.flinkProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
run: | | ||
if [ "$SCALA_PROFILE" == "scala-2.13" ]; then | ||
mvn clean package -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DdeployArtifacts=true -DskipTests=true $MVN_ARGS -pl packaging/hudi-hadoop-mr-bundle,packaging/hudi-spark-bundle,packaging/hudi-utilities-bundle,packaging/hudi-utilities-slim-bundle -am | ||
else | ||
mvn clean package -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DdeployArtifacts=true -DskipTests=true $MVN_ARGS | ||
# TODO remove the sudo below. It's a needed workaround as detailed in HUDI-5708. | ||
sudo chown -R "$USER:$(id -g -n)" hudi-platform-service/hudi-metaserver/target/generated-sources | ||
mvn clean package -T 2 -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -DdeployArtifacts=true -DskipTests=true $MVN_ARGS -pl packaging/hudi-flink-bundle -am -Davro.version=1.10.0 | ||
fi | ||
- name: IT - Bundle Validation - OpenJDK 8 | ||
env: | ||
FLINK_PROFILE: ${{ matrix.flinkProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_RUNTIME: ${{ matrix.sparkRuntime }} | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
if: ${{ env.SPARK_PROFILE >= 'spark3' }} # Only run validation on Spark 3 | ||
run: | | ||
HUDI_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) | ||
./packaging/bundle-validation/ci_run.sh hudi_docker_java8 $HUDI_VERSION openjdk8 | ||
- name: IT - Bundle Validation - OpenJDK 11 | ||
env: | ||
FLINK_PROFILE: ${{ matrix.flinkProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_RUNTIME: ${{ matrix.sparkRuntime }} | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
if: ${{ env.SPARK_PROFILE >= 'spark3' }} # Only run validation on Spark 3 | ||
run: | | ||
HUDI_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) | ||
./packaging/bundle-validation/ci_run.sh hudi_docker_java11 $HUDI_VERSION openjdk11 | ||
- name: IT - Bundle Validation - OpenJDK 17 | ||
env: | ||
FLINK_PROFILE: ${{ matrix.flinkProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_RUNTIME: ${{ matrix.sparkRuntime }} | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
if: ${{ env.SPARK_PROFILE >= 'spark3.3' }} # Only Spark 3.3 and above support Java 17 | ||
run: | | ||
HUDI_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) | ||
./packaging/bundle-validation/ci_run.sh hudi_docker_java17 $HUDI_VERSION openjdk17 | ||
validate-bundles-java11: | ||
runs-on: ubuntu-latest | ||
strategy: | ||
matrix: | ||
include: | ||
- scalaProfile: 'scala-2.13' | ||
flinkProfile: 'flink1.18' | ||
sparkProfile: 'spark3.5' | ||
sparkRuntime: 'spark3.5.0' | ||
- scalaProfile: 'scala-2.12' | ||
flinkProfile: 'flink1.18' | ||
sparkProfile: 'spark3.5' | ||
sparkRuntime: 'spark3.5.0' | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up JDK 11 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '11' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Build Project | ||
env: | ||
FLINK_PROFILE: ${{ matrix.flinkProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
run: | | ||
if [ "$SCALA_PROFILE" == "scala-2.13" ]; then | ||
mvn clean package -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DdeployArtifacts=true -DskipTests=true $MVN_ARGS -Dmaven.javadoc.skip=true -pl packaging/hudi-hadoop-mr-bundle,packaging/hudi-spark-bundle,packaging/hudi-utilities-bundle,packaging/hudi-utilities-slim-bundle -am | ||
else | ||
mvn clean package -T 2 -D"$SCALA_PROFILE" -D"$SPARK_PROFILE" -DdeployArtifacts=true -DskipTests=true $MVN_ARGS -Dmaven.javadoc.skip=true | ||
# TODO remove the sudo below. It's a needed workaround as detailed in HUDI-5708. | ||
sudo chown -R "$USER:$(id -g -n)" hudi-platform-service/hudi-metaserver/target/generated-sources | ||
mvn clean package -T 2 -D"$SCALA_PROFILE" -D"$FLINK_PROFILE" -DdeployArtifacts=true -DskipTests=true $MVN_ARGS -Dmaven.javadoc.skip=true -pl packaging/hudi-flink-bundle -am -Davro.version=1.10.0 | ||
fi | ||
- name: IT - Bundle Validation - OpenJDK 11 | ||
env: | ||
FLINK_PROFILE: ${{ matrix.flinkProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_RUNTIME: ${{ matrix.sparkRuntime }} | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
run: | | ||
HUDI_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) | ||
./packaging/bundle-validation/ci_run.sh hudi_docker_java11 $HUDI_VERSION openjdk11 | ||
- name: IT - Bundle Validation - OpenJDK 17 | ||
env: | ||
FLINK_PROFILE: ${{ matrix.flinkProfile }} | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_RUNTIME: ${{ matrix.sparkRuntime }} | ||
SCALA_PROFILE: ${{ matrix.scalaProfile }} | ||
run: | | ||
HUDI_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) | ||
./packaging/bundle-validation/ci_run.sh hudi_docker_java17 $HUDI_VERSION openjdk17 | ||
integration-tests: | ||
runs-on: ubuntu-latest | ||
strategy: | ||
matrix: | ||
include: | ||
- sparkProfile: 'spark2.4' | ||
sparkArchive: 'spark-2.4.4/spark-2.4.4-bin-hadoop2.7.tgz' | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- name: Set up JDK 8 | ||
uses: actions/setup-java@v3 | ||
with: | ||
java-version: '8' | ||
distribution: 'temurin' | ||
architecture: x64 | ||
cache: maven | ||
- name: Build Project | ||
env: | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SCALA_PROFILE: '-Dscala-2.11 -Dscala.binary.version=2.11' | ||
run: | ||
mvn clean install -T 2 $SCALA_PROFILE -D"$SPARK_PROFILE" -Pintegration-tests -DskipTests=true $MVN_ARGS | ||
- name: 'UT integ-test' | ||
env: | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SCALA_PROFILE: '-Dscala-2.11 -Dscala.binary.version=2.11' | ||
run: | ||
mvn test $SCALA_PROFILE -D"$SPARK_PROFILE" -Pintegration-tests -DskipUTs=false -DskipITs=true -pl hudi-integ-test $MVN_ARGS | ||
- name: 'IT' | ||
env: | ||
SPARK_PROFILE: ${{ matrix.sparkProfile }} | ||
SPARK_ARCHIVE: ${{ matrix.sparkArchive }} | ||
SCALA_PROFILE: '-Dscala-2.11 -Dscala.binary.version=2.11' | ||
run: | | ||
echo "Downloading $SPARK_ARCHIVE" | ||
curl https://archive.apache.org/dist/spark/$SPARK_ARCHIVE --create-dirs -o $GITHUB_WORKSPACE/$SPARK_ARCHIVE | ||
tar -xvf $GITHUB_WORKSPACE/$SPARK_ARCHIVE -C $GITHUB_WORKSPACE/ | ||
mkdir /tmp/spark-events/ | ||
SPARK_ARCHIVE_BASENAME=$(basename $SPARK_ARCHIVE) | ||
export SPARK_HOME=$GITHUB_WORKSPACE/${SPARK_ARCHIVE_BASENAME%.*} | ||
rm -f $GITHUB_WORKSPACE/$SPARK_ARCHIVE | ||
docker system prune --all --force | ||
mvn verify $SCALA_PROFILE -D"$SPARK_PROFILE" -Pintegration-tests -pl !hudi-flink-datasource/hudi-flink $MVN_ARGS |