Skip to content

Commit

Permalink
done
Browse files Browse the repository at this point in the history
  • Loading branch information
scottsand-db committed Sep 3, 2024
1 parent cdd39dd commit 54ed81c
Show file tree
Hide file tree
Showing 8 changed files with 36 additions and 36 deletions.
18 changes: 9 additions & 9 deletions .github/workflows/connectors_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,12 @@ jobs:
scala: [2.13.13, 2.12.18]
steps:
- uses: actions/checkout@v2
- name: install java
- name: [Connectors] Install java
uses: actions/setup-java@v2
with:
distribution: 'zulu'
java-version: '8'
- name: Cache Scala, SBT
- name: [Connectors] Cache Scala, SBT
uses: actions/cache@v2
with:
path: |
Expand All @@ -24,22 +24,22 @@ jobs:
~/.cache/coursier
~/.m2
key: build-cache-3-with-scala_${{ matrix.scala }}
- name: Run Scala Style tests on test sources (Scala 2.12 only)
- name: [Connectors] Run Scala Style tests on test sources (Scala 2.12 only)
run: build/sbt "++ ${{ matrix.scala }}" testScalastyle
if: startsWith(matrix.scala, '2.12.')
- name: Run sqlDeltaImport tests (Scala 2.12 and 2.13 only)
- name: [Connectors] Run sqlDeltaImport tests (Scala 2.12 and 2.13 only)
run: build/sbt "++ ${{ matrix.scala }}" sqlDeltaImport/test
if: ${{ !startsWith(matrix.scala, '2.11.') }}
# These tests are not working yet
# - name: Run Delta Standalone Compatibility tests (Scala 2.12 only)
# - name: [Connectors] Run Delta Standalone Compatibility tests (Scala 2.12 only)
# run: build/sbt "++ ${{ matrix.scala }}" compatibility/test
# if: startsWith(matrix.scala, '2.12.')
- name: Run Delta Standalone tests
- name: [Connectors] Run Delta Standalone tests
run: build/sbt "++ ${{ matrix.scala }}" standalone/test testStandaloneCosmetic/test standaloneParquet/test testParquetUtilsWithStandaloneCosmetic/test
- name: Run Hive 3 tests
- name: [Connectors] Run Hive 3 tests
run: build/sbt "++ ${{ matrix.scala }}" hiveMR/test hiveTez/test
- name: Run Hive 2 tests
- name: [Connectors] Run Hive 2 tests
run: build/sbt "++ ${{ matrix.scala }}" hive2MR/test hive2Tez/test
- name: Run Flink tests (Scala 2.12 only)
- name: [Connectors] Run Flink tests (Scala 2.12 only)
run: build/sbt -mem 3000 "++ ${{ matrix.scala }}" flink/test
if: ${{ startsWith(matrix.scala, '2.12.') }}
12 changes: 6 additions & 6 deletions .github/workflows/kernel_docs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,27 +25,27 @@ jobs:
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps:
- name: Checkout
- name: [Kernel Docs] Checkout
uses: actions/checkout@v3
- name: install java
- name: [Kernel Docs] Install java
uses: actions/setup-java@v3
with:
distribution: "zulu"
java-version: "8"
- name: Generate docs
- name: [Kernel Docs] Generate docs
run: |
build/sbt kernelGroup/unidoc
mkdir -p kernel/docs/snapshot/kernel-api/java
mkdir -p kernel/docs/snapshot/kernel-defaults/java
cp -r kernel/kernel-api/target/javaunidoc/. kernel/docs/snapshot/kernel-api/java/
cp -r kernel/kernel-defaults/target/javaunidoc/. kernel/docs/snapshot/kernel-defaults/java/
- name: Setup Pages
- name: [Kernel Docs] Setup Pages
uses: actions/configure-pages@v3
- name: Upload artifact
- name: [Kernel Docs] Upload artifact
uses: actions/upload-pages-artifact@v1
with:
# Upload kernel docs
path: kernel/docs
- name: Deploy to GitHub Pages
- name: [Kernel Docs] Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v2
6 changes: 3 additions & 3 deletions .github/workflows/kernel_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@ jobs:
SCALA_VERSION: 2.12.18
steps:
- uses: actions/checkout@v3
- name: install java
- name: [Kernel] Install java
uses: actions/setup-java@v3
with:
distribution: "zulu"
java-version: "8"
- name: Run tests
- name: [Kernel] Run tests
run: |
python run-tests.py --group kernel --coverage
- name: Run integration tests
- name: [Kernel] Run integration tests
run: |
cd kernel/examples && python run-kernel-examples.py --use-local
8 changes: 4 additions & 4 deletions .github/workflows/spark_examples_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@ jobs:
.github/workflows/**
!kernel/**
!connectors/**
- name: install java
- name: [Spark Examples] Install java
uses: actions/setup-java@v3
with:
distribution: "zulu"
java-version: "8"
- name: Cache Scala, SBT
- name: [Spark Examples] Cache Scala, SBT
uses: actions/cache@v3
with:
path: |
Expand All @@ -36,13 +36,13 @@ jobs:
# just use the cache. The cache is immutable so we need to use a new key when trying to
# cache new stuff.
key: delta-sbt-cache-spark-examples-scala${{ matrix.scala }}
- name: Install Job dependencies
- name: [Spark Examples] Install Job dependencies
run: |
sudo apt-get update
sudo apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev python-openssl git
sudo apt install libedit-dev
if: steps.git-diff.outputs.diff
- name: Run Delta Spark Local Publishing and Examples Compilation
- name: [Spark Examples] Run Delta Spark Local Publishing and Examples Compilation
# examples/scala/build.sbt will compile against the local Delta relase version (e.g. 3.2.0-SNAPSHOT).
# Thus, we need to publishM2 first so those jars are locally accessible.
# We publish storage explicitly so that it is available for the Scala 2.13 build. As a java project
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/spark_master_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@ jobs:
.github/workflows/**
!kernel/**
!connectors/**
- name: install java
- name: [Spark Master] Install java
uses: actions/setup-java@v3
with:
distribution: "zulu"
java-version: "17"
- name: Cache Scala, SBT
- name: [Spark Master] Cache Scala, SBT
uses: actions/cache@v3
with:
path: |
Expand All @@ -41,13 +41,13 @@ jobs:
# just use the cache. The cache is immutable so we need to use a new key when trying to
# cache new stuff.
key: delta-sbt-cache-spark-master-scala${{ matrix.scala }}
- name: Install Job dependencies
- name: [Spark Master] Install Job dependencies
run: |
sudo apt-get update
sudo apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev python-openssl git
sudo apt install libedit-dev
if: steps.git-diff.outputs.diff
- name: Run Spark Master tests
- name: [Spark Master] Run Spark Master tests
# when changing TEST_PARALLELISM_COUNT make sure to also change it in spark_test.yaml
run: |
TEST_PARALLELISM_COUNT=4 SHARD_ID=${{matrix.shard}} build/sbt -DsparkVersion=master "++ ${{ matrix.scala }}" clean spark/test
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/spark_python_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@ jobs:
.github/workflows/**
!kernel/**
!connectors/**
- name: install java
- name: [Spark Latest - Python] Install java
uses: actions/setup-java@v3
with:
distribution: "zulu"
java-version: "8"
- name: Cache Scala, SBT
- name: [Spark Latest - Python] Cache Scala, SBT
uses: actions/cache@v3
with:
path: |
Expand All @@ -36,7 +36,7 @@ jobs:
# just use the cache. The cache is immutable so we need to use a new key when trying to
# cache new stuff.
key: delta-sbt-cache-spark3.2-scala${{ matrix.scala }}
- name: Install Job dependencies
- name: [Spark Latest - Python] Install Job dependencies
run: |
sudo apt-get update
sudo apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev python-openssl git
Expand Down Expand Up @@ -75,7 +75,7 @@ jobs:
pipenv run pip install pyarrow==8.0.0
pipenv run pip install numpy==1.20.3
if: steps.git-diff.outputs.diff
- name: Run Python tests
- name: [Spark Latest - Python] Run Python tests
# when changing TEST_PARALLELISM_COUNT make sure to also change it in spark_master_test.yaml
run: |
TEST_PARALLELISM_COUNT=4 pipenv run python run-tests.py --group spark-python
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/spark_test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,12 +23,12 @@ jobs:
.github/workflows/**
!kernel/**
!connectors/**
- name: install java
- name: [Spark Latest] Install java
uses: actions/setup-java@v3
with:
distribution: "zulu"
java-version: "8"
- name: Cache Scala, SBT
- name: [Spark Latest] Cache Scala, SBT
uses: actions/cache@v3
with:
path: |
Expand All @@ -40,7 +40,7 @@ jobs:
# just use the cache. The cache is immutable so we need to use a new key when trying to
# cache new stuff.
key: delta-sbt-cache-spark3.2-scala${{ matrix.scala }}
- name: Install Job dependencies
- name: [Spark Latest] Install Job dependencies
run: |
sudo apt-get update
sudo apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev python-openssl git
Expand Down Expand Up @@ -79,7 +79,7 @@ jobs:
pipenv run pip install pyarrow==8.0.0
pipenv run pip install numpy==1.20.3
if: steps.git-diff.outputs.diff
- name: Run Scala/Java and Python tests
- name: [Spark Latest] Run Scala/Java and Python tests
# when changing TEST_PARALLELISM_COUNT make sure to also change it in spark_master_test.yaml
run: |
TEST_PARALLELISM_COUNT=4 pipenv run python run-tests.py --group spark --shard ${{ matrix.shard }}
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/unidoc.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@
# These Scala versions must match those in the build.sbt
scala: [2.13.13, 2.12.18]
steps:
- name: install java
- name: [Unidoc] Install java
uses: actions/setup-java@v3
with:
distribution: "zulu"
java-version: "8"
- uses: actions/checkout@v3
- name: generate unidoc
- name: [Unidoc] Generate unidoc
run: build/sbt "++ ${{ matrix.scala }}" unidoc

0 comments on commit 54ed81c

Please sign in to comment.