Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[BP-1.20][FLINK-34194] Update CI to Ubuntu 22.04 (Jammy) #25827

Merged
merged 2 commits into from
Jan 3, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ jobs:
fi
- name: Build documentation
run: |
docker run --rm --volume "$PWD:/root/flink" chesnay/flink-ci:java_8_11_17_21_maven_386 bash -c "cd /root/flink && ./.github/workflows/docs.sh"
docker run --rm --volume "$PWD:/root/flink" chesnay/flink-ci:java_8_11_17_21_maven_386_jammy bash -c "cd /root/flink && ./.github/workflows/docs.sh"
- name: Upload documentation
uses: burnett01/rsync-deployments@5.2
with:
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/template.flink-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ jobs:
name: "Compile"
runs-on: ubuntu-22.04
container:
image: mapohl/flink-ci:FLINK-34194
image: chesnay/flink-ci:java_8_11_17_21_maven_386_jammy
# --init makes the process in the container being started as an init process which will clean up any daemon processes during shutdown
# --privileged allows writing coredumps in docker (FLINK-16973)
options: --init --privileged
Expand Down Expand Up @@ -133,7 +133,7 @@ jobs:
needs: compile
runs-on: ubuntu-22.04
container:
image: mapohl/flink-ci:FLINK-34194
image: chesnay/flink-ci:java_8_11_17_21_maven_386_jammy
# --init makes the process in the container being started as an init process which will clean up any daemon processes during shutdown
# --privileged allows writing coredumps in docker (FLINK-16973)
options: --init --privileged
Expand Down Expand Up @@ -173,7 +173,7 @@ jobs:
needs: compile
runs-on: ubuntu-22.04
container:
image: mapohl/flink-ci:FLINK-34194
image: chesnay/flink-ci:java_8_11_17_21_maven_386_jammy
# --init makes the process in the container being started as an init process which will clean up any daemon processes during shutdown
# --privileged allows writing coredumps in docker (FLINK-16973)
options: --init --privileged
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/template.pre-compile-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ jobs:
name: "Basic QA"
runs-on: ubuntu-22.04
container:
image: mapohl/flink-ci:FLINK-34194
image: chesnay/flink-ci:java_8_11_17_21_maven_386_jammy
# --init makes the process in the container being started as an init process which will clean up any daemon processes during shutdown
# --privileged allows writing coredumps in docker (FLINK-16973)
options: --init --privileged
Expand Down
8 changes: 4 additions & 4 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ resources:
# Container with SSL to have the same environment everywhere.
# see https://github.com/apache/flink-connector-shared-utils/tree/ci_utils
- container: flink-build-container
image: chesnay/flink-ci:java_8_11_17_21_maven_386
image: chesnay/flink-ci:java_8_11_17_21_maven_386_jammy
# On AZP provided machines, set this flag to allow writing coredumps in docker
options: --privileged

Expand Down Expand Up @@ -73,16 +73,16 @@ stages:
parameters: # see template file for a definition of the parameters.
stage_name: ci_build
test_pool_definition:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
e2e_pool_definition:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2"
run_end_to_end: false
container: flink-build-container
jdk: 8
- job: docs_404_check # run on a MSFT provided machine
pool:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
steps:
- task: GoTool@0
inputs:
Expand Down
22 changes: 11 additions & 11 deletions tools/azure-pipelines/build-apache-repo.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ resources:
# Container with SSL to have the same environment everywhere.
# see https://github.com/apache/flink-connector-shared-utils/tree/ci_utils
- container: flink-build-container
image: chesnay/flink-ci:java_8_11_17_21_maven_386
image: chesnay/flink-ci:java_8_11_17_21_maven_386_jammy

variables:
MAVEN_CACHE_FOLDER: $(Pipeline.Workspace)/.m2/repository
Expand Down Expand Up @@ -68,14 +68,14 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2"
run_end_to_end: false
container: flink-build-container
jdk: 8
- job: docs_404_check # run on a MSFT provided machine
pool:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
steps:
# Skip docs check if this is a pull request that doesn't contain a documentation change
- task: GoTool@0
Expand Down Expand Up @@ -110,9 +110,9 @@ stages:
parameters:
stage_name: cron_azure
test_pool_definition:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
e2e_pool_definition:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2"
run_end_to_end: true
container: flink-build-container
Expand All @@ -123,7 +123,7 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3"
run_end_to_end: true
container: flink-build-container
Expand All @@ -134,7 +134,7 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Pjava11-target"
run_end_to_end: true
container: flink-build-container
Expand All @@ -145,7 +145,7 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Djdk17 -Pjava17-target"
run_end_to_end: true
container: flink-build-container
Expand All @@ -156,7 +156,7 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Djdk17 -Djdk21 -Pjava21-target"
run_end_to_end: true
container: flink-build-container
Expand All @@ -167,14 +167,14 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Penable-adaptive-scheduler"
run_end_to_end: true
container: flink-build-container
jdk: 8
- job: docs_404_check # run on a MSFT provided machine
pool:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
steps:
- task: GoTool@0
inputs:
Expand Down
4 changes: 2 additions & 2 deletions tools/azure-pipelines/build-nightly-dist.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ parameters:
jobs:
- job: ${{parameters.stage_name}}_binary
pool:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
container: flink-build-container
workspace:
clean: all
Expand Down Expand Up @@ -69,7 +69,7 @@ jobs:
# artifact: nightly-release
- job: ${{parameters.stage_name}}_maven
pool:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
container: flink-build-container
timeoutInMinutes: 240
workspace:
Expand Down
2 changes: 1 addition & 1 deletion tools/azure-pipelines/build-python-wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
jobs:
- job: build_wheels_on_Linux
pool:
vmImage: 'ubuntu-20.04'
vmImage: 'ubuntu-22.04'
steps:
- script: |
cd flink-python
Expand Down