Skip to content

QA Integration Test #1298

QA Integration Test

QA Integration Test #1298

# Copyright (c) 2021-2024 TiaC Systems
# Copyright (c) 2021 Li-Pro.Net
# SPDX-License-Identifier: Apache-2.0
name: QA Integration Test
on:
schedule:
- cron: "0 2 * * *" # run at 2 AM UTC
workflow_dispatch: # And manually on button click
pull_request:
types: [opened, synchronize, reopened]
paths:
- 'applications/**'
- 'arch/**'
- 'boards/**'
- 'cmake/**'
- 'drivers/**'
- 'dts/**'
- 'include/**'
- 'lib/**'
- 'modules/**'
- 'samples/**'
- 'tests/**'
- 'soc/**'
- 'subsys/**'
- '**/CMakeLists.txt'
- '**/Kconfig*'
- '**.conf'
- '**.defconfig'
- '**.overlay'
- '**.yaml'
- 'scripts/requirements-*'
- 'scripts/requirements.txt'
- 'west.yml'
- '.github/workflows/qa-integration.yml'
jobs:
qa-samples-integration:
name: Run integration tests for samples
if: github.repository_owner == 'tiacsys'
runs-on: [self-hosted, ci-32g-x8, linux, x64, container]
container:
image: ghcr.io/zephyrproject-rtos/ci:v0.27.4
options: '--cpus 4 --memory 8g --entrypoint /bin/bash'
env:
ZEPHYR_SDK_INSTALL_DIR: /opt/toolchains/zephyr-sdk-0.17.0
steps:
- name: Apply container owner mismatch workaround
run: |
# FIXME: The owner UID of the GITHUB_WORKSPACE directory may not
# match the container user UID because of the way GitHub
# Actions runner is implemented. Remove this workaround when
# GitHub comes up with a fundamental fix for this problem.
git config --global --add safe.directory ${GITHUB_WORKSPACE}
- name: Apply container HTTP/2 framing layer workaround
run: |
# FIXME: For unknown reasons, the local development host and CI is
# running in temporary "Error in the HTTP2 framing layer".
# Forcing the historical but still supported HTTP/1.1 layer
# seems to be a stable workaround - happened in Oct. 2023.
git config --global --add http.version HTTP/1.1
- name: Update GitHub PATH for west
run: |
echo "$HOME/.local/bin" >> $GITHUB_PATH
# It is important that this is run before any caching tasks as cleanups
# are run in reverse order (and you do not want to cleanup before the
# caching is saved).
- name: Clean working directory
uses: tiacsys/clean-after-action@v3
#
# Could also be implemented with the new pre- and post-job hook scripts:
#
# - https://stackoverflow.com/questions/70483902
# - https://devopsjournal.io/blog/2023/06/21/GitHub-container-based-Action-cleanup
# - https://docs.github.com/actions/hosting-your-own-runners/running-scripts-before-or-after-a-job
#
with:
keep-git: false
- name: Checkout the code
uses: actions/checkout@v4
with:
fetch-depth: 100
show-progress: true
path: workspace/bridle
ref: ${{ github.ref }}
- name: Restore PIP Cache
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-qa-pip
- name: Install base dependencies
working-directory: workspace
run: |
pip3 install --upgrade pip
pip3 install --upgrade setuptools
pip3 install --upgrade --requirement bridle/scripts/requirements-base.txt
- name: West init and update
working-directory: workspace
run: |
west init --local bridle
west update --fetch-opt=--depth=100 --fetch=always --narrow --stats
west zephyr-export
west bridle-export
- name: Install build and test dependencies
working-directory: workspace
run: |
pip3 install --upgrade --requirement zephyr/scripts/requirements-base.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-build-test.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-run-test.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-extras.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-compliance.txt
pip3 install --upgrade --requirement bridle/scripts/requirements-build.txt
- name: Build samples
working-directory: workspace
run: |
west twister --verbose --jobs 6 \
--retry-failed 5 --retry-interval 60 \
--outdir twister-out --no-clean --inline-logs \
--alt-config-root bridle/zephyr/alt-config/samples \
--integration --no-detailed-test-id \
--testsuite-root bridle/samples \
--testsuite-root zephyr/samples \
--tag bridle \
--tag zephyr
- name: Upload integration test logging
uses: actions/upload-artifact@v4
with:
name: twister-samples.log
path: workspace/twister-out/twister.log
- name: Upload integration test results
uses: actions/upload-artifact@v4
with:
name: twister-samples.xml
path: workspace/twister-out/twister.xml
- name: Convert integration test reports to annotations
uses: mikepenz/action-junit-report@v4
with:
check_name: twister-report (samples)
report_paths: "**/twister-out/twister.xml"
require_tests: true
fail_on_failure: false
if: always()
qa-tests-integration:
name: Run integration tests from upstream
if: github.repository_owner == 'tiacsys'
runs-on: [self-hosted, ci-32g-x8, linux, x64, container]
container:
image: ghcr.io/zephyrproject-rtos/ci:v0.27.4
options: '--cpus 4 --memory 8g --entrypoint /bin/bash'
env:
ZEPHYR_SDK_INSTALL_DIR: /opt/toolchains/zephyr-sdk-0.17.0
steps:
- name: Apply container owner mismatch workaround
run: |
# FIXME: The owner UID of the GITHUB_WORKSPACE directory may not
# match the container user UID because of the way GitHub
# Actions runner is implemented. Remove this workaround when
# GitHub comes up with a fundamental fix for this problem.
git config --global --add safe.directory ${GITHUB_WORKSPACE}
- name: Apply container HTTP/2 framing layer workaround
run: |
# FIXME: For unknown reasons, the local development host and CI is
# running in temporary "Error in the HTTP2 framing layer".
# Forcing the historical but still supported HTTP/1.1 layer
# seems to be a stable workaround - happened in Oct. 2023.
git config --global --add http.version HTTP/1.1
- name: Update GitHub PATH for west
run: |
echo "$HOME/.local/bin" >> $GITHUB_PATH
# It is important that this is run before any caching tasks as cleanups
# are run in reverse order (and you do not want to cleanup before the
# caching is saved).
- name: Clean working directory
uses: tiacsys/clean-after-action@v3
#
# Could also be implemented with the new pre- and post-job hook scripts:
#
# - https://stackoverflow.com/questions/70483902
# - https://devopsjournal.io/blog/2023/06/21/GitHub-container-based-Action-cleanup
# - https://docs.github.com/actions/hosting-your-own-runners/running-scripts-before-or-after-a-job
#
with:
keep-git: false
- name: Checkout the code
uses: actions/checkout@v4
with:
fetch-depth: 100
show-progress: true
path: workspace/bridle
ref: ${{ github.ref }}
- name: Restore PIP Cache
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-qa-pip
- name: Install base dependencies
working-directory: workspace
run: |
pip3 install --upgrade pip
pip3 install --upgrade setuptools
pip3 install --upgrade --requirement bridle/scripts/requirements-base.txt
- name: West init and update
working-directory: workspace
run: |
west init --local bridle
west update --fetch-opt=--depth=100 --fetch=always --narrow --stats
west zephyr-export
west bridle-export
- name: Install build and test dependencies
working-directory: workspace
run: |
pip3 install --upgrade --requirement zephyr/scripts/requirements-base.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-build-test.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-run-test.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-extras.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-compliance.txt
pip3 install --upgrade --requirement bridle/scripts/requirements-build.txt
- name: Build tests
working-directory: workspace
run: |
west twister --verbose --jobs 6 \
--retry-failed 5 --retry-interval 60 \
--outdir twister-out --no-clean --inline-logs \
--alt-config-root bridle/zephyr/alt-config/tests \
--integration --no-detailed-test-id \
--testsuite-root bridle/tests \
--testsuite-root zephyr/tests \
--tag bridle \
--tag zephyr
- name: Upload integration test logging
uses: actions/upload-artifact@v4
with:
name: twister-tests.log
path: workspace/twister-out/twister.log
- name: Upload integration test results
uses: actions/upload-artifact@v4
with:
name: twister-tests.xml
path: workspace/twister-out/twister.xml
- name: Convert integration test reports to annotations
uses: mikepenz/action-junit-report@v4
with:
check_name: twister-report (tests)
report_paths: "**/twister-out/twister.xml"
require_tests: true
fail_on_failure: false
if: always()
qa-shield-integration:
name: Run integration tests for shields
if: github.repository_owner == 'tiacsys'
runs-on: [self-hosted, ci-32g-x8, linux, x64, container]
container:
image: ghcr.io/zephyrproject-rtos/ci:v0.27.4
options: '--cpus 4 --memory 8g --entrypoint /bin/bash'
env:
ZEPHYR_SDK_INSTALL_DIR: /opt/toolchains/zephyr-sdk-0.17.0
steps:
- name: Apply container owner mismatch workaround
run: |
# FIXME: The owner UID of the GITHUB_WORKSPACE directory may not
# match the container user UID because of the way GitHub
# Actions runner is implemented. Remove this workaround when
# GitHub comes up with a fundamental fix for this problem.
git config --global --add safe.directory ${GITHUB_WORKSPACE}
- name: Apply container HTTP/2 framing layer workaround
run: |
# FIXME: For unknown reasons, the local development host and CI is
# running in temporary "Error in the HTTP2 framing layer".
# Forcing the historical but still supported HTTP/1.1 layer
# seems to be a stable workaround - happened in Oct. 2023.
git config --global --add http.version HTTP/1.1
- name: Update GitHub PATH for west
run: |
echo "$HOME/.local/bin" >> $GITHUB_PATH
# It is important that this is run before any caching tasks as cleanups
# are run in reverse order (and you do not want to cleanup before the
# caching is saved).
- name: Clean working directory
uses: tiacsys/clean-after-action@v3
#
# Could also be implemented with the new pre- and post-job hook scripts:
#
# - https://stackoverflow.com/questions/70483902
# - https://devopsjournal.io/blog/2023/06/21/GitHub-container-based-Action-cleanup
# - https://docs.github.com/actions/hosting-your-own-runners/running-scripts-before-or-after-a-job
#
with:
keep-git: false
- name: Checkout the code
uses: actions/checkout@v4
with:
fetch-depth: 100
show-progress: true
path: workspace/bridle
ref: ${{ github.ref }}
- name: Restore PIP Cache
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-qa-pip
- name: Install base dependencies
working-directory: workspace
run: |
pip3 install --upgrade pip
pip3 install --upgrade setuptools
pip3 install --upgrade --requirement bridle/scripts/requirements-base.txt
- name: West init and update
working-directory: workspace
run: |
west init --local bridle
west update --fetch-opt=--depth=100 --fetch=always --narrow --stats
west zephyr-export
west bridle-export
- name: Install build and test dependencies
working-directory: workspace
run: |
pip3 install --upgrade --requirement zephyr/scripts/requirements-base.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-build-test.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-run-test.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-extras.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-compliance.txt
pip3 install --upgrade --requirement bridle/scripts/requirements-build.txt
- name: Build integration tests for shields
working-directory: workspace
run: |
#
# Disabled in the meantime because the execution time is still
# too high (up to about 45 min):
#
# --> Grove Button Shields, ~640% more time (~550 more tests)
# --testsuite-root bridle/tests/shields/grove_btn/dts_bindings \
#
# --> Grove LED Shields, ~880% more time (~890 more tests)
# --testsuite-root bridle/tests/shields/grove_led/dts_bindings \
#
west twister --verbose --jobs 6 \
--outdir twister-out --no-clean --inline-logs \
--integration --cmake-only \
--quarantine-list bridle/tests/quarantine.yaml \
--testsuite-root bridle/tests/shields/grove/dts_bindings \
--testsuite-root bridle/tests/shields/x_grove_testbed/dts_bindings
- name: Upload integration test logging
uses: actions/upload-artifact@v4
with:
name: twister-shields.log
path: workspace/twister-out/twister.log
- name: Upload integration test results
uses: actions/upload-artifact@v4
with:
name: twister-shields.xml
path: workspace/twister-out/twister.xml
- name: Convert integration test reports to annotations
uses: mikepenz/action-junit-report@v4
with:
check_name: twister-report (shields)
report_paths: "**/twister-out/twister.xml"
require_tests: true
fail_on_failure: false
if: always()
qa-target-integration:
name: Run integration tests on targets
if: github.repository_owner == 'tiacsys'
runs-on: [self-hosted, linux, gnuarmemb, zephyr-sdk, tiac_magpie]
strategy:
matrix:
board: [magpie_f777ni]
steps:
- name: Apply container HTTP/2 framing layer workaround
run: |
# FIXME: For unknown reasons, the local development host and CI is
# running in temporary "Error in the HTTP2 framing layer".
# Forcing the historical but still supported HTTP/1.1 layer
# seems to be a stable workaround - happened in Oct. 2023.
git config --global --add http.version HTTP/1.1
# It is important that this is run before any caching tasks as cleanups
# are run in reverse order (and you do not want to cleanup before the
# caching is saved).
- name: Clean working directory
uses: tiacsys/clean-after-action@v3
#
# Could also be implemented with the new pre- and post-job hook scripts:
#
# - https://stackoverflow.com/questions/70483902
# - https://devopsjournal.io/blog/2023/06/21/GitHub-container-based-Action-cleanup
# - https://docs.github.com/actions/hosting-your-own-runners/running-scripts-before-or-after-a-job
#
with:
keep-git: false
- name: Checkout the code
uses: actions/checkout@v4
with:
fetch-depth: 100
show-progress: true
path: workspace/bridle
ref: ${{ github.ref }}
- name: Setup Pyhton 3.12 virtual environment
working-directory: workspace
run: |
python3.12 -m venv --clear --copies .env
source .env/bin/activate
echo "$PATH" > $GITHUB_PATH
- name: Install base dependencies
working-directory: workspace
run: |
which python3 && python3 --version
which pip3 && pip3 --version
pip3 install --upgrade pip
pip3 install --upgrade setuptools
pip3 install --upgrade --requirement bridle/scripts/requirements-base.txt
- name: West init and update
working-directory: workspace
run: |
west init --local bridle
west update --fetch-opt=--depth=100 --fetch=always --narrow --stats
west zephyr-export
west bridle-export
- name: Install build and test dependencies
working-directory: workspace
run: |
pip3 install --upgrade --requirement zephyr/scripts/requirements-base.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-build-test.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-run-test.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-extras.txt
pip3 install --upgrade --requirement zephyr/scripts/requirements-compliance.txt
pip3 install --upgrade --requirement bridle/scripts/requirements-build.txt
- name: Execute integration tests on target
working-directory: workspace
env:
HARDWARE_MAP: bridle/.github/${{ github.job }}/map-${{ matrix.board }}.yml
run: |
#
# Disabled in the meantime because the execution time is still
# too high and most of the core tests are already performed by
# Zephyr in any case:
#
# --tag arm \
# --tag vector_relay \
# --tag kernel \
#
west twister --verbose --jobs 2 \
--retry-failed 5 --retry-interval 60 \
--outdir twister-out --no-clean --inline-logs \
--device-testing --hardware-map ${HARDWARE_MAP} \
--extra-args SHIELD="loopback_test_tmph" \
--alt-config-root bridle/zephyr/alt-config/tests \
--testsuite-root bridle/tests \
--testsuite-root zephyr/tests \
--tag bridle \
--tag hwinfo \
--tag random \
--tag entropy \
--tag watchdog \
--tag counter \
--tag gpio \
--tag spi \
--tag uart \
--tag can
- name: Upload integration test logging
uses: actions/upload-artifact@v4
with:
name: twister-targets.log
path: workspace/twister-out/twister.log
- name: Upload integration test results
uses: actions/upload-artifact@v4
with:
name: twister-targets.xml
path: workspace/twister-out/twister.xml
- name: Convert integration test reports to annotations
uses: mikepenz/action-junit-report@v4
with:
check_name: twister-report (${{ matrix.board }})
report_paths: "**/twister-out/twister.xml"
require_tests: true
fail_on_failure: false
if: always()