Test Actions #406
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Test actions | |
on: | |
push: | |
branches: ["**"] | |
pull_request: | |
branches: [main] | |
workflow_dispatch: | |
env: | |
# The bash escape character is \033 | |
bashPass: \033[32;1mPASSED - | |
bashInfo: \033[33;1mWARN - | |
bashFail: \033[31;1mFAILED - | |
bashEnd: \033[0m | |
jobs: | |
test-format-check: | |
runs-on: ubuntu-20.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
# Checks for a commit | |
inputs: | |
[ | |
{ | |
test-type: Functional, | |
test-status: failure, | |
description: "Whitespace, CRLF, and Format Failure", | |
path: formatting | |
# Also the test that not using exclude dirs/files throws no API errors | |
}, | |
{ | |
test-type: Functional, | |
test-status: failure, | |
description: "CRLF and Formatting Error", | |
path: formatting, | |
exclude-dirs: "filesWithTrailingWhitespace" | |
}, | |
{ | |
test-type: Functional, | |
test-status: failure, | |
description: "CRLF and Whitespace Error", | |
path: formatting, | |
exclude-dirs: "filesWithFormattingErrors" | |
}, | |
{ | |
test-type: Functional, | |
test-status: failure, | |
description: "CRLF Error", | |
path: formatting, | |
exclude-dirs: "filesWithFormattingIssues,filesWithWTrailingWhitespace" | |
}, | |
{ | |
test-type: Functional, | |
test-status: failure, | |
description: "Formatting and Whitespace Errror", | |
path: formatting, | |
exclude-dirs: "filesWithCRLFEndings" | |
}, | |
{ | |
test-type: Functional, | |
test-status: failure, | |
description: "Formatting Error", | |
path: formatting, | |
exclude-dirs: "filesWithTrailingWhitespace,filesWithCRLFEndings" | |
}, | |
{ | |
test-type: Functional, | |
test-status: failure, | |
description: "Whitespace Error", | |
path: formatting, | |
exclude-dirs: "filesWithFormattingErrors,filesWithCRLFEndings" | |
}, | |
{ | |
test-type: Functional, | |
test-status: success, | |
description: "No Errors", | |
path: formatting, | |
exclude-dirs: "filesWithFormattingErrors,filesWithCRLFEndings,filesWithTrailingWhitespace", | |
exclude-files: "badFile.c" | |
}, | |
] | |
steps: | |
- uses: actions/checkout@v3 | |
- env: | |
stepName: "${{ matrix.inputs.test-type }} | ${{ matrix.inputs.test-status }} | ${{matrix.inputs.description }}" | |
name: ${{ env.stepName }} | |
id: format-test | |
continue-on-error: true | |
uses: ./formatting | |
with: | |
path: ${{ matrix.inputs.path }} | |
exclude-files: ${{ matrix.inputs.exclude-files }} | |
exclude-dirs: ${{ matrix.inputs.exclude-dirs }} | |
- env: | |
stepName: Check Failure Test Cases | |
name: ${{ env.stepName }} | |
id: check-failure-test-cases | |
shell: bash | |
run: | | |
# Check Failure Test Cases | |
set +e | |
if [ "${{ steps.format-test.outcome}}" = "${{ matrix.inputs.test-status }}" ]; then | |
echo -e "${{ env.bashPass }} ${{ matrix.inputs.test-type }} | ${{ matrix.inputs.description}} | Had Expected ${{ matrix.inputs.test-status }} ${{ env.bashEnd }}" | |
else | |
echo -e "${{ env.bashFail }} ${{ matrix.inputs.test-type }} | ${{ matrix.inputs.description}} | Had Unexpected ${{ matrix.inputs.test-status }} ${{ env.bashEnd }}" | |
set -e | |
exit 1 | |
fi | |
test-exe-monitor-success-cases: | |
strategy: | |
fail-fast: false | |
matrix: | |
os: [ubuntu-latest, windows-latest] | |
runs-on: ${{ matrix.os }} | |
steps: | |
- uses: actions/checkout@v3 | |
- env: | |
stepName: Install Windows Build tools | |
name: ${{ env.stepName }} | |
if: runner.os == 'Windows' | |
id: install-windows-build-tools | |
uses: microsoft/setup-msbuild@v1.1 | |
- env: | |
stepName: Install Ubuntu Build Tools | |
name: ${{ env.stepName }} | |
if: runner.os == 'Linux' | |
id: install-ubuntu-build--tools | |
run: | | |
# Install Ubuntu Build Tools | |
echo "::group::${{ env.stepName }}" | |
sudo apt install build-essential | |
exitStatus=$? | |
echo "::endgroup::" | |
if [ "$exitStatus" = "0" ]; then | |
echo -e "${{ env.bashPass }} ${{ env.stepName }} ${{ env.bashEnd }}" | |
else | |
echo -e "${{ env.bashFail }} ${{ env.stepName }} ${{ env.bashEnd }}" | |
exit 1 | |
fi | |
- env: | |
stepName: Compile Executable Monitor Test Files | |
name: ${{ env.stepName }} | |
id: compile-executable-monitor-test-files | |
shell: bash | |
run: | | |
# ${{ env.stepName }} | |
echo "::group::${{ env.stepName }}" | |
gcc executable-monitor/test.c -o executable-monitor/test.out | |
gcc -DEXIT_WITH_MINUTES executable-monitor/test.c -o executable-monitor/test_exit_current_minutes.out | |
readlink -f executable-monitor/test.out | |
readlink -f executable-monitor/test_exit_current_minutes.out | |
exitStatus=$? | |
echo "::endgroup::" | |
if [ "$exitStatus" = "0" ]; then | |
echo -e "${{ env.bashPass }} ${{ env.stepName }} ${{ env.bashEnd }}" | |
else | |
echo -e "${{ env.bashFail }} ${{ env.stepName }} ${{ env.bashEnd }}" | |
exit 1 | |
fi | |
# Get future times from now, then look for that in the executable | |
- env: | |
stepName: Get Future Times | |
name: ${{ env.stepName }} | |
id: future-time | |
shell: bash | |
run: | | |
# {{ env.stepName }} | |
set -x | |
echo "${{ env.bashInfo }} Getting Future Times to Compare Against ${{ env.bashEnd }}" | |
echo "exitCodeTwoMinutes=$(date --date='2 minutes' +%M)" >> "$GITHUB_ENV" | |
echo "successLineThreeMinutes=$(date --date='3 minutes' +%H:%M)" >> "$GITHUB_ENV" | |
echo "successLineFiveMinutes=$(date --date='5 minutes' +%H:%M)" >> "$GITHUB_ENV" | |
echo "exitCodeFourMinutes=$(date --date='4 minutes' +%M)" >> "$GITHUB_ENV" | |
- env: | |
stepName: "Functional | Exit Code | Exit Code Found" | |
name: ${{ env.stepName }} | |
id: exe-monitor-exit-code | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test.out | |
success-exit-code: 0 | |
log-dir: logDirectory | |
timeout-seconds: 60 | |
- env: | |
stepName: "Functional | Success Line | Success Line Found" | |
name: ${{ env.stepName }} | |
id: exe-monitor-success-line | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test.out | |
success-line: "SLEEPING FOR 6 SECONDS" | |
log-dir: logDirectory | |
timeout-seconds: 30 | |
- env: | |
stepName: "Functional | Exit Code and Success Line | Exit Code Found" | |
name: ${{ env.stepName }} | |
id: exe-monitor-find-exit-code | |
uses: ./executable-monitor | |
with: | |
description: | |
exe-path: executable-monitor/test.out | |
success-line: "LINE_THAT_WILL_NOT_PRINT" | |
success-exit-code: 0 | |
timeout-seconds: 60 | |
- env: | |
stepName: "Functional | Exit Code and Success Line | Success Line Found" | |
name: ${{ env.stepName }} | |
id: exe-monitor-find-success-line | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test.out | |
success-line: "SLEEPING FOR 6 SECONDS" | |
success-exit-code: 0 | |
timeout-seconds: 30 | |
- env: | |
stepName: "Functional | Retry Needed | Exit Code Found" | |
name: ${{ env.stepName }} | |
id: exe-monitor-retry-find-exit-code | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test_exit_current_minutes.out | |
success-exit-code: ${{ env.exitCodeTwoMinutes }} | |
retry-attempts: 10 | |
timeout-seconds: 60 | |
- env: | |
stepName: "Functional | Retry Needed | Success Line Found" | |
name: ${{ env.stepName }} | |
id: exe-monitor-retry-find-success-line | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test_exit_current_minutes.out | |
success-line: ${{ env.successLineThreeMinutes }} | |
retry-attempts: 10 | |
timeout-seconds: 60 | |
- env: | |
stepName: "Functional | Retry Needed | Exit Code and Success Line | Exit Code Found" | |
name: ${{ env.stepName }} | |
id: exe-monitor-retry-both-inputs-find-exit | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test_exit_current_minutes.out | |
success-line: "LINE_THAT_WILL_NOT_PRINT" | |
success-exit-code: ${{ env.exitCodeFourMinutes }} | |
retry-attempts: 10 | |
timeout-seconds: 60 | |
- env: | |
stepName: "Functional | Retry Needed | Exit Code and Success Line | Success Line Found" | |
name: ${{ env.stepName }} | |
id: exe-monitor-retry-both-inputs-find-success-line | |
uses: ./executable-monitor | |
with: | |
# Use the EXE that doesn't exit with the current minutes | |
exe-path: executable-monitor/test.out | |
success-line: ${{ env.successLineFiveMinutes }} | |
success-exit-code: 1 | |
retry-attempts: 10 | |
timeout-seconds: 60 | |
test-exe-monitor-failure-cases: | |
strategy: | |
fail-fast: false | |
matrix: | |
os: [ubuntu-latest, windows-latest] | |
runs-on: ${{ matrix.os }} | |
steps: | |
- uses: actions/checkout@v3 | |
- env: | |
stepName: Install Windows Build tools | |
name: ${{ env.stepName }} | |
if: runner.os == 'Windows' | |
id: install-windows-build-tools | |
uses: microsoft/setup-msbuild@v1.1 | |
- env: | |
stepName: Install Ubuntu Build Tools | |
name: ${{ env.stepName }} | |
if: runner.os == 'Linux' | |
id: install-ubuntu-build--tools | |
run: | | |
# Install Ubuntu Build Tools | |
echo "::group::${{ env.stepName }}" | |
sudo apt install build-essential | |
exitStatus=$? | |
echo "::endgroup::" | |
if [ "$exitStatus" = "0" ]; then | |
echo -e "${{ env.bashPass }} ${{ env.stepName }} ${{ env.bashEnd }}" | |
else | |
echo -e "${{ env.bashFail }} ${{ env.stepName }} ${{ env.bashEnd }}" | |
exit 1 | |
fi | |
- env: | |
stepName: Compile Executable Monitor Test Files | |
name: ${{ env.stepName }} | |
id: compile-executable-monitor-test-files | |
shell: bash | |
run: | | |
# ${{ env.stepName }} | |
echo "::group::${{ env.stepName }}" | |
gcc executable-monitor/test.c -o executable-monitor/test.out | |
gcc -DEXIT_WITH_MINUTES executable-monitor/test.c -o executable-monitor/test_exit_current_minutes.out | |
readlink -f executable-monitor/test.out | |
readlink -f executable-monitor/test_exit_current_minutes.out | |
exitStatus=$? | |
echo "::endgroup::" | |
if [ "$exitStatus" = "0" ]; then | |
echo -e "${{ env.bashPass }} ${{ env.stepName }} ${{ env.bashEnd }}" | |
else | |
echo -e "${{ env.bashFail }} ${{ env.stepName }} ${{ env.bashEnd }}" | |
exit 1 | |
fi | |
- env: | |
stepName: "API | Failure | No Executable Provided" | |
id: exe-monitor-fail-no-exe | |
uses: ./executable-monitor | |
continue-on-error: true | |
with: | |
timeout-seconds: 30 | |
- env: | |
stepName: "API | Failure | No Success Condition Provided" | |
name: ${{ env.stepName }} | |
id: exe-monitor-fail-no-success-condition | |
uses: ./executable-monitor | |
continue-on-error: true | |
with: | |
exe-path: executable-monitor/test.out | |
log-dir: logDirectory | |
timeout-seconds: 30 | |
- env: | |
stepName: "Functional | Failure | Timeout Cause No Success Line To Print" | |
name: ${{ env.stepName }} | |
id: exe-monitor-fail-timeout-no-success-line | |
uses: ./executable-monitor | |
continue-on-error: true | |
with: | |
exe-path: executable-monitor/test.out | |
# This is a line that would print if not for timeout | |
success-line: "SLEEPING FOR 9 SECONDS" | |
timeout-seconds: 2 | |
- env: | |
stepName: "Functional | Failure | Timeout Cause No Exit Code" | |
name: ${{ env.stepName }} | |
id: exe-monitor-fail-timeout-no-exit-code | |
uses: ./executable-monitor | |
continue-on-error: true | |
with: | |
description: "" | |
exe-path: executable-monitor/test.out | |
# This is an exit status that should be met if not for timeou | |
success-exit-code: 0 | |
timeout-seconds: 2 | |
- env: | |
stepName: "Functional | Failure | Timeout Cause Neither Condition" | |
name: ${{ env.stepName }} | |
id: exe-monitor-fail-timeout-no-condition | |
uses: ./executable-monitor | |
continue-on-error: true | |
with: | |
exe-path: executable-monitor/test.out | |
# These are exit conditions that should be met if not for timeout | |
success-line: "SLEEPING FOR 9 SECONDS" | |
success-exit-code: 0 | |
timeout-seconds: 2 | |
- env: | |
stepName: "Functional | Failure | Retries Timeout Cause No Success Line To Print" | |
name: ${{ env.stepName }} | |
id: exe-monitor-fail-retries-timeout-no-success-line | |
uses: ./executable-monitor | |
continue-on-error: true | |
with: | |
exe-path: executable-monitor/test.out | |
# This is a line that would print if not for timeout | |
success-line: "SLEEPING FOR 9 SECONDS" | |
timeout-seconds: 2 | |
retry-attempts: 2 | |
- env: | |
stepName: "Functional | Failure | Retries Timeout Cause No Exit Code" | |
name: ${{ env.stepName }} | |
id: exe-monitor-fail-retries-no-exit-code | |
uses: ./executable-monitor | |
continue-on-error: true | |
with: | |
exe-path: executable-monitor/test.out | |
# This is an exit status that should be met if not for timeout | |
success-exit-code: 0 | |
timeout-seconds: 2 | |
retry-attempts: 2 | |
- env: | |
stepName: "Functional | Failure | Retries Timeout Cause Neither Condition " | |
name: ${{ env.stepName }} | |
id: exe-monitor-fail-retries-no-success-condition | |
uses: ./executable-monitor | |
continue-on-error: true | |
with: | |
exe-path: executable-monitor/test.out | |
# These are exit conditions that should be met if not for timeout | |
success-line: "SLEEPING FOR 9 SECONDS" | |
success-exit-code: 0 | |
timeout-seconds: 2 | |
retry-attempts: 2 | |
- env: | |
stepName: Check Failure Test Cases | |
name: ${{ env.stepName }} | |
id: check-failure-test-cases | |
shell: bash | |
run: | | |
# Check Failure Test Cases | |
if [ "${{ steps.exe-monitor-fail-no-exe.outcome}}" = "failure" ]; then | |
echo -e "${{ env.bashPass }} | No Executable Provided | Failed As Intended ${{ env.bashEnd }}" | |
else | |
echo -e "${{ env.bashFail }} | No Executable Provided | Had Unexpected Pass ${{ env.bashEnd }}" | |
exit 1 | |
fi | |
if [ "${{ steps.exe-monitor-fail-no-success-condition.outcome}}" = "failure" ]; then | |
echo -e "${{ env.bashPass }} | No Success Condition Provided | Failed As Intended ${{ env.bashEnd }}" | |
else | |
echo -e "${{ env.bashFail }} | No Success Condition Provided | Had Unexpected Pass ${{ env.bashEnd }}" | |
exit 1 | |
fi | |
if [ "${{ steps.exe-monitor-fail-timeout-no-success-line.outcome}}" = "failure" ]; then | |
echo -e "${{ env.bashPass }} | Timeout Cause No Success Line To Print | Failed As Intended ${{ env.bashEnd }}" | |
else | |
echo -e "${{ env.bashFail }} | Timeout Cause No Success Line To Print | Had Unexpected Pass ${{ env.bashEnd }}" | |
exit 1 | |
fi | |
if [ "${{ steps.exe-monitor-fail-timeout-no-exit-code.outcome}}" = "failure" ]; then | |
echo -e "${{ env.bashPass }} | Timeout Cause No Exit Code | Failed As Intended ${{ env.bashEnd }}" | |
else | |
echo -e "${{ env.bashFail }} | Timeout Cause No Exit Code | Had Unexpected Pass ${{ env.bashEnd }}" | |
exit 1 | |
fi | |
if [ "${{ steps.exe-monitor-fail-timeout-no-condition.outcome}}" = "failure" ]; then | |
echo -e "${{ env.bashPass }} | Timeout Cause Neither Condition | Failed As Intended ${{ env.bashEnd }}" | |
else | |
echo -e "${{ env.bashFail }} | Timeout Cause Neither Condition | Had Unexpected Pass ${{ env.bashEnd }}" | |
exit 1 | |
fi | |
if [ "${{ steps.exe-monitor-fail-retries-timeout-no-success-line.outcome}}" = "failure" ]; then | |
echo -e "${{ env.bashPass }} | Retries Timeout Cause No Success Line To Print | Failed As Intended ${{ env.bashEnd }}" | |
else | |
echo -e "${{ env.bashFail }} | Retries Timeout Cause No Success Line To Print | Had Unexpected Pass ${{ env.bashEnd }}" | |
exit 1 | |
fi | |
if [ "${{ steps.exe-monitor-fail-retries-no-exit-code.outcome}}" = "failure" ]; then | |
echo -e "${{ env.bashPass }} | Retries Timeout Cause No Exit Code | Failed As Intended ${{ env.bashEnd }}" | |
else | |
echo -e "${{ env.bashFail }} | Retries Timeout Cause No Exit Code | Had Unexpected Pass ${{ env.bashEnd }}" | |
exit 1 | |
fi | |
if [ "${{ steps.exe-monitor-fail-retries-no-success-condition.outcome}}" = "failure" ]; then | |
echo -e "${{ env.bashPass }} | Retries Timeout Cause Neither Condition | Failed As Intended ${{ env.bashEnd }}" | |
else | |
echo -e "${{ env.bashFail }} | Retries Timeout Cause Neither Condition | Had Unexpected Pass ${{ env.bashEnd }}" | |
exit 1 | |
fi | |
test-complexity-check: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/checkout@v3 | |
with: | |
repository: FreeRTOS/coreMQTT | |
ref: main | |
path: coreMQTT | |
- name: Test complexity check action | |
uses: ./complexity | |
with: | |
path: coreMQTT | |
# For coreMQTT the code complexity threshold is 10. | |
horrid_threshold: 10 | |
test-doxygen-zip-check: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/setup-python@v3 | |
with: | |
python-version: '3.11.0' | |
- uses: actions/checkout@v3 | |
with: | |
repository: aws/aws-iot-device-sdk-embedded-C | |
submodules: recursive | |
ref: main | |
path: aws-iot-device-sdk-embedded-C | |
- name: Test doxygen build action | |
uses: ./doxygen | |
with: | |
path: ./aws-iot-device-sdk-embedded-C | |
libs_parent_dir_path: libraries/standard,libraries/aws | |
generate_zip: true | |
test-doxygen-non-zip-check: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/checkout@v3 | |
with: | |
repository: FreeRTOS/coreMQTT | |
ref: main | |
path: coreMQTT | |
- name: Test doxygen build action | |
uses: ./doxygen | |
with: | |
path: coreMQTT | |
test-spell-check: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/checkout@v3 | |
with: | |
repository: FreeRTOS/coreMQTT | |
ref: main | |
path: coreMQTT | |
- name: Test spell check action | |
uses: ./spellings | |
with: | |
path: coreMQTT | |
test-coverage-cop: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/checkout@v3 | |
with: | |
repository: FreeRTOS/coreMQTT | |
ref: main | |
path: coreMQTT | |
- name: Build | |
run: | | |
sudo apt-get install -y lcov | |
cmake -S ./coreMQTT/test -B build/ \ | |
-G "Unix Makefiles" \ | |
-DCMAKE_BUILD_TYPE=Debug \ | |
-DBUILD_CLONE_SUBMODULES=ON \ | |
-DCMAKE_C_FLAGS='--coverage -Wall -Wextra -Werror -DNDEBUG -DLIBRARY_LOG_LEVEL=LOG_DEBUG' | |
make -C build/ all | |
- name: Test | |
run: | | |
cd build/ | |
ctest -E system --output-on-failure | |
cd .. | |
- name: Run Coverage | |
run: | | |
make -C build/ coverage | |
declare -a EXCLUDE=("\*test/\*" "\*CMakeCCompilerId\*" "\*mocks\*") | |
echo ${EXCLUDE[@]} | xargs lcov --rc lcov_branch_coverage=1 -r build/coverage.info -o build/coverage.info | |
lcov --rc lcov_branch_coverage=1 --list build/coverage.info | |
- name: Test coverage cop action | |
uses: ./coverage-cop | |
with: | |
path: ./build/coverage.info | |
branch-coverage-min: 70 | |
line-coverage-min: 100 | |
test-memory-statistics: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Test memory statistics action | |
uses: ./memory_statistics | |
with: | |
path: memory_statistics/test | |
config: ./memory_statistics_config.json | |
output: ./size_table_new.html | |
check_against: ./size_table_expected.html | |
test-link-verifier: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Setup python environment | |
uses: actions/setup-python@v3 | |
with: | |
python-version: '3.11.0' | |
- name: Test link verifier action | |
uses: ./link-verifier | |
with: | |
path: ./ | |
exclude-dirs: complexity,formatting | |
include-file-types: .c,.html | |
test-manifest-verifier: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Setup python environment | |
uses: actions/setup-python@v3 | |
with: | |
python-version: '3.x' | |
- name: Checkout the FreeRTOS/FreeRTOS repository for testing action on. | |
uses: actions/checkout@v3 | |
with: | |
repository: FreeRTOS/FreeRTOS | |
ref: '202107.00' | |
path: FreeRTOS | |
submodules: recursive | |
- name: Test manifest verifier | |
uses: ./manifest-verifier | |
with: | |
path: ./FreeRTOS | |
exclude-submodules: FreeRTOS-Plus/Test/CMock,FreeRTOS/Test/CMock/CMock,FreeRTOS/Test/litani | |
fail-on-incorrect-version: true |