Bump certifi from 2022.12.7 to 2023.7.22 in /link-verifier #388
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Test actions | |
on: | |
push: | |
branches: ["**"] | |
pull_request: | |
branches: [main] | |
workflow_dispatch: | |
jobs: | |
test-format-check: | |
runs-on: ubuntu-20.04 | |
steps: | |
- uses: actions/checkout@v2 | |
- uses: actions/checkout@v2 | |
with: | |
repository: FreeRTOS/coreMQTT | |
ref: main | |
path: coreMQTT | |
- name: Test formatting check action | |
uses: ./formatting | |
with: | |
path: coreMQTT | |
exclude-files: lexicon.txt | |
exclude-dirs: build,docs | |
test-executable-monitor: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Compile executable monitor test | |
id: compile-executable-monitor-test | |
shell: bash | |
run: | | |
# Compile executable monitor test | |
echo "::group::Compile executable monitor test" | |
sudo apt install build-essential | |
if [ "$?" = "0" ]; then | |
echo -e "\033[32;3mInstalled build-essential\033[0m" | |
else | |
echo "::endgroup::" | |
echo -e "\033[32;31mInstall build-essential failed...\033[0m" | |
exit 1 | |
fi | |
gcc executable-monitor/test.c -o executable-monitor/test.out | |
gcc -DEXIT_WITH_MINUTES executable-monitor/test.c -o executable-monitor/test_exit_current_minutes.out | |
echo "::endgroup::" | |
if [ "$?" = "0" ]; then | |
readlink -f executable-monitor/test.out | |
echo -e "\033[32;3mCompiled executable-monitor/test.c\033[0m" | |
else | |
echo -e "\033[32;31mCompilation of executable-monitor/test.c failed...\033[0m" | |
exit 1 | |
fi | |
- name: Functional Test | Success Case | Retries, Success Line, and Exit Code | Pass on Success Line | |
id: test-executable-monitor-action-success-line | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test.out | |
timeout-seconds: 20 | |
success-line: "SLEEPING FOR 6 SECONDS" | |
success-exit-code: 0 | |
retry-attempts: 2 | |
- name: Functional Test | Success Case | Retries, Success Line, and Exit Code | Pass on Exit Code | |
id: test-executable-monitor-action-exit-code | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test.out | |
timeout-seconds: 75 | |
success-line: "LINE_THAT_WILL_NOT_PRINT_BUT_EXISTS" | |
success-exit-code: 0 | |
retry-attempts: 2 | |
- name: Functional Test | Success Case | Retries, Success Line, and Exit Code, With log file | |
id: test-executable-monitor-action-log-file | |
uses: ./executable-monitor | |
with: | |
log-dir: demo_run_logs | |
exe-path: executable-monitor/test.out | |
timeout-seconds: 20 | |
success-line: "SLEEPING FOR 6 SECONDS" | |
success-exit-code: 0 | |
retry-attempts: 2 | |
- name: Functional Test | Success Case | Retries, Success Line, No Exit Code | |
id: test-executable-monitor-action-no-exit-code | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test.out | |
timeout-seconds: 20 | |
success-line: "SLEEPING FOR 6 SECONDS" | |
retry-attempts: 2 | |
- name: Functional Test | Success Case | Retries, No Success Line, Exit Code | |
id: test-executable-monitor-action-no-success-line | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test.out | |
timeout-seconds: 75 | |
success-exit-code: 0 | |
retry-attempts: 2 | |
- name: Functional Test | Success Case | No Retries, Success Line, No Exit Code | |
id: test-executable-monitor-action-no-retry-attempts-success-line | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test.out | |
success-line: "SLEEPING FOR 6 SECONDS" | |
timeout-seconds: 20 | |
- name: Functional Test | Success Case | No Retries, No Success Line, Exit Code | |
id: test-executable-monitor-action-no-retry-attempts-exit-code | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test.out | |
success-exit-code: 0 | |
timeout-seconds: 75 | |
- name: API Test | Success Case | Retries, Success Line, No Exit Code, Use Default Timeout | |
id: test-executable-monitor-action-no-exit-code-no-timeout | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test.out | |
success-line: "SLEEPING FOR 6 SECONDS" | |
retry-attempts: 2 | |
- name: API Test | Success Case | Retries, No Success Line, Exit Code, Use Default Timeout | |
id: test-executable-monitor-action-no-success-line-no-timeout | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test.out | |
success-exit-code: 0 | |
retry-attempts: 2 | |
# Get future times from now, then look for that in the executable | |
- name: Get Future Times | |
id: future-time | |
run: | | |
# Get times for future demos | |
echo "time_success_line=$(date --date='2 minutes' +%H:%M)" >> "$GITHUB_ENV" | |
echo "time_exit_code=$(date --date='5 minutes' +%M)" >> "$GITHUB_ENV" | |
- name: Functional Test | Sucess Case | Test Retry Run occurs on Wrong exit code | |
id: test-executable-monitor-action-retry-success-line | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test.out | |
timeout-seconds: 20 | |
retry-attempts: 10 | |
success-line: ${{ env.time_success_line }} | |
- name: Functional Test | Success Case | Retry On Wrong Exit Code | |
id: test-executable-monitor-action-retry-exit-code | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test_exit_current_minutes.out | |
success-exit-code: ${{ env.time_exit_code }} | |
timeout-seconds: 60 | |
retry-attempts: 10 | |
- name: Functional Test | Failure Case | Fail On Timeout | |
id: test-executable-monitor-action-fail-on-timeout | |
continue-on-error: true | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test.out | |
timeout-seconds: 1 | |
success-line: "SLEEPING FOR 12 SECONDS" | |
success-exit-code: 0 | |
retry-attempts: 2 | |
- name: API Test | Failure Case | Retries, No Success Metric | |
continue-on-error: true | |
id: test-executable-monitor-API-no-success-metric | |
uses: ./executable-monitor | |
with: | |
exe-path: executable-monitor/test.out | |
timeout-seconds: 20 | |
retry-attempts: 2 | |
- name: API Test | Failure Case | No Exe Path | |
continue-on-error: true | |
id: test-executable-monitor-API-no-exe-path | |
uses: ./executable-monitor | |
with: | |
timeout-seconds: 20 | |
success-line: "SLEEPING FOR 6 SECONDS" | |
success-exit-code: 0 | |
retry-attempts: 2 | |
- name: Check Failure Test Cases | |
id: check-failure-test-cases | |
run: | | |
# Check Last Step Failed | |
echo "::group::Check Failure Test Cases" | |
if [ "${{ steps.test-executable-monitor-action-fail-on-timeout.outcome}}" = "failure" ]; then | |
echo -e "\033[32;3mFunctional Test | Failure Case | Fail On Timeout had outcome '${{ steps.test-executable-monitor-action-fail-on-timeout.outcome}}' as intended\033[0m" | |
else | |
echo "::endgroup::" | |
echo -e "\033[32;31mFunctional Test | Failure Case | Fail On Timeout had unexpected '${{ steps.test-executable-monitor-action-fail-on-timeout.outcome}}' exit condition\033[0m" | |
exit 1 | |
fi | |
if [ "${{ steps.test-executable-monitor-API-no-success-metric.outcome}}" = "failure" ]; then | |
echo -e "\033[32;3mAPI Test | Failure Case | Retries, No Success Metric had outcome '${{ steps.test-executable-monitor-API-no-success-metric.outcome}}' as intended\033[0m" | |
else | |
echo "::endgroup::" | |
echo -e "\033[32;31mAPI Test | Failure Case | Retries, No Success Metric had unexpected '${{ steps.test-executable-monitor-API-no-success-metric.outcome}}' exit condition\033[0m" | |
exit 1 | |
fi | |
if [ "${{ steps.test-executable-monitor-API-no-exe-path.outcome}}" = "failure" ]; then | |
echo -e "\033[32;3mAPI Test | Failure Case | No Exe Path had outcome '${{ steps.test-executable-monitor-API-no-exe-path.outcome}}' as intended\033[0m" | |
else | |
echo "::endgroup::" | |
echo -e "\033[32;31mAPI Test | Failure Case | No Exe Path had unexpected '${{ steps.test-executable-monitor-API-no-exe-path.outcome}}' exit condition\033[0m" | |
exit 1 | |
fi | |
echo "::endgroup::" | |
echo -e "All failure tests failed correctly" | |
test-complexity-check: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v2 | |
- uses: actions/checkout@v2 | |
with: | |
repository: FreeRTOS/coreMQTT | |
ref: main | |
path: coreMQTT | |
- name: Test complexity check action | |
uses: ./complexity | |
with: | |
path: coreMQTT | |
# For coreMQTT the code complexity threshold is 10. | |
horrid_threshold: 10 | |
test-doxygen-zip-check: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v2 | |
- uses: actions/setup-python@v2 | |
with: | |
python-version: '3.11.0' | |
- uses: actions/checkout@v2 | |
with: | |
repository: aws/aws-iot-device-sdk-embedded-C | |
submodules: recursive | |
ref: main | |
path: aws-iot-device-sdk-embedded-C | |
- name: Test doxygen build action | |
uses: ./doxygen | |
with: | |
path: ./aws-iot-device-sdk-embedded-C | |
libs_parent_dir_path: libraries/standard,libraries/aws | |
generate_zip: true | |
test-doxygen-non-zip-check: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v2 | |
- uses: actions/checkout@v2 | |
with: | |
repository: FreeRTOS/coreMQTT | |
ref: main | |
path: coreMQTT | |
- name: Test doxygen build action | |
uses: ./doxygen | |
with: | |
path: coreMQTT | |
test-spell-check: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v2 | |
- uses: actions/checkout@v2 | |
with: | |
repository: FreeRTOS/coreMQTT | |
ref: main | |
path: coreMQTT | |
- name: Test spell check action | |
uses: ./spellings | |
with: | |
path: coreMQTT | |
test-coverage-cop: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v2 | |
- uses: actions/checkout@v2 | |
with: | |
repository: FreeRTOS/coreMQTT | |
ref: main | |
path: coreMQTT | |
- name: Build | |
run: | | |
sudo apt-get install -y lcov | |
cmake -S ./coreMQTT/test -B build/ \ | |
-G "Unix Makefiles" \ | |
-DCMAKE_BUILD_TYPE=Debug \ | |
-DBUILD_CLONE_SUBMODULES=ON \ | |
-DCMAKE_C_FLAGS='--coverage -Wall -Wextra -Werror -DNDEBUG -DLIBRARY_LOG_LEVEL=LOG_DEBUG' | |
make -C build/ all | |
- name: Test | |
run: | | |
cd build/ | |
ctest -E system --output-on-failure | |
cd .. | |
- name: Run Coverage | |
run: | | |
make -C build/ coverage | |
declare -a EXCLUDE=("\*test/\*" "\*CMakeCCompilerId\*" "\*mocks\*") | |
echo ${EXCLUDE[@]} | xargs lcov --rc lcov_branch_coverage=1 -r build/coverage.info -o build/coverage.info | |
lcov --rc lcov_branch_coverage=1 --list build/coverage.info | |
- name: Test coverage cop action | |
uses: ./coverage-cop | |
with: | |
path: ./build/coverage.info | |
branch-coverage-min: 70 | |
line-coverage-min: 100 | |
test-memory-statistics: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Test memory statistics action | |
uses: ./memory_statistics | |
with: | |
path: memory_statistics/test | |
config: ./memory_statistics_config.json | |
output: ./size_table_new.html | |
check_against: ./size_table_expected.html | |
test-link-verifier: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Setup python environment | |
uses: actions/setup-python@v2 | |
with: | |
python-version: '3.11.0' | |
- name: Test link verifier action | |
uses: ./link-verifier | |
with: | |
path: ./ | |
exclude-dirs: complexity,formatting | |
include-file-types: .c,.html | |
test-manifest-verifier: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Setup python environment | |
uses: actions/setup-python@v2 | |
with: | |
python-version: '3.x' | |
- name: Checkout the FreeRTOS/FreeRTOS repository for testing action on. | |
uses: actions/checkout@v2 | |
with: | |
repository: FreeRTOS/FreeRTOS | |
ref: '202107.00' | |
path: FreeRTOS | |
submodules: recursive | |
- name: Test manifest verifier | |
uses: ./manifest-verifier | |
with: | |
path: ./FreeRTOS | |
exclude-submodules: FreeRTOS-Plus/Test/CMock,FreeRTOS/Test/CMock/CMock,FreeRTOS/Test/litani | |
fail-on-incorrect-version: true | |