From c883c46844d986e312d3be97658281b6e64a92f4 Mon Sep 17 00:00:00 2001 From: driazati Date: Mon, 18 Apr 2022 15:00:43 -0700 Subject: [PATCH 1/2] [ci] Add local test re-run info This adds a note about `ci.py` with relevant tests when failures are detected. This should help advertise `ci.py` and make it more clear how to reproduce failures locally. This also adds a long link that makes it a short process to report a test on a build as flaky. --- Jenkinsfile | 145 +++++++++++++++++------------- jenkins/Jenkinsfile.j2 | 87 +++++++++++++++--- jenkins/macros.j2 | 9 +- tests/scripts/pytest_wrapper.py | 133 +++++++++++++++++++++++++++ tests/scripts/setup-pytest-env.sh | 5 +- 5 files changed, 299 insertions(+), 80 deletions(-) create mode 100755 tests/scripts/pytest_wrapper.py diff --git a/Jenkinsfile b/Jenkinsfile index 47b57cc7fcb1..226cda5dab50 100755 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -45,7 +45,7 @@ // 'python3 jenkins/generate.py' // Note: This timestamp is here to ensure that updates to the Jenkinsfile are // always rebased on main before merging: -// Generated at 2022-04-22T08:47:27.237503 +// Generated at 2022-04-22T12:59:15.071304 import org.jenkinsci.plugins.pipeline.modeldefinition.Utils // NOTE: these lines are scanned by docker/dev_common.sh. Please update the regex as needed. --> @@ -89,7 +89,7 @@ microtvm_lib = 'build/microtvm_template_projects.tar.gz, ' + tvm_lib upstream_revision = null // command to start a docker container -docker_run = 'docker/bash.sh --env CI --env TVM_SHARD_INDEX --env TVM_NUM_SHARDS' +docker_run = 'docker/bash.sh --env CI --env TVM_SHARD_INDEX --env TVM_NUM_SHARDS --env RUN_DISPLAY_URL --env PLATFORM' docker_build = 'docker/build.sh' // timeout in minutes max_time = 240 @@ -619,6 +619,7 @@ stage('Test') { init_git() timeout(time: max_time, unit: 'MINUTES') { withEnv([ + 'PLATFORM=gpu', 'TVM_NUM_SHARDS=2', 'TVM_SHARD_INDEX=0'], { unpack_lib('gpu2', tvm_multilib) @@ -658,6 +659,7 @@ stage('Test') { init_git() timeout(time: max_time, unit: 'MINUTES') { withEnv([ + 'PLATFORM=gpu', 'TVM_NUM_SHARDS=2', 'TVM_SHARD_INDEX=1'], { unpack_lib('gpu2', tvm_multilib) @@ -697,6 +699,7 @@ stage('Test') { init_git() timeout(time: max_time, unit: 'MINUTES') { withEnv([ + 'PLATFORM=cpu', 'TVM_NUM_SHARDS=2', 'TVM_SHARD_INDEX=0'], { unpack_lib('cpu', tvm_multilib_tsim) @@ -724,6 +727,7 @@ stage('Test') { init_git() timeout(time: max_time, unit: 'MINUTES') { withEnv([ + 'PLATFORM=cpu', 'TVM_NUM_SHARDS=2', 'TVM_SHARD_INDEX=1'], { unpack_lib('cpu', tvm_multilib_tsim) @@ -750,15 +754,17 @@ stage('Test') { timeout(time: max_time, unit: 'MINUTES') { try { init_git() - unpack_lib('cpu', tvm_multilib_tsim) - ci_setup(ci_cpu) - cpp_unittest(ci_cpu) - python_unittest(ci_cpu) - fsim_test(ci_cpu) - sh ( - script: "${docker_run} ${ci_cpu} ./tests/scripts/task_python_vta_tsim.sh", - label: 'Run VTA tests in TSIM', - ) + withEnv(['PLATFORM=cpu'], { + unpack_lib('cpu', tvm_multilib_tsim) + ci_setup(ci_cpu) + cpp_unittest(ci_cpu) + python_unittest(ci_cpu) + fsim_test(ci_cpu) + sh ( + script: "${docker_run} ${ci_cpu} ./tests/scripts/task_python_vta_tsim.sh", + label: 'Run VTA tests in TSIM', + ) + }) } finally { junit 'build/pytest-results/*.xml' } @@ -777,6 +783,7 @@ stage('Test') { init_git() timeout(time: max_time, unit: 'MINUTES') { withEnv([ + 'PLATFORM=i386', 'TVM_NUM_SHARDS=2', 'TVM_SHARD_INDEX=0'], { unpack_lib('i386', tvm_multilib) @@ -807,6 +814,7 @@ stage('Test') { init_git() timeout(time: max_time, unit: 'MINUTES') { withEnv([ + 'PLATFORM=i386', 'TVM_NUM_SHARDS=2', 'TVM_SHARD_INDEX=1'], { unpack_lib('i386', tvm_multilib) @@ -836,17 +844,19 @@ stage('Test') { timeout(time: max_time, unit: 'MINUTES') { try { init_git() - unpack_lib('hexagon', tvm_lib) - ci_setup(ci_hexagon) - cpp_unittest(ci_hexagon) - sh ( - script: "${docker_run} ${ci_hexagon} ./tests/scripts/task_build_hexagon_api.sh", - label: 'Build Hexagon API', - ) - sh ( - script: "${docker_run} ${ci_hexagon} ./tests/scripts/task_python_hexagon.sh", - label: 'Run Hexagon tests', - ) + withEnv(['PLATFORM=hexagon'], { + unpack_lib('hexagon', tvm_lib) + ci_setup(ci_hexagon) + cpp_unittest(ci_hexagon) + sh ( + script: "${docker_run} ${ci_hexagon} ./tests/scripts/task_build_hexagon_api.sh", + label: 'Build Hexagon API', + ) + sh ( + script: "${docker_run} ${ci_hexagon} ./tests/scripts/task_python_hexagon.sh", + label: 'Run Hexagon tests', + ) + }) } finally { junit 'build/pytest-results/*.xml' } @@ -864,21 +874,23 @@ stage('Test') { timeout(time: max_time, unit: 'MINUTES') { try { init_git() - unpack_lib('qemu', microtvm_lib) - sh( - script: 'cd build && tar -xzvf microtvm_template_projects.tar.gz', - label: 'Unpack microtvm_template_projects' - ) - ci_setup(ci_qemu) - cpp_unittest(ci_qemu) - sh ( - script: "${docker_run} ${ci_qemu} ./tests/scripts/task_python_microtvm.sh", - label: 'Run microTVM tests', - ) - sh ( - script: "${docker_run} ${ci_qemu} ./tests/scripts/task_demo_microtvm.sh", - label: 'Run microTVM demos', - ) + withEnv(['PLATFORM=qemu'], { + unpack_lib('qemu', microtvm_lib) + sh( + script: 'cd build && tar -xzvf microtvm_template_projects.tar.gz', + label: 'Unpack microtvm_template_projects' + ) + ci_setup(ci_qemu) + cpp_unittest(ci_qemu) + sh ( + script: "${docker_run} ${ci_qemu} ./tests/scripts/task_python_microtvm.sh", + label: 'Run microTVM tests', + ) + sh ( + script: "${docker_run} ${ci_qemu} ./tests/scripts/task_demo_microtvm.sh", + label: 'Run microTVM demos', + ) + }) } finally { junit 'build/pytest-results/*.xml' } @@ -896,17 +908,19 @@ stage('Test') { timeout(time: max_time, unit: 'MINUTES') { try { init_git() - unpack_lib('arm', tvm_multilib) - ci_setup(ci_arm) - cpp_unittest(ci_arm) - sh ( - script: "${docker_run} ${ci_arm} ./tests/scripts/task_python_arm_compute_library.sh", - label: 'Run test_arm_compute_lib test', - ) - sh ( - script: "${docker_run} ${ci_arm} ./tests/scripts/task_python_topi.sh", - label: 'Run TOPI tests', - ) + withEnv(['PLATFORM=arm'], { + unpack_lib('arm', tvm_multilib) + ci_setup(ci_arm) + cpp_unittest(ci_arm) + sh ( + script: "${docker_run} ${ci_arm} ./tests/scripts/task_python_arm_compute_library.sh", + label: 'Run test_arm_compute_lib test', + ) + sh ( + script: "${docker_run} ${ci_arm} ./tests/scripts/task_python_topi.sh", + label: 'Run TOPI tests', + ) + }) } finally { junit 'build/pytest-results/*.xml' } @@ -925,6 +939,7 @@ stage('Test') { init_git() timeout(time: max_time, unit: 'MINUTES') { withEnv([ + 'PLATFORM=arm', 'TVM_NUM_SHARDS=2', 'TVM_SHARD_INDEX=0'], { unpack_lib('arm', tvm_multilib) @@ -953,6 +968,7 @@ stage('Test') { init_git() timeout(time: max_time, unit: 'MINUTES') { withEnv([ + 'PLATFORM=arm', 'TVM_NUM_SHARDS=2', 'TVM_SHARD_INDEX=1'], { unpack_lib('arm', tvm_multilib) @@ -981,6 +997,7 @@ stage('Test') { init_git() timeout(time: max_time, unit: 'MINUTES') { withEnv([ + 'PLATFORM=gpu', 'TVM_NUM_SHARDS=2', 'TVM_SHARD_INDEX=0'], { unpack_lib('gpu', tvm_multilib) @@ -1008,6 +1025,7 @@ stage('Test') { init_git() timeout(time: max_time, unit: 'MINUTES') { withEnv([ + 'PLATFORM=gpu', 'TVM_NUM_SHARDS=2', 'TVM_SHARD_INDEX=1'], { unpack_lib('gpu', tvm_multilib) @@ -1035,6 +1053,7 @@ stage('Test') { init_git() timeout(time: max_time, unit: 'MINUTES') { withEnv([ + 'PLATFORM=gpu', 'TVM_NUM_SHARDS=3', 'TVM_SHARD_INDEX=0'], { unpack_lib('gpu', tvm_multilib) @@ -1062,6 +1081,7 @@ stage('Test') { init_git() timeout(time: max_time, unit: 'MINUTES') { withEnv([ + 'PLATFORM=gpu', 'TVM_NUM_SHARDS=3', 'TVM_SHARD_INDEX=1'], { unpack_lib('gpu', tvm_multilib) @@ -1089,6 +1109,7 @@ stage('Test') { init_git() timeout(time: max_time, unit: 'MINUTES') { withEnv([ + 'PLATFORM=gpu', 'TVM_NUM_SHARDS=3', 'TVM_SHARD_INDEX=2'], { unpack_lib('gpu', tvm_multilib) @@ -1115,12 +1136,14 @@ stage('Test') { timeout(time: max_time, unit: 'MINUTES') { try { init_git() - unpack_lib('cpu', tvm_multilib) - ci_setup(ci_cpu) - sh ( - script: "${docker_run} ${ci_cpu} ./tests/scripts/task_python_frontend_cpu.sh", - label: 'Run Python frontend tests', - ) + withEnv(['PLATFORM=cpu'], { + unpack_lib('cpu', tvm_multilib) + ci_setup(ci_cpu) + sh ( + script: "${docker_run} ${ci_cpu} ./tests/scripts/task_python_frontend_cpu.sh", + label: 'Run Python frontend tests', + ) + }) } finally { junit 'build/pytest-results/*.xml' } @@ -1138,12 +1161,14 @@ stage('Test') { timeout(time: max_time, unit: 'MINUTES') { try { init_git() - unpack_lib('arm', tvm_multilib) - ci_setup(ci_arm) - sh ( - script: "${docker_run} ${ci_arm} ./tests/scripts/task_python_frontend_cpu.sh", - label: 'Run Python frontend tests', - ) + withEnv(['PLATFORM=arm'], { + unpack_lib('arm', tvm_multilib) + ci_setup(ci_arm) + sh ( + script: "${docker_run} ${ci_arm} ./tests/scripts/task_python_frontend_cpu.sh", + label: 'Run Python frontend tests', + ) + }) } finally { junit 'build/pytest-results/*.xml' } diff --git a/jenkins/Jenkinsfile.j2 b/jenkins/Jenkinsfile.j2 index 6527ed080c45..8331db0082a2 100644 --- a/jenkins/Jenkinsfile.j2 +++ b/jenkins/Jenkinsfile.j2 @@ -86,7 +86,7 @@ microtvm_lib = 'build/microtvm_template_projects.tar.gz, ' + tvm_lib upstream_revision = null // command to start a docker container -docker_run = 'docker/bash.sh --env CI --env TVM_SHARD_INDEX --env TVM_NUM_SHARDS' +docker_run = 'docker/bash.sh --env CI --env TVM_SHARD_INDEX --env TVM_NUM_SHARDS --env RUN_DISPLAY_URL --env PLATFORM' docker_build = 'docker/build.sh' // timeout in minutes max_time = 240 @@ -608,7 +608,13 @@ stage('Test') { SKIP_SLOW_TESTS = "${skip_slow_tests}" } parallel( - {% call m.sharded_test_step(name="unittest: GPU", num_shards=2, node="GPU", ws="tvm/ut-python-gpu") %} + {% call m.sharded_test_step( + name="unittest: GPU", + num_shards=2, + node="GPU", + ws="tvm/ut-python-gpu", + platform="gpu", + ) %} unpack_lib('gpu2', tvm_multilib) cpp_unittest(ci_gpu) @@ -628,7 +634,13 @@ stage('Test') { label: 'Run Python GPU integration tests', ) {% endcall %} - {% call m.sharded_test_step(name="integration: CPU", node="CPU", num_shards=2, ws="tvm/integration-python-cpu") %} + {% call m.sharded_test_step( + name="integration: CPU", + node="CPU", + num_shards=2, + ws="tvm/integration-python-cpu", + platform="cpu", + ) %} unpack_lib('cpu', tvm_multilib_tsim) ci_setup(ci_cpu) sh ( @@ -636,7 +648,11 @@ stage('Test') { label: 'Run CPU integration tests', ) {% endcall %} - {% call m.test_step(name="unittest: CPU", node="CPU", ws="tvm/ut-python-cpu") %} + {% call m.test_step( + name="unittest: CPU", + node="CPU", ws="tvm/ut-python-cpu", + platform="cpu", + ) %} unpack_lib('cpu', tvm_multilib_tsim) ci_setup(ci_cpu) cpp_unittest(ci_cpu) @@ -647,7 +663,13 @@ stage('Test') { label: 'Run VTA tests in TSIM', ) {% endcall %} - {% call m.sharded_test_step(name="python: i386", node="CPU", num_shards=2, ws="tvm/integration-python-i386") %} + {% call m.sharded_test_step( + name="python: i386", + node="CPU", + num_shards=2, + ws="tvm/integration-python-i386", + platform="i386", + ) %} unpack_lib('i386', tvm_multilib) ci_setup(ci_i386) cpp_unittest(ci_i386) @@ -658,7 +680,11 @@ stage('Test') { ) fsim_test(ci_i386) {% endcall %} - {% call m.test_step(name="test: Hexagon", node="CPU", ws="tvm/test-hexagon") %} + {% call m.test_step( + name="test: Hexagon", + node="CPU", ws="tvm/test-hexagon", + platform="hexagon", + ) %} unpack_lib('hexagon', tvm_lib) ci_setup(ci_hexagon) cpp_unittest(ci_hexagon) @@ -671,7 +697,11 @@ stage('Test') { label: 'Run Hexagon tests', ) {% endcall %} - {% call m.test_step(name="test: QEMU", node="CPU", ws="tvm/test-qemu") %} + {% call m.test_step( + name="test: QEMU", + node="CPU", ws="tvm/test-qemu", + platform="qemu", + ) %} unpack_lib('qemu', microtvm_lib) sh( script: 'cd build && tar -xzvf microtvm_template_projects.tar.gz', @@ -688,7 +718,12 @@ stage('Test') { label: 'Run microTVM demos', ) {% endcall %} - {% call m.test_step(name="topi: aarch64", node="ARM", ws="tvm/ut-python-arm") %} + {% call m.test_step( + name="topi: aarch64", + node="ARM", + ws="tvm/ut-python-arm", + platform="arm", +) %} unpack_lib('arm', tvm_multilib) ci_setup(ci_arm) cpp_unittest(ci_arm) @@ -701,7 +736,12 @@ stage('Test') { label: 'Run TOPI tests', ) {% endcall %} - {% call m.sharded_test_step(name="integration: aarch64", num_shards=2, node="ARM", ws="tvm/ut-python-arm") %} + {% call m.sharded_test_step( + name="integration: aarch64", + num_shards=2, + node="ARM", ws="tvm/ut-python-arm", + platform="arm", + ) %} unpack_lib('arm', tvm_multilib) ci_setup(ci_arm) python_unittest(ci_arm) @@ -710,7 +750,13 @@ stage('Test') { label: 'Run CPU integration tests', ) {% endcall %} - {% call m.sharded_test_step(name="topi: GPU", node="GPU", num_shards=2, ws="tvm/topi-python-gpu") %} + {% call m.sharded_test_step( + name="topi: GPU", + node="GPU", + num_shards=2, + ws="tvm/topi-python-gpu", + platform="gpu", + ) %} unpack_lib('gpu', tvm_multilib) ci_setup(ci_gpu) sh ( @@ -718,7 +764,12 @@ stage('Test') { label: 'Run TOPI tests', ) {% endcall %} - {% call m.sharded_test_step(name="frontend: GPU", node="GPU", num_shards=3, ws="tvm/frontend-python-gpu") %} + {% call m.sharded_test_step( + name="frontend: GPU", node="GPU", + num_shards=3, + ws="tvm/frontend-python-gpu", + platform="gpu", + ) %} unpack_lib('gpu', tvm_multilib) ci_setup(ci_gpu) sh ( @@ -726,7 +777,12 @@ stage('Test') { label: 'Run Python frontend tests', ) {% endcall %} - {% call m.test_step(name="frontend: CPU", node="CPU", ws="tvm/frontend-python-cpu") %} + {% call m.test_step( + name="frontend: CPU", + node="CPU", + ws="tvm/frontend-python-cpu", + platform="cpu", +) %} unpack_lib('cpu', tvm_multilib) ci_setup(ci_cpu) sh ( @@ -734,7 +790,12 @@ stage('Test') { label: 'Run Python frontend tests', ) {% endcall %} - {% call m.test_step(name="frontend: aarch64", node="ARM", ws="tvm/frontend-python-arm") %} + {% call m.test_step( + name="frontend: aarch64", + node="ARM", + ws="tvm/frontend-python-arm", + platform="arm", +) %} unpack_lib('arm', tvm_multilib) ci_setup(ci_arm) sh ( diff --git a/jenkins/macros.j2 b/jenkins/macros.j2 index 033afbe94921..9e7c202b32f1 100644 --- a/jenkins/macros.j2 +++ b/jenkins/macros.j2 @@ -19,7 +19,7 @@ "workspace/exec_${env.EXECUTOR_NUMBER}/{{ folder }}" {%- endmacro -%} -{% macro sharded_test_step(name, num_shards, node, ws) %} +{% macro sharded_test_step(name, num_shards, node, ws, platform) %} {% for shard_index in range(1, num_shards + 1) %} '{{ name }} {{ shard_index }} of {{ num_shards }}': { if (!skip_ci && is_docs_only_build != 1) { @@ -29,6 +29,7 @@ init_git() timeout(time: max_time, unit: 'MINUTES') { withEnv([ + 'PLATFORM={{ platform }}', 'TVM_NUM_SHARDS={{ num_shards }}', 'TVM_SHARD_INDEX={{ shard_index - 1 }}'], { {{ caller() | trim | indent(width=12) }} @@ -47,7 +48,7 @@ {% endmacro %} -{% macro test_step(name, node, ws) %} +{% macro test_step(name, node, ws, platform) %} '{{ name }}': { if (!skip_ci && is_docs_only_build != 1) { node('{{ node }}') { @@ -55,7 +56,9 @@ timeout(time: max_time, unit: 'MINUTES') { try { init_git() - {{ caller() | indent(width=10) | trim }} + withEnv(['PLATFORM={{ platform }}'], { + {{ caller() | indent(width=12) | trim }} + }) } finally { junit 'build/pytest-results/*.xml' } diff --git a/tests/scripts/pytest_wrapper.py b/tests/scripts/pytest_wrapper.py new file mode 100755 index 000000000000..f34ada857c53 --- /dev/null +++ b/tests/scripts/pytest_wrapper.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python3 +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import argparse +import textwrap +import junitparser +from pathlib import Path +from typing import List, Optional +import os +import urllib.parse + + +REPO_ROOT = Path(__file__).resolve().parent.parent.parent + + +def lstrip(s: str, prefix: str) -> str: + if s.startswith(prefix): + s = s[len(prefix) :] + return s + + +def classname_to_file(classname: str) -> str: + classname = lstrip(classname, "cython.") + classname = lstrip(classname, "ctypes.") + return classname.replace(".", "/") + ".py" + + +def failed_test_ids() -> List[str]: + FAILURE_TYPES = (junitparser.Failure, junitparser.Error) + junit_dir = REPO_ROOT / "build" / "pytest-results" + failed_node_ids = [] + for junit in junit_dir.glob("*.xml"): + xml = junitparser.JUnitXml.fromfile(str(junit)) + for suite in xml: + # handle suites + for case in suite: + if len(case.result) > 0 and isinstance(case.result[0], FAILURE_TYPES): + node_id = classname_to_file(case.classname) + "::" + case.name + failed_node_ids.append(node_id) + + return list(set(failed_node_ids)) + + +def repro_command(build_type: str, failed_node_ids: List[str]) -> Optional[str]: + """ + Parse available JUnit XML files and output a command that users can run to + reproduce CI failures locally + """ + test_args = [f"--tests {node_id}" for node_id in failed_node_ids] + test_args_str = " ".join(test_args) + return f"python3 tests/scripts/ci.py {build_type} {test_args_str}" + + +def make_issue_url(failed_node_ids: List[str]) -> str: + names = [f"`{node_id}`" for node_id in failed_node_ids] + run_url = os.getenv("RUN_DISPLAY_URL", "") + test_bullets = [f" - `{node_id}`" for node_id in failed_node_ids] + params = { + "labels": "test: flaky", + "title": "[Flaky Test] " + ", ".join(names), + "body": textwrap.dedent( + f""" + These tests were found to be flaky (intermittently failing on `main` or failed in a PR with unrelated changes). See [the docs](https://github.com/apache/tvm/blob/main/docs/contribute/ci.rst#handling-flaky-failures) for details. + + ### Tests(s)\n + """ + ) + + "\n".join(test_bullets) + + f"\n\n### Jenkins Links\n\n - {run_url}", + } + return "https://github.com/apache/tvm/issues/new?" + urllib.parse.urlencode(params) + + +def show_failure_help(failed_suites: List[str]) -> None: + failed_node_ids = failed_test_ids() + + if len(failed_node_ids) == 0: + return + + build_type = os.getenv("PLATFORM") + + if build_type is None: + raise RuntimeError("build type was None, cannot show command") + + repro = repro_command(build_type=build_type, failed_node_ids=failed_node_ids) + if repro is None: + print("No test failures detected") + return + + print("=============================== PYTEST FAILURES ================================") + print( + "These pytest suites failed to execute. The results can be found in the " + "Jenkins 'Tests' tab or by scrolling up through the raw logs here. " + "If there is no test listed below, the failure likely came from a segmentation " + "fault which you can find in the logs above.\n" + ) + if len(failed_suites) > 0: + print("\n".join([f" - {suite}" for suite in failed_suites])) + print("") + + print("You can reproduce these specific failures locally with this command:\n") + print(textwrap.indent(repro, prefix=" ")) + print("") + + print( + "If you believe these test failures are spurious or are not due to this change, " + f"please file an issue: {make_issue_url(failed_node_ids)}" + ) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Print information about a failed pytest run") + args, other = parser.parse_known_args() + try: + show_failure_help(failed_suites=other) + except Exception as e: + # This script shouldn't ever introduce failures since it's just there to + # add extra information, so ignore any errors + print(e) diff --git a/tests/scripts/setup-pytest-env.sh b/tests/scripts/setup-pytest-env.sh index e6c2a39d7e64..63145c9909f7 100755 --- a/tests/scripts/setup-pytest-env.sh +++ b/tests/scripts/setup-pytest-env.sh @@ -39,10 +39,7 @@ function cleanup() { set +x if [ "${#pytest_errors[@]}" -gt 0 ]; then echo "These pytest invocations failed, the results can be found in the Jenkins 'Tests' tab or by scrolling up through the raw logs here." - echo "" - for e in "${pytest_errors[@]}"; do - echo " ${e}" - done + python3 tests/scripts/pytest_wrapper.py "${pytest_errors[@]}" exit 1 fi set -x From 4760b4f4a2c08da5d5f555b633dc3a9243ec7923 Mon Sep 17 00:00:00 2001 From: driazati Date: Fri, 22 Apr 2022 11:15:42 -0600 Subject: [PATCH 2/2] Use logging, move issue URL up in log --- tests/scripts/pytest_wrapper.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/tests/scripts/pytest_wrapper.py b/tests/scripts/pytest_wrapper.py index f34ada857c53..a7b6f0dfa766 100755 --- a/tests/scripts/pytest_wrapper.py +++ b/tests/scripts/pytest_wrapper.py @@ -22,6 +22,9 @@ from typing import List, Optional import os import urllib.parse +import logging + +from cmd_utils import init_log REPO_ROOT = Path(__file__).resolve().parent.parent.parent @@ -101,6 +104,7 @@ def show_failure_help(failed_suites: List[str]) -> None: print("No test failures detected") return + print(f"Report flaky test shortcut: {make_issue_url(failed_node_ids)}") print("=============================== PYTEST FAILURES ================================") print( "These pytest suites failed to execute. The results can be found in the " @@ -116,18 +120,15 @@ def show_failure_help(failed_suites: List[str]) -> None: print(textwrap.indent(repro, prefix=" ")) print("") - print( - "If you believe these test failures are spurious or are not due to this change, " - f"please file an issue: {make_issue_url(failed_node_ids)}" - ) - if __name__ == "__main__": parser = argparse.ArgumentParser(description="Print information about a failed pytest run") args, other = parser.parse_known_args() + init_log() + try: show_failure_help(failed_suites=other) except Exception as e: # This script shouldn't ever introduce failures since it's just there to # add extra information, so ignore any errors - print(e) + logging.error(str(e))