From 20f8e7f17a931e3852f7c58a25db55bd78943697 Mon Sep 17 00:00:00 2001 From: Rod Vagg Date: Thu, 12 Feb 2015 12:39:40 +1100 Subject: [PATCH] test: remove flaky test functionality MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Reverts https://github.com/joyent/node/pull/8689 PR-URL: https://github.com/iojs/io.js/pull/812 Reviewed-By: Jeremiah Senkpiel Reviewed-By: Johan Bergström Reviewed-By: Ben Noordhuis Reviewed-By: Colin Ihrig --- test/internet/internet.status | 1 - test/parallel/simple.status | 4 --- test/pummel/pummel.status | 1 - tools/test.py | 48 ++++++++++------------------------- 4 files changed, 13 insertions(+), 41 deletions(-) delete mode 100644 test/internet/internet.status delete mode 100644 test/parallel/simple.status delete mode 100644 test/pummel/pummel.status diff --git a/test/internet/internet.status b/test/internet/internet.status deleted file mode 100644 index 34aea6a6af7eae..00000000000000 --- a/test/internet/internet.status +++ /dev/null @@ -1 +0,0 @@ -prefix internet diff --git a/test/parallel/simple.status b/test/parallel/simple.status deleted file mode 100644 index d310575f1bb086..00000000000000 --- a/test/parallel/simple.status +++ /dev/null @@ -1,4 +0,0 @@ -prefix simple - -[$system==linux] -test-net-GH-5504 : PASS,FLAKY diff --git a/test/pummel/pummel.status b/test/pummel/pummel.status deleted file mode 100644 index 87224bb70bfeb6..00000000000000 --- a/test/pummel/pummel.status +++ /dev/null @@ -1 +0,0 @@ -prefix pummel diff --git a/tools/test.py b/tools/test.py index 074a918f2ad002..4ef61d4f945205 100755 --- a/tools/test.py +++ b/tools/test.py @@ -57,9 +57,8 @@ class ProgressIndicator(object): - def __init__(self, cases, flaky_tests_mode): + def __init__(self, cases): self.cases = cases - self.flaky_tests_mode = flaky_tests_mode self.parallel_queue = Queue(len(cases)) self.sequential_queue = Queue(len(cases)) for case in cases: @@ -248,19 +247,13 @@ def HasRun(self, output): self._done += 1 command = basename(output.command[-1]) if output.UnexpectedOutput(): - status_line = 'not ok %i - %s' % (self._done, command) - if FLAKY in output.test.outcomes and self.flaky_tests_mode == "dontcare": - status_line = status_line + " # TODO : Fix flaky test" - print status_line + print 'not ok %i - %s' % (self._done, command) for l in output.output.stderr.splitlines(): print '#' + l for l in output.output.stdout.splitlines(): print '#' + l else: - status_line = 'ok %i - %s' % (self._done, command) - if FLAKY in output.test.outcomes: - status_line = status_line + " # TODO : Fix flaky test" - print status_line + print 'ok %i - %s' % (self._done, command) duration = output.test.duration @@ -278,8 +271,8 @@ def Done(self): class CompactProgressIndicator(ProgressIndicator): - def __init__(self, cases, flaky_tests_mode, templates): - super(CompactProgressIndicator, self).__init__(cases, flaky_tests_mode) + def __init__(self, cases, templates): + super(CompactProgressIndicator, self).__init__(cases) self.templates = templates self.last_status_length = 0 self.start_time = time.time() @@ -334,13 +327,13 @@ def PrintProgress(self, name): class ColorProgressIndicator(CompactProgressIndicator): - def __init__(self, cases, flaky_tests_mode): + def __init__(self, cases): templates = { 'status_line': "[%(mins)02i:%(secs)02i|\033[34m%%%(remaining) 4d\033[0m|\033[32m+%(passed) 4d\033[0m|\033[31m-%(failed) 4d\033[0m]: %(test)s", 'stdout': "\033[1m%s\033[0m", 'stderr': "\033[31m%s\033[0m", } - super(ColorProgressIndicator, self).__init__(cases, flaky_tests_mode, templates) + super(ColorProgressIndicator, self).__init__(cases, templates) def ClearLine(self, last_line_length): print "\033[1K\r", @@ -348,7 +341,7 @@ def ClearLine(self, last_line_length): class MonochromeProgressIndicator(CompactProgressIndicator): - def __init__(self, cases, flaky_tests_mode): + def __init__(self, cases): templates = { 'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s", 'stdout': '%s', @@ -356,7 +349,7 @@ def __init__(self, cases, flaky_tests_mode): 'clear': lambda last_line_length: ("\r" + (" " * last_line_length) + "\r"), 'max_length': 78 } - super(MonochromeProgressIndicator, self).__init__(cases, flaky_tests_mode, templates) + super(MonochromeProgressIndicator, self).__init__(cases, templates) def ClearLine(self, last_line_length): print ("\r" + (" " * last_line_length) + "\r"), @@ -776,8 +769,8 @@ def GetVmFlags(self, testcase, mode): def GetTimeout(self, mode): return self.timeout * TIMEOUT_SCALEFACTOR[mode] -def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode): - progress = PROGRESS_INDICATORS[progress](cases_to_run, flaky_tests_mode) +def RunTestCases(cases_to_run, progress, tasks): + progress = PROGRESS_INDICATORS[progress](cases_to_run) return progress.Run(tasks) @@ -801,7 +794,6 @@ def BuildRequirements(context, requirements, mode, scons_flags): TIMEOUT = 'timeout' CRASH = 'crash' SLOW = 'slow' -FLAKY = 'flaky' class Expression(object): @@ -1248,9 +1240,6 @@ def BuildOptions(): default=False, action="store_true") result.add_option("--cat", help="Print the source of the tests", default=False, action="store_true") - result.add_option("--flaky-tests", - help="Regard tests marked as flaky (run|skip|dontcare)", - default="run") result.add_option("--warn-unused", help="Report unused rules", default=False, action="store_true") result.add_option("-j", help="The number of parallel tasks to run", @@ -1280,35 +1269,24 @@ def ProcessOptions(options): options.mode = options.mode.split(',') if options.J: options.j = multiprocessing.cpu_count() - def CheckTestMode(name, option): - if not option in ["run", "skip", "dontcare"]: - print "Unknown %s mode %s" % (name, option) - return False - return True - if not CheckTestMode("--flaky-tests", options.flaky_tests): - return False return True REPORT_TEMPLATE = """\ Total: %(total)i tests * %(skipped)4d tests will be skipped - * %(nocrash)4d tests are expected to be flaky but not crash * %(pass)4d tests are expected to pass * %(fail_ok)4d tests are expected to fail that we won't fix * %(fail)4d tests are expected to fail that we should fix\ """ def PrintReport(cases): - def IsFlaky(o): - return (PASS in o) and (FAIL in o) and (not CRASH in o) and (not OKAY in o) def IsFailOk(o): return (len(o) == 2) and (FAIL in o) and (OKAY in o) unskipped = [c for c in cases if not SKIP in c.outcomes] print REPORT_TEMPLATE % { 'total': len(cases), 'skipped': len(cases) - len(unskipped), - 'nocrash': len([t for t in unskipped if IsFlaky(t.outcomes)]), 'pass': len([t for t in unskipped if list(t.outcomes) == [PASS]]), 'fail_ok': len([t for t in unskipped if IsFailOk(t.outcomes)]), 'fail': len([t for t in unskipped if list(t.outcomes) == [FAIL]]) @@ -1486,7 +1464,7 @@ def Main(): result = None def DoSkip(case): - return SKIP in case.outcomes or SLOW in case.outcomes or (FLAKY in case.outcomes and options.flaky_tests == "skip") + return SKIP in case.outcomes or SLOW in case.outcomes cases_to_run = [ c for c in all_cases if not DoSkip(c) ] if len(cases_to_run) == 0: print "No tests to run." @@ -1494,7 +1472,7 @@ def DoSkip(case): else: try: start = time.time() - if RunTestCases(cases_to_run, options.progress, options.j, options.flaky_tests): + if RunTestCases(cases_to_run, options.progress, options.j): result = 0 else: result = 1