Skip to content

Commit eeaed58

Browse files
committed
test: runner support for flaky tests
Adding --flaky-tests option, to allow regarding flaky tests failures as non-fatal. Currently only observed by the TapProgressIndicator, which will add a # TODO directive to tests classified as flaky. According to the TAP specification, the test harness is supposed to treat failures that have a # TODO directive as non-fatal. PR-URL: nodejs/node-v0.x-archive#25686 Reviewed-By: Julien Gilli <julien.gilli@joyent.com>
1 parent 3bb8174 commit eeaed58

File tree

1 file changed

+33
-14
lines changed

1 file changed

+33
-14
lines changed

tools/test.py

+33-14
Original file line numberDiff line numberDiff line change
@@ -55,8 +55,9 @@
5555

5656
class ProgressIndicator(object):
5757

58-
def __init__(self, cases):
58+
def __init__(self, cases, flaky_tests_mode):
5959
self.cases = cases
60+
self.flaky_tests_mode = flaky_tests_mode
6061
self.queue = Queue(len(cases))
6162
for case in cases:
6263
self.queue.put_nowait(case)
@@ -234,13 +235,19 @@ def HasRun(self, output):
234235
self._done += 1
235236
command = basename(output.command[-1])
236237
if output.UnexpectedOutput():
237-
print 'not ok %i - %s' % (self._done, command)
238+
status_line = 'not ok %i - %s' % (self._done, command)
239+
if FLAKY in output.test.outcomes and self.flaky_tests_mode == "dontcare":
240+
status_line = status_line + " # TODO : Fix flaky test"
241+
print status_line
238242
for l in output.output.stderr.splitlines():
239243
print '#' + l
240244
for l in output.output.stdout.splitlines():
241245
print '#' + l
242246
else:
243-
print 'ok %i - %s' % (self._done, command)
247+
status_line = 'ok %i - %s' % (self._done, command)
248+
if FLAKY in output.test.outcomes:
249+
status_line = status_line + " # TODO : Fix flaky test"
250+
print status_line
244251

245252
duration = output.test.duration
246253

@@ -258,8 +265,8 @@ def Done(self):
258265

259266
class CompactProgressIndicator(ProgressIndicator):
260267

261-
def __init__(self, cases, templates):
262-
super(CompactProgressIndicator, self).__init__(cases)
268+
def __init__(self, cases, flaky_tests_mode, templates):
269+
super(CompactProgressIndicator, self).__init__(cases, flaky_tests_mode)
263270
self.templates = templates
264271
self.last_status_length = 0
265272
self.start_time = time.time()
@@ -314,29 +321,29 @@ def PrintProgress(self, name):
314321

315322
class ColorProgressIndicator(CompactProgressIndicator):
316323

317-
def __init__(self, cases):
324+
def __init__(self, cases, flaky_tests_mode):
318325
templates = {
319326
'status_line': "[%(mins)02i:%(secs)02i|\033[34m%%%(remaining) 4d\033[0m|\033[32m+%(passed) 4d\033[0m|\033[31m-%(failed) 4d\033[0m]: %(test)s",
320327
'stdout': "\033[1m%s\033[0m",
321328
'stderr': "\033[31m%s\033[0m",
322329
}
323-
super(ColorProgressIndicator, self).__init__(cases, templates)
330+
super(ColorProgressIndicator, self).__init__(cases, flaky_tests_mode, templates)
324331

325332
def ClearLine(self, last_line_length):
326333
print "\033[1K\r",
327334

328335

329336
class MonochromeProgressIndicator(CompactProgressIndicator):
330337

331-
def __init__(self, cases):
338+
def __init__(self, cases, flaky_tests_mode):
332339
templates = {
333340
'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s",
334341
'stdout': '%s',
335342
'stderr': '%s',
336343
'clear': lambda last_line_length: ("\r" + (" " * last_line_length) + "\r"),
337344
'max_length': 78
338345
}
339-
super(MonochromeProgressIndicator, self).__init__(cases, templates)
346+
super(MonochromeProgressIndicator, self).__init__(cases, flaky_tests_mode, templates)
340347

341348
def ClearLine(self, last_line_length):
342349
print ("\r" + (" " * last_line_length) + "\r"),
@@ -738,8 +745,8 @@ def GetVmFlags(self, testcase, mode):
738745
def GetTimeout(self, mode):
739746
return self.timeout * TIMEOUT_SCALEFACTOR[mode]
740747

741-
def RunTestCases(cases_to_run, progress, tasks):
742-
progress = PROGRESS_INDICATORS[progress](cases_to_run)
748+
def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode):
749+
progress = PROGRESS_INDICATORS[progress](cases_to_run, flaky_tests_mode)
743750
return progress.Run(tasks)
744751

745752

@@ -763,6 +770,7 @@ def BuildRequirements(context, requirements, mode, scons_flags):
763770
TIMEOUT = 'timeout'
764771
CRASH = 'crash'
765772
SLOW = 'slow'
773+
FLAKY = 'flaky'
766774

767775

768776
class Expression(object):
@@ -1212,6 +1220,9 @@ def BuildOptions():
12121220
default=False, action="store_true")
12131221
result.add_option("--cat", help="Print the source of the tests",
12141222
default=False, action="store_true")
1223+
result.add_option("--flaky-tests",
1224+
help="Regard tests marked as flaky (run|skip|dontcare)",
1225+
default="run")
12151226
result.add_option("--warn-unused", help="Report unused rules",
12161227
default=False, action="store_true")
12171228
result.add_option("-j", help="The number of parallel tasks to run",
@@ -1258,6 +1269,13 @@ def ProcessOptions(options):
12581269
options.scons_flags.append("arch=" + options.arch)
12591270
if options.snapshot:
12601271
options.scons_flags.append("snapshot=on")
1272+
def CheckTestMode(name, option):
1273+
if not option in ["run", "skip", "dontcare"]:
1274+
print "Unknown %s mode %s" % (name, option)
1275+
return False
1276+
return True
1277+
if not CheckTestMode("--flaky-tests", options.flaky_tests):
1278+
return False
12611279
return True
12621280

12631281

@@ -1450,15 +1468,16 @@ def wrap(processor):
14501468

14511469
result = None
14521470
def DoSkip(case):
1453-
return SKIP in case.outcomes or SLOW in case.outcomes
1471+
return (SKIP in case.outcomes or SLOW in case.outcomes or
1472+
(FLAKY in case.outcomes and options.flaky_tests == "skip"))
14541473
cases_to_run = [ c for c in all_cases if not DoSkip(c) ]
14551474
if len(cases_to_run) == 0:
14561475
print "No tests to run."
1457-
return 0
1476+
return 1
14581477
else:
14591478
try:
14601479
start = time.time()
1461-
if RunTestCases(cases_to_run, options.progress, options.j):
1480+
if RunTestCases(cases_to_run, options.progress, options.j, options.flaky_tests):
14621481
result = 0
14631482
else:
14641483
result = 1

0 commit comments

Comments
 (0)