summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorAlexis Campailla <alexis@janeasystems.com>2014-11-07 14:15:24 +0100
committerAlexis Campailla <alexis@janeasystems.com>2014-12-04 17:22:14 +0100
commitdf3a2b2cf21274fe7afc19d14ec0259b964e13f7 (patch)
tree19f7a719f6cb09e9a5fb2d406e2fc0a175db3ce3 /tools
parente67db0191db01aca6c25a36b05485026cfb309bb (diff)
downloadandroid-node-v8-df3a2b2cf21274fe7afc19d14ec0259b964e13f7.tar.gz
android-node-v8-df3a2b2cf21274fe7afc19d14ec0259b964e13f7.tar.bz2
android-node-v8-df3a2b2cf21274fe7afc19d14ec0259b964e13f7.zip
test: runner support for flaky tests
Adding --flaky-tests option, to allow regarding flaky tests failures as non-fatal. Currently only observed by the TapProgressIndicator, which will add a # TODO directive to tests classified as flaky. According to the TAP specification, the test harness is supposed to treat failures that have a # TODO directive as non-fatal.
Diffstat (limited to 'tools')
-rwxr-xr-xtools/test.py46
1 files changed, 32 insertions, 14 deletions
diff --git a/tools/test.py b/tools/test.py
index 0772f9ad32..579d444f6c 100755
--- a/tools/test.py
+++ b/tools/test.py
@@ -55,8 +55,9 @@ VERBOSE = False
class ProgressIndicator(object):
- def __init__(self, cases):
+ def __init__(self, cases, flaky_tests_mode):
self.cases = cases
+ self.flaky_tests_mode = flaky_tests_mode
self.queue = Queue(len(cases))
for case in cases:
self.queue.put_nowait(case)
@@ -234,13 +235,19 @@ class TapProgressIndicator(SimpleProgressIndicator):
self._done += 1
command = basename(output.command[-1])
if output.UnexpectedOutput():
- print 'not ok %i - %s' % (self._done, command)
+ status_line = 'not ok %i - %s' % (self._done, command)
+ if FLAKY in output.test.outcomes and self.flaky_tests_mode == "dontcare":
+ status_line = status_line + " # TODO : Fix flaky test"
+ print status_line
for l in output.output.stderr.splitlines():
print '#' + l
for l in output.output.stdout.splitlines():
print '#' + l
else:
- print 'ok %i - %s' % (self._done, command)
+ status_line = 'ok %i - %s' % (self._done, command)
+ if FLAKY in output.test.outcomes:
+ status_line = status_line + " # TODO : Fix flaky test"
+ print status_line
duration = output.test.duration
@@ -258,8 +265,8 @@ class TapProgressIndicator(SimpleProgressIndicator):
class CompactProgressIndicator(ProgressIndicator):
- def __init__(self, cases, templates):
- super(CompactProgressIndicator, self).__init__(cases)
+ def __init__(self, cases, flaky_tests_mode, templates):
+ super(CompactProgressIndicator, self).__init__(cases, flaky_tests_mode)
self.templates = templates
self.last_status_length = 0
self.start_time = time.time()
@@ -314,13 +321,13 @@ class CompactProgressIndicator(ProgressIndicator):
class ColorProgressIndicator(CompactProgressIndicator):
- def __init__(self, cases):
+ def __init__(self, cases, flaky_tests_mode):
templates = {
'status_line': "[%(mins)02i:%(secs)02i|\033[34m%%%(remaining) 4d\033[0m|\033[32m+%(passed) 4d\033[0m|\033[31m-%(failed) 4d\033[0m]: %(test)s",
'stdout': "\033[1m%s\033[0m",
'stderr': "\033[31m%s\033[0m",
}
- super(ColorProgressIndicator, self).__init__(cases, templates)
+ super(ColorProgressIndicator, self).__init__(cases, flaky_tests_mode, templates)
def ClearLine(self, last_line_length):
print "\033[1K\r",
@@ -328,7 +335,7 @@ class ColorProgressIndicator(CompactProgressIndicator):
class MonochromeProgressIndicator(CompactProgressIndicator):
- def __init__(self, cases):
+ def __init__(self, cases, flaky_tests_mode):
templates = {
'status_line': "[%(mins)02i:%(secs)02i|%%%(remaining) 4d|+%(passed) 4d|-%(failed) 4d]: %(test)s",
'stdout': '%s',
@@ -336,7 +343,7 @@ class MonochromeProgressIndicator(CompactProgressIndicator):
'clear': lambda last_line_length: ("\r" + (" " * last_line_length) + "\r"),
'max_length': 78
}
- super(MonochromeProgressIndicator, self).__init__(cases, templates)
+ super(MonochromeProgressIndicator, self).__init__(cases, flaky_tests_mode, templates)
def ClearLine(self, last_line_length):
print ("\r" + (" " * last_line_length) + "\r"),
@@ -738,8 +745,8 @@ class Context(object):
def GetTimeout(self, mode):
return self.timeout * TIMEOUT_SCALEFACTOR[mode]
-def RunTestCases(cases_to_run, progress, tasks):
- progress = PROGRESS_INDICATORS[progress](cases_to_run)
+def RunTestCases(cases_to_run, progress, tasks, flaky_tests_mode):
+ progress = PROGRESS_INDICATORS[progress](cases_to_run, flaky_tests_mode)
return progress.Run(tasks)
@@ -763,6 +770,7 @@ OKAY = 'okay'
TIMEOUT = 'timeout'
CRASH = 'crash'
SLOW = 'slow'
+FLAKY = 'flaky'
class Expression(object):
@@ -1212,6 +1220,9 @@ def BuildOptions():
default=False, action="store_true")
result.add_option("--cat", help="Print the source of the tests",
default=False, action="store_true")
+ result.add_option("--flaky-tests",
+ help="Regard tests marked as flaky (run|skip|dontcare)",
+ default="run")
result.add_option("--warn-unused", help="Report unused rules",
default=False, action="store_true")
result.add_option("-j", help="The number of parallel tasks to run",
@@ -1258,6 +1269,13 @@ def ProcessOptions(options):
options.scons_flags.append("arch=" + options.arch)
if options.snapshot:
options.scons_flags.append("snapshot=on")
+ def CheckTestMode(name, option):
+ if not option in ["run", "skip", "dontcare"]:
+ print "Unknown %s mode %s" % (name, option)
+ return False
+ return True
+ if not CheckTestMode("--flaky-tests", options.flaky_tests):
+ return False
return True
@@ -1457,15 +1475,15 @@ def Main():
result = None
def DoSkip(case):
- return SKIP in case.outcomes or SLOW in case.outcomes
+ return SKIP in case.outcomes or SLOW in case.outcomes or (FLAKY in case.outcomes and options.flaky_tests == "skip")
cases_to_run = [ c for c in all_cases if not DoSkip(c) ]
if len(cases_to_run) == 0:
print "No tests to run."
- return 0
+ return 1
else:
try:
start = time.time()
- if RunTestCases(cases_to_run, options.progress, options.j):
+ if RunTestCases(cases_to_run, options.progress, options.j, options.flaky_tests):
result = 0
else:
result = 1