Support skipped tests

Knowing whether a test failed to run as its prerequisites were not
available, or whether those prerequisites were available and produced
unexpected/incorrect results, is a useful differentiation.

Add support for skipped tests by testing for exit code 77, used through
autotools/piglit/etc to denote a test which detected this and decided to
skip.
pull/1111/head
Daniel Stone 8 years ago committed by Jussi Pakkanen
parent ac78ae47a9
commit 1cb9d2bc0d
  1. 1
      authors.txt
  2. 9
      mesontest.py
  3. 4
      test cases/common/124 test skip/meson.build
  4. 4
      test cases/common/124 test skip/test_skip.c

@ -56,3 +56,4 @@ Aurelien Jarno
Mark Schulte
Paulo Antonio Alvarez
Olexa Bilaniuk
Daniel Stone

@ -26,6 +26,10 @@ import concurrent.futures as conc
import platform
import signal
# GNU autotools interprets a return code of 77 from tests it executes to
# mean that the test should be skipped.
GNU_SKIP_RETURNCODE = 77
def is_windows():
platname = platform.system().lower()
return platname == 'windows' or 'mingw' in platname
@ -213,6 +217,8 @@ class TestHarness:
stde = decode(stde)
if timed_out:
res = 'TIMEOUT'
if p.returncode == GNU_SKIP_RETURNCODE:
res = 'SKIP'
elif (not test.should_fail and p.returncode == 0) or \
(test.should_fail and p.returncode != 0):
res = 'OK'
@ -230,7 +236,8 @@ class TestHarness:
(num, name, padding1, result.res, padding2, result.duration)
print(result_str)
result_str += "\n\n" + result.get_log()
if (result.returncode != 0) != result.should_fail:
if (result.returncode != GNU_SKIP_RETURNCODE) and \
(result.returncode != 0) != result.should_fail:
self.error_count += 1
if self.options.print_errorlogs:
self.collected_logs.append(result_str)

@ -0,0 +1,4 @@
project('test skip', 'c')
exe_test_skip = executable('test_skip', 'test_skip.c')
test('test_skip', exe_test_skip)

@ -0,0 +1,4 @@
int main(int argc, char *argv[])
{
return 77;
}
Loading…
Cancel
Save