|
|
@ -63,9 +63,6 @@ class AutoDeletedDir(): |
|
|
|
raise |
|
|
|
raise |
|
|
|
time.sleep(0.1 * (2**i)) |
|
|
|
time.sleep(0.1 * (2**i)) |
|
|
|
|
|
|
|
|
|
|
|
passing_tests = 0 |
|
|
|
|
|
|
|
failing_tests = 0 |
|
|
|
|
|
|
|
skipped_tests = 0 |
|
|
|
|
|
|
|
failing_logs = [] |
|
|
|
failing_logs = [] |
|
|
|
print_debug = 'MESON_PRINT_TEST_OUTPUT' in os.environ |
|
|
|
print_debug = 'MESON_PRINT_TEST_OUTPUT' in os.environ |
|
|
|
do_debug = not {'MESON_PRINT_TEST_OUTPUT', 'TRAVIS', 'APPVEYOR'}.isdisjoint(os.environ) |
|
|
|
do_debug = not {'MESON_PRINT_TEST_OUTPUT', 'TRAVIS', 'APPVEYOR'}.isdisjoint(os.environ) |
|
|
@ -370,14 +367,18 @@ def detect_tests_to_run(): |
|
|
|
all_tests.append(('python3', gather_tests('test cases/python3'), False if using_backend('ninja') and shutil.which('python3') else True)) |
|
|
|
all_tests.append(('python3', gather_tests('test cases/python3'), False if using_backend('ninja') and shutil.which('python3') else True)) |
|
|
|
return all_tests |
|
|
|
return all_tests |
|
|
|
|
|
|
|
|
|
|
|
def run_tests(extra_args): |
|
|
|
def run_tests(all_tests, log_name_base, extra_args): |
|
|
|
global install_commands, passing_tests, failing_tests, stop, executor, futures |
|
|
|
global stop, executor, futures |
|
|
|
all_tests = detect_tests_to_run() |
|
|
|
txtname = log_name_base + '.txt' |
|
|
|
logfile = open('meson-test-run.txt', 'w', encoding="utf_8") |
|
|
|
xmlname = log_name_base + '.xml' |
|
|
|
|
|
|
|
logfile = open(txtname, 'w', encoding="utf_8") |
|
|
|
junit_root = ET.Element('testsuites') |
|
|
|
junit_root = ET.Element('testsuites') |
|
|
|
conf_time = 0 |
|
|
|
conf_time = 0 |
|
|
|
build_time = 0 |
|
|
|
build_time = 0 |
|
|
|
test_time = 0 |
|
|
|
test_time = 0 |
|
|
|
|
|
|
|
passing_tests = 0 |
|
|
|
|
|
|
|
failing_tests = 0 |
|
|
|
|
|
|
|
skipped_tests = 0 |
|
|
|
|
|
|
|
|
|
|
|
try: |
|
|
|
try: |
|
|
|
# This fails in some CI environments for unknown reasons. |
|
|
|
# This fails in some CI environments for unknown reasons. |
|
|
@ -412,7 +413,6 @@ def run_tests(extra_args): |
|
|
|
current_test = ET.SubElement(current_suite, 'testcase', {'name' : testname, |
|
|
|
current_test = ET.SubElement(current_suite, 'testcase', {'name' : testname, |
|
|
|
'classname' : name}) |
|
|
|
'classname' : name}) |
|
|
|
ET.SubElement(current_test, 'skipped', {}) |
|
|
|
ET.SubElement(current_test, 'skipped', {}) |
|
|
|
global skipped_tests |
|
|
|
|
|
|
|
skipped_tests += 1 |
|
|
|
skipped_tests += 1 |
|
|
|
else: |
|
|
|
else: |
|
|
|
without_install = "" if len(install_commands) > 0 else " (without install)" |
|
|
|
without_install = "" if len(install_commands) > 0 else " (without install)" |
|
|
@ -442,7 +442,8 @@ def run_tests(extra_args): |
|
|
|
print("\nTotal configuration time: %.2fs" % conf_time) |
|
|
|
print("\nTotal configuration time: %.2fs" % conf_time) |
|
|
|
print("Total build time: %.2fs" % build_time) |
|
|
|
print("Total build time: %.2fs" % build_time) |
|
|
|
print("Total test time: %.2fs" % test_time) |
|
|
|
print("Total test time: %.2fs" % test_time) |
|
|
|
ET.ElementTree(element=junit_root).write('meson-test-run.xml', xml_declaration=True, encoding='UTF-8') |
|
|
|
ET.ElementTree(element=junit_root).write(xmlname, xml_declaration=True, encoding='UTF-8') |
|
|
|
|
|
|
|
return (passing_tests, failing_tests, skipped_tests) |
|
|
|
|
|
|
|
|
|
|
|
def check_file(fname): |
|
|
|
def check_file(fname): |
|
|
|
linenum = 1 |
|
|
|
linenum = 1 |
|
|
@ -539,7 +540,8 @@ if __name__ == '__main__': |
|
|
|
check_format() |
|
|
|
check_format() |
|
|
|
pbfiles = generate_prebuilt() |
|
|
|
pbfiles = generate_prebuilt() |
|
|
|
try: |
|
|
|
try: |
|
|
|
run_tests(options.extra_args) |
|
|
|
all_tests = detect_tests_to_run() |
|
|
|
|
|
|
|
(passing_tests, failing_tests, skipped_tests) = run_tests(all_tests, 'meson-test-run', options.extra_args) |
|
|
|
except StopException: |
|
|
|
except StopException: |
|
|
|
pass |
|
|
|
pass |
|
|
|
for f in pbfiles: |
|
|
|
for f in pbfiles: |
|
|
|