1
0
mirror of https://github.com/RPCS3/llvm-mirror.git synced 2025-01-31 20:51:52 +01:00

[lit] Include unexecuted tests in xUnit report

Pass in all discovered tests to report generators.

The XunitReport generator now creates testcase items for unexecuted
tests and documents why they have been skipped.  This makes it easier
to compare test runs with different filters or configurations, or across
platforms.

I don't know who is using the JsonReport generator and what the
expectations there are (it doesn't have tests), so decided to preserve
the old behavior by filtering out the unexecuted tests.

Reviewed By: jdenny

Differential Revision: https://reviews.llvm.org/D81316
This commit is contained in:
Julian Lettner 2020-06-05 15:45:43 -07:00
parent d94eb15087
commit 74fefe7dbd
9 changed files with 50 additions and 17 deletions

View File

@ -96,17 +96,13 @@ def main(builtin_params={}):
run_tests(selected_tests, lit_config, opts, len(discovered_tests))
elapsed = time.time() - start
# TODO(yln): eventually, all functions below should act on discovered_tests
executed_tests = [
t for t in selected_tests if t.result.code != lit.Test.SKIPPED]
if opts.time_tests:
print_histogram(discovered_tests)
print_results(discovered_tests, elapsed, opts)
for report in opts.reports:
report.write_results(executed_tests, elapsed)
report.write_results(discovered_tests, elapsed)
if lit_config.numErrors:
sys.stderr.write('\n%d error(s) in tests\n' % lit_config.numErrors)

View File

@ -17,7 +17,8 @@ class JsonReport(object):
self.output_file = output_file
def write_results(self, tests, elapsed):
assert not any(t.result.code in {lit.Test.EXCLUDED, lit.Test.SKIPPED} for t in tests)
unexecuted_codes = {lit.Test.EXCLUDED, lit.Test.SKIPPED}
tests = [t for t in tests if t.result.code not in unexecuted_codes]
# Construct the data we will write.
data = {}
# Encode the current lit version as a schema version.
@ -75,7 +76,6 @@ class XunitReport(object):
# TODO(yln): elapsed unused, put it somewhere?
def write_results(self, tests, elapsed):
assert not any(t.result.code in {lit.Test.EXCLUDED, lit.Test.SKIPPED} for t in tests)
tests.sort(key=by_suite_and_test_path)
tests_by_suite = itertools.groupby(tests, lambda t: t.suite)
@ -136,4 +136,4 @@ class XunitReport(object):
features = test.getMissingRequiredFeatures()
if features:
return 'Missing required feature(s): ' + ', '.join(features)
return 'Skipping because of configuration'
return 'Unsupported configuration'

View File

@ -23,6 +23,10 @@ class DummyFormat(lit.formats.FileBasedTest):
result = lit.Test.Result(getattr(lit.Test, result_code),
result_output)
required_feature = cfg.get('global', 'required_feature', fallback=None)
if required_feature:
test.requires.append(required_feature)
# Load additional metrics.
for key,value_str in cfg.items('results'):
value = eval(value_str)

View File

@ -0,0 +1,5 @@
[global]
result_code = EXCLUDED
result_output = not shown
[results]

View File

@ -0,0 +1,7 @@
[global]
result_code = UNSUPPORTED
result_output = not shown
required_feature = dummy_feature
[results]

View File

@ -0,0 +1,5 @@
[global]
result_code = PASS
result_output = not shown
[results]

View File

@ -0,0 +1,5 @@
[global]
result_code = UNSUPPORTED
result_output = not shown
[results]

View File

@ -134,7 +134,7 @@
# XUNIT: <testcase classname="shtest-format.shtest-format" name="unsupported-expr-false.txt" time="{{[0-9]+\.[0-9]+}}"/>
# XUNIT: <testcase classname="shtest-format.shtest-format" name="unsupported-expr-true.txt" time="{{[0-9]+\.[0-9]+}}">
# XUNIT-NEXT:<skipped message="Skipping because of configuration"/>
# XUNIT-NEXT:<skipped message="Unsupported configuration"/>
# XUNIT: <testcase classname="shtest-format.shtest-format" name="unsupported-star.txt" time="{{[0-9]+\.[0-9]+}}">
# XUNIT-NEXT: <failure{{[ ]*}}>
@ -142,7 +142,7 @@
# XUNIT-NEXT: </testcase>
# XUNIT: <testcase classname="shtest-format.unsupported_dir" name="some-test.txt" time="{{[0-9]+\.[0-9]+}}">
# XUNIT-NEXT:<skipped message="Skipping because of configuration"/>
# XUNIT-NEXT:<skipped message="Unsupported configuration"/>
# XUNIT: <testcase classname="shtest-format.shtest-format" name="xfail-expr-false.txt" time="{{[0-9]+\.[0-9]+}}"/>

View File

@ -7,10 +7,21 @@
# RUN: sh -c 'if command -v xmllint 2>/dev/null; then xmllint --noout %t.xunit.xml; fi'
# RUN: FileCheck < %t.xunit.xml %s
# CHECK: <?xml version="1.0" encoding="UTF-8"?>
# CHECK: <testsuites>
# CHECK: <testsuite name="test-data" tests="1" failures="1" skipped="0">
# CHECK: <testcase classname="test-data.test-data" name="bad&amp;name.ini" time="{{[0-1]}}.{{[0-9]+}}">
# CHECK-NEXT: <failure><![CDATA[& < > ]]]]><![CDATA[> &"]]></failure>
# CHECK: </testsuite>
# CHECK: </testsuites>
# CHECK: <?xml version="1.0" encoding="UTF-8"?>
# CHECK-NEXT: <testsuites>
# CHECK-NEXT: <testsuite name="test-data" tests="5" failures="1" skipped="3">
# CHECK-NEXT: <testcase classname="test-data.test-data" name="bad&amp;name.ini" time="{{[0-1]\.[0-9]+}}">
# CHECK-NEXT: <failure><![CDATA[& < > ]]]]><![CDATA[> &"]]></failure>
# CHECK-NEXT: </testcase>
# CHECK-NEXT: <testcase classname="test-data.test-data" name="excluded.ini" time="{{[0-1]\.[0-9]+}}">
# CHECK-NEXT: <skipped message="Test not selected (--filter, --max-tests, --run-shard)"/>
# CHECK-NEXT: </testcase>
# CHECK-NEXT: <testcase classname="test-data.test-data" name="missing_feature.ini" time="{{[0-1]\.[0-9]+}}">
# CHECK-NEXT: <skipped message="Missing required feature(s): dummy_feature"/>
# CHECK-NEXT: </testcase>
# CHECK-NEXT: <testcase classname="test-data.test-data" name="pass.ini" time="{{[0-1]\.[0-9]+}}"/>
# CHECK-NEXT: <testcase classname="test-data.test-data" name="unsupported.ini" time="{{[0-1]\.[0-9]+}}">
# CHECK-NEXT: <skipped message="Unsupported configuration"/>
# CHECK-NEXT: </testcase>
# CHECK-NEXT: </testsuite>
# CHECK-NEXT: </testsuites>