mirror of
https://github.com/RPCS3/llvm-mirror.git
synced 2024-11-26 12:43:36 +01:00
[LIT] Move xunit tests tests into their own location, and and add failures
Failures will increase coverage. llvm-svn: 332056
This commit is contained in:
parent
c50865580a
commit
3dee71508e
@ -1,5 +1,5 @@
|
||||
[global]
|
||||
result_code = PASS
|
||||
result_code = FAIL
|
||||
result_output = & < > "
|
||||
|
||||
[results]
|
38
utils/lit/tests/Inputs/xunit-output/dummy_format.py
Normal file
38
utils/lit/tests/Inputs/xunit-output/dummy_format.py
Normal file
@ -0,0 +1,38 @@
|
||||
import os
|
||||
try:
|
||||
import ConfigParser
|
||||
except ImportError:
|
||||
import configparser as ConfigParser
|
||||
|
||||
import lit.formats
|
||||
import lit.Test
|
||||
|
||||
class DummyFormat(lit.formats.FileBasedTest):
|
||||
def execute(self, test, lit_config):
|
||||
# In this dummy format, expect that each test file is actually just a
|
||||
# .ini format dump of the results to report.
|
||||
|
||||
source_path = test.getSourcePath()
|
||||
|
||||
cfg = ConfigParser.ConfigParser()
|
||||
cfg.read(source_path)
|
||||
|
||||
# Create the basic test result.
|
||||
result_code = cfg.get('global', 'result_code')
|
||||
result_output = cfg.get('global', 'result_output')
|
||||
result = lit.Test.Result(getattr(lit.Test, result_code),
|
||||
result_output)
|
||||
|
||||
# Load additional metrics.
|
||||
for key,value_str in cfg.items('results'):
|
||||
value = eval(value_str)
|
||||
if isinstance(value, int):
|
||||
metric = lit.Test.IntMetricValue(value)
|
||||
elif isinstance(value, float):
|
||||
metric = lit.Test.RealMetricValue(value)
|
||||
else:
|
||||
raise RuntimeError("unsupported result type")
|
||||
result.addMetric(key, metric)
|
||||
|
||||
return result
|
||||
|
10
utils/lit/tests/Inputs/xunit-output/lit.cfg
Normal file
10
utils/lit/tests/Inputs/xunit-output/lit.cfg
Normal file
@ -0,0 +1,10 @@
|
||||
import site
|
||||
site.addsitedir(os.path.dirname(__file__))
|
||||
import dummy_format
|
||||
|
||||
config.name = 'test-data'
|
||||
config.suffixes = ['.ini']
|
||||
config.test_format = dummy_format.DummyFormat()
|
||||
config.test_source_root = None
|
||||
config.test_exec_root = None
|
||||
config.target_triple = None
|
@ -12,8 +12,8 @@
|
||||
# CHECK-NEXT: "value0": 1,
|
||||
# CHECK-NEXT: "value1": 2.3456
|
||||
# CHECK-NEXT: }
|
||||
# CHECK: "name": "test-data :: bad&name.ini",
|
||||
# CHECK: "output": "& < > \""
|
||||
|
||||
# CHECK: ]
|
||||
# CHECK-NEXT: "name": "test-data :: metrics.ini",
|
||||
# CHECK-NEXT: "output": "Test passed."
|
||||
# CHECK-NEXT: }
|
||||
# CHECK-NEXT: ]
|
||||
# CHECK-NEXT: }
|
||||
|
@ -1,11 +1,13 @@
|
||||
# Check xunit output
|
||||
# RUN: %{lit} --xunit-xml-output %t.xunit.xml %{inputs}/test-data
|
||||
# RUN: %{lit} --xunit-xml-output %t.xunit.xml %{inputs}/xunit-output || true
|
||||
# RUN: FileCheck < %t.xunit.xml %s
|
||||
|
||||
# CHECK: <?xml version="1.0" encoding="UTF-8" ?>
|
||||
# CHECK: <testsuites>
|
||||
# CHECK: <testsuite name='test-data' tests='2' failures='0'>
|
||||
# CHECK: <testcase classname='test-data.test-data' name='bad&name.ini' time='{{[0-1]}}.{{[0-9]+}}'/>
|
||||
# CHECK: <testcase classname='test-data.test-data' name='metrics.ini' time='{{[0-1]}}.{{[0-9]+}}'/>
|
||||
# CHECK: <testsuite name='test-data' tests='1' failures='1'>
|
||||
# CHECK: <testcase classname='test-data.test-data' name='bad&name.ini' time='{{[0-1]}}.{{[0-9]+}}'>
|
||||
# CHECK-NEXT: <failure >
|
||||
# CHECK-NEXT:& < > "
|
||||
# CHECK-NEXT:</failure>
|
||||
# CHECK: </testsuite>
|
||||
# CHECK: </testsuites>
|
||||
|
Loading…
Reference in New Issue
Block a user