Index: docs/CommandGuide/lit.rst =================================================================== --- docs/CommandGuide/lit.rst +++ docs/CommandGuide/lit.rst @@ -77,8 +77,11 @@ .. option:: -v, --verbose - Show more information on test failures, for example the entire test output - instead of just the test result. + Show more information about test results, for example the entire test output + instead of just the test result. This option can be specified more than once. + If ``--verbose`` is specified only once more information will be shown about + failing tests. If ``--verbose`` is specified more than once more information + will be shown for passing tests as well. .. option:: --no-progress-bar Index: utils/lit/lit/main.py =================================================================== --- utils/lit/lit/main.py +++ utils/lit/lit/main.py @@ -42,7 +42,7 @@ self.progressBar.update(float(self.completed)/self.numTests, test.getFullName()) - shouldShow = test.result.code.isFailure or \ + shouldShow = test.result.code.isFailure or self.opts.showOutput > 1 or \ (not self.opts.quiet and not self.opts.succinct) if not shouldShow: return @@ -55,10 +55,13 @@ print('%s: %s (%d of %d)' % (test.result.code.name, test_name, self.completed, self.numTests)) + showPassingTest = self.opts.showOutput > 1 and test.result.output + showFailingTest = test.result.code.isFailure and self.opts.showOutput # Show the test failure output, if requested. - if test.result.code.isFailure and self.opts.showOutput: - print("%s TEST '%s' FAILED %s" % ('*'*20, test.getFullName(), - '*'*20)) + if showFailingTest or showPassingTest: + print("%s TEST '%s' %s %s" % + ('*'*20, test.getFullName(), + 'FAILED' if showFailingTest else 'OUTPUT', '*'*20)) print(test.result.output) print("*" * 20) @@ -162,7 +165,7 @@ action="store_true", default=False) group.add_option("-v", "--verbose", dest="showOutput", help="Show all test output", - action="store_true", default=False) + action="count", default=0) group.add_option("-o", "--output", dest="output_path", help="Write test results to the provided path", action="store", type=str, metavar="PATH") @@ -446,9 +449,9 @@ for suite_name, suite in by_suite.items(): safe_suite_name = suite_name.replace(".", "-") xunit_output_file.write("\n") for result_test in suite['tests']: xunit_output_file.write(result_test.getJUnitXML() + "\n") Index: utils/lit/tests/Inputs/test-data/lit.cfg =================================================================== --- utils/lit/tests/Inputs/test-data/lit.cfg +++ utils/lit/tests/Inputs/test-data/lit.cfg @@ -23,6 +23,9 @@ result = lit.Test.Result(getattr(lit.Test, result_code), result_output) + if not cfg.has_section('results'): + return result + # Load additional metrics. for key,value_str in cfg.items('results'): value = eval(value_str) Index: utils/lit/tests/Inputs/test-data/metrics.ini =================================================================== --- utils/lit/tests/Inputs/test-data/metrics.ini +++ /dev/null @@ -1,7 +0,0 @@ -[global] -result_code = PASS -result_output = Test passed. - -[results] -value0 = 1 -value1 = 2.3456 \ No newline at end of file Index: utils/lit/tests/Inputs/test-data/test-metrics.ini =================================================================== --- /dev/null +++ utils/lit/tests/Inputs/test-data/test-metrics.ini @@ -0,0 +1,7 @@ +[global] +result_code = PASS +result_output = Test passed. + +[results] +value0 = 1 +value1 = 2.3456 Index: utils/lit/tests/Inputs/test-data/test-verbose-failed.ini =================================================================== --- /dev/null +++ utils/lit/tests/Inputs/test-data/test-verbose-failed.ini @@ -0,0 +1,7 @@ +[global] +result_code = FAIL +result_output = This is the FAILING output. + +[results] +value0 = 1 +value1 = 2.3456 Index: utils/lit/tests/Inputs/test-data/test-verbose.ini =================================================================== --- /dev/null +++ utils/lit/tests/Inputs/test-data/test-verbose.ini @@ -0,0 +1,7 @@ +[global] +result_code = PASS +result_output = This is the passing output. + +[results] +value0 = 1 +value1 = 2.3456 Index: utils/lit/tests/test-output.py =================================================================== --- utils/lit/tests/test-output.py +++ utils/lit/tests/test-output.py @@ -1,5 +1,5 @@ -# RUN: %{lit} -j 1 -v %{inputs}/test-data --output %t.results.out > %t.out -# RUN: FileCheck < %t.results.out %s +# RUN: %{lit} -v %{inputs}/test-data/test-metrics.ini --output %t.results.out > %t.out +# RUN: FileCheck --check-prefix=CHECK < %t.results.out %s # CHECK: { # CHECK: "__version__" @@ -12,8 +12,39 @@ # CHECK-NEXT: "value0": 1, # CHECK-NEXT: "value1": 2.3456 # CHECK-NEXT: } -# CHECK-NEXT: "name": "test-data :: metrics.ini", +# CHECK-NEXT: "name": "test-data :: test-metrics.ini", # CHECK-NEXT: "output": "Test passed." # CHECK-NEXT: } # CHECK-NEXT: ] # CHECK-NEXT: } + +# RUN: %{lit} -j 1 -vv %{inputs}/test-data/test-verbose.ini > %t.out +# RUN: FileCheck -check-prefix=CHECK-VERBOSE %s < %t.out + +# CHECK-VERBOSE: {{\*+}} TEST 'test-data :: test-verbose.ini' OUTPUT {{\*+}} +# CHECK-VERBOSE-NEXT: This is the passing output. +# CHECK-VERBOSE-NEXT: {{\*+}} +# CHECK-VERBOSE-NEXT: {{\*+}} TEST 'test-data :: test-verbose.ini' RESULTS {{\*+}} + +# RUN: %{lit} -j 1 -v %{inputs}/test-data/test-verbose.ini > %t.out +# RUN: FileCheck -check-prefix=CHECK-NONVERBOSE %s < %t.out + +# CHECK-NONVERBOSE-NOT: {{\*+}} TEST 'test-data :: test-verbose.ini' OUTPUT {{\*+}} +# CHECK-NONVERBOSE-NOT: This is the passing output. +# CHECK-NONVERBOSE: {{\*+}} TEST 'test-data :: test-verbose.ini' RESULTS {{\*+}} + +# RUN: not %{lit} -j 1 -vv %{inputs}/test-data/test-verbose-failed.ini > %t.out +# RUN: FileCheck -check-prefix=CHECK-VERBOSE-FAIL %s < %t.out + +# CHECK-VERBOSE-FAIL: {{\*+}} TEST 'test-data :: test-verbose-failed.ini' FAILED {{\*+}} +# CHECK-VERBOSE-FAIL-NEXT: This is the FAILING output. +# CHECK-VERBOSE-FAIL-NEXT: {{\*+}} +# CHECK-VERBOSE-FAIL-NEXT: {{\*+}} TEST 'test-data :: test-verbose-failed.ini' RESULTS {{\*+}} + +# RUN: not %{lit} -j 1 -v %{inputs}/test-data/test-verbose-failed.ini > %t.out +# RUN: FileCheck -check-prefix=CHECK-NONVERBOSE-FAIL %s < %t.out + +# CHECK-NONVERBOSE-FAIL: {{\*+}} TEST 'test-data :: test-verbose-failed.ini' FAILED {{\*+}} +# CHECK-NONVERBOSE-FAIL-NEXT: This is the FAILING output. +# CHECK-NONVERBOSE-FAIL-NEXT: {{\*+}} +# CHECK-NONVERBOSE-FAIL-NEXT: {{\*+}} TEST 'test-data :: test-verbose-failed.ini' RESULTS {{\*+}}