Index: test-suite/trunk/MicroBenchmarks/CMakeLists.txt =================================================================== --- test-suite/trunk/MicroBenchmarks/CMakeLists.txt +++ test-suite/trunk/MicroBenchmarks/CMakeLists.txt @@ -1,2 +1,4 @@ +file(COPY lit.local.cfg DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) + add_subdirectory(libs) add_subdirectory(XRay) Index: test-suite/trunk/MicroBenchmarks/XRay/FDRMode/CMakeLists.txt =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/FDRMode/CMakeLists.txt +++ test-suite/trunk/MicroBenchmarks/XRay/FDRMode/CMakeLists.txt @@ -1,23 +1,8 @@ check_cxx_compiler_flag(-fxray-instrument COMPILER_HAS_FXRAY_INSTRUMENT) if(ARCH STREQUAL "x86" AND COMPILER_HAS_FXRAY_INSTRUMENT) - file(COPY lit.local.cfg DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) - list(APPEND CPPFLAGS -std=c++11 -Wl,--gc-sections -fxray-instrument) list(APPEND LDFLAGS -fxray-instrument) - llvm_test_run(--benchmark_filter=dummy_skip_ignore) + llvm_test_run() llvm_test_executable(fdrmode-bench fdrmode-bench.cc) target_link_libraries(fdrmode-bench benchmark) - - file(COPY fdrmode-bench_BM_XRayFDRMultiThreaded_1_thread.test - DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) - file(COPY fdrmode-bench_BM_XRayFDRMultiThreaded_2_thread.test - DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) - file(COPY fdrmode-bench_BM_XRayFDRMultiThreaded_4_thread.test - DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) - file(COPY fdrmode-bench_BM_XRayFDRMultiThreaded_8_thread.test - DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) - file(COPY fdrmode-bench_BM_XRayFDRMultiThreaded_16_thread.test - DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) - file(COPY fdrmode-bench_BM_XRayFDRMultiThreaded_32_thread.test - DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) endif() Index: test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_16_thread.test =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_16_thread.test +++ test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_16_thread.test @@ -1 +0,0 @@ -RUN: %S/fdrmode-bench --benchmark_repetitions=10 --benchmark_report_aggregates_only=true --benchmark_filter=BM_XRayFDRMultiThreaded/threads:16$ Index: test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_1_thread.test =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_1_thread.test +++ test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_1_thread.test @@ -1 +0,0 @@ -RUN: %S/fdrmode-bench --benchmark_repetitions=10 --benchmark_report_aggregates_only=true --benchmark_filter=BM_XRayFDRMultiThreaded/threads:1$ Index: test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_2_thread.test =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_2_thread.test +++ test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_2_thread.test @@ -1 +0,0 @@ -RUN: %S/fdrmode-bench --benchmark_repetitions=10 --benchmark_report_aggregates_only=true --benchmark_filter=BM_XRayFDRMultiThreaded/threads:2$ Index: test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_32_thread.test =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_32_thread.test +++ test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_32_thread.test @@ -1 +0,0 @@ -RUN: %S/fdrmode-bench --benchmark_repetitions=10 --benchmark_report_aggregates_only=true --benchmark_filter=BM_XRayFDRMultiThreaded/threads:32$ Index: test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_4_thread.test =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_4_thread.test +++ test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_4_thread.test @@ -1 +0,0 @@ -RUN: %S/fdrmode-bench --benchmark_repetitions=10 --benchmark_report_aggregates_only=true --benchmark_filter=BM_XRayFDRMultiThreaded/threads:4$ Index: test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_8_thread.test =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_8_thread.test +++ test-suite/trunk/MicroBenchmarks/XRay/FDRMode/fdrmode-bench_BM_XRayFDRMultiThreaded_8_thread.test @@ -1 +0,0 @@ -RUN: %S/fdrmode-bench --benchmark_repetitions=10 --benchmark_report_aggregates_only=true --benchmark_filter=BM_XRayFDRMultiThreaded/threads:8$ Index: test-suite/trunk/MicroBenchmarks/XRay/FDRMode/lit.local.cfg =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/FDRMode/lit.local.cfg +++ test-suite/trunk/MicroBenchmarks/XRay/FDRMode/lit.local.cfg @@ -1,8 +0,0 @@ -config.environment['XRAY_OPTIONS'] = 'patch_premain=false xray_naive_log=false xray_fdr_log=true' -test_modules = config.test_modules -if 'run' in test_modules: - # Insert microbenchmark module behind 'run' - test_modules.insert(test_modules.index('run')+1, 'microbenchmark') - # Timeit results are not useful for microbenchmarks - if 'timeit' in test_modules: - test_modules.remove('timeit') Index: test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/CMakeLists.txt =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/CMakeLists.txt +++ test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/CMakeLists.txt @@ -1,23 +1,8 @@ check_cxx_compiler_flag(-fxray-instrument COMPILER_HAS_FXRAY_INSTRUMENT) if(ARCH STREQUAL "x86" AND COMPILER_HAS_FXRAY_INSTRUMENT) - file(COPY lit.local.cfg DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) - list(APPEND CPPFLAGS -std=c++11 -Wl,--gc-sections -fxray-instrument) list(APPEND LDFLAGS -fxray-instrument) - llvm_test_run(--benchmark_filter=dummy_skip_ignore) + llvm_test_run() llvm_test_executable(retref-bench retref-bench.cc) target_link_libraries(retref-bench benchmark) - - file(COPY retref-bench_BM_ReturnNeverInstrumented.test - DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) - file(COPY retref-bench_BM_ReturnInstrumentedUnPatched.test - DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) - file(COPY retref-bench_BM_ReturnInstrumentedPatchedThenUnpatched.test - DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) - file(COPY retref-bench_BM_ReturnInstrumentedPatched.test - DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) - file(COPY retref-bench_BM_RDTSCP_Cost.test - DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) - file(COPY retref-bench_BM_ReturnInstrumentedPatchedWithLogHandler.test - DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) endif() Index: test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/lit.local.cfg =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/lit.local.cfg +++ test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/lit.local.cfg @@ -1,8 +0,0 @@ -config.environment['XRAY_OPTIONS'] = 'patch_premain=false xray_naive_log=false' -test_modules = config.test_modules -if 'run' in test_modules: - # Insert microbenchmark module behind 'run' - test_modules.insert(test_modules.index('run')+1, 'microbenchmark') - # Timeit results are not useful for microbenchmarks - if 'timeit' in test_modules: - test_modules.remove('timeit') Index: test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_RDTSCP_Cost.test =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_RDTSCP_Cost.test +++ test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_RDTSCP_Cost.test @@ -1 +0,0 @@ -RUN: %S/retref-bench --benchmark_repetitions=10 --benchmark_report_aggregates_only=true --benchmark_filter=BM_RDTSCP_Cost Index: test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_ReturnInstrumentedPatched.test =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_ReturnInstrumentedPatched.test +++ test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_ReturnInstrumentedPatched.test @@ -1 +0,0 @@ -RUN: %S/retref-bench --benchmark_repetitions=10 --benchmark_report_aggregates_only=true --benchmark_filter=BM_ReturnInstrumentedPatched Index: test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_ReturnInstrumentedPatchedThenUnpatched.test =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_ReturnInstrumentedPatchedThenUnpatched.test +++ test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_ReturnInstrumentedPatchedThenUnpatched.test @@ -1 +0,0 @@ -RUN: %S/retref-bench --benchmark_repetitions=10 --benchmark_report_aggregates_only=true --benchmark_filter=BM_ReturnInstrumentedPatchedThenUnpatched Index: test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_ReturnInstrumentedPatchedWithLogHandler.test =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_ReturnInstrumentedPatchedWithLogHandler.test +++ test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_ReturnInstrumentedPatchedWithLogHandler.test @@ -1 +0,0 @@ -RUN: %S/retref-bench --benchmark_repetitions=10 --benchmark_report_aggregates_only=true --benchmark_filter=BM_ReturnInstrumentedPatchedWithLogHandler Index: test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_ReturnInstrumentedUnPatched.test =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_ReturnInstrumentedUnPatched.test +++ test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_ReturnInstrumentedUnPatched.test @@ -1 +0,0 @@ -RUN: %S/retref-bench --benchmark_repetitions=10 --benchmark_report_aggregates_only=true --benchmark_filter=BM_ReturnInstrumentedUnPatched Index: test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_ReturnNeverInstrumented.test =================================================================== --- test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_ReturnNeverInstrumented.test +++ test-suite/trunk/MicroBenchmarks/XRay/ReturnReference/retref-bench_BM_ReturnNeverInstrumented.test @@ -1 +0,0 @@ -RUN: %S/retref-bench --benchmark_repetitions=10 --benchmark_report_aggregates_only=true --benchmark_filter=BM_ReturnNeverInstrumented Index: test-suite/trunk/MicroBenchmarks/lit.local.cfg =================================================================== --- test-suite/trunk/MicroBenchmarks/lit.local.cfg +++ test-suite/trunk/MicroBenchmarks/lit.local.cfg @@ -0,0 +1,8 @@ +config.environment['XRAY_OPTIONS'] = 'patch_premain=false xray_naive_log=false' +test_modules = config.test_modules +if 'run' in test_modules: + # Insert microbenchmark module behind 'run' + test_modules.insert(test_modules.index('run')+1, 'microbenchmark') + # Timeit results are not useful for microbenchmarks + if 'timeit' in test_modules: + test_modules.remove('timeit') Index: test-suite/trunk/litsupport/modules/microbenchmark.py =================================================================== --- test-suite/trunk/litsupport/modules/microbenchmark.py +++ test-suite/trunk/litsupport/modules/microbenchmark.py @@ -23,7 +23,6 @@ def _collectMicrobenchmarkTime(context, microbenchfiles): - result = 0.0 for f in microbenchfiles: with open(f) as inp: lines = csv.reader(inp) @@ -31,10 +30,19 @@ for line in lines: if line[0] == 'name': continue - # Note that we cannot create new tests here, so for now we just - # add up all the numbers here. - result += float(line[3]) - return {'microbenchmark_time_ns': lit.Test.toMetricValue(result)} + # Name for MicroBenchmark + name = line[0] + # Create Result object with PASS + microBenchmark = lit.Test.Result(lit.Test.PASS) + + # Index 3 is cpu_time + microBenchmark.addMetric('exec_time', lit.Test.toMetricValue(float(line[3]))) + + # Add Micro Result + context.micro_results[name] = microBenchmark + + # returning the number of microbenchmarks collected as a metric for the base test + return ({'MicroBenchmarks': lit.Test.toMetricValue(len(context.micro_results))}) def mutatePlan(context, plan): Index: test-suite/trunk/litsupport/testplan.py =================================================================== --- test-suite/trunk/litsupport/testplan.py +++ test-suite/trunk/litsupport/testplan.py @@ -143,6 +143,7 @@ lit.test.Result() object for the results.""" context.result_output = "" context.result_metrics = {} + context.micro_results = {} result_code = _executePlan(context, testplan) @@ -150,6 +151,9 @@ result = lit.Test.Result(result_code, context.result_output) for key, value in context.result_metrics.items(): result.addMetric(key, value) + for key, value in context.micro_results.items(): + result.addMicroResult(key, value) + return result