Index: LNTBased/MicroBenchmarks/XRay/CMakeLists.txt =================================================================== --- /dev/null +++ LNTBased/MicroBenchmarks/XRay/CMakeLists.txt @@ -0,0 +1,14 @@ +cmake_minimum_required(VERSION 3.7) + +include(CheckCXXCompilerFlag) +check_cxx_compiler_flag(-fxray-instrument COMPILER_HAS_FXRAY_INSTRUMENT) + +if("${ARCH}" STREQUAL "x86_64" AND "${COMPILER_HAS_FXRAY_INSTRUMENT}") + add_subdirectory(benchmark-1.2.0) + + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wl,--gc-sections -fxray-instrument") + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fxray-instrument") + add_executable(retref-bench XRay/retref-bench.cc) + target_link_libraries(retref-bench benchmark) +endif() + Index: LNTBased/MicroBenchmarks/XRay/TestModule =================================================================== --- /dev/null +++ LNTBased/MicroBenchmarks/XRay/TestModule @@ -0,0 +1,17 @@ +#!/usr/bin/env python +# -*- Python -*- + +import microbenchmark + +class RetrefBench(microbenchmark.TestModule): + def execute_test(self, options, make_variables, config): + result = self.build_microbenchmark(options, make_variables) + if result is not None: + return result; + + return self.run_microbenchmark(options, 'retref-bench') + +test_class = RetrefBench + +if __name__ == '__main__': + test_class().main() Index: LNTBased/MicroBenchmarks/XRay/XRay =================================================================== --- /dev/null +++ LNTBased/MicroBenchmarks/XRay/XRay @@ -0,0 +1 @@ +../../../MicroBenchmarks/XRay/ \ No newline at end of file Index: LNTBased/MicroBenchmarks/XRay/benchmark-1.2.0 =================================================================== --- /dev/null +++ LNTBased/MicroBenchmarks/XRay/benchmark-1.2.0 @@ -0,0 +1 @@ +../../../MicroBenchmarks/libs/benchmark-1.2.0 \ No newline at end of file Index: LNTBased/lib/microbenchmark.py =================================================================== --- /dev/null +++ LNTBased/lib/microbenchmark.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python +# -*- Python -*- + +import csv +import os +import StringIO +import subprocess + +from lnt.tests import nt +from lnt.testing import TestSamples, PASS, FAIL, XFAIL + +class TestModule(nt.TestModule): + def build_microbenchmark(self, options, make_variables): + """build benchmark binary in obj dir""" + objroot = options['OBJROOT'] + module = options['MODULENAME'] + + rc = subprocess.Popen( + ['cmake', + '-DARCH={}'.format(make_variables['ARCH']), + options['SRCROOT']], + cwd=objroot, + env=os.environ.update({ + 'CXX': options['CXX'], + 'CXXFLAGS': options['CXXFLAGS'], + 'CC': options['CC'], + 'CFLAGS': options['CFLAGS'], + })).wait() + if rc != 0: + return TestSamples('nts.LNT/{}/compile.status'.format(module), [FAIL]) + + rc = subprocess.Popen( + ['make', '-j{}'.format(options['BUILD_THREADS'])], + cwd=objroot).wait() + if rc != 0: + return TestSamples('nts.LNT/{}/compile.status'.format(module), [FAIL]) + + return None + + def run_microbenchmark(self, options, name): + """run benchmark binary and parse output""" + + objroot = options['OBJROOT'] + module = options['MODULENAME'] + + p = subprocess.Popen( + ['{}/{}'.format(objroot, name), '--benchmark_format=csv'], + cwd=objroot, + env=os.environ.update({ + 'XRAY_OPTIONS': 'patch_premain=false xray_naive_log=false'}), + stdout=subprocess.PIPE) + bm_output = p.communicate() + if p.returncode != 0: + return TestSamples( + 'nts.LNT/{}/{}.exec.status'.formt(module, name), [FAIL]) + + csv_lines = csv.reader(StringIO.StringIO(bm_output[0])) + results = [] + for line in csv_lines: + # the first line of output from the benchmark in csv output mode + # should look like "name,iterations,real_time,cpu_time,time_unit..." + if line[0] == "name": + continue + results.append( + TestSamples('nts.LNT/{}/{}.{}.exec'.format(module, name, line[0]), + [float(line[3])])) + return results