Merge topic 'benchmark-options'
7742df755 Split up the filters benchmark tests 1f34e0eaa Add performance configuration options Acked-by: Kitware Robot <kwrobot@kitware.com> Acked-by: Vicente Bolea <vicente.bolea@kitware.com> Merge-request: !2941
This commit is contained in:
commit
d1ec992078
@ -11,18 +11,25 @@
|
||||
#-----------------------------------------------------------------------------
|
||||
# Adds a performance benchmark test
|
||||
#
|
||||
# add_benchmark_test(benchmark [ <filter_regex...> ])
|
||||
#
|
||||
# Usage:
|
||||
# add_benchmark_test(FiltersBenchmark BenchThreshold BenchTetrahedralize)
|
||||
# add_benchmark_test(benchmark
|
||||
# [ NAME <name> ]
|
||||
# [ ARGS <args...> ]
|
||||
# [ REGEX <benchmark_regex...> ]
|
||||
# )
|
||||
#
|
||||
# benchmark: Target of an executable that uses Google Benchmark.
|
||||
#
|
||||
# filter_regex: CMake regexes that selects the specific benchmarks within the binary
|
||||
# NAME: The name given to the CMake tests. The benchmark target name is used
|
||||
# if NAME is not specified.
|
||||
#
|
||||
# ARGS: Extra arguments passed to the benchmark executable when run.
|
||||
#
|
||||
# REGEX: Regular expressions that select the specific benchmarks within the binary
|
||||
# to be used. It populates the Google Benchmark
|
||||
# --benchmark_filter parameter. When multiple regexes are passed
|
||||
# as independent positional arguments, they are joined using the "|"
|
||||
# regex operator before populating the `--benchmark_filter` parameter
|
||||
# regex operator before populating the `--benchmark_filter` parameter.
|
||||
#
|
||||
function(add_benchmark_test benchmark)
|
||||
|
||||
@ -34,10 +41,22 @@ function(add_benchmark_test benchmark)
|
||||
|
||||
###TEST VARIABLES############################################################
|
||||
|
||||
# Optional positional parameters for filter_regex
|
||||
set(VTKm_PERF_FILTER_NAME ".*")
|
||||
if (${ARGC} GREATER_EQUAL 2)
|
||||
string(REPLACE ";" "|" VTKm_PERF_FILTER_NAME "${ARGN}")
|
||||
set(options)
|
||||
set(one_value_keywords NAME)
|
||||
set(multi_value_keywords ARGS REGEX)
|
||||
cmake_parse_arguments(PARSE_ARGV 1 VTKm_PERF "${options}" "${one_value_keywords}" "${multi_value_keywords}")
|
||||
if (VTKm_PERF_UNPARSED_ARGUMENTS)
|
||||
message(FATAL_ERROR "Bad arguments to add_benchmark_test (${VTKm_PERF_UNPARSED_ARGUMENTS}).")
|
||||
endif()
|
||||
|
||||
if (NOT VTKm_PERF_NAME)
|
||||
set(VTKm_PERF_NAME ${benchmark})
|
||||
endif()
|
||||
|
||||
if (VTKm_PERF_REGEX)
|
||||
string(REPLACE ";" "|" VTKm_PERF_REGEX "${VTKm_PERF_REGEX}")
|
||||
else()
|
||||
set(VTKm_PERF_REGEX ".*")
|
||||
endif()
|
||||
|
||||
set(VTKm_PERF_REMOTE_URL "https://gitlab.kitware.com/vbolea/vtk-m-benchmark-records.git")
|
||||
@ -49,22 +68,23 @@ function(add_benchmark_test benchmark)
|
||||
set(VTKm_PERF_DIST "normal")
|
||||
|
||||
set(VTKm_PERF_REPO "${CMAKE_BINARY_DIR}/vtk-m-benchmark-records")
|
||||
set(VTKm_PERF_COMPARE_JSON "${CMAKE_BINARY_DIR}/nocommit_${benchmark}.json")
|
||||
set(VTKm_PERF_STDOUT "${CMAKE_BINARY_DIR}/benchmark_${benchmark}.stdout")
|
||||
set(VTKm_PERF_COMPARE_STDOUT "${CMAKE_BINARY_DIR}/compare_${benchmark}.stdout")
|
||||
set(VTKm_PERF_COMPARE_JSON "${CMAKE_BINARY_DIR}/nocommit_${VTKm_PERF_NAME}.json")
|
||||
set(VTKm_PERF_STDOUT "${CMAKE_BINARY_DIR}/benchmark_${VTKm_PERF_NAME}.stdout")
|
||||
set(VTKm_PERF_COMPARE_STDOUT "${CMAKE_BINARY_DIR}/compare_${VTKm_PERF_NAME}.stdout")
|
||||
|
||||
if (DEFINED ENV{CI_COMMIT_SHA})
|
||||
set(VTKm_PERF_COMPARE_JSON "${CMAKE_BINARY_DIR}/$ENV{CI_COMMIT_SHA}_${benchmark}.json")
|
||||
set(VTKm_PERF_COMPARE_JSON "${CMAKE_BINARY_DIR}/$ENV{CI_COMMIT_SHA}_${VTKm_PERF_NAME}.json")
|
||||
endif()
|
||||
|
||||
set(test_name "PerformanceTest${benchmark}")
|
||||
set(test_name "PerformanceTest${VTKm_PERF_NAME}")
|
||||
|
||||
###TEST INVOKATIONS##########################################################
|
||||
add_test(NAME "${test_name}Run"
|
||||
COMMAND ${CMAKE_COMMAND}
|
||||
"-DVTKm_PERF_BENCH_DEVICE=Any"
|
||||
"-DVTKm_PERF_BENCH_PATH=${CMAKE_BINARY_DIR}/bin/${benchmark}"
|
||||
"-DVTKm_PERF_FILTER_NAME=${VTKm_PERF_FILTER_NAME}"
|
||||
"-DVTKm_PERF_ARGS=${VTKm_PERF_ARGS}"
|
||||
"-DVTKm_PERF_REGEX=${VTKm_PERF_REGEX}"
|
||||
"-DVTKm_PERF_REPETITIONS=${VTKm_PERF_REPETITIONS}"
|
||||
"-DVTKm_PERF_MIN_TIME=${VTKm_PERF_MIN_TIME}"
|
||||
"-DVTKm_PERF_COMPARE_JSON=${VTKm_PERF_COMPARE_JSON}"
|
||||
|
@ -11,7 +11,7 @@
|
||||
include("${VTKm_SOURCE_DIR}/CMake/testing/VTKmPerformanceTestLib.cmake")
|
||||
|
||||
REQUIRE_FLAG("VTKm_PERF_BENCH_PATH")
|
||||
REQUIRE_FLAG("VTKm_PERF_FILTER_NAME")
|
||||
REQUIRE_FLAG("VTKm_PERF_REGEX")
|
||||
REQUIRE_FLAG("VTKm_PERF_COMPARE_JSON")
|
||||
REQUIRE_FLAG("VTKm_PERF_STDOUT")
|
||||
|
||||
@ -22,7 +22,8 @@ REQUIRE_FLAG_MUTABLE("VTKm_PERF_MIN_TIME")
|
||||
execute(
|
||||
COMMAND "${VTKm_PERF_BENCH_PATH}"
|
||||
--vtkm-device "${VTKm_PERF_BENCH_DEVICE}"
|
||||
"--benchmark_filter=${VTKm_PERF_FILTER_NAME}"
|
||||
${VTKm_PERF_ARGS}
|
||||
"--benchmark_filter=${VTKm_PERF_REGEX}"
|
||||
"--benchmark_out=${VTKm_PERF_COMPARE_JSON}"
|
||||
"--benchmark_repetitions=${VTKm_PERF_REPETITIONS}"
|
||||
"--benchmark_min_time=${VTKm_PERF_MIN_TIME}"
|
||||
|
@ -499,8 +499,7 @@ void BenchContourGenerator(::benchmark::internal::Benchmark* bm)
|
||||
helper(12);
|
||||
}
|
||||
|
||||
// :TODO: Disabled until SIGSEGV in Countour when passings field is resolved
|
||||
//VTKM_BENCHMARK_APPLY(BenchContour, BenchContourGenerator);
|
||||
VTKM_BENCHMARK_APPLY(BenchContour, BenchContourGenerator);
|
||||
|
||||
void BenchExternalFaces(::benchmark::State& state)
|
||||
{
|
||||
|
@ -72,14 +72,35 @@ target_compile_definitions(BenchmarkDeviceAdapter PUBLIC VTKm_BENCHS_RANGE_UPPER
|
||||
if(VTKm_ENABLE_PERFORMANCE_TESTING)
|
||||
include("${VTKm_SOURCE_DIR}/CMake/testing/VTKmPerformanceTest.cmake")
|
||||
add_benchmark_test(BenchmarkFilters
|
||||
BenchThreshold
|
||||
BenchContour/IsStructuredDataSet:1/NIsoVals:12/MergePts:1/GenNormals:0
|
||||
BenchContour/IsStructuredDataSet:1/NIsoVals:12/MergePts:0/GenNormals:1/FastNormals:1
|
||||
BenchContour/IsStructuredDataSet:0/NIsoVals:12/MergePts:1/GenNormals:0
|
||||
BenchContour/IsStructuredDataSet:0/NIsoVals:12/MergePts:0/GenNormals:/FastNormals:1
|
||||
BenchTetrahedralize
|
||||
BenchVertexClustering/NumDivs:256)
|
||||
NAME BenchThreshold
|
||||
REGEX BenchThreshold
|
||||
)
|
||||
add_benchmark_test(BenchmarkFilters
|
||||
NAME BenchTetrahedralize
|
||||
REGEX BenchTetrahedralize
|
||||
)
|
||||
add_benchmark_test(BenchmarkFilters
|
||||
NAME BenchContour
|
||||
REGEX
|
||||
BenchContour/IsStructuredDataSet:1/NIsoVals:12/MergePts:1/GenNormals:0.*/MultiPartitioned:0
|
||||
BenchContour/IsStructuredDataSet:1/NIsoVals:12/MergePts:0/GenNormals:1/FastNormals:1.*/MultiPartitioned:0
|
||||
BenchContour/IsStructuredDataSet:0/NIsoVals:12/MergePts:1/GenNormals:0.*/MultiPartitioned:0
|
||||
BenchContour/IsStructuredDataSet:0/NIsoVals:12/MergePts:0/GenNormals:1/FastNormals:1.*/MultiPartitioned:0
|
||||
)
|
||||
add_benchmark_test(BenchmarkFilters
|
||||
NAME BenchContourPartitioned
|
||||
ARGS --wavelet-dim=32 --num-partitions=128
|
||||
REGEX
|
||||
BenchContour/IsStructuredDataSet:1/NIsoVals:12/MergePts:1/GenNormals:0.*/MultiPartitioned:1
|
||||
BenchContour/IsStructuredDataSet:1/NIsoVals:12/MergePts:0/GenNormals:1/FastNormals:1.*/MultiPartitioned:1
|
||||
BenchContour/IsStructuredDataSet:0/NIsoVals:12/MergePts:1/GenNormals:0.*/MultiPartitioned:1
|
||||
BenchContour/IsStructuredDataSet:0/NIsoVals:12/MergePts:0/GenNormals:1/FastNormals:1.*/MultiPartitioned:1
|
||||
)
|
||||
add_benchmark_test(BenchmarkFilters
|
||||
NAME BenchVertexClustering
|
||||
REGEX BenchVertexClustering/NumDivs:256
|
||||
)
|
||||
if(TARGET vtkm_rendering)
|
||||
add_benchmark_test(BenchmarkInSitu "BenchContour")
|
||||
add_benchmark_test(BenchmarkInSitu REGEX "BenchContour")
|
||||
endif()
|
||||
endif()
|
||||
|
9
docs/changelog/benchmark-options.md
Normal file
9
docs/changelog/benchmark-options.md
Normal file
@ -0,0 +1,9 @@
|
||||
# More performance test options
|
||||
|
||||
More options are available for adding performance regression tests. These
|
||||
options allow you to pass custom options to the benchmark test so that you
|
||||
are not limited to the default values. They also allow multiple tests to be
|
||||
created from the same benchmark executable. Separating out the benchmarks
|
||||
allows the null hypothesis testing to better catch performance problems
|
||||
when only one of the tested filters regresses. It also allows passing
|
||||
different arguments to different benchmarks.
|
Loading…
Reference in New Issue
Block a user