1# Enable the tests 2 3set(THREADS_PREFER_PTHREAD_FLAG ON) 4 5find_package(Threads REQUIRED) 6include(CheckCXXCompilerFlag) 7 8add_cxx_compiler_flag(-Wno-unused-variable) 9 10# NOTE: Some tests use `<cassert>` to perform the test. Therefore we must 11# strip -DNDEBUG from the default CMake flags in DEBUG mode. 12string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE) 13if( NOT uppercase_CMAKE_BUILD_TYPE STREQUAL "DEBUG" ) 14 add_definitions( -UNDEBUG ) 15 add_definitions(-DTEST_BENCHMARK_LIBRARY_HAS_NO_ASSERTIONS) 16 # Also remove /D NDEBUG to avoid MSVC warnings about conflicting defines. 17 foreach (flags_var_to_scrub 18 CMAKE_CXX_FLAGS_RELEASE 19 CMAKE_CXX_FLAGS_RELWITHDEBINFO 20 CMAKE_CXX_FLAGS_MINSIZEREL 21 CMAKE_C_FLAGS_RELEASE 22 CMAKE_C_FLAGS_RELWITHDEBINFO 23 CMAKE_C_FLAGS_MINSIZEREL) 24 string (REGEX REPLACE "(^| )[/-]D *NDEBUG($| )" " " 25 "${flags_var_to_scrub}" "${${flags_var_to_scrub}}") 26 endforeach() 27endif() 28 29if (NOT BUILD_SHARED_LIBS) 30 add_definitions(-DBENCHMARK_STATIC_DEFINE) 31endif() 32 33check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG) 34set(BENCHMARK_O3_FLAG "") 35if (BENCHMARK_HAS_O3_FLAG) 36 set(BENCHMARK_O3_FLAG "-O3") 37endif() 38 39# NOTE: These flags must be added after find_package(Threads REQUIRED) otherwise 40# they will break the configuration check. 41if (DEFINED BENCHMARK_CXX_LINKER_FLAGS) 42 list(APPEND CMAKE_EXE_LINKER_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS}) 43endif() 44 45add_library(output_test_helper STATIC output_test_helper.cc output_test.h) 46target_link_libraries(output_test_helper PRIVATE benchmark::benchmark) 47 48macro(compile_benchmark_test name) 49 add_executable(${name} "${name}.cc") 50 target_link_libraries(${name} benchmark::benchmark ${CMAKE_THREAD_LIBS_INIT}) 51 if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "NVHPC") 52 target_compile_options( ${name} PRIVATE --diag_suppress partial_override ) 53 endif() 54endmacro(compile_benchmark_test) 55 56macro(compile_benchmark_test_with_main name) 57 add_executable(${name} "${name}.cc") 58 target_link_libraries(${name} benchmark::benchmark_main) 59endmacro(compile_benchmark_test_with_main) 60 61macro(compile_output_test name) 62 add_executable(${name} "${name}.cc" output_test.h) 63 target_link_libraries(${name} output_test_helper benchmark::benchmark_main 64 ${BENCHMARK_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT}) 65endmacro(compile_output_test) 66 67macro(benchmark_add_test) 68 add_test(${ARGV}) 69 if(WIN32 AND BUILD_SHARED_LIBS) 70 cmake_parse_arguments(TEST "" "NAME" "" ${ARGN}) 71 set_tests_properties(${TEST_NAME} PROPERTIES ENVIRONMENT_MODIFICATION "PATH=path_list_prepend:$<TARGET_FILE_DIR:benchmark::benchmark>") 72 endif() 73endmacro(benchmark_add_test) 74 75# Demonstration executable 76compile_benchmark_test(benchmark_test) 77benchmark_add_test(NAME benchmark COMMAND benchmark_test --benchmark_min_time=0.01s) 78 79compile_benchmark_test(spec_arg_test) 80benchmark_add_test(NAME spec_arg COMMAND spec_arg_test --benchmark_filter=BM_NotChosen) 81 82compile_benchmark_test(spec_arg_verbosity_test) 83benchmark_add_test(NAME spec_arg_verbosity COMMAND spec_arg_verbosity_test --v=42) 84 85compile_benchmark_test(benchmark_setup_teardown_test) 86benchmark_add_test(NAME benchmark_setup_teardown COMMAND benchmark_setup_teardown_test) 87 88compile_benchmark_test(filter_test) 89macro(add_filter_test name filter expect) 90 benchmark_add_test(NAME ${name} COMMAND filter_test --benchmark_min_time=0.01s --benchmark_filter=${filter} ${expect}) 91 benchmark_add_test(NAME ${name}_list_only COMMAND filter_test --benchmark_list_tests --benchmark_filter=${filter} ${expect}) 92endmacro(add_filter_test) 93 94compile_benchmark_test(benchmark_min_time_flag_time_test) 95benchmark_add_test(NAME min_time_flag_time COMMAND benchmark_min_time_flag_time_test) 96 97compile_benchmark_test(benchmark_min_time_flag_iters_test) 98benchmark_add_test(NAME min_time_flag_iters COMMAND benchmark_min_time_flag_iters_test) 99 100add_filter_test(filter_simple "Foo" 3) 101add_filter_test(filter_simple_negative "-Foo" 2) 102add_filter_test(filter_suffix "BM_.*" 4) 103add_filter_test(filter_suffix_negative "-BM_.*" 1) 104add_filter_test(filter_regex_all ".*" 5) 105add_filter_test(filter_regex_all_negative "-.*" 0) 106add_filter_test(filter_regex_blank "" 5) 107add_filter_test(filter_regex_blank_negative "-" 0) 108add_filter_test(filter_regex_none "monkey" 0) 109add_filter_test(filter_regex_none_negative "-monkey" 5) 110add_filter_test(filter_regex_wildcard ".*Foo.*" 3) 111add_filter_test(filter_regex_wildcard_negative "-.*Foo.*" 2) 112add_filter_test(filter_regex_begin "^BM_.*" 4) 113add_filter_test(filter_regex_begin_negative "-^BM_.*" 1) 114add_filter_test(filter_regex_begin2 "^N" 1) 115add_filter_test(filter_regex_begin2_negative "-^N" 4) 116add_filter_test(filter_regex_end ".*Ba$" 1) 117add_filter_test(filter_regex_end_negative "-.*Ba$" 4) 118 119compile_benchmark_test(options_test) 120benchmark_add_test(NAME options_benchmarks COMMAND options_test --benchmark_min_time=0.01s) 121 122compile_benchmark_test(basic_test) 123benchmark_add_test(NAME basic_benchmark COMMAND basic_test --benchmark_min_time=0.01s) 124 125compile_output_test(repetitions_test) 126benchmark_add_test(NAME repetitions_benchmark COMMAND repetitions_test --benchmark_min_time=0.01s --benchmark_repetitions=3) 127 128compile_benchmark_test(diagnostics_test) 129benchmark_add_test(NAME diagnostics_test COMMAND diagnostics_test --benchmark_min_time=0.01s) 130 131compile_benchmark_test(skip_with_error_test) 132benchmark_add_test(NAME skip_with_error_test COMMAND skip_with_error_test --benchmark_min_time=0.01s) 133 134compile_benchmark_test(donotoptimize_test) 135# Enable errors for deprecated deprecations (DoNotOptimize(Tp const& value)). 136check_cxx_compiler_flag(-Werror=deprecated-declarations BENCHMARK_HAS_DEPRECATED_DECLARATIONS_FLAG) 137if (BENCHMARK_HAS_DEPRECATED_DECLARATIONS_FLAG) 138 target_compile_options (donotoptimize_test PRIVATE "-Werror=deprecated-declarations") 139endif() 140# Some of the issues with DoNotOptimize only occur when optimization is enabled 141check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG) 142if (BENCHMARK_HAS_O3_FLAG) 143 set_target_properties(donotoptimize_test PROPERTIES COMPILE_FLAGS "-O3") 144endif() 145benchmark_add_test(NAME donotoptimize_test COMMAND donotoptimize_test --benchmark_min_time=0.01s) 146 147compile_benchmark_test(fixture_test) 148benchmark_add_test(NAME fixture_test COMMAND fixture_test --benchmark_min_time=0.01s) 149 150compile_benchmark_test(register_benchmark_test) 151benchmark_add_test(NAME register_benchmark_test COMMAND register_benchmark_test --benchmark_min_time=0.01s) 152 153compile_benchmark_test(map_test) 154benchmark_add_test(NAME map_test COMMAND map_test --benchmark_min_time=0.01s) 155 156compile_benchmark_test(multiple_ranges_test) 157benchmark_add_test(NAME multiple_ranges_test COMMAND multiple_ranges_test --benchmark_min_time=0.01s) 158 159compile_benchmark_test(args_product_test) 160benchmark_add_test(NAME args_product_test COMMAND args_product_test --benchmark_min_time=0.01s) 161 162compile_benchmark_test_with_main(link_main_test) 163benchmark_add_test(NAME link_main_test COMMAND link_main_test --benchmark_min_time=0.01s) 164 165compile_output_test(reporter_output_test) 166benchmark_add_test(NAME reporter_output_test COMMAND reporter_output_test --benchmark_min_time=0.01s) 167 168compile_output_test(templated_fixture_test) 169benchmark_add_test(NAME templated_fixture_test COMMAND templated_fixture_test --benchmark_min_time=0.01s) 170 171compile_output_test(user_counters_test) 172benchmark_add_test(NAME user_counters_test COMMAND user_counters_test --benchmark_min_time=0.01s) 173 174compile_output_test(perf_counters_test) 175benchmark_add_test(NAME perf_counters_test COMMAND perf_counters_test --benchmark_min_time=0.01s --benchmark_perf_counters=CYCLES,INSTRUCTIONS) 176 177compile_output_test(internal_threading_test) 178benchmark_add_test(NAME internal_threading_test COMMAND internal_threading_test --benchmark_min_time=0.01s) 179 180compile_output_test(report_aggregates_only_test) 181benchmark_add_test(NAME report_aggregates_only_test COMMAND report_aggregates_only_test --benchmark_min_time=0.01s) 182 183compile_output_test(display_aggregates_only_test) 184benchmark_add_test(NAME display_aggregates_only_test COMMAND display_aggregates_only_test --benchmark_min_time=0.01s) 185 186compile_output_test(user_counters_tabular_test) 187benchmark_add_test(NAME user_counters_tabular_test COMMAND user_counters_tabular_test --benchmark_counters_tabular=true --benchmark_min_time=0.01s) 188 189compile_output_test(user_counters_thousands_test) 190benchmark_add_test(NAME user_counters_thousands_test COMMAND user_counters_thousands_test --benchmark_min_time=0.01s) 191 192compile_output_test(memory_manager_test) 193benchmark_add_test(NAME memory_manager_test COMMAND memory_manager_test --benchmark_min_time=0.01s) 194 195# MSVC does not allow to set the language standard to C++98/03. 196if(NOT (MSVC OR CMAKE_CXX_SIMULATE_ID STREQUAL "MSVC")) 197 compile_benchmark_test(cxx03_test) 198 set_target_properties(cxx03_test 199 PROPERTIES 200 CXX_STANDARD 98 201 CXX_STANDARD_REQUIRED YES) 202 # libstdc++ provides different definitions within <map> between dialects. When 203 # LTO is enabled and -Werror is specified GCC diagnoses this ODR violation 204 # causing the test to fail to compile. To prevent this we explicitly disable 205 # the warning. 206 check_cxx_compiler_flag(-Wno-odr BENCHMARK_HAS_WNO_ODR) 207 check_cxx_compiler_flag(-Wno-lto-type-mismatch BENCHMARK_HAS_WNO_LTO_TYPE_MISMATCH) 208 # Cannot set_target_properties multiple times here because the warnings will 209 # be overwritten on each call 210 set (DISABLE_LTO_WARNINGS "") 211 if (BENCHMARK_HAS_WNO_ODR) 212 set(DISABLE_LTO_WARNINGS "${DISABLE_LTO_WARNINGS} -Wno-odr") 213 endif() 214 if (BENCHMARK_HAS_WNO_LTO_TYPE_MISMATCH) 215 set(DISABLE_LTO_WARNINGS "${DISABLE_LTO_WARNINGS} -Wno-lto-type-mismatch") 216 endif() 217 set_target_properties(cxx03_test PROPERTIES LINK_FLAGS "${DISABLE_LTO_WARNINGS}") 218 benchmark_add_test(NAME cxx03 COMMAND cxx03_test --benchmark_min_time=0.01s) 219endif() 220 221compile_output_test(complexity_test) 222benchmark_add_test(NAME complexity_benchmark COMMAND complexity_test --benchmark_min_time=1000000x) 223 224############################################################################### 225# GoogleTest Unit Tests 226############################################################################### 227 228if (BENCHMARK_ENABLE_GTEST_TESTS) 229 macro(compile_gtest name) 230 add_executable(${name} "${name}.cc") 231 target_link_libraries(${name} benchmark::benchmark 232 gmock_main ${CMAKE_THREAD_LIBS_INIT}) 233 endmacro(compile_gtest) 234 235 macro(add_gtest name) 236 compile_gtest(${name}) 237 benchmark_add_test(NAME ${name} COMMAND ${name}) 238 if(WIN32 AND BUILD_SHARED_LIBS) 239 set_tests_properties(${name} PROPERTIES 240 ENVIRONMENT_MODIFICATION "PATH=path_list_prepend:$<TARGET_FILE_DIR:benchmark::benchmark>;PATH=path_list_prepend:$<TARGET_FILE_DIR:gmock_main>" 241 ) 242 endif() 243 endmacro() 244 245 add_gtest(benchmark_gtest) 246 add_gtest(benchmark_name_gtest) 247 add_gtest(benchmark_random_interleaving_gtest) 248 add_gtest(commandlineflags_gtest) 249 add_gtest(statistics_gtest) 250 add_gtest(string_util_gtest) 251 add_gtest(perf_counters_gtest) 252 add_gtest(time_unit_gtest) 253 add_gtest(min_time_parse_gtest) 254endif(BENCHMARK_ENABLE_GTEST_TESTS) 255 256############################################################################### 257# Assembly Unit Tests 258############################################################################### 259 260if (BENCHMARK_ENABLE_ASSEMBLY_TESTS) 261 if (NOT LLVM_FILECHECK_EXE) 262 message(FATAL_ERROR "LLVM FileCheck is required when including this file") 263 endif() 264 include(AssemblyTests.cmake) 265 add_filecheck_test(donotoptimize_assembly_test) 266 add_filecheck_test(state_assembly_test) 267 add_filecheck_test(clobber_memory_assembly_test) 268endif() 269 270 271 272############################################################################### 273# Code Coverage Configuration 274############################################################################### 275 276# Add the coverage command(s) 277if(CMAKE_BUILD_TYPE) 278 string(TOLOWER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_LOWER) 279endif() 280if (${CMAKE_BUILD_TYPE_LOWER} MATCHES "coverage") 281 find_program(GCOV gcov) 282 find_program(LCOV lcov) 283 find_program(GENHTML genhtml) 284 find_program(CTEST ctest) 285 if (GCOV AND LCOV AND GENHTML AND CTEST AND HAVE_CXX_FLAG_COVERAGE) 286 add_custom_command( 287 OUTPUT ${CMAKE_BINARY_DIR}/lcov/index.html 288 COMMAND ${LCOV} -q -z -d . 289 COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o before.lcov -i 290 COMMAND ${CTEST} --force-new-ctest-process 291 COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o after.lcov 292 COMMAND ${LCOV} -q -a before.lcov -a after.lcov --output-file final.lcov 293 COMMAND ${LCOV} -q -r final.lcov "'${CMAKE_SOURCE_DIR}/test/*'" -o final.lcov 294 COMMAND ${GENHTML} final.lcov -o lcov --demangle-cpp --sort -p "${CMAKE_BINARY_DIR}" -t benchmark 295 DEPENDS filter_test benchmark_test options_test basic_test fixture_test cxx03_test complexity_test 296 WORKING_DIRECTORY ${CMAKE_BINARY_DIR} 297 COMMENT "Running LCOV" 298 ) 299 add_custom_target(coverage 300 DEPENDS ${CMAKE_BINARY_DIR}/lcov/index.html 301 COMMENT "LCOV report at lcov/index.html" 302 ) 303 message(STATUS "Coverage command added") 304 else() 305 if (HAVE_CXX_FLAG_COVERAGE) 306 set(CXX_FLAG_COVERAGE_MESSAGE supported) 307 else() 308 set(CXX_FLAG_COVERAGE_MESSAGE unavailable) 309 endif() 310 message(WARNING 311 "Coverage not available:\n" 312 " gcov: ${GCOV}\n" 313 " lcov: ${LCOV}\n" 314 " genhtml: ${GENHTML}\n" 315 " ctest: ${CTEST}\n" 316 " --coverage flag: ${CXX_FLAG_COVERAGE_MESSAGE}") 317 endif() 318endif() 319