1# Enable the tests 2 3find_package(Threads REQUIRED) 4include(CheckCXXCompilerFlag) 5 6# NOTE: Some tests use `<cassert>` to perform the test. Therefore we must 7# strip -DNDEBUG from the default CMake flags in DEBUG mode. 8string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE) 9if( NOT uppercase_CMAKE_BUILD_TYPE STREQUAL "DEBUG" ) 10 add_definitions( -UNDEBUG ) 11 add_definitions(-DTEST_BENCHMARK_LIBRARY_HAS_NO_ASSERTIONS) 12 # Also remove /D NDEBUG to avoid MSVC warnings about conflicting defines. 13 foreach (flags_var_to_scrub 14 CMAKE_CXX_FLAGS_RELEASE 15 CMAKE_CXX_FLAGS_RELWITHDEBINFO 16 CMAKE_CXX_FLAGS_MINSIZEREL 17 CMAKE_C_FLAGS_RELEASE 18 CMAKE_C_FLAGS_RELWITHDEBINFO 19 CMAKE_C_FLAGS_MINSIZEREL) 20 string (REGEX REPLACE "(^| )[/-]D *NDEBUG($| )" " " 21 "${flags_var_to_scrub}" "${${flags_var_to_scrub}}") 22 endforeach() 23endif() 24 25check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG) 26set(BENCHMARK_O3_FLAG "") 27if (BENCHMARK_HAS_O3_FLAG) 28 set(BENCHMARK_O3_FLAG "-O3") 29endif() 30 31# NOTE: These flags must be added after find_package(Threads REQUIRED) otherwise 32# they will break the configuration check. 33if (DEFINED BENCHMARK_CXX_LINKER_FLAGS) 34 list(APPEND CMAKE_EXE_LINKER_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS}) 35endif() 36 37add_library(output_test_helper STATIC output_test_helper.cc output_test.h) 38 39macro(compile_benchmark_test name) 40 add_executable(${name} "${name}.cc") 41 target_link_libraries(${name} benchmark::benchmark ${CMAKE_THREAD_LIBS_INIT}) 42endmacro(compile_benchmark_test) 43 44macro(compile_benchmark_test_with_main name) 45 add_executable(${name} "${name}.cc") 46 target_link_libraries(${name} benchmark::benchmark_main) 47endmacro(compile_benchmark_test_with_main) 48 49macro(compile_output_test name) 50 add_executable(${name} "${name}.cc" output_test.h) 51 target_link_libraries(${name} output_test_helper benchmark::benchmark 52 ${BENCHMARK_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT}) 53endmacro(compile_output_test) 54 55# Demonstration executable 56compile_benchmark_test(benchmark_test) 57add_test(NAME benchmark COMMAND benchmark_test --benchmark_min_time=0.01) 58 59compile_benchmark_test(spec_arg_test) 60add_test(NAME spec_arg COMMAND spec_arg_test --benchmark_filter=BM_NotChosen) 61 62compile_benchmark_test(benchmark_setup_teardown_test) 63add_test(NAME benchmark_setup_teardown COMMAND benchmark_setup_teardown_test) 64 65compile_benchmark_test(filter_test) 66macro(add_filter_test name filter expect) 67 add_test(NAME ${name} COMMAND filter_test --benchmark_min_time=0.01 --benchmark_filter=${filter} ${expect}) 68 add_test(NAME ${name}_list_only COMMAND filter_test --benchmark_list_tests --benchmark_filter=${filter} ${expect}) 69endmacro(add_filter_test) 70 71add_filter_test(filter_simple "Foo" 3) 72add_filter_test(filter_simple_negative "-Foo" 2) 73add_filter_test(filter_suffix "BM_.*" 4) 74add_filter_test(filter_suffix_negative "-BM_.*" 1) 75add_filter_test(filter_regex_all ".*" 5) 76add_filter_test(filter_regex_all_negative "-.*" 0) 77add_filter_test(filter_regex_blank "" 5) 78add_filter_test(filter_regex_blank_negative "-" 0) 79add_filter_test(filter_regex_none "monkey" 0) 80add_filter_test(filter_regex_none_negative "-monkey" 5) 81add_filter_test(filter_regex_wildcard ".*Foo.*" 3) 82add_filter_test(filter_regex_wildcard_negative "-.*Foo.*" 2) 83add_filter_test(filter_regex_begin "^BM_.*" 4) 84add_filter_test(filter_regex_begin_negative "-^BM_.*" 1) 85add_filter_test(filter_regex_begin2 "^N" 1) 86add_filter_test(filter_regex_begin2_negative "-^N" 4) 87add_filter_test(filter_regex_end ".*Ba$" 1) 88add_filter_test(filter_regex_end_negative "-.*Ba$" 4) 89 90compile_benchmark_test(options_test) 91add_test(NAME options_benchmarks COMMAND options_test --benchmark_min_time=0.01) 92 93compile_benchmark_test(basic_test) 94add_test(NAME basic_benchmark COMMAND basic_test --benchmark_min_time=0.01) 95 96compile_output_test(repetitions_test) 97add_test(NAME repetitions_benchmark COMMAND repetitions_test --benchmark_min_time=0.01 --benchmark_repetitions=3) 98 99compile_benchmark_test(diagnostics_test) 100add_test(NAME diagnostics_test COMMAND diagnostics_test --benchmark_min_time=0.01) 101 102compile_benchmark_test(skip_with_error_test) 103add_test(NAME skip_with_error_test COMMAND skip_with_error_test --benchmark_min_time=0.01) 104 105compile_benchmark_test(donotoptimize_test) 106# Some of the issues with DoNotOptimize only occur when optimization is enabled 107check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG) 108if (BENCHMARK_HAS_O3_FLAG) 109 set_target_properties(donotoptimize_test PROPERTIES COMPILE_FLAGS "-O3") 110endif() 111add_test(NAME donotoptimize_test COMMAND donotoptimize_test --benchmark_min_time=0.01) 112 113compile_benchmark_test(fixture_test) 114add_test(NAME fixture_test COMMAND fixture_test --benchmark_min_time=0.01) 115 116compile_benchmark_test(register_benchmark_test) 117add_test(NAME register_benchmark_test COMMAND register_benchmark_test --benchmark_min_time=0.01) 118 119compile_benchmark_test(map_test) 120add_test(NAME map_test COMMAND map_test --benchmark_min_time=0.01) 121 122compile_benchmark_test(multiple_ranges_test) 123add_test(NAME multiple_ranges_test COMMAND multiple_ranges_test --benchmark_min_time=0.01) 124 125compile_benchmark_test(args_product_test) 126add_test(NAME args_product_test COMMAND args_product_test --benchmark_min_time=0.01) 127 128compile_benchmark_test_with_main(link_main_test) 129add_test(NAME link_main_test COMMAND link_main_test --benchmark_min_time=0.01) 130 131compile_output_test(reporter_output_test) 132add_test(NAME reporter_output_test COMMAND reporter_output_test --benchmark_min_time=0.01) 133 134compile_output_test(templated_fixture_test) 135add_test(NAME templated_fixture_test COMMAND templated_fixture_test --benchmark_min_time=0.01) 136 137compile_output_test(user_counters_test) 138add_test(NAME user_counters_test COMMAND user_counters_test --benchmark_min_time=0.01) 139 140compile_output_test(perf_counters_test) 141add_test(NAME perf_counters_test COMMAND perf_counters_test --benchmark_min_time=0.01 --benchmark_perf_counters=CYCLES,BRANCHES) 142 143compile_output_test(internal_threading_test) 144add_test(NAME internal_threading_test COMMAND internal_threading_test --benchmark_min_time=0.01) 145 146compile_output_test(report_aggregates_only_test) 147add_test(NAME report_aggregates_only_test COMMAND report_aggregates_only_test --benchmark_min_time=0.01) 148 149compile_output_test(display_aggregates_only_test) 150add_test(NAME display_aggregates_only_test COMMAND display_aggregates_only_test --benchmark_min_time=0.01) 151 152compile_output_test(user_counters_tabular_test) 153add_test(NAME user_counters_tabular_test COMMAND user_counters_tabular_test --benchmark_counters_tabular=true --benchmark_min_time=0.01) 154 155compile_output_test(user_counters_thousands_test) 156add_test(NAME user_counters_thousands_test COMMAND user_counters_thousands_test --benchmark_min_time=0.01) 157 158compile_output_test(memory_manager_test) 159add_test(NAME memory_manager_test COMMAND memory_manager_test --benchmark_min_time=0.01) 160 161check_cxx_compiler_flag(-std=c++03 BENCHMARK_HAS_CXX03_FLAG) 162if (BENCHMARK_HAS_CXX03_FLAG) 163 compile_benchmark_test(cxx03_test) 164 set_target_properties(cxx03_test 165 PROPERTIES 166 CXX_STANDARD 98 167 CXX_STANDARD_REQUIRED YES) 168 # libstdc++ provides different definitions within <map> between dialects. When 169 # LTO is enabled and -Werror is specified GCC diagnoses this ODR violation 170 # causing the test to fail to compile. To prevent this we explicitly disable 171 # the warning. 172 check_cxx_compiler_flag(-Wno-odr BENCHMARK_HAS_WNO_ODR) 173 if (BENCHMARK_ENABLE_LTO AND BENCHMARK_HAS_WNO_ODR) 174 set_target_properties(cxx03_test 175 PROPERTIES 176 LINK_FLAGS "-Wno-odr") 177 endif() 178 add_test(NAME cxx03 COMMAND cxx03_test --benchmark_min_time=0.01) 179endif() 180 181# Attempt to work around flaky test failures when running on Appveyor servers. 182if (DEFINED ENV{APPVEYOR}) 183 set(COMPLEXITY_MIN_TIME "0.5") 184else() 185 set(COMPLEXITY_MIN_TIME "0.01") 186endif() 187compile_output_test(complexity_test) 188add_test(NAME complexity_benchmark COMMAND complexity_test --benchmark_min_time=${COMPLEXITY_MIN_TIME}) 189 190############################################################################### 191# GoogleTest Unit Tests 192############################################################################### 193 194if (BENCHMARK_ENABLE_GTEST_TESTS) 195 macro(compile_gtest name) 196 add_executable(${name} "${name}.cc") 197 target_link_libraries(${name} benchmark::benchmark 198 gmock_main ${CMAKE_THREAD_LIBS_INIT}) 199 endmacro(compile_gtest) 200 201 macro(add_gtest name) 202 compile_gtest(${name}) 203 add_test(NAME ${name} COMMAND ${name}) 204 endmacro() 205 206 add_gtest(benchmark_gtest) 207 add_gtest(benchmark_name_gtest) 208 add_gtest(benchmark_random_interleaving_gtest) 209 add_gtest(commandlineflags_gtest) 210 add_gtest(statistics_gtest) 211 add_gtest(string_util_gtest) 212 add_gtest(perf_counters_gtest) 213endif(BENCHMARK_ENABLE_GTEST_TESTS) 214 215############################################################################### 216# Assembly Unit Tests 217############################################################################### 218 219if (BENCHMARK_ENABLE_ASSEMBLY_TESTS) 220 if (NOT LLVM_FILECHECK_EXE) 221 message(FATAL_ERROR "LLVM FileCheck is required when including this file") 222 endif() 223 include(AssemblyTests.cmake) 224 add_filecheck_test(donotoptimize_assembly_test) 225 add_filecheck_test(state_assembly_test) 226 add_filecheck_test(clobber_memory_assembly_test) 227endif() 228 229 230 231############################################################################### 232# Code Coverage Configuration 233############################################################################### 234 235# Add the coverage command(s) 236if(CMAKE_BUILD_TYPE) 237 string(TOLOWER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_LOWER) 238endif() 239if (${CMAKE_BUILD_TYPE_LOWER} MATCHES "coverage") 240 find_program(GCOV gcov) 241 find_program(LCOV lcov) 242 find_program(GENHTML genhtml) 243 find_program(CTEST ctest) 244 if (GCOV AND LCOV AND GENHTML AND CTEST AND HAVE_CXX_FLAG_COVERAGE) 245 add_custom_command( 246 OUTPUT ${CMAKE_BINARY_DIR}/lcov/index.html 247 COMMAND ${LCOV} -q -z -d . 248 COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o before.lcov -i 249 COMMAND ${CTEST} --force-new-ctest-process 250 COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o after.lcov 251 COMMAND ${LCOV} -q -a before.lcov -a after.lcov --output-file final.lcov 252 COMMAND ${LCOV} -q -r final.lcov "'${CMAKE_SOURCE_DIR}/test/*'" -o final.lcov 253 COMMAND ${GENHTML} final.lcov -o lcov --demangle-cpp --sort -p "${CMAKE_BINARY_DIR}" -t benchmark 254 DEPENDS filter_test benchmark_test options_test basic_test fixture_test cxx03_test complexity_test 255 WORKING_DIRECTORY ${CMAKE_BINARY_DIR} 256 COMMENT "Running LCOV" 257 ) 258 add_custom_target(coverage 259 DEPENDS ${CMAKE_BINARY_DIR}/lcov/index.html 260 COMMENT "LCOV report at lcov/index.html" 261 ) 262 message(STATUS "Coverage command added") 263 else() 264 if (HAVE_CXX_FLAG_COVERAGE) 265 set(CXX_FLAG_COVERAGE_MESSAGE supported) 266 else() 267 set(CXX_FLAG_COVERAGE_MESSAGE unavailable) 268 endif() 269 message(WARNING 270 "Coverage not available:\n" 271 " gcov: ${GCOV}\n" 272 " lcov: ${LCOV}\n" 273 " genhtml: ${GENHTML}\n" 274 " ctest: ${CTEST}\n" 275 " --coverage flag: ${CXX_FLAG_COVERAGE_MESSAGE}") 276 endif() 277endif() 278