1# Enable the tests 2 3find_package(Threads REQUIRED) 4include(CheckCXXCompilerFlag) 5 6# NOTE: Some tests use `<cassert>` to perform the test. Therefore we must 7# strip -DNDEBUG from the default CMake flags in DEBUG mode. 8string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE) 9if( NOT uppercase_CMAKE_BUILD_TYPE STREQUAL "DEBUG" ) 10 add_definitions( -UNDEBUG ) 11 add_definitions(-DTEST_BENCHMARK_LIBRARY_HAS_NO_ASSERTIONS) 12 # Also remove /D NDEBUG to avoid MSVC warnings about conflicting defines. 13 foreach (flags_var_to_scrub 14 CMAKE_CXX_FLAGS_RELEASE 15 CMAKE_CXX_FLAGS_RELWITHDEBINFO 16 CMAKE_CXX_FLAGS_MINSIZEREL 17 CMAKE_C_FLAGS_RELEASE 18 CMAKE_C_FLAGS_RELWITHDEBINFO 19 CMAKE_C_FLAGS_MINSIZEREL) 20 string (REGEX REPLACE "(^| )[/-]D *NDEBUG($| )" " " 21 "${flags_var_to_scrub}" "${${flags_var_to_scrub}}") 22 endforeach() 23endif() 24 25# NOTE: These flags must be added after find_package(Threads REQUIRED) otherwise 26# they will break the configuration check. 27if (DEFINED BENCHMARK_CXX_LINKER_FLAGS) 28 list(APPEND CMAKE_EXE_LINKER_FLAGS ${BENCHMARK_CXX_LINKER_FLAGS}) 29endif() 30 31add_library(output_test_helper STATIC output_test_helper.cc output_test.h) 32 33macro(compile_benchmark_test name) 34 add_executable(${name} "${name}.cc") 35 target_link_libraries(${name} benchmark ${CMAKE_THREAD_LIBS_INIT}) 36endmacro(compile_benchmark_test) 37 38 39macro(compile_output_test name) 40 add_executable(${name} "${name}.cc" output_test.h) 41 target_link_libraries(${name} output_test_helper benchmark 42 ${BENCHMARK_CXX_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT}) 43endmacro(compile_output_test) 44 45# Demonstration executable 46compile_benchmark_test(benchmark_test) 47add_test(benchmark benchmark_test --benchmark_min_time=0.01) 48 49compile_benchmark_test(filter_test) 50macro(add_filter_test name filter expect) 51 add_test(${name} filter_test --benchmark_min_time=0.01 --benchmark_filter=${filter} ${expect}) 52 add_test(${name}_list_only filter_test --benchmark_list_tests --benchmark_filter=${filter} ${expect}) 53endmacro(add_filter_test) 54 55add_filter_test(filter_simple "Foo" 3) 56add_filter_test(filter_suffix "BM_.*" 4) 57add_filter_test(filter_regex_all ".*" 5) 58add_filter_test(filter_regex_blank "" 5) 59add_filter_test(filter_regex_none "monkey" 0) 60add_filter_test(filter_regex_wildcard ".*Foo.*" 3) 61add_filter_test(filter_regex_begin "^BM_.*" 4) 62add_filter_test(filter_regex_begin2 "^N" 1) 63add_filter_test(filter_regex_end ".*Ba$" 1) 64 65compile_benchmark_test(options_test) 66add_test(options_benchmarks options_test --benchmark_min_time=0.01) 67 68compile_benchmark_test(basic_test) 69add_test(basic_benchmark basic_test --benchmark_min_time=0.01) 70 71compile_benchmark_test(diagnostics_test) 72add_test(diagnostics_test diagnostics_test --benchmark_min_time=0.01) 73 74compile_benchmark_test(skip_with_error_test) 75add_test(skip_with_error_test skip_with_error_test --benchmark_min_time=0.01) 76 77compile_benchmark_test(donotoptimize_test) 78# Some of the issues with DoNotOptimize only occur when optimization is enabled 79check_cxx_compiler_flag(-O3 BENCHMARK_HAS_O3_FLAG) 80if (BENCHMARK_HAS_O3_FLAG) 81 set_target_properties(donotoptimize_test PROPERTIES COMPILE_FLAGS "-O3") 82endif() 83add_test(donotoptimize_test donotoptimize_test --benchmark_min_time=0.01) 84 85compile_benchmark_test(fixture_test) 86add_test(fixture_test fixture_test --benchmark_min_time=0.01) 87 88compile_benchmark_test(register_benchmark_test) 89add_test(register_benchmark_test register_benchmark_test --benchmark_min_time=0.01) 90 91compile_benchmark_test(map_test) 92add_test(map_test map_test --benchmark_min_time=0.01) 93 94compile_benchmark_test(multiple_ranges_test) 95add_test(multiple_ranges_test multiple_ranges_test --benchmark_min_time=0.01) 96 97compile_output_test(reporter_output_test) 98add_test(reporter_output_test reporter_output_test --benchmark_min_time=0.01) 99 100compile_output_test(templated_fixture_test) 101add_test(templated_fixture_test templated_fixture_test --benchmark_min_time=0.01) 102 103compile_output_test(user_counters_test) 104add_test(user_counters_test user_counters_test --benchmark_min_time=0.01) 105 106compile_output_test(user_counters_tabular_test) 107add_test(user_counters_tabular_test user_counters_tabular_test --benchmark_counters_tabular=true --benchmark_min_time=0.01) 108 109check_cxx_compiler_flag(-std=c++03 BENCHMARK_HAS_CXX03_FLAG) 110if (BENCHMARK_HAS_CXX03_FLAG) 111 compile_benchmark_test(cxx03_test) 112 set_target_properties(cxx03_test 113 PROPERTIES 114 COMPILE_FLAGS "-std=c++03") 115 # libstdc++ provides different definitions within <map> between dialects. When 116 # LTO is enabled and -Werror is specified GCC diagnoses this ODR violation 117 # causing the test to fail to compile. To prevent this we explicitly disable 118 # the warning. 119 check_cxx_compiler_flag(-Wno-odr BENCHMARK_HAS_WNO_ODR) 120 if (BENCHMARK_ENABLE_LTO AND BENCHMARK_HAS_WNO_ODR) 121 set_target_properties(cxx03_test 122 PROPERTIES 123 LINK_FLAGS "-Wno-odr") 124 endif() 125 add_test(cxx03 cxx03_test --benchmark_min_time=0.01) 126endif() 127 128# Attempt to work around flaky test failures when running on Appveyor servers. 129if (DEFINED ENV{APPVEYOR}) 130 set(COMPLEXITY_MIN_TIME "0.5") 131else() 132 set(COMPLEXITY_MIN_TIME "0.01") 133endif() 134compile_output_test(complexity_test) 135add_test(complexity_benchmark complexity_test --benchmark_min_time=${COMPLEXITY_MIN_TIME}) 136 137############################################################################### 138# GoogleTest Unit Tests 139############################################################################### 140 141if (BENCHMARK_ENABLE_GTEST_TESTS) 142 macro(compile_gtest name) 143 add_executable(${name} "${name}.cc") 144 if (TARGET googletest) 145 add_dependencies(${name} googletest) 146 endif() 147 target_link_libraries(${name} benchmark 148 "${GTEST_BOTH_LIBRARIES}" ${CMAKE_THREAD_LIBS_INIT}) 149 endmacro(compile_gtest) 150 151 macro(add_gtest name) 152 compile_gtest(${name}) 153 add_test(${name} ${name}) 154 endmacro() 155 156 add_gtest(statistics_test) 157endif(BENCHMARK_ENABLE_GTEST_TESTS) 158 159 160# Add the coverage command(s) 161if(CMAKE_BUILD_TYPE) 162 string(TOLOWER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_LOWER) 163endif() 164if (${CMAKE_BUILD_TYPE_LOWER} MATCHES "coverage") 165 find_program(GCOV gcov) 166 find_program(LCOV lcov) 167 find_program(GENHTML genhtml) 168 find_program(CTEST ctest) 169 if (GCOV AND LCOV AND GENHTML AND CTEST AND HAVE_CXX_FLAG_COVERAGE) 170 add_custom_command( 171 OUTPUT ${CMAKE_BINARY_DIR}/lcov/index.html 172 COMMAND ${LCOV} -q -z -d . 173 COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o before.lcov -i 174 COMMAND ${CTEST} --force-new-ctest-process 175 COMMAND ${LCOV} -q --no-external -c -b "${CMAKE_SOURCE_DIR}" -d . -o after.lcov 176 COMMAND ${LCOV} -q -a before.lcov -a after.lcov --output-file final.lcov 177 COMMAND ${LCOV} -q -r final.lcov "'${CMAKE_SOURCE_DIR}/test/*'" -o final.lcov 178 COMMAND ${GENHTML} final.lcov -o lcov --demangle-cpp --sort -p "${CMAKE_BINARY_DIR}" -t benchmark 179 DEPENDS filter_test benchmark_test options_test basic_test fixture_test cxx03_test complexity_test 180 WORKING_DIRECTORY ${CMAKE_BINARY_DIR} 181 COMMENT "Running LCOV" 182 ) 183 add_custom_target(coverage 184 DEPENDS ${CMAKE_BINARY_DIR}/lcov/index.html 185 COMMENT "LCOV report at lcov/index.html" 186 ) 187 message(STATUS "Coverage command added") 188 else() 189 if (HAVE_CXX_FLAG_COVERAGE) 190 set(CXX_FLAG_COVERAGE_MESSAGE supported) 191 else() 192 set(CXX_FLAG_COVERAGE_MESSAGE unavailable) 193 endif() 194 message(WARNING 195 "Coverage not available:\n" 196 " gcov: ${GCOV}\n" 197 " lcov: ${LCOV}\n" 198 " genhtml: ${GENHTML}\n" 199 " ctest: ${CTEST}\n" 200 " --coverage flag: ${CXX_FLAG_COVERAGE_MESSAGE}") 201 endif() 202endif() 203