1# Copyright 2020 The Pigweed Authors 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); you may not 4# use this file except in compliance with the License. You may obtain a copy of 5# the License at 6# 7# https://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12# License for the specific language governing permissions and limitations under 13# the License. 14 15import("//build_overrides/pigweed.gni") 16 17import("$dir_pw_arduino_build/arduino.gni") 18import("$dir_pw_bloat/bloat.gni") 19import("$dir_pw_build/cc_blob_library.gni") 20import("$dir_pw_build/module_config.gni") 21import("$dir_pw_build/target_types.gni") 22import("$dir_pw_docgen/docs.gni") 23import("$dir_pw_fuzzer/fuzzer.gni") 24import("$dir_pw_protobuf_compiler/proto.gni") 25import("$dir_pw_unit_test/test.gni") 26 27declare_args() { 28 # The build target that overrides the default configuration options for this 29 # module. This should point to a source set that provides defines through a 30 # public config (which may -include a file or add defines directly). 31 pw_tokenizer_CONFIG = pw_build_DEFAULT_MODULE_CONFIG 32} 33 34config("public_include_path") { 35 include_dirs = [ "public" ] 36 visibility = [ ":*" ] 37} 38 39config("linker_script") { 40 inputs = [ "pw_tokenizer_linker_sections.ld" ] 41 lib_dirs = [ "." ] 42 43 # Automatically add the tokenizer linker sections when cross-compiling or 44 # building for Linux. macOS and Windows executables are not supported. 45 if (current_os == "" || current_os == "freertos") { 46 ldflags = [ 47 "-T", 48 rebase_path("pw_tokenizer_linker_sections.ld", root_build_dir), 49 ] 50 } else if (current_os == "linux" && !pw_toolchain_OSS_FUZZ_ENABLED) { 51 # When building for Linux, the linker provides a default linker script. 52 # The add_tokenizer_sections_to_default_script.ld wrapper includes the 53 # pw_tokenizer_linker_sections.ld script in a way that appends to the the 54 # default linker script instead of overriding it. 55 ldflags = [ 56 "-T", 57 rebase_path("add_tokenizer_sections_to_default_script.ld", 58 root_build_dir), 59 ] 60 61 inputs += [ "add_tokenizer_sections_to_default_script.ld" ] 62 } 63 visibility = [ ":*" ] 64} 65 66pw_source_set("config") { 67 public = [ "public/pw_tokenizer/config.h" ] 68 public_configs = [ ":public_include_path" ] 69 public_deps = [ pw_tokenizer_CONFIG ] 70} 71 72pw_source_set("pw_tokenizer") { 73 public_configs = [ ":public_include_path" ] 74 all_dependent_configs = [ ":linker_script" ] 75 public_deps = [ 76 ":config", 77 "$dir_pw_containers:to_array", 78 dir_pw_polyfill, 79 dir_pw_preprocessor, 80 dir_pw_span, 81 dir_pw_varint, 82 ] 83 public = [ 84 "public/pw_tokenizer/encode_args.h", 85 "public/pw_tokenizer/enum.h", 86 "public/pw_tokenizer/hash.h", 87 "public/pw_tokenizer/nested_tokenization.h", 88 "public/pw_tokenizer/tokenize.h", 89 ] 90 sources = [ 91 "encode_args.cc", 92 "hash.cc", 93 "public/pw_tokenizer/internal/argument_types.h", 94 "public/pw_tokenizer/internal/argument_types_macro_4_byte.h", 95 "public/pw_tokenizer/internal/argument_types_macro_8_byte.h", 96 "public/pw_tokenizer/internal/enum.h", 97 "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_128_hash_macro.h", 98 "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_256_hash_macro.h", 99 "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_80_hash_macro.h", 100 "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_96_hash_macro.h", 101 "public/pw_tokenizer/internal/tokenize_string.h", 102 "tokenize.cc", 103 ] 104 friend = [ ":*" ] 105} 106 107pw_source_set("base64") { 108 public_configs = [ ":public_include_path" ] 109 public = [ "public/pw_tokenizer/base64.h" ] 110 sources = [ "base64.cc" ] 111 public_deps = [ 112 ":pw_tokenizer", 113 "$dir_pw_string:string", 114 dir_pw_base64, 115 dir_pw_preprocessor, 116 ] 117} 118 119pw_source_set("decoder") { 120 public_configs = [ ":public_include_path" ] 121 public_deps = [ 122 dir_pw_preprocessor, 123 dir_pw_result, 124 dir_pw_span, 125 dir_pw_stream, 126 ] 127 deps = [ 128 ":base64", 129 ":csv", 130 "$dir_pw_bytes:bit", 131 "$dir_pw_elf:reader", 132 dir_pw_base64, 133 dir_pw_bytes, 134 dir_pw_varint, 135 ] 136 public = [ 137 "public/pw_tokenizer/detokenize.h", 138 "public/pw_tokenizer/token_database.h", 139 ] 140 sources = [ 141 "decode.cc", 142 "detokenize.cc", 143 "public/pw_tokenizer/internal/decode.h", 144 "token_database.cc", 145 ] 146 friend = [ ":*" ] 147} 148 149pw_source_set("csv") { 150 public = [ "pw_tokenizer_private/csv.h" ] 151 sources = [ "csv.cc" ] 152 deps = [ dir_pw_log ] 153 visibility = [ ":*" ] 154} 155 156pw_test("csv_test") { 157 sources = [ "csv_test.cc" ] 158 deps = [ ":csv" ] 159} 160 161# Executable for generating test data for the C++ and Python detokenizers. This 162# target should only be built for the host. 163pw_executable("generate_decoding_test_data") { 164 deps = [ 165 ":decoder", 166 ":pw_tokenizer", 167 dir_pw_varint, 168 ] 169 sources = [ "generate_decoding_test_data.cc" ] 170} 171 172# Executable for generating a test ELF file for elf_reader_test.py. A host 173# version of this binary is checked in for use in elf_reader_test.py. 174pw_executable("elf_reader_test_binary") { 175 deps = [ 176 ":pw_tokenizer", 177 "$dir_pw_varint", 178 ] 179 sources = [ "py/elf_reader_test_binary.c" ] 180 ldflags = [ "-Wl,--unresolved-symbols=ignore-all" ] # main is not defined 181} 182 183pw_test_group("tests") { 184 tests = [ 185 ":argument_types_test", 186 ":csv_test", 187 ":base64_test", 188 ":decode_test", 189 ":detokenize_test", 190 ":enum_test", 191 ":encode_args_test", 192 ":hash_test", 193 ":simple_tokenize_test", 194 ":token_database_test", 195 ":tokenize_test", 196 ":tokenize_c99_test", 197 ] 198 group_deps = [ 199 ":fuzzers", 200 "$dir_pw_preprocessor:tests", 201 ] 202} 203 204pw_fuzzer_group("fuzzers") { 205 fuzzers = [ 206 ":detokenize_fuzzer", 207 ":token_database_fuzzer", 208 ] 209} 210 211pw_test("argument_types_test") { 212 sources = [ 213 "argument_types_test.cc", 214 "argument_types_test_c.c", 215 "pw_tokenizer_private/argument_types_test.h", 216 ] 217 deps = [ ":pw_tokenizer" ] 218 219 if (pw_arduino_build_CORE_PATH != "") { 220 remove_configs = [ "$dir_pw_build:strict_warnings" ] 221 } 222} 223 224pw_test("base64_test") { 225 sources = [ "base64_test.cc" ] 226 deps = [ 227 ":base64", 228 dir_pw_span, 229 ] 230} 231 232pw_test("decode_test") { 233 sources = [ 234 "decode_test.cc", 235 "pw_tokenizer_private/tokenized_string_decoding_test_data.h", 236 "pw_tokenizer_private/varint_decoding_test_data.h", 237 ] 238 deps = [ 239 ":decoder", 240 "$dir_pw_varint", 241 ] 242 243 # TODO(tonymd): This fails on Teensyduino 1.54 beta core. It may be related to 244 # linking in stl functions. Will debug when 1.54 is released. 245 enable_if = pw_build_EXECUTABLE_TARGET_TYPE != "arduino_executable" 246} 247 248pw_test("detokenize_test") { 249 sources = [ "detokenize_test.cc" ] 250 deps = [ 251 ":decoder", 252 ":detokenizer_elf_test_blob", 253 dir_pw_stream, 254 ] 255 256 # TODO(tonymd): This fails on Teensyduino 1.54 beta core. It may be related to 257 # linking in stl functions. Will debug when 1.54 is released. 258 enable_if = pw_build_EXECUTABLE_TARGET_TYPE != "arduino_executable" 259} 260 261pw_test("encode_args_test") { 262 sources = [ "encode_args_test.cc" ] 263 deps = [ ":pw_tokenizer" ] 264} 265 266pw_test("enum_test") { 267 sources = [ "enum_test.cc" ] 268 deps = [ ":pw_tokenizer" ] 269 negative_compilation_tests = true 270} 271 272pw_test("hash_test") { 273 sources = [ 274 "hash_test.cc", 275 "pw_tokenizer_private/generated_hash_test_cases.h", 276 ] 277 deps = [ ":pw_tokenizer" ] 278} 279 280pw_test("simple_tokenize_test") { 281 sources = [ "simple_tokenize_test.cc" ] 282 deps = [ ":pw_tokenizer" ] 283} 284 285pw_test("token_database_test") { 286 sources = [ "token_database_test.cc" ] 287 deps = [ ":decoder" ] 288} 289 290pw_test("tokenize_test") { 291 sources = [ 292 "pw_tokenizer_private/tokenize_test.h", 293 "tokenize_test.cc", 294 "tokenize_test_c.c", 295 ] 296 deps = [ 297 ":pw_tokenizer", 298 "$dir_pw_varint", 299 ] 300} 301 302pw_test("tokenize_c99_test") { 303 cflags_c = [ 304 "-std=c99", 305 306 # pw_tokenizer uses static_assert, so this test uses a static_assert to 307 # verify that it works. Silence warnings about preadopting C11 features. 308 "-Wno-c11-extensions", 309 ] 310 sources = [ 311 "tokenize_c99_test.c", 312 "tokenize_c99_test_entry_point.cc", 313 ] 314 deps = [ 315 ":pw_tokenizer", 316 "$dir_pw_containers:inline_var_len_entry_queue", 317 ] 318} 319 320pw_fuzzer("token_database_fuzzer") { 321 sources = [ "token_database_fuzzer.cc" ] 322 deps = [ 323 ":decoder", 324 dir_pw_preprocessor, 325 dir_pw_span, 326 ] 327} 328 329pw_fuzzer("detokenize_fuzzer") { 330 sources = [ "detokenize_fuzzer.cc" ] 331 deps = [ 332 ":decoder", 333 dir_pw_preprocessor, 334 ] 335} 336 337pw_proto_library("proto") { 338 sources = [ "pw_tokenizer_proto/options.proto" ] 339 python_package = "py" 340} 341 342declare_args() { 343 # pw_JAVA_NATIVE_INTERFACE_INCLUDE_DIRS specifies the paths to use for 344 # building Java Native Interface libraries. If no paths are provided, targets 345 # that require JNI may not build correctly. 346 # 347 # Example JNI include paths for a Linux system: 348 # 349 # pw_JAVA_NATIVE_INTERFACE_INCLUDE_DIRS = [ 350 # "/usr/local/buildtools/java/jdk/include/", 351 # "/usr/local/buildtools/java/jdk/include/linux", 352 # ] 353 # 354 pw_JAVA_NATIVE_INTERFACE_INCLUDE_DIRS = [] 355} 356 357# Create a shared library for the tokenizer JNI wrapper. The include paths for 358# the JNI headers must be available in the system or provided with the 359# pw_JAVA_NATIVE_INTERFACE_INCLUDE_DIRS variable. 360pw_shared_library("detokenizer_jni") { 361 public_configs = [ ":public_include_path" ] 362 include_dirs = pw_JAVA_NATIVE_INTERFACE_INCLUDE_DIRS 363 sources = [ "java/dev/pigweed/tokenizer/detokenizer.cc" ] 364 public_deps = [ 365 ":decoder", 366 "$dir_pw_preprocessor", 367 ] 368 deps = [ dir_pw_span ] 369} 370 371pw_doc_group("docs") { 372 inputs = [ 373 "Kconfig", 374 "py/pw_tokenizer/encode.py", 375 "public/pw_tokenizer/internal/tokenize_string.h", 376 ] 377 sources = [ 378 "api.rst", 379 "detokenization.rst", 380 "docs.rst", 381 "get_started.rst", 382 "token_databases.rst", 383 "tokenization.rst", 384 ] 385 report_deps = [ ":tokenizer_size_report" ] 386} 387 388# Pigweed tokenizer size report. 389pw_size_diff("tokenizer_size_report") { 390 title = "Pigweed tokenizer size report" 391 binaries = [ 392 { 393 target = "size_report:tokenize_string" 394 base = "size_report:tokenize_string_base" 395 label = "tokenize a string" 396 }, 397 { 398 target = "size_report:tokenize_string_expr" 399 base = "size_report:tokenize_string_expr_base" 400 label = "tokenize a string expression" 401 }, 402 ] 403} 404 405pw_cc_blob_library("detokenizer_elf_test_blob") { 406 out_header = "pw_tokenizer/example_binary_with_tokenized_strings.h" 407 namespace = "test::ns" 408 blobs = [ 409 { 410 file_path = "py/example_binary_with_tokenized_strings.elf" 411 symbol_name = "kElfSection" 412 }, 413 ] 414 visibility = [ ":*" ] 415} 416