• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 The Pigweed Authors
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may not
4# use this file except in compliance with the License. You may obtain a copy of
5# the License at
6#
7#     https://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations under
13# the License.
14
15import("//build_overrides/pigweed.gni")
16
17import("$dir_pw_arduino_build/arduino.gni")
18import("$dir_pw_bloat/bloat.gni")
19import("$dir_pw_build/cc_blob_library.gni")
20import("$dir_pw_build/module_config.gni")
21import("$dir_pw_build/target_types.gni")
22import("$dir_pw_docgen/docs.gni")
23import("$dir_pw_fuzzer/fuzzer.gni")
24import("$dir_pw_protobuf_compiler/proto.gni")
25import("$dir_pw_unit_test/test.gni")
26
27declare_args() {
28  # The build target that overrides the default configuration options for this
29  # module. This should point to a source set that provides defines through a
30  # public config (which may -include a file or add defines directly).
31  pw_tokenizer_CONFIG = pw_build_DEFAULT_MODULE_CONFIG
32}
33
34config("public_include_path") {
35  include_dirs = [ "public" ]
36  visibility = [ ":*" ]
37}
38
39config("linker_script") {
40  inputs = [ "pw_tokenizer_linker_sections.ld" ]
41  lib_dirs = [ "." ]
42
43  # Automatically add the tokenizer linker sections when cross-compiling or
44  # building for Linux. macOS and Windows executables are not supported.
45  if (current_os == "" || current_os == "freertos") {
46    ldflags = [
47      "-T",
48      rebase_path("pw_tokenizer_linker_sections.ld", root_build_dir),
49    ]
50  } else if (current_os == "linux" && !pw_toolchain_OSS_FUZZ_ENABLED) {
51    # When building for Linux, the linker provides a default linker script.
52    # The add_tokenizer_sections_to_default_script.ld wrapper includes the
53    # pw_tokenizer_linker_sections.ld script in a way that appends to the the
54    # default linker script instead of overriding it.
55    ldflags = [
56      "-T",
57      rebase_path("add_tokenizer_sections_to_default_script.ld",
58                  root_build_dir),
59    ]
60
61    inputs += [ "add_tokenizer_sections_to_default_script.ld" ]
62  }
63  visibility = [ ":*" ]
64}
65
66pw_source_set("config") {
67  public = [ "public/pw_tokenizer/config.h" ]
68  public_configs = [ ":public_include_path" ]
69  public_deps = [ pw_tokenizer_CONFIG ]
70}
71
72pw_source_set("pw_tokenizer") {
73  public_configs = [ ":public_include_path" ]
74  all_dependent_configs = [ ":linker_script" ]
75  public_deps = [
76    ":config",
77    "$dir_pw_containers:to_array",
78    dir_pw_polyfill,
79    dir_pw_preprocessor,
80    dir_pw_span,
81    dir_pw_varint,
82  ]
83  public = [
84    "public/pw_tokenizer/encode_args.h",
85    "public/pw_tokenizer/hash.h",
86    "public/pw_tokenizer/nested_tokenization.h",
87    "public/pw_tokenizer/tokenize.h",
88  ]
89  sources = [
90    "encode_args.cc",
91    "hash.cc",
92    "public/pw_tokenizer/internal/argument_types.h",
93    "public/pw_tokenizer/internal/argument_types_macro_4_byte.h",
94    "public/pw_tokenizer/internal/argument_types_macro_8_byte.h",
95    "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_128_hash_macro.h",
96    "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_256_hash_macro.h",
97    "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_80_hash_macro.h",
98    "public/pw_tokenizer/internal/pw_tokenizer_65599_fixed_length_96_hash_macro.h",
99    "public/pw_tokenizer/internal/tokenize_string.h",
100    "tokenize.cc",
101  ]
102  friend = [ ":*" ]
103}
104
105pw_source_set("base64") {
106  public_configs = [ ":public_include_path" ]
107  public = [ "public/pw_tokenizer/base64.h" ]
108  sources = [ "base64.cc" ]
109  public_deps = [
110    ":pw_tokenizer",
111    "$dir_pw_string:string",
112    dir_pw_base64,
113    dir_pw_preprocessor,
114  ]
115}
116
117pw_source_set("decoder") {
118  public_configs = [ ":public_include_path" ]
119  public_deps = [
120    dir_pw_preprocessor,
121    dir_pw_result,
122    dir_pw_span,
123  ]
124  deps = [
125    ":base64",
126    "$dir_pw_bytes:bit",
127    dir_pw_base64,
128    dir_pw_bytes,
129    dir_pw_varint,
130  ]
131  public = [
132    "public/pw_tokenizer/detokenize.h",
133    "public/pw_tokenizer/token_database.h",
134  ]
135  sources = [
136    "decode.cc",
137    "detokenize.cc",
138    "public/pw_tokenizer/internal/decode.h",
139    "token_database.cc",
140  ]
141  friend = [ ":*" ]
142}
143
144# Executable for generating test data for the C++ and Python detokenizers. This
145# target should only be built for the host.
146pw_executable("generate_decoding_test_data") {
147  deps = [
148    ":decoder",
149    ":pw_tokenizer",
150    dir_pw_varint,
151  ]
152  sources = [ "generate_decoding_test_data.cc" ]
153}
154
155# Executable for generating a test ELF file for elf_reader_test.py. A host
156# version of this binary is checked in for use in elf_reader_test.py.
157pw_executable("elf_reader_test_binary") {
158  deps = [
159    ":pw_tokenizer",
160    "$dir_pw_varint",
161  ]
162  sources = [ "py/elf_reader_test_binary.c" ]
163  ldflags = [ "-Wl,--unresolved-symbols=ignore-all" ]  # main is not defined
164}
165
166pw_test_group("tests") {
167  tests = [
168    ":argument_types_test",
169    ":base64_test",
170    ":decode_test",
171    ":detokenize_test",
172    ":encode_args_test",
173    ":hash_test",
174    ":simple_tokenize_test",
175    ":token_database_test",
176    ":tokenize_test",
177    ":tokenize_c99_test",
178  ]
179  group_deps = [
180    ":fuzzers",
181    "$dir_pw_preprocessor:tests",
182  ]
183}
184
185pw_fuzzer_group("fuzzers") {
186  fuzzers = [
187    ":detokenize_fuzzer",
188    ":token_database_fuzzer",
189  ]
190}
191
192pw_test("argument_types_test") {
193  sources = [
194    "argument_types_test.cc",
195    "argument_types_test_c.c",
196    "pw_tokenizer_private/argument_types_test.h",
197  ]
198  deps = [ ":pw_tokenizer" ]
199
200  if (pw_arduino_build_CORE_PATH != "") {
201    remove_configs = [ "$dir_pw_build:strict_warnings" ]
202  }
203}
204
205pw_test("base64_test") {
206  sources = [ "base64_test.cc" ]
207  deps = [
208    ":base64",
209    dir_pw_span,
210  ]
211}
212
213pw_test("decode_test") {
214  sources = [
215    "decode_test.cc",
216    "pw_tokenizer_private/tokenized_string_decoding_test_data.h",
217    "pw_tokenizer_private/varint_decoding_test_data.h",
218  ]
219  deps = [
220    ":decoder",
221    "$dir_pw_varint",
222  ]
223
224  # TODO(tonymd): This fails on Teensyduino 1.54 beta core. It may be related to
225  # linking in stl functions. Will debug when 1.54 is released.
226  enable_if = pw_build_EXECUTABLE_TARGET_TYPE != "arduino_executable"
227}
228
229pw_test("detokenize_test") {
230  sources = [ "detokenize_test.cc" ]
231  deps = [
232    ":decoder",
233    ":detokenizer_elf_test_blob",
234  ]
235
236  # TODO(tonymd): This fails on Teensyduino 1.54 beta core. It may be related to
237  # linking in stl functions. Will debug when 1.54 is released.
238  enable_if = pw_build_EXECUTABLE_TARGET_TYPE != "arduino_executable"
239}
240
241pw_test("encode_args_test") {
242  sources = [ "encode_args_test.cc" ]
243  deps = [ ":pw_tokenizer" ]
244}
245
246pw_test("hash_test") {
247  sources = [
248    "hash_test.cc",
249    "pw_tokenizer_private/generated_hash_test_cases.h",
250  ]
251  deps = [ ":pw_tokenizer" ]
252}
253
254pw_test("simple_tokenize_test") {
255  sources = [ "simple_tokenize_test.cc" ]
256  deps = [ ":pw_tokenizer" ]
257}
258
259pw_test("token_database_test") {
260  sources = [ "token_database_test.cc" ]
261  deps = [ ":decoder" ]
262}
263
264pw_test("tokenize_test") {
265  sources = [
266    "pw_tokenizer_private/tokenize_test.h",
267    "tokenize_test.cc",
268    "tokenize_test_c.c",
269  ]
270  deps = [
271    ":pw_tokenizer",
272    "$dir_pw_varint",
273  ]
274}
275
276pw_test("tokenize_c99_test") {
277  cflags_c = [
278    "-std=c99",
279
280    # pw_tokenizer uses static_assert, so this test uses a static_assert to
281    # verify that it works. Silence warnings about preadopting C11 features.
282    "-Wno-c11-extensions",
283  ]
284  sources = [
285    "tokenize_c99_test.c",
286    "tokenize_c99_test_entry_point.cc",
287  ]
288  deps = [
289    ":pw_tokenizer",
290    "$dir_pw_containers:inline_var_len_entry_queue",
291  ]
292}
293
294pw_fuzzer("token_database_fuzzer") {
295  sources = [ "token_database_fuzzer.cc" ]
296  deps = [
297    ":decoder",
298    dir_pw_preprocessor,
299    dir_pw_span,
300  ]
301}
302
303pw_fuzzer("detokenize_fuzzer") {
304  sources = [ "detokenize_fuzzer.cc" ]
305  deps = [
306    ":decoder",
307    dir_pw_preprocessor,
308  ]
309}
310
311pw_proto_library("proto") {
312  sources = [ "pw_tokenizer_proto/options.proto" ]
313  python_package = "py"
314}
315
316declare_args() {
317  # pw_JAVA_NATIVE_INTERFACE_INCLUDE_DIRS specifies the paths to use for
318  # building Java Native Interface libraries. If no paths are provided, targets
319  # that require JNI may not build correctly.
320  #
321  # Example JNI include paths for a Linux system:
322  #
323  #   pw_JAVA_NATIVE_INTERFACE_INCLUDE_DIRS = [
324  #     "/usr/local/buildtools/java/jdk/include/",
325  #     "/usr/local/buildtools/java/jdk/include/linux",
326  #   ]
327  #
328  pw_JAVA_NATIVE_INTERFACE_INCLUDE_DIRS = []
329}
330
331# Create a shared library for the tokenizer JNI wrapper. The include paths for
332# the JNI headers must be available in the system or provided with the
333# pw_JAVA_NATIVE_INTERFACE_INCLUDE_DIRS variable.
334pw_shared_library("detokenizer_jni") {
335  public_configs = [ ":public_include_path" ]
336  include_dirs = pw_JAVA_NATIVE_INTERFACE_INCLUDE_DIRS
337  sources = [ "java/dev/pigweed/tokenizer/detokenizer.cc" ]
338  public_deps = [
339    ":decoder",
340    "$dir_pw_preprocessor",
341  ]
342  deps = [ dir_pw_span ]
343}
344
345pw_doc_group("docs") {
346  sources = [
347    "api.rst",
348    "detokenization.rst",
349    "docs.rst",
350    "get_started.rst",
351    "token_databases.rst",
352    "tokenization.rst",
353  ]
354  inputs = [ "py/pw_tokenizer/encode.py" ]
355  report_deps = [ ":tokenizer_size_report" ]
356}
357
358# Pigweed tokenizer size report.
359pw_size_diff("tokenizer_size_report") {
360  title = "Pigweed tokenizer size report"
361  binaries = [
362    {
363      target = "size_report:tokenize_string"
364      base = "size_report:tokenize_string_base"
365      label = "tokenize a string"
366    },
367    {
368      target = "size_report:tokenize_string_expr"
369      base = "size_report:tokenize_string_expr_base"
370      label = "tokenize a string expression"
371    },
372  ]
373}
374
375pw_cc_blob_library("detokenizer_elf_test_blob") {
376  out_header = "pw_tokenizer/example_binary_with_tokenized_strings.h"
377  namespace = "test::ns"
378  blobs = [
379    {
380      file_path = "py/example_binary_with_tokenized_strings.elf"
381      symbol_name = "kElfSection"
382    },
383  ]
384  visibility = [ ":*" ]
385}
386