1# Copyright 2021 The Pigweed Authors 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); you may not 4# use this file except in compliance with the License. You may obtain a copy of 5# the License at 6# 7# https://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12# License for the specific language governing permissions and limitations under 13# the License. 14 15import("//build_overrides/pigweed.gni") 16 17import("$dir_pw_build/python.gni") 18import("$dir_pw_protobuf_compiler/proto.gni") 19 20pw_python_package("py") { 21 generate_setup = { 22 metadata = { 23 name = "pw_tokenizer" 24 version = "0.0.1" 25 } 26 options = { 27 extras_require = { 28 serial_detokenizer = [ "serial" ] 29 } 30 } 31 } 32 sources = [ 33 "generate_argument_types_macro.py", 34 "generate_hash_macro.py", 35 "generate_hash_test_data.py", 36 "pw_tokenizer/__init__.py", 37 "pw_tokenizer/__main__.py", 38 "pw_tokenizer/database.py", 39 "pw_tokenizer/decode.py", 40 "pw_tokenizer/detokenize.py", 41 "pw_tokenizer/elf_reader.py", 42 "pw_tokenizer/encode.py", 43 "pw_tokenizer/parse_message.py", 44 "pw_tokenizer/proto/__init__.py", 45 "pw_tokenizer/serial_detokenizer.py", 46 "pw_tokenizer/tokens.py", 47 ] 48 tests = [ 49 "database_test.py", 50 "decode_test.py", 51 "detokenize_proto_test.py", 52 "detokenize_test.py", 53 "elf_reader_test.py", 54 "encode_test.py", 55 "tokenized_string_decoding_test_data.py", 56 "tokens_test.py", 57 "varint_test_data.py", 58 ] 59 python_test_deps = [ ":test_proto.python" ] 60 inputs = [ 61 "elf_reader_test_binary.elf", 62 "example_binary_with_tokenized_strings.elf", 63 "example_legacy_binary_with_tokenized_strings.elf", 64 ] 65 proto_library = "..:proto" 66 pylintrc = "$dir_pigweed/.pylintrc" 67} 68 69# This setup.py may be used to install pw_tokenizer without GN. It does not 70# include the pw_tokenizer.proto subpackage, since it contains a generated 71# protobuf module. 72pw_python_script("setup") { 73 sources = [ "setup.py" ] 74 inputs = [ 75 "setup.cfg", 76 "pyproject.toml", 77 ] 78} 79 80pw_proto_library("test_proto") { 81 sources = [ "detokenize_proto_test.proto" ] 82 deps = [ "..:proto" ] 83 prefix = "pw_tokenizer_tests" 84} 85