1# Copyright 2022 The Pigweed Authors 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); you may not 4# use this file except in compliance with the License. You may obtain a copy of 5# the License at 6# 7# https://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12# License for the specific language governing permissions and limitations under 13# the License. 14""" 15bloat is a script which generates a size report card for binary files. 16""" 17 18import argparse 19import json 20import logging 21import os 22from pathlib import Path 23import subprocess 24import sys 25import tempfile 26from typing import Iterable, Optional 27 28import pw_cli.log 29 30from pw_bloat.bloaty_config import generate_bloaty_config 31from pw_bloat.label import DataSourceMap, Label 32from pw_bloat.label_output import ( 33 BloatTableOutput, 34 LineCharset, 35 RstOutput, 36 AsciiCharset, 37) 38 39_LOG = logging.getLogger(__name__) 40 41MAX_COL_WIDTH = 50 42BINARY_SIZES_EXTENSION = '.binary_sizes.json' 43 44 45def parse_args() -> argparse.Namespace: 46 """Parses the script's arguments.""" 47 48 parser = argparse.ArgumentParser('Generate a size report card for binaries') 49 parser.add_argument( 50 '--gn-arg-path', 51 type=str, 52 required=True, 53 help='File path to json of binaries', 54 ) 55 parser.add_argument( 56 '--single-report', 57 action="store_true", 58 help='Determine if calling single size report', 59 ) 60 61 return parser.parse_args() 62 63 64def run_bloaty( 65 filename: str, 66 config: str, 67 base_file: Optional[str] = None, 68 data_sources: Iterable[str] = (), 69 extra_args: Iterable[str] = (), 70) -> bytes: 71 """Executes a Bloaty size report on some binary file(s). 72 73 Args: 74 filename: Path to the binary. 75 config: Path to Bloaty config file. 76 base_file: Path to a base binary. If provided, a size diff is performed. 77 data_sources: List of Bloaty data sources for the report. 78 extra_args: Additional command-line arguments to pass to Bloaty. 79 80 Returns: 81 Binary output of the Bloaty invocation. 82 83 Raises: 84 subprocess.CalledProcessError: The Bloaty invocation failed. 85 """ 86 87 default_bloaty = 'bloaty' 88 bloaty_path = os.getenv('BLOATY_PATH', default_bloaty) 89 90 cmd = [ 91 bloaty_path, 92 '-c', 93 config, 94 '-d', 95 ','.join(data_sources), 96 '--domain', 97 'vm', 98 '-n', 99 '0', 100 filename, 101 *extra_args, 102 ] 103 104 if base_file is not None: 105 cmd.extend(['--', base_file]) 106 107 return subprocess.check_output(cmd) 108 109 110class NoMemoryRegions(Exception): 111 """Exception raised if an ELF does not define any memory region symbols.""" 112 113 def __init__(self, elf: Path): 114 super().__init__(f'ELF {elf} does not define memory region symbols') 115 self.elf = elf 116 117 118def memory_regions_size_report( 119 elf: Path, 120 data_sources: Iterable[str] = (), 121 extra_args: Iterable[str] = (), 122) -> Iterable[str]: 123 """Runs a size report on an ELF file using pw_bloat memory region symbols. 124 125 Arguments: 126 elf: The ELF binary on which to run. 127 data_sources: Hierarchical data sources to display. 128 extra_args: Additional command line arguments forwarded to bloaty. 129 130 Returns: 131 The bloaty TSV output detailing the size report. 132 133 Raises: 134 NoMemoryRegions: The ELF does not define memory region symbols. 135 """ 136 with tempfile.NamedTemporaryFile() as bloaty_config: 137 with open(elf.resolve(), "rb") as infile, open( 138 bloaty_config.name, "w" 139 ) as outfile: 140 result = generate_bloaty_config( 141 infile, 142 enable_memoryregions=True, 143 enable_utilization=False, 144 out_file=outfile, 145 ) 146 147 if not result.has_memoryregions: 148 raise NoMemoryRegions(elf) 149 150 return ( 151 run_bloaty( 152 str(elf.resolve()), 153 bloaty_config.name, 154 data_sources=data_sources, 155 extra_args=extra_args, 156 ) 157 .decode('utf-8') 158 .splitlines() 159 ) 160 161 162def write_file(filename: str, contents: str, out_dir_file: str) -> None: 163 path = os.path.join(out_dir_file, filename) 164 with open(path, 'w') as output_file: 165 output_file.write(contents) 166 _LOG.debug('Output written to %s', path) 167 168 169def create_binary_sizes_json(binary_name: str, labels: Iterable[Label]) -> str: 170 """Creates a binary_sizes.json file content from a list of labels. 171 172 Args: 173 binary_name: the single binary name to attribute segment sizes to. 174 labels: the label.Label content to include 175 176 Returns: 177 a string of content to write to binary_sizes.json file. 178 """ 179 json_content = { 180 f'{binary_name} {label.name}': label.size for label in labels 181 } 182 return json.dumps(json_content, sort_keys=True, indent=2) 183 184 185def single_target_output( 186 target: str, 187 bloaty_config: str, 188 target_out_file: str, 189 out_dir: str, 190 data_sources: Iterable[str], 191 extra_args: Iterable[str], 192) -> int: 193 """TODO(frolv) Add docstring.""" 194 195 try: 196 single_output = run_bloaty( 197 target, 198 bloaty_config, 199 data_sources=data_sources, 200 extra_args=extra_args, 201 ) 202 203 except subprocess.CalledProcessError: 204 _LOG.error('%s: failed to run size report on %s', sys.argv[0], target) 205 return 1 206 207 single_tsv = single_output.decode().splitlines() 208 single_report = BloatTableOutput( 209 DataSourceMap.from_bloaty_tsv(single_tsv), MAX_COL_WIDTH, LineCharset 210 ) 211 212 data_source_map = DataSourceMap.from_bloaty_tsv(single_tsv) 213 rst_single_report = BloatTableOutput( 214 data_source_map, 215 MAX_COL_WIDTH, 216 AsciiCharset, 217 True, 218 ) 219 220 single_report_table = single_report.create_table() 221 222 # Generates contents for top level summary for binary_sizes.json 223 binary_json_content = create_binary_sizes_json( 224 target, data_source_map.labels(ds_index=0) 225 ) 226 227 print(single_report_table) 228 write_file(target_out_file, rst_single_report.create_table(), out_dir) 229 write_file(f'{target_out_file}.txt', single_report_table, out_dir) 230 write_file( 231 f'{target_out_file}{BINARY_SIZES_EXTENSION}', 232 binary_json_content, 233 out_dir, 234 ) 235 236 return 0 237 238 239def main() -> int: 240 """Program entry point.""" 241 242 args = parse_args() 243 extra_args = ['--tsv'] 244 data_sources = ['segment_names', 'symbols'] 245 gn_arg_dict = {} 246 json_file = open(args.gn_arg_path) 247 gn_arg_dict = json.load(json_file) 248 249 if args.single_report: 250 single_binary_args = gn_arg_dict['binaries'][0] 251 if single_binary_args['source_filter']: 252 extra_args.extend( 253 ['--source-filter', single_binary_args['source_filter']] 254 ) 255 if single_binary_args['data_sources']: 256 data_sources = single_binary_args['data_sources'] 257 258 return single_target_output( 259 single_binary_args['target'], 260 single_binary_args['bloaty_config'], 261 gn_arg_dict['target_name'], 262 gn_arg_dict['out_dir'], 263 data_sources, 264 extra_args, 265 ) 266 267 default_data_sources = ['segment_names', 'symbols'] 268 269 diff_report = '' 270 rst_diff_report = '' 271 for curr_diff_binary in gn_arg_dict['binaries']: 272 curr_extra_args = extra_args.copy() 273 data_sources = default_data_sources 274 275 if curr_diff_binary['source_filter']: 276 curr_extra_args.extend( 277 ['--source-filter', curr_diff_binary['source_filter']] 278 ) 279 280 if curr_diff_binary['data_sources']: 281 data_sources = curr_diff_binary['data_sources'] 282 283 try: 284 single_output_base = run_bloaty( 285 curr_diff_binary['base'], 286 curr_diff_binary['bloaty_config'], 287 data_sources=data_sources, 288 extra_args=curr_extra_args, 289 ) 290 291 except subprocess.CalledProcessError: 292 _LOG.error( 293 '%s: failed to run base size report on %s', 294 sys.argv[0], 295 curr_diff_binary["base"], 296 ) 297 return 1 298 299 try: 300 single_output_target = run_bloaty( 301 curr_diff_binary['target'], 302 curr_diff_binary['bloaty_config'], 303 data_sources=data_sources, 304 extra_args=curr_extra_args, 305 ) 306 307 except subprocess.CalledProcessError: 308 _LOG.error( 309 '%s: failed to run target size report on %s', 310 sys.argv[0], 311 curr_diff_binary['target'], 312 ) 313 return 1 314 315 if not single_output_target or not single_output_base: 316 continue 317 318 base_dsm = DataSourceMap.from_bloaty_tsv( 319 single_output_base.decode().splitlines() 320 ) 321 target_dsm = DataSourceMap.from_bloaty_tsv( 322 single_output_target.decode().splitlines() 323 ) 324 diff_dsm = target_dsm.diff(base_dsm) 325 326 diff_report += BloatTableOutput( 327 diff_dsm, 328 MAX_COL_WIDTH, 329 LineCharset, 330 diff_label=curr_diff_binary['label'], 331 ).create_table() 332 333 curr_rst_report = RstOutput(diff_dsm, curr_diff_binary['label']) 334 if rst_diff_report == '': 335 rst_diff_report = curr_rst_report.create_table() 336 else: 337 rst_diff_report += f"{curr_rst_report.add_report_row()}\n" 338 339 print(diff_report) 340 write_file( 341 gn_arg_dict['target_name'], rst_diff_report, gn_arg_dict['out_dir'] 342 ) 343 write_file( 344 f"{gn_arg_dict['target_name']}.txt", diff_report, gn_arg_dict['out_dir'] 345 ) 346 347 return 0 348 349 350if __name__ == '__main__': 351 pw_cli.log.install() 352 sys.exit(main()) 353