1# Copyright 2022 The Pigweed Authors 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); you may not 4# use this file except in compliance with the License. You may obtain a copy of 5# the License at 6# 7# https://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12# License for the specific language governing permissions and limitations under 13# the License. 14""" 15bloat is a script which generates a size report card for binary files. 16""" 17 18import argparse 19import json 20import logging 21import os 22from pathlib import Path 23import subprocess 24import sys 25import tempfile 26from typing import Iterable 27 28import pw_cli.log 29 30from pw_bloat.bloaty_config import generate_bloaty_config 31from pw_bloat.label import DataSourceMap 32from pw_bloat.label_output import ( 33 BloatTableOutput, 34 LineCharset, 35 RstOutput, 36 AsciiCharset, 37) 38 39_LOG = logging.getLogger(__name__) 40 41MAX_COL_WIDTH = 50 42BINARY_SIZES_EXTENSION = '.binary_sizes.json' 43 44 45def parse_args() -> argparse.Namespace: 46 """Parses the script's arguments.""" 47 48 parser = argparse.ArgumentParser('Generate a size report card for binaries') 49 parser.add_argument( 50 '--gn-arg-path', 51 type=str, 52 required=True, 53 help='File path to json of binaries', 54 ) 55 parser.add_argument( 56 '--single-report', 57 action="store_true", 58 help='Determine if calling single size report', 59 ) 60 parser.add_argument( 61 '--json-key-prefix', 62 type=str, 63 help='Prefix for json keys in size report, default = target name', 64 default=None, 65 ) 66 parser.add_argument( 67 '--full-json-summary', 68 action="store_true", 69 help='Include all levels of data sources in json binary report', 70 ) 71 parser.add_argument( 72 '--ignore-unused-labels', 73 action="store_true", 74 help='Do not include labels with size equal to zero in report', 75 ) 76 77 return parser.parse_args() 78 79 80def run_bloaty( 81 filename: str, 82 config: str, 83 base_file: str | None = None, 84 data_sources: Iterable[str] = (), 85 extra_args: Iterable[str] = (), 86) -> bytes: 87 """Executes a Bloaty size report on some binary file(s). 88 89 Args: 90 filename: Path to the binary. 91 config: Path to Bloaty config file. 92 base_file: Path to a base binary. If provided, a size diff is performed. 93 data_sources: List of Bloaty data sources for the report. 94 extra_args: Additional command-line arguments to pass to Bloaty. 95 96 Returns: 97 Binary output of the Bloaty invocation. 98 99 Raises: 100 subprocess.CalledProcessError: The Bloaty invocation failed. 101 """ 102 103 default_bloaty = 'bloaty' 104 bloaty_path = os.getenv('BLOATY_PATH', default_bloaty) 105 106 cmd = [ 107 bloaty_path, 108 '-c', 109 config, 110 '-d', 111 ','.join(data_sources), 112 '--domain', 113 'vm', 114 '-n', 115 '0', 116 filename, 117 *extra_args, 118 ] 119 120 if base_file is not None: 121 cmd.extend(['--', base_file]) 122 123 return subprocess.check_output(cmd) 124 125 126class NoMemoryRegions(Exception): 127 """Exception raised if an ELF does not define any memory region symbols.""" 128 129 def __init__(self, elf: Path): 130 super().__init__(f'ELF {elf} does not define memory region symbols') 131 self.elf = elf 132 133 134def memory_regions_size_report( 135 elf: Path, 136 data_sources: Iterable[str] = (), 137 extra_args: Iterable[str] = (), 138) -> Iterable[str]: 139 """Runs a size report on an ELF file using pw_bloat memory region symbols. 140 141 Arguments: 142 elf: The ELF binary on which to run. 143 data_sources: Hierarchical data sources to display. 144 extra_args: Additional command line arguments forwarded to bloaty. 145 146 Returns: 147 The bloaty TSV output detailing the size report. 148 149 Raises: 150 NoMemoryRegions: The ELF does not define memory region symbols. 151 """ 152 with tempfile.NamedTemporaryFile() as bloaty_config: 153 with open(elf.resolve(), "rb") as infile, open( 154 bloaty_config.name, "w" 155 ) as outfile: 156 result = generate_bloaty_config( 157 infile, 158 enable_memoryregions=True, 159 enable_utilization=False, 160 out_file=outfile, 161 ) 162 163 if not result.has_memoryregions: 164 raise NoMemoryRegions(elf) 165 166 return ( 167 run_bloaty( 168 str(elf.resolve()), 169 bloaty_config.name, 170 data_sources=data_sources, 171 extra_args=extra_args, 172 ) 173 .decode('utf-8') 174 .splitlines() 175 ) 176 177 178def write_file(filename: str, contents: str, out_dir_file: str) -> None: 179 path = os.path.join(out_dir_file, filename) 180 with open(path, 'w') as output_file: 181 output_file.write(contents) 182 _LOG.debug('Output written to %s', path) 183 184 185def create_binary_sizes_json( 186 key_prefix: str, 187 data_source_map: DataSourceMap, 188 full_json: bool, 189 ignore_unused_labels: bool, 190) -> str: 191 """Creates a binary_sizes.json file content from a list of labels. 192 193 Args: 194 key_prefix: Prefix for the json keys. 195 data_source_map: Hierarchical structure containing size of sources. 196 full_json: Report contains all sources, otherwise just top level. 197 ignore_unused_labels: Doesn't include labels of size zero in json. 198 199 Returns: 200 A string of content to write to binary_sizes.json file. 201 """ 202 json_content = {} 203 if full_json: 204 *ds_parents, last = data_source_map.get_ds_names() 205 for label in data_source_map.labels(): 206 key = f'{key_prefix}.' 207 for ds_parent, label_parent in zip(ds_parents, label.parents): 208 key += f'{ds_parent}.{label_parent}.' 209 key += f'{last}.{label.name}' 210 if label.size != 0 or not ignore_unused_labels: 211 json_content[key] = label.size 212 else: 213 for label in data_source_map.labels(ds_index=0): 214 if label.size != 0 or not ignore_unused_labels: 215 json_content[f'{key_prefix}.{label.name}'] = label.size 216 return json.dumps(json_content, sort_keys=True, indent=2) 217 218 219def single_target_output( 220 target: str, 221 bloaty_config: str, 222 target_out_file: str, 223 out_dir: str, 224 data_sources: Iterable[str], 225 extra_args: Iterable[str], 226 json_key_prefix: str, 227 full_json: bool, 228 ignore_unused_labels: bool, 229) -> int: 230 """Generates size report for a single target. 231 232 Args: 233 target: The ELF binary on which to run. 234 bloaty_config: Path to Bloaty config file. 235 target_out_file: Output file name for the generated reports. 236 out_dir: Path to write size reports to. 237 data_sources: Hierarchical data sources to display. 238 extra_args: Additional command-line arguments to pass to Bloaty. 239 json_key_prefix: Prefix for the json keys, uses target name by default. 240 full_json: Json report contains all hierarchical data source totals. 241 242 Returns: 243 Zero on success. 244 245 Raises: 246 subprocess.CalledProcessError: The Bloaty invocation failed. 247 """ 248 249 try: 250 single_output = run_bloaty( 251 target, 252 bloaty_config, 253 data_sources=data_sources, 254 extra_args=extra_args, 255 ) 256 257 except subprocess.CalledProcessError: 258 _LOG.error('%s: failed to run size report on %s', sys.argv[0], target) 259 return 1 260 261 single_tsv = single_output.decode().splitlines() 262 single_report = BloatTableOutput( 263 DataSourceMap.from_bloaty_tsv(single_tsv), MAX_COL_WIDTH, LineCharset 264 ) 265 266 data_source_map = DataSourceMap.from_bloaty_tsv(single_tsv) 267 rst_single_report = BloatTableOutput( 268 data_source_map, 269 MAX_COL_WIDTH, 270 AsciiCharset, 271 True, 272 ) 273 274 single_report_table = single_report.create_table() 275 276 # Generates contents for summary printed to binary_sizes.json 277 binary_json_content = create_binary_sizes_json( 278 json_key_prefix, data_source_map, full_json, ignore_unused_labels 279 ) 280 281 print(single_report_table) 282 write_file(target_out_file, rst_single_report.create_table(), out_dir) 283 write_file(f'{target_out_file}.txt', single_report_table, out_dir) 284 write_file( 285 f'{target_out_file}{BINARY_SIZES_EXTENSION}', 286 binary_json_content, 287 out_dir, 288 ) 289 290 return 0 291 292 293def main() -> int: 294 """Program entry point.""" 295 296 args = parse_args() 297 extra_args = ['--tsv'] 298 data_sources = ['segment_names', 'symbols'] 299 gn_arg_dict = {} 300 json_file = open(args.gn_arg_path) 301 gn_arg_dict = json.load(json_file) 302 json_key_prefix = args.json_key_prefix 303 304 if args.single_report: 305 single_binary_args = gn_arg_dict['binaries'][0] 306 if single_binary_args['source_filter']: 307 extra_args.extend( 308 ['--source-filter', single_binary_args['source_filter']] 309 ) 310 if single_binary_args['data_sources']: 311 data_sources = single_binary_args['data_sources'] 312 313 # Use target binary name as json key prefix if none given 314 if not json_key_prefix: 315 json_key_prefix = single_binary_args['target'] 316 317 return single_target_output( 318 single_binary_args['target'], 319 single_binary_args['bloaty_config'], 320 gn_arg_dict['target_name'], 321 gn_arg_dict['out_dir'], 322 data_sources, 323 extra_args, 324 json_key_prefix, 325 args.full_json_summary, 326 args.ignore_unused_labels, 327 ) 328 329 default_data_sources = ['segment_names', 'symbols'] 330 331 diff_report = '' 332 rst_diff_report = '' 333 for curr_diff_binary in gn_arg_dict['binaries']: 334 curr_extra_args = extra_args.copy() 335 data_sources = default_data_sources 336 337 if curr_diff_binary['source_filter']: 338 curr_extra_args.extend( 339 ['--source-filter', curr_diff_binary['source_filter']] 340 ) 341 342 if curr_diff_binary['data_sources']: 343 data_sources = curr_diff_binary['data_sources'] 344 345 try: 346 single_output_base = run_bloaty( 347 curr_diff_binary['base'], 348 curr_diff_binary['bloaty_config'], 349 data_sources=data_sources, 350 extra_args=curr_extra_args, 351 ) 352 353 except subprocess.CalledProcessError: 354 _LOG.error( 355 '%s: failed to run base size report on %s', 356 sys.argv[0], 357 curr_diff_binary["base"], 358 ) 359 return 1 360 361 try: 362 single_output_target = run_bloaty( 363 curr_diff_binary['target'], 364 curr_diff_binary['bloaty_config'], 365 data_sources=data_sources, 366 extra_args=curr_extra_args, 367 ) 368 369 except subprocess.CalledProcessError: 370 _LOG.error( 371 '%s: failed to run target size report on %s', 372 sys.argv[0], 373 curr_diff_binary['target'], 374 ) 375 return 1 376 377 if not single_output_target or not single_output_base: 378 continue 379 380 base_dsm = DataSourceMap.from_bloaty_tsv( 381 single_output_base.decode().splitlines() 382 ) 383 target_dsm = DataSourceMap.from_bloaty_tsv( 384 single_output_target.decode().splitlines() 385 ) 386 diff_dsm = target_dsm.diff(base_dsm) 387 388 diff_report += BloatTableOutput( 389 diff_dsm, 390 MAX_COL_WIDTH, 391 LineCharset, 392 diff_label=curr_diff_binary['label'], 393 ).create_table() 394 395 curr_rst_report = RstOutput(diff_dsm, curr_diff_binary['label']) 396 if rst_diff_report == '': 397 rst_diff_report = curr_rst_report.create_table() 398 else: 399 rst_diff_report += f"{curr_rst_report.add_report_row()}\n" 400 401 print(diff_report) 402 write_file( 403 gn_arg_dict['target_name'], rst_diff_report, gn_arg_dict['out_dir'] 404 ) 405 write_file( 406 f"{gn_arg_dict['target_name']}.txt", diff_report, gn_arg_dict['out_dir'] 407 ) 408 409 return 0 410 411 412if __name__ == '__main__': 413 pw_cli.log.install() 414 sys.exit(main()) 415