1# Copyright 2013 The Chromium Authors. All rights reserved. 2# Use of this source code is governed by a BSD-style license that can be 3# found in the LICENSE file. 4 5"""Contains common helpers for GN action()s.""" 6 7import collections 8import contextlib 9import filecmp 10import fnmatch 11import json 12import os 13import pipes 14import re 15import shutil 16import stat 17import subprocess 18import sys 19import tempfile 20import zipfile 21 22# Any new non-system import must be added to: 23# //build/config/android/internal_rules.gni 24 25# Some clients do not add //build/android/gyp to PYTHONPATH. 26import md5_check # pylint: disable=relative-import 27 28# pylib conflicts with mojo/public/tools/bindings/pylib. Prioritize 29# build/android/pylib. 30# PYTHONPATH wouldn't help in this case, because soong put source files under 31# temp directory for each build, so the abspath is unknown until the 32# execution. 33#sys.path.append(os.path.join(os.path.dirname(__file__), 34# os.pardir, os.pardir, os.pardir)) 35sys.path.insert(0, os.path.join(os.path.dirname(__file__), 36 os.pardir, os.pardir)) 37 38import gn_helpers 39 40# Definition copied from pylib/constants/__init__.py to avoid adding 41# a dependency on pylib. 42DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT', 43 os.path.abspath(os.path.join(os.path.dirname(__file__), 44 os.pardir, os.pardir, os.pardir, os.pardir))) 45 46HERMETIC_TIMESTAMP = (2001, 1, 1, 0, 0, 0) 47_HERMETIC_FILE_ATTR = (0644 << 16L) 48 49 50@contextlib.contextmanager 51def TempDir(): 52 dirname = tempfile.mkdtemp() 53 try: 54 yield dirname 55 finally: 56 shutil.rmtree(dirname) 57 58 59def MakeDirectory(dir_path): 60 try: 61 os.makedirs(dir_path) 62 except OSError: 63 pass 64 65 66def DeleteDirectory(dir_path): 67 if os.path.exists(dir_path): 68 shutil.rmtree(dir_path) 69 70 71def Touch(path, fail_if_missing=False): 72 if fail_if_missing and not os.path.exists(path): 73 raise Exception(path + ' doesn\'t exist.') 74 75 MakeDirectory(os.path.dirname(path)) 76 with open(path, 'a'): 77 os.utime(path, None) 78 79 80def FindInDirectory(directory, filename_filter): 81 files = [] 82 for root, _dirnames, filenames in os.walk(directory): 83 matched_files = fnmatch.filter(filenames, filename_filter) 84 files.extend((os.path.join(root, f) for f in matched_files)) 85 return files 86 87 88def ReadBuildVars(path): 89 """Parses a build_vars.txt into a dict.""" 90 with open(path) as f: 91 return dict(l.rstrip().split('=', 1) for l in f) 92 93 94def ParseGnList(gn_string): 95 """Converts a command-line parameter into a list. 96 97 If the input starts with a '[' it is assumed to be a GN-formatted list and 98 it will be parsed accordingly. When empty an empty list will be returned. 99 Otherwise, the parameter will be treated as a single raw string (not 100 GN-formatted in that it's not assumed to have literal quotes that must be 101 removed) and a list will be returned containing that string. 102 103 The common use for this behavior is in the Android build where things can 104 take lists of @FileArg references that are expanded via ExpandFileArgs. 105 """ 106 if gn_string.startswith('['): 107 parser = gn_helpers.GNValueParser(gn_string) 108 return parser.ParseList() 109 if len(gn_string): 110 return [ gn_string ] 111 return [] 112 113 114def CheckOptions(options, parser, required=None): 115 if not required: 116 return 117 for option_name in required: 118 if getattr(options, option_name) is None: 119 parser.error('--%s is required' % option_name.replace('_', '-')) 120 121 122def WriteJson(obj, path, only_if_changed=False): 123 old_dump = None 124 if os.path.exists(path): 125 with open(path, 'r') as oldfile: 126 old_dump = oldfile.read() 127 128 new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': ')) 129 130 if not only_if_changed or old_dump != new_dump: 131 with open(path, 'w') as outfile: 132 outfile.write(new_dump) 133 134 135@contextlib.contextmanager 136def AtomicOutput(path, only_if_changed=True): 137 """Helper to prevent half-written outputs. 138 139 Args: 140 path: Path to the final output file, which will be written atomically. 141 only_if_changed: If True (the default), do not touch the filesystem 142 if the content has not changed. 143 Returns: 144 A python context manager that yelds a NamedTemporaryFile instance 145 that must be used by clients to write the data to. On exit, the 146 manager will try to replace the final output file with the 147 temporary one if necessary. The temporary file is always destroyed 148 on exit. 149 Example: 150 with build_utils.AtomicOutput(output_path) as tmp_file: 151 subprocess.check_call(['prog', '--output', tmp_file.name]) 152 """ 153 # Create in same directory to ensure same filesystem when moving. 154 with tempfile.NamedTemporaryFile(suffix=os.path.basename(path), 155 dir=os.path.dirname(path), 156 delete=False) as f: 157 try: 158 yield f 159 160 # file should be closed before comparison/move. 161 f.close() 162 if not (only_if_changed and os.path.exists(path) and 163 filecmp.cmp(f.name, path)): 164 shutil.move(f.name, path) 165 finally: 166 if os.path.exists(f.name): 167 os.unlink(f.name) 168 169 170class CalledProcessError(Exception): 171 """This exception is raised when the process run by CheckOutput 172 exits with a non-zero exit code.""" 173 174 def __init__(self, cwd, args, output): 175 super(CalledProcessError, self).__init__() 176 self.cwd = cwd 177 self.args = args 178 self.output = output 179 180 def __str__(self): 181 # A user should be able to simply copy and paste the command that failed 182 # into their shell. 183 copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd), 184 ' '.join(map(pipes.quote, self.args))) 185 return 'Command failed: {}\n{}'.format(copyable_command, self.output) 186 187 188# This can be used in most cases like subprocess.check_output(). The output, 189# particularly when the command fails, better highlights the command's failure. 190# If the command fails, raises a build_utils.CalledProcessError. 191def CheckOutput(args, cwd=None, env=None, 192 print_stdout=False, print_stderr=True, 193 stdout_filter=None, 194 stderr_filter=None, 195 fail_func=lambda returncode, stderr: returncode != 0): 196 if not cwd: 197 cwd = os.getcwd() 198 199 child = subprocess.Popen(args, 200 stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env) 201 stdout, stderr = child.communicate() 202 203 if stdout_filter is not None: 204 stdout = stdout_filter(stdout) 205 206 if stderr_filter is not None: 207 stderr = stderr_filter(stderr) 208 209 if fail_func(child.returncode, stderr): 210 raise CalledProcessError(cwd, args, stdout + stderr) 211 212 if print_stdout: 213 sys.stdout.write(stdout) 214 if print_stderr: 215 sys.stderr.write(stderr) 216 217 return stdout 218 219 220def GetModifiedTime(path): 221 # For a symlink, the modified time should be the greater of the link's 222 # modified time and the modified time of the target. 223 return max(os.lstat(path).st_mtime, os.stat(path).st_mtime) 224 225 226def IsTimeStale(output, inputs): 227 if not os.path.exists(output): 228 return True 229 230 output_time = GetModifiedTime(output) 231 for i in inputs: 232 if GetModifiedTime(i) > output_time: 233 return True 234 return False 235 236 237def _CheckZipPath(name): 238 if os.path.normpath(name) != name: 239 raise Exception('Non-canonical zip path: %s' % name) 240 if os.path.isabs(name): 241 raise Exception('Absolute zip path: %s' % name) 242 243 244def _IsSymlink(zip_file, name): 245 zi = zip_file.getinfo(name) 246 247 # The two high-order bytes of ZipInfo.external_attr represent 248 # UNIX permissions and file type bits. 249 return stat.S_ISLNK(zi.external_attr >> 16L) 250 251 252def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None, 253 predicate=None): 254 if path is None: 255 path = os.getcwd() 256 elif not os.path.exists(path): 257 MakeDirectory(path) 258 259 if not zipfile.is_zipfile(zip_path): 260 raise Exception('Invalid zip file: %s' % zip_path) 261 262 extracted = [] 263 with zipfile.ZipFile(zip_path) as z: 264 for name in z.namelist(): 265 if name.endswith('/'): 266 MakeDirectory(os.path.join(path, name)) 267 continue 268 if pattern is not None: 269 if not fnmatch.fnmatch(name, pattern): 270 continue 271 if predicate and not predicate(name): 272 continue 273 _CheckZipPath(name) 274 if no_clobber: 275 output_path = os.path.join(path, name) 276 if os.path.exists(output_path): 277 raise Exception( 278 'Path already exists from zip: %s %s %s' 279 % (zip_path, name, output_path)) 280 if _IsSymlink(z, name): 281 dest = os.path.join(path, name) 282 MakeDirectory(os.path.dirname(dest)) 283 os.symlink(z.read(name), dest) 284 extracted.append(dest) 285 else: 286 z.extract(name, path) 287 extracted.append(os.path.join(path, name)) 288 289 return extracted 290 291 292def AddToZipHermetic(zip_file, zip_path, src_path=None, data=None, 293 compress=None): 294 """Adds a file to the given ZipFile with a hard-coded modified time. 295 296 Args: 297 zip_file: ZipFile instance to add the file to. 298 zip_path: Destination path within the zip file. 299 src_path: Path of the source file. Mutually exclusive with |data|. 300 data: File data as a string. 301 compress: Whether to enable compression. Default is taken from ZipFile 302 constructor. 303 """ 304 assert (src_path is None) != (data is None), ( 305 '|src_path| and |data| are mutually exclusive.') 306 _CheckZipPath(zip_path) 307 zipinfo = zipfile.ZipInfo(filename=zip_path, date_time=HERMETIC_TIMESTAMP) 308 zipinfo.external_attr = _HERMETIC_FILE_ATTR 309 310 if src_path and os.path.islink(src_path): 311 zipinfo.filename = zip_path 312 zipinfo.external_attr |= stat.S_IFLNK << 16L # mark as a symlink 313 zip_file.writestr(zipinfo, os.readlink(src_path)) 314 return 315 316 if src_path: 317 with file(src_path) as f: 318 data = f.read() 319 320 # zipfile will deflate even when it makes the file bigger. To avoid 321 # growing files, disable compression at an arbitrary cut off point. 322 if len(data) < 16: 323 compress = False 324 325 # None converts to ZIP_STORED, when passed explicitly rather than the 326 # default passed to the ZipFile constructor. 327 compress_type = zip_file.compression 328 if compress is not None: 329 compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED 330 zip_file.writestr(zipinfo, data, compress_type) 331 332 333def DoZip(inputs, output, base_dir=None, compress_fn=None): 334 """Creates a zip file from a list of files. 335 336 Args: 337 inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples. 338 output: Destination .zip file. 339 base_dir: Prefix to strip from inputs. 340 compress_fn: Applied to each input to determine whether or not to compress. 341 By default, items will be |zipfile.ZIP_STORED|. 342 """ 343 input_tuples = [] 344 for tup in inputs: 345 if isinstance(tup, basestring): 346 tup = (os.path.relpath(tup, base_dir), tup) 347 input_tuples.append(tup) 348 349 # Sort by zip path to ensure stable zip ordering. 350 input_tuples.sort(key=lambda tup: tup[0]) 351 with zipfile.ZipFile(output, 'w') as outfile: 352 for zip_path, fs_path in input_tuples: 353 compress = compress_fn(zip_path) if compress_fn else None 354 AddToZipHermetic(outfile, zip_path, src_path=fs_path, compress=compress) 355 356 357def ZipDir(output, base_dir, compress_fn=None): 358 """Creates a zip file from a directory.""" 359 inputs = [] 360 for root, _, files in os.walk(base_dir): 361 for f in files: 362 inputs.append(os.path.join(root, f)) 363 364 with AtomicOutput(output) as f: 365 DoZip(inputs, f, base_dir, compress_fn=compress_fn) 366 367 368def MatchesGlob(path, filters): 369 """Returns whether the given path matches any of the given glob patterns.""" 370 return filters and any(fnmatch.fnmatch(path, f) for f in filters) 371 372 373def MergeZips(output, input_zips, path_transform=None): 374 """Combines all files from |input_zips| into |output|. 375 376 Args: 377 output: Path or ZipFile instance to add files to. 378 input_zips: Iterable of paths to zip files to merge. 379 path_transform: Called for each entry path. Returns a new path, or None to 380 skip the file. 381 """ 382 path_transform = path_transform or (lambda p: p) 383 added_names = set() 384 385 output_is_already_open = not isinstance(output, basestring) 386 if output_is_already_open: 387 assert isinstance(output, zipfile.ZipFile) 388 out_zip = output 389 else: 390 out_zip = zipfile.ZipFile(output, 'w') 391 392 try: 393 for in_file in input_zips: 394 with zipfile.ZipFile(in_file, 'r') as in_zip: 395 # ijar creates zips with null CRCs. 396 in_zip._expected_crc = None 397 for info in in_zip.infolist(): 398 # Ignore directories. 399 if info.filename[-1] == '/': 400 continue 401 dst_name = path_transform(info.filename) 402 if not dst_name: 403 continue 404 already_added = dst_name in added_names 405 if not already_added: 406 AddToZipHermetic(out_zip, dst_name, data=in_zip.read(info), 407 compress=info.compress_type != zipfile.ZIP_STORED) 408 added_names.add(dst_name) 409 finally: 410 if not output_is_already_open: 411 out_zip.close() 412 413 414def GetSortedTransitiveDependencies(top, deps_func): 415 """Gets the list of all transitive dependencies in sorted order. 416 417 There should be no cycles in the dependency graph (crashes if cycles exist). 418 419 Args: 420 top: A list of the top level nodes 421 deps_func: A function that takes a node and returns a list of its direct 422 dependencies. 423 Returns: 424 A list of all transitive dependencies of nodes in top, in order (a node will 425 appear in the list at a higher index than all of its dependencies). 426 """ 427 # Find all deps depth-first, maintaining original order in the case of ties. 428 deps_map = collections.OrderedDict() 429 def discover(nodes): 430 for node in nodes: 431 if node in deps_map: 432 continue 433 deps = deps_func(node) 434 discover(deps) 435 deps_map[node] = deps 436 437 discover(top) 438 return deps_map.keys() 439 440 441def _ComputePythonDependencies(): 442 """Gets the paths of imported non-system python modules. 443 444 A path is assumed to be a "system" import if it is outside of chromium's 445 src/. The paths will be relative to the current directory. 446 """ 447 _ForceLazyModulesToLoad() 448 module_paths = (m.__file__ for m in sys.modules.itervalues() 449 if m is not None and hasattr(m, '__file__')) 450 abs_module_paths = map(os.path.abspath, module_paths) 451 452 assert os.path.isabs(DIR_SOURCE_ROOT) 453 non_system_module_paths = [ 454 p for p in abs_module_paths if p.startswith(DIR_SOURCE_ROOT)] 455 def ConvertPycToPy(s): 456 if s.endswith('.pyc'): 457 return s[:-1] 458 return s 459 460 non_system_module_paths = map(ConvertPycToPy, non_system_module_paths) 461 non_system_module_paths = map(os.path.relpath, non_system_module_paths) 462 return sorted(set(non_system_module_paths)) 463 464 465def _ForceLazyModulesToLoad(): 466 """Forces any lazily imported modules to fully load themselves. 467 468 Inspecting the modules' __file__ attribute causes lazily imported modules 469 (e.g. from email) to get fully imported and update sys.modules. Iterate 470 over the values until sys.modules stabilizes so that no modules are missed. 471 """ 472 while True: 473 num_modules_before = len(sys.modules.keys()) 474 for m in sys.modules.values(): 475 if m is not None and hasattr(m, '__file__'): 476 _ = m.__file__ 477 num_modules_after = len(sys.modules.keys()) 478 if num_modules_before == num_modules_after: 479 break 480 481 482def AddDepfileOption(parser): 483 # TODO(agrieve): Get rid of this once we've moved to argparse. 484 if hasattr(parser, 'add_option'): 485 func = parser.add_option 486 else: 487 func = parser.add_argument 488 func('--depfile', 489 help='Path to depfile (refer to `gn help depfile`)') 490 491 492def WriteDepfile(depfile_path, first_gn_output, inputs=None, add_pydeps=True): 493 assert depfile_path != first_gn_output # http://crbug.com/646165 494 inputs = inputs or [] 495 if add_pydeps: 496 inputs = _ComputePythonDependencies() + inputs 497 MakeDirectory(os.path.dirname(depfile_path)) 498 # Ninja does not support multiple outputs in depfiles. 499 with open(depfile_path, 'w') as depfile: 500 depfile.write(first_gn_output.replace(' ', '\\ ')) 501 depfile.write(': ') 502 depfile.write(' '.join(i.replace(' ', '\\ ') for i in inputs)) 503 depfile.write('\n') 504 505 506def ExpandFileArgs(args): 507 """Replaces file-arg placeholders in args. 508 509 These placeholders have the form: 510 @FileArg(filename:key1:key2:...:keyn) 511 512 The value of such a placeholder is calculated by reading 'filename' as json. 513 And then extracting the value at [key1][key2]...[keyn]. 514 515 Note: This intentionally does not return the list of files that appear in such 516 placeholders. An action that uses file-args *must* know the paths of those 517 files prior to the parsing of the arguments (typically by explicitly listing 518 them in the action's inputs in build files). 519 """ 520 new_args = list(args) 521 file_jsons = dict() 522 r = re.compile('@FileArg\((.*?)\)') 523 for i, arg in enumerate(args): 524 match = r.search(arg) 525 if not match: 526 continue 527 528 if match.end() != len(arg): 529 raise Exception('Unexpected characters after FileArg: ' + arg) 530 531 lookup_path = match.group(1).split(':') 532 file_path = lookup_path[0] 533 if not file_path in file_jsons: 534 with open(file_path) as f: 535 file_jsons[file_path] = json.load(f) 536 537 expansion = file_jsons[file_path] 538 for k in lookup_path[1:]: 539 expansion = expansion[k] 540 541 # This should match ParseGNList. The output is either a GN-formatted list 542 # or a literal (with no quotes). 543 if isinstance(expansion, list): 544 new_args[i] = arg[:match.start()] + gn_helpers.ToGNString(expansion) 545 else: 546 new_args[i] = arg[:match.start()] + str(expansion) 547 548 return new_args 549 550 551def ReadSourcesList(sources_list_file_name): 552 """Reads a GN-written file containing list of file names and returns a list. 553 554 Note that this function should not be used to parse response files. 555 """ 556 with open(sources_list_file_name) as f: 557 return [file_name.strip() for file_name in f] 558 559 560def CallAndWriteDepfileIfStale(function, options, record_path=None, 561 input_paths=None, input_strings=None, 562 output_paths=None, force=False, 563 pass_changes=False, depfile_deps=None, 564 add_pydeps=True): 565 """Wraps md5_check.CallAndRecordIfStale() and writes a depfile if applicable. 566 567 Depfiles are automatically added to output_paths when present in the |options| 568 argument. They are then created after |function| is called. 569 570 By default, only python dependencies are added to the depfile. If there are 571 other input paths that are not captured by GN deps, then they should be listed 572 in depfile_deps. It's important to write paths to the depfile that are already 573 captured by GN deps since GN args can cause GN deps to change, and such 574 changes are not immediately reflected in depfiles (http://crbug.com/589311). 575 """ 576 if not output_paths: 577 raise Exception('At least one output_path must be specified.') 578 input_paths = list(input_paths or []) 579 input_strings = list(input_strings or []) 580 output_paths = list(output_paths or []) 581 582 python_deps = None 583 if hasattr(options, 'depfile') and options.depfile: 584 python_deps = _ComputePythonDependencies() 585 input_paths += python_deps 586 output_paths += [options.depfile] 587 588 def on_stale_md5(changes): 589 args = (changes,) if pass_changes else () 590 function(*args) 591 if python_deps is not None: 592 all_depfile_deps = list(python_deps) if add_pydeps else [] 593 if depfile_deps: 594 all_depfile_deps.extend(depfile_deps) 595 WriteDepfile(options.depfile, output_paths[0], all_depfile_deps, 596 add_pydeps=False) 597 598 md5_check.CallAndRecordIfStale( 599 on_stale_md5, 600 record_path=record_path, 601 input_paths=input_paths, 602 input_strings=input_strings, 603 output_paths=output_paths, 604 force=force, 605 pass_changes=True) 606