• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3# Copyright 2013 The Chromium Authors. All rights reserved.
4# Use of this source code is governed by a BSD-style license that can be
5# found in the LICENSE file.
6"""Contains common helpers for GN action()s."""
7
8import collections
9import contextlib
10from distutils import extension
11import filecmp
12import fnmatch
13import json
14import os
15import pipes
16import re
17import shutil
18import stat
19import subprocess
20import sys
21import tempfile
22import zipfile
23import optparse
24
25# Any new non-system import must be added to:
26
27sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
28import gn_helpers
29
30# Some clients do not add //build/scripts/util to PYTHONPATH.
31from . import md5_check  # pylint: disable=relative-import
32
33# Definition copied from pylib/constants/__init__.py to avoid adding
34# a dependency on pylib.
35DIR_SOURCE_ROOT = os.environ.get(
36    'CHECKOUT_SOURCE_ROOT',
37    os.path.abspath(
38        os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
39                     os.pardir, os.pardir)))
40
41HERMETIC_TIMESTAMP = (2001, 1, 1, 0, 0, 0)
42_HERMETIC_FILE_ATTR = (0o644 << 16)
43
44
45@contextlib.contextmanager
46def temp_dir():
47    dirname = tempfile.mkdtemp()
48    try:
49        yield dirname
50    finally:
51        shutil.rmtree(dirname)
52
53
54def make_directory(dir_path):
55    try:
56        os.makedirs(dir_path, exist_ok=True)
57    except OSError:
58        pass
59
60
61def delete_directory(dir_path):
62    if os.path.exists(dir_path):
63        shutil.rmtree(dir_path)
64
65
66def touch(path, fail_if_missing=False):
67    if fail_if_missing and not os.path.exists(path):
68        raise Exception(path + ' doesn\'t exist.')
69
70    make_directory(os.path.dirname(path))
71    with open(path, 'a'):
72        os.utime(path, None)
73
74
75def find_in_directory(directory, filename_filter):
76    files = []
77    for root, _dirnames, filenames in os.walk(directory):
78        matched_files = fnmatch.filter(filenames, filename_filter)
79        files.extend((os.path.join(root, f) for f in matched_files))
80    return files
81
82
83def read_build_vars(path):
84    """Parses a build_vars.txt into a dict."""
85    with open(path) as f:
86        return dict(l.rstrip().split('=', 1) for l in f)
87
88
89def parse_gn_list(gn_string):
90    """Converts a command-line parameter into a list.
91
92    If the input starts with a '[' it is assumed to be a GN-formatted list and
93    it will be parsed accordingly. When empty an empty list will be returned.
94    Otherwise, the parameter will be treated as a single raw string (not
95    GN-formatted in that it's not assumed to have literal quotes that must be
96    removed) and a list will be returned containing that string.
97
98    The common use for this behavior is in the ohos build where things can
99    take lists of @FileArg references that are expanded via expand_file_args.
100    """
101    if gn_string.startswith('['):
102        parser = gn_helpers.GNValueParser(gn_string)
103        return parser.ParseList()
104    if len(gn_string):
105        return [gn_string]
106    return []
107
108
109def parse_and_flatten_gn_lists(gn_lists):
110    ret = []
111    for arg in gn_lists:
112        ret.extend(parse_gn_list(arg))
113    return ret
114
115
116def check_options(options, parser, required=None):
117    if not required:
118        return
119    for option_name in required:
120        if getattr(options, option_name) is None:
121            parser.error('--%s is required' % option_name.replace('_', '-'))
122
123
124def write_json(obj, path, only_if_changed=False):
125    old_dump = None
126    if os.path.exists(path):
127        with open(path, 'r') as oldfile:
128            old_dump = oldfile.read()
129
130    new_dump = json.dumps(obj,
131                          sort_keys=True,
132                          indent=2,
133                          separators=(',', ': '))
134
135    if not only_if_changed or old_dump != new_dump:
136        with open(path, 'w') as outfile:
137            outfile.write(new_dump)
138
139
140@contextlib.contextmanager
141def atomic_output(path, only_if_changed=True):
142    """Helper to prevent half-written outputs.
143
144    Args:
145      path: Path to the final output file, which will be written atomically.
146      only_if_changed: If True (the default), do not touch the filesystem
147        if the content has not changed.
148    Returns:
149      A python context manager that yields a NamedTemporaryFile instance
150      that must be used by clients to write the data to. On exit, the
151      manager will try to replace the final output file with the
152      temporary one if necessary. The temporary file is always destroyed
153      on exit.
154    Example:
155      with build_utils.atomic_output(output_path) as tmp_file:
156        subprocess.check_call(['prog', '--output', tmp_file.name])
157    """
158    # Create in same directory to ensure same filesystem when moving.
159    with tempfile.NamedTemporaryFile(suffix=os.path.basename(path),
160                                     dir=os.path.dirname(path),
161                                     delete=False) as f:
162        try:
163            # Change tempfile permission to 664
164            os.fchmod(f.fileno(), 0o664)
165            yield f
166
167            # file should be closed before comparison/move.
168            f.close()
169            if not (only_if_changed and os.path.exists(path)
170                    and filecmp.cmp(f.name, path)):
171                shutil.move(f.name, path)
172        finally:
173            if os.path.exists(f.name):
174                os.unlink(f.name)
175
176
177class called_process_error(Exception):
178    """This exception is raised when the process run by check_output
179    exits with a non-zero exit code."""
180    def __init__(self, cwd, args, output):
181        super(called_process_error, self).__init__()
182        self.cwd = cwd
183        self.args = args
184        if isinstance(output, bytes):
185            self.output = output.decode()
186        else:
187            self.output = output
188
189    def __str__(self):
190        # A user should be able to simply copy and paste the command that failed
191        # into their shell.
192        copyable_command = '( cd {}; {} )'.format(
193            os.path.abspath(self.cwd), ' '.join(map(pipes.quote, self.args)))
194        return 'Command failed: {}\n{}'.format(copyable_command, self.output)
195
196
197def filter_lines(output, filter_string):
198    """Output filter from build_utils.check_output.
199
200    Args:
201      output: Executable output as from build_utils.check_output.
202      filter_string: An RE string that will filter (remove) matching
203          lines from |output|.
204
205    Returns:
206      The filtered output, as a single string.
207    """
208    re_filter = re.compile(filter_string)
209    return '\n'.join(line for line in output.splitlines()
210                     if not re_filter.search(line))
211
212
213# This can be used in most cases like subprocess.check_output(). The output,
214# particularly when the command fails, better highlights the command's failure.
215# If the command fails, raises a build_utils.called_process_error.
216def check_output(args,
217                 cwd=None,
218                 env=None,
219                 print_stdout=False,
220                 print_stderr=True,
221                 stdout_filter=None,
222                 stderr_filter=None,
223                 fail_func=lambda returncode, stderr: returncode != 0):
224    if not cwd:
225        cwd = os.getcwd()
226
227    child = subprocess.Popen(args,
228                             stdout=subprocess.PIPE,
229                             stderr=subprocess.PIPE,
230                             cwd=cwd,
231                             env=env)
232    stdout, stderr = child.communicate()
233
234    if stdout_filter is not None:
235        stdout = stdout_filter(stdout)
236
237    if stderr_filter is not None:
238        stderr = stderr_filter(stderr)
239    if isinstance(stdout, bytes):
240        stdout = stdout.decode()
241    if isinstance(stderr, bytes):
242        stderr = stderr.decode()
243
244    if fail_func(child.returncode, stderr):
245        raise called_process_error(cwd, args, stdout + stderr)
246
247    if print_stdout:
248        if isinstance(stdout, bytes):
249            stdout = stdout.decode()
250        if stdout:
251            sys.stdout.write(stdout)
252    if print_stderr:
253        if isinstance(stderr, bytes):
254            stderr = stderr.decode()
255        if stderr:
256            sys.stderr.write(stderr)
257    return stdout
258
259
260def get_modified_time(path):
261    # For a symlink, the modified time should be the greater of the link's
262    # modified time and the modified time of the target.
263    return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
264
265
266def is_time_stale(output, inputs):
267    if not os.path.exists(output):
268        return True
269
270    output_time = get_modified_time(output)
271    for i in inputs:
272        if get_modified_time(i) > output_time:
273            return True
274    return False
275
276
277def _check_zip_path(name):
278    if os.path.normpath(name) != name:
279        raise Exception('Non-canonical zip path: %s' % name)
280    if os.path.isabs(name):
281        raise Exception('Absolute zip path: %s' % name)
282
283
284def _is_symlink(zip_file, name):
285    zi = zip_file.getinfo(name)
286
287    # The two high-order bytes of ZipInfo.external_attr represent
288    # UNIX permissions and file type bits.
289    return stat.S_ISLNK(zi.external_attr >> 16)
290
291
292def extract_all(zip_path,
293                path=None,
294                no_clobber=True,
295                pattern=None,
296                predicate=None):
297    if path is None:
298        path = os.getcwd()
299    elif not os.path.exists(path):
300        make_directory(path)
301
302    if not zipfile.is_zipfile(zip_path):
303        raise Exception('Invalid zip file: %s' % zip_path)
304
305    extracted = []
306    with zipfile.ZipFile(zip_path) as z:
307        for name in z.namelist():
308            if name.endswith('/'):
309                make_directory(os.path.join(path, name))
310                continue
311            if pattern is not None:
312                if not fnmatch.fnmatch(name, pattern):
313                    continue
314            if predicate and not predicate(name):
315                continue
316            _check_zip_path(name)
317            if no_clobber:
318                output_path = os.path.join(path, name)
319                if os.path.exists(output_path):
320                    raise Exception('Path already exists from zip: %s %s %s' %
321                                    (zip_path, name, output_path))
322            if _is_symlink(z, name):
323                dest = os.path.join(path, name)
324                make_directory(os.path.dirname(dest))
325                os.symlink(z.read(name), dest)
326                extracted.append(dest)
327            else:
328                z.extract(name, path)
329                extracted.append(os.path.join(path, name))
330
331    return extracted
332
333
334def add_to_zip_hermetic(zip_file,
335                        zip_path,
336                        src_path=None,
337                        data=None,
338                        compress=None,
339                        compress_level=6):
340    """Adds a file to the given ZipFile with a hard-coded modified time.
341
342    Args:
343      zip_file: ZipFile instance to add the file to.
344      zip_path: Destination path within the zip file.
345      src_path: Path of the source file. Mutually exclusive with |data|.
346      data: File data as a string.
347      compress: Whether to enable compression. Default is taken from ZipFile
348          constructor.
349    """
350    assert (src_path is None) != (data is None), (
351        '|src_path| and |data| are mutually exclusive.')
352    _check_zip_path(zip_path)
353    zipinfo = zipfile.ZipInfo(filename=zip_path, date_time=HERMETIC_TIMESTAMP)
354    zipinfo.external_attr = _HERMETIC_FILE_ATTR
355
356    if src_path and os.path.islink(src_path):
357        zipinfo.filename = zip_path
358        zipinfo.external_attr |= stat.S_IFLNK << 16  # mark as a symlink
359        zip_file.writestr(zipinfo, os.readlink(src_path))
360        return
361
362    # we want to use _HERMETIC_FILE_ATTR, so manually set
363    # the few attr bits we care about.
364    if src_path:
365        st = os.stat(src_path)
366        for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH):
367            if st.st_mode & mode:
368                zipinfo.external_attr |= mode << 16
369
370    if src_path:
371        with open(src_path, 'rb') as f:
372            data = f.read()
373
374    # zipfile will deflate even when it makes the file bigger. To avoid
375    # growing files, disable compression at an arbitrary cut off point.
376    if len(data) < 16:
377        compress = False
378
379    # None converts to ZIP_STORED, when passed explicitly rather than the
380    # default passed to the ZipFile constructor.
381    compress_type = zip_file.compression
382    if compress is not None:
383        compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
384    if os.getenv("ZIP_COMPRESS_LEVEL"):
385        compress_level = int(os.getenv("ZIP_COMPRESS_LEVEL"))
386    zip_file.writestr(zipinfo, data, compress_type, compress_level)
387
388
389def do_zip(inputs,
390           output,
391           base_dir=None,
392           compress_fn=None,
393           zip_prefix_path=None):
394    """Creates a zip file from a list of files.
395
396    Args:
397      inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
398      output: Destination .zip file.
399      base_dir: Prefix to strip from inputs.
400      compress_fn: Applied to each input to determine whether or not to compress.
401          By default, items will be |zipfile.ZIP_STORED|.
402      zip_prefix_path: Path prepended to file path in zip file.
403    """
404    input_tuples = []
405    for tup in inputs:
406        if isinstance(tup, str):
407            tup = (os.path.relpath(tup, base_dir), tup)
408        input_tuples.append(tup)
409
410    # Sort by zip path to ensure stable zip ordering.
411    input_tuples.sort(key=lambda tup: tup[0])
412    with zipfile.ZipFile(output, 'w') as outfile:
413        for zip_path, fs_path in input_tuples:
414            if zip_prefix_path:
415                zip_path = os.path.join(zip_prefix_path, zip_path)
416            compress = compress_fn(zip_path) if compress_fn else None
417            add_to_zip_hermetic(outfile,
418                                zip_path,
419                                src_path=fs_path,
420                                compress=compress)
421
422
423def zip_dir(output, base_dir, compress_fn=None, zip_prefix_path=None):
424    """Creates a zip file from a directory."""
425    inputs = []
426    for root, _, files in os.walk(base_dir):
427        for f in files:
428            inputs.append(os.path.join(root, f))
429
430    with atomic_output(output) as f:
431        do_zip(inputs,
432               f,
433               base_dir,
434               compress_fn=compress_fn,
435               zip_prefix_path=zip_prefix_path)
436
437
438def matches_glob(path, filters):
439    """Returns whether the given path matches any of the given glob patterns."""
440    return filters and any(fnmatch.fnmatch(path, f) for f in filters)
441
442
443def _strip_dst_name(dst_name, options):
444    # Strip specific directories and file if options is not None
445    if options and options.stripFile:
446        for f in options.stripFile:
447            if fnmatch.fnmatch(dst_name, '*/' + f):
448                return True
449    if options and options.stripDir:
450        for d in options.stripDir:
451            if fnmatch.fnmatch(dst_name, d + '/*'):
452                return True
453    return False
454
455
456def merge_zips(output, input_zips, path_transform=None, merge_args=None):
457    """Combines all files from |input_zips| into |output|.
458
459    Args:
460      output: Path or ZipFile instance to add files to.
461      input_zips: Iterable of paths to zip files to merge.
462      path_transform: Called for each entry path. Returns a new path, or None to
463          skip the file.
464    """
465    options = None
466    if merge_args:
467        parser = optparse.OptionParser()
468        parser.add_option('--stripDir',
469                          action='append',
470                          help='strip specific directory')
471        parser.add_option('--stripFile',
472                          action='append',
473                          help='strip specific file.')
474
475        args = expand_file_args(merge_args)
476        options, _ = parser.parse_args(args)
477
478    path_transform = path_transform or (lambda p: p)
479    added_names = set()
480
481    output_is_already_open = not isinstance(output, str)
482    if output_is_already_open:
483        assert isinstance(output, zipfile.ZipFile)
484        out_zip = output
485    else:
486        out_zip = zipfile.ZipFile(output, 'w')
487
488    try:
489        for in_file in input_zips:
490            with zipfile.ZipFile(in_file, 'r') as in_zip:
491                # ijar creates zips with null CRCs.
492                in_zip._expected_crc = None
493                for info in in_zip.infolist():
494                    # Ignore directories.
495                    if info.filename[-1] == '/':
496                        continue
497                    dst_name = path_transform(info.filename)
498                    if not dst_name:
499                        continue
500                    if _strip_dst_name(dst_name, options):
501                        continue
502                    already_added = dst_name in added_names
503                    if not already_added:
504                        add_to_zip_hermetic(
505                            out_zip,
506                            dst_name,
507                            data=in_zip.read(info),
508                            compress=info.compress_type != zipfile.ZIP_STORED)
509                        added_names.add(dst_name)
510    finally:
511        if not output_is_already_open:
512            out_zip.close()
513
514
515def get_sorted_transitive_dependencies(top, deps_func):
516    """Gets the list of all transitive dependencies in sorted order.
517
518    There should be no cycles in the dependency graph (crashes if cycles exist).
519
520    Args:
521      top: A list of the top level nodes
522      deps_func: A function that takes a node and returns a list of its direct
523          dependencies.
524    Returns:
525      A list of all transitive dependencies of nodes in top, in order (a node
526      will appear in the list at a higher index than all of its dependencies).
527    """
528    # Find all deps depth-first, maintaining original order in the case of ties.
529    deps_map = collections.OrderedDict()
530
531    def discover(nodes):
532        for node in nodes:
533            if node in deps_map:
534                continue
535            deps = deps_func(node)
536            discover(deps)
537            deps_map[node] = deps
538
539    discover(top)
540    return list(deps_map.keys())
541
542
543def _compute_python_dependencies():
544    """Gets the paths of imported non-system python modules.
545
546    A path is assumed to be a "system" import if it is outside of chromium's
547    src/. The paths will be relative to the current directory.
548    """
549    _force_lazy_modules_to_load()
550    module_paths = (m.__file__ for m in sys.modules.values()
551                    if m is not None and hasattr(m, '__file__') and m.__file__)
552    abs_module_paths = list(map(os.path.abspath, module_paths))
553
554    assert os.path.isabs(DIR_SOURCE_ROOT)
555    non_system_module_paths = [
556        p for p in abs_module_paths if p.startswith(DIR_SOURCE_ROOT)
557    ]
558
559    def convert_pyc_to_py(s):
560        if s.endswith('.pyc'):
561            return s[:-1]
562        return s
563
564    non_system_module_paths = list(
565        map(convert_pyc_to_py, non_system_module_paths))
566    non_system_module_paths = list(
567        map(os.path.relpath, non_system_module_paths))
568    return sorted(set(non_system_module_paths))
569
570
571def _force_lazy_modules_to_load():
572    """Forces any lazily imported modules to fully load themselves.
573
574    Inspecting the modules' __file__ attribute causes lazily imported modules
575    (e.g. from email) to get fully imported and update sys.modules. Iterate
576    over the values until sys.modules stabilizes so that no modules are missed.
577    """
578    while True:
579        num_modules_before = len(list(sys.modules.keys()))
580        for m in list(sys.modules.values()):
581            if m is not None and hasattr(m, '__file__'):
582                _ = m.__file__
583        num_modules_after = len(list(sys.modules.keys()))
584        if num_modules_before == num_modules_after:
585            break
586
587
588def add_depfile_option(parser):
589    if hasattr(parser, 'add_option'):
590        func = parser.add_option
591    else:
592        func = parser.add_argument
593    func('--depfile', help='Path to depfile (refer to `gn help depfile`)')
594
595
596def write_depfile(depfile_path, first_gn_output, inputs=None, add_pydeps=True):
597    assert depfile_path != first_gn_output  # http://crbug.com/646165
598    inputs = inputs or []
599    if add_pydeps:
600        inputs = _compute_python_dependencies() + inputs
601    inputs = sorted(inputs)
602    make_directory(os.path.dirname(depfile_path))
603    # Ninja does not support multiple outputs in depfiles.
604    with open(depfile_path, 'w') as depfile:
605        depfile.write(first_gn_output.replace(' ', '\\ '))
606        depfile.write(': ')
607        depfile.write(' '.join(i.replace(' ', '\\ ') for i in inputs))
608        depfile.write('\n')
609
610
611def expand_file_args(args):
612    """Replaces file-arg placeholders in args.
613
614    These placeholders have the form:
615      @FileArg(filename:key1:key2:...:keyn)
616
617    The value of such a placeholder is calculated by reading 'filename' as json.
618    And then extracting the value at [key1][key2]...[keyn].
619
620    Note: This intentionally does not return the list of files that appear in
621    such placeholders. An action that uses file-args *must* know the paths of
622    those files prior to the parsing of the arguments (typically by explicitly
623    listing them in the action's inputs in build files).
624    """
625    new_args = list(args)
626    file_jsons = dict()
627    r = re.compile(r'@FileArg\((.*?)\)')
628    for i, arg in enumerate(args):
629        match = r.search(arg)
630        if not match:
631            continue
632
633        if match.end() != len(arg):
634            raise Exception(
635                'Unexpected characters after FileArg: {}'.format(arg))
636
637        lookup_path = match.group(1).split(':')
638        file_path = lookup_path[0]
639        if file_path not in file_jsons:
640            with open(file_path) as f:
641                file_jsons[file_path] = json.load(f)
642
643        expansion = file_jsons[file_path]
644
645        for k in lookup_path[1:]:
646            if k in expansion:
647                expansion = expansion[k]
648            else:
649                expansion = ""
650                print("WARNNING", lookup_path[1:], "is not in metadata file, set default ''")
651        # This should match parse_gn_list. The output is either a GN-formatted list
652        # or a literal (with no quotes).
653        if isinstance(expansion, list):
654            new_args[i] = arg[:match.start()] + gn_helpers.ToGNString(
655                expansion)
656        else:
657            new_args[i] = arg[:match.start()] + str(expansion)
658
659    return new_args
660
661
662def read_sources_list(sources_list_file_name):
663    """Reads a GN-written file containing list of file names and returns a list.
664
665    Note that this function should not be used to parse response files.
666    """
667    with open(sources_list_file_name) as f:
668        return [file_name.strip() for file_name in f]
669
670
671def call_and_write_depfile_if_stale(function,
672                                    options,
673                                    record_path=None,
674                                    input_paths=None,
675                                    input_strings=None,
676                                    output_paths=None,
677                                    force=False,
678                                    pass_changes=False,
679                                    depfile_deps=None,
680                                    add_pydeps=True):
681    """Wraps md5_check.call_and_record_if_stale() and writes a depfile if applicable.
682
683    Depfiles are automatically added to output_paths when present in the
684    |options| argument. They are then created after |function| is called.
685
686    By default, only python dependencies are added to the depfile. If there are
687    other input paths that are not captured by GN deps, then they should be
688    listed in depfile_deps. It's important to write paths to the depfile that
689    are already captured by GN deps since GN args can cause GN deps to change,
690    and such changes are not immediately reflected in depfiles
691    (http://crbug.com/589311).
692    """
693    if not output_paths:
694        raise Exception('At least one output_path must be specified.')
695    input_paths = list(input_paths or [])
696    input_strings = list(input_strings or [])
697    output_paths = list(output_paths or [])
698
699    python_deps = None
700    if hasattr(options, 'depfile') and options.depfile:
701        python_deps = _compute_python_dependencies()
702        input_paths += python_deps
703        output_paths += [options.depfile]
704
705    def on_stale_md5(changes):
706        args = (changes, ) if pass_changes else ()
707        function(*args)
708        if python_deps is not None:
709            all_depfile_deps = list(python_deps) if add_pydeps else []
710            if depfile_deps:
711                all_depfile_deps.extend(depfile_deps)
712            write_depfile(options.depfile,
713                          output_paths[0],
714                          all_depfile_deps,
715                          add_pydeps=False)
716
717    md5_check.call_and_record_if_stale(on_stale_md5,
718                                       record_path=record_path,
719                                       input_paths=input_paths,
720                                       input_strings=input_strings,
721                                       output_paths=output_paths,
722                                       force=force,
723                                       pass_changes=True)
724
725
726def get_all_files(base, follow_symlinks=False):
727    """Returns a list of all the files in |base|. Each entry is relative to the
728    last path entry of |base|."""
729    result = []
730    for root, _, files in os.walk(base, followlinks=follow_symlinks):
731        result.extend([os.path.join(root, f) for f in files])
732
733    return result
734
735
736def rebase_path(input, new_base=None, current_base="."):
737    if new_base:
738        return os.path.relpath(os.path.join(current_base, input), new_base)
739    else:
740        return os.path.realpath(os.path.join(current_base, input))
741