• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3# Copyright 2013 The Chromium Authors. All rights reserved.
4# Use of this source code is governed by a BSD-style license that can be
5# found in the LICENSE file.
6"""Contains common helpers for GN action()s."""
7
8import collections
9import contextlib
10from distutils import extension
11import filecmp
12import fnmatch
13import json
14import os
15import pipes
16import re
17import shutil
18import stat
19import subprocess
20import sys
21import tempfile
22import zipfile
23import optparse
24
25# Any new non-system import must be added to:
26
27# Some clients do not add //build/scripts/util to PYTHONPATH.
28from . import md5_check  # pylint: disable=relative-import
29
30sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
31import gn_helpers
32
33# Definition copied from pylib/constants/__init__.py to avoid adding
34# a dependency on pylib.
35DIR_SOURCE_ROOT = os.environ.get(
36    'CHECKOUT_SOURCE_ROOT',
37    os.path.abspath(
38        os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
39                     os.pardir, os.pardir)))
40
41HERMETIC_TIMESTAMP = (2001, 1, 1, 0, 0, 0)
42_HERMETIC_FILE_ATTR = (0o644 << 16)
43
44
45@contextlib.contextmanager
46def temp_dir():
47    dirname = tempfile.mkdtemp()
48    try:
49        yield dirname
50    finally:
51        shutil.rmtree(dirname)
52
53
54def make_directory(dir_path):
55    try:
56        os.makedirs(dir_path)
57    except OSError:
58        pass
59
60
61def delete_directory(dir_path):
62    if os.path.exists(dir_path):
63        shutil.rmtree(dir_path)
64
65
66def touch(path, fail_if_missing=False):
67    if fail_if_missing and not os.path.exists(path):
68        raise Exception(path + ' doesn\'t exist.')
69
70    make_directory(os.path.dirname(path))
71    with open(path, 'a'):
72        os.utime(path, None)
73
74
75def find_in_directory(directory, filename_filter):
76    files = []
77    for root, _dirnames, filenames in os.walk(directory):
78        matched_files = fnmatch.filter(filenames, filename_filter)
79        files.extend((os.path.join(root, f) for f in matched_files))
80    return files
81
82
83def read_build_vars(path):
84    """Parses a build_vars.txt into a dict."""
85    with open(path) as f:
86        return dict(l.rstrip().split('=', 1) for l in f)
87
88
89def parse_gn_list(gn_string):
90    """Converts a command-line parameter into a list.
91
92    If the input starts with a '[' it is assumed to be a GN-formatted list and
93    it will be parsed accordingly. When empty an empty list will be returned.
94    Otherwise, the parameter will be treated as a single raw string (not
95    GN-formatted in that it's not assumed to have literal quotes that must be
96    removed) and a list will be returned containing that string.
97
98    The common use for this behavior is in the ohos build where things can
99    take lists of @FileArg references that are expanded via expand_file_args.
100    """
101    if gn_string.startswith('['):
102        parser = gn_helpers.GNValueParser(gn_string)
103        return parser.ParseList()
104    if len(gn_string):
105        return [gn_string]
106    return []
107
108
109def parse_and_flatten_gn_lists(gn_lists):
110    ret = []
111    for arg in gn_lists:
112        ret.extend(parse_gn_list(arg))
113    return ret
114
115
116def check_options(options, parser, required=None):
117    if not required:
118        return
119    for option_name in required:
120        if getattr(options, option_name) is None:
121            parser.error('--%s is required' % option_name.replace('_', '-'))
122
123
124def write_json(obj, path, only_if_changed=False):
125    old_dump = None
126    if os.path.exists(path):
127        with open(path, 'r') as oldfile:
128            old_dump = oldfile.read()
129
130    new_dump = json.dumps(obj,
131                          sort_keys=True,
132                          indent=2,
133                          separators=(',', ': '))
134
135    if not only_if_changed or old_dump != new_dump:
136        with open(path, 'w') as outfile:
137            outfile.write(new_dump)
138
139
140@contextlib.contextmanager
141def atomic_output(path, only_if_changed=True):
142    """Helper to prevent half-written outputs.
143
144    Args:
145      path: Path to the final output file, which will be written atomically.
146      only_if_changed: If True (the default), do not touch the filesystem
147        if the content has not changed.
148    Returns:
149      A python context manager that yields a NamedTemporaryFile instance
150      that must be used by clients to write the data to. On exit, the
151      manager will try to replace the final output file with the
152      temporary one if necessary. The temporary file is always destroyed
153      on exit.
154    Example:
155      with build_utils.atomic_output(output_path) as tmp_file:
156        subprocess.check_call(['prog', '--output', tmp_file.name])
157    """
158    # Create in same directory to ensure same filesystem when moving.
159    with tempfile.NamedTemporaryFile(suffix=os.path.basename(path),
160                                     dir=os.path.dirname(path),
161                                     delete=False) as f:
162        try:
163            # Change tempfile permission to 664
164            os.fchmod(f.fileno(), 0o664)
165            yield f
166
167            # file should be closed before comparison/move.
168            f.close()
169            if not (only_if_changed and os.path.exists(path)
170                    and filecmp.cmp(f.name, path)):
171                shutil.move(f.name, path)
172        finally:
173            if os.path.exists(f.name):
174                os.unlink(f.name)
175
176
177class called_process_error(Exception):
178    """This exception is raised when the process run by check_output
179    exits with a non-zero exit code."""
180    def __init__(self, cwd, args, output):
181        super(called_process_error, self).__init__()
182        self.cwd = cwd
183        self.args = args
184        if isinstance(output, bytes):
185            self.output = output.decode()
186        else:
187            self.output = output
188
189    def __str__(self):
190        # A user should be able to simply copy and paste the command that failed
191        # into their shell.
192        copyable_command = '( cd {}; {} )'.format(
193            os.path.abspath(self.cwd), ' '.join(map(pipes.quote, self.args)))
194        return 'Command failed: {}\n{}'.format(copyable_command, self.output)
195
196
197def filter_lines(output, filter_string):
198    """Output filter from build_utils.check_output.
199
200    Args:
201      output: Executable output as from build_utils.check_output.
202      filter_string: An RE string that will filter (remove) matching
203          lines from |output|.
204
205    Returns:
206      The filtered output, as a single string.
207    """
208    re_filter = re.compile(filter_string)
209    return '\n'.join(line for line in output.splitlines()
210                     if not re_filter.search(line))
211
212
213# This can be used in most cases like subprocess.check_output(). The output,
214# particularly when the command fails, better highlights the command's failure.
215# If the command fails, raises a build_utils.called_process_error.
216def check_output(args,
217                 cwd=None,
218                 env=None,
219                 print_stdout=False,
220                 print_stderr=True,
221                 stdout_filter=None,
222                 stderr_filter=None,
223                 fail_func=lambda returncode, stderr: returncode != 0):
224    if not cwd:
225        cwd = os.getcwd()
226
227    child = subprocess.Popen(args,
228                             stdout=subprocess.PIPE,
229                             stderr=subprocess.PIPE,
230                             cwd=cwd,
231                             env=env)
232    stdout, stderr = child.communicate()
233
234    if stdout_filter is not None:
235        stdout = stdout_filter(stdout)
236
237    if stderr_filter is not None:
238        stderr = stderr_filter(stderr)
239    if isinstance(stdout, bytes):
240        stdout = stdout.decode()
241    if isinstance(stderr, bytes):
242        stderr = stderr.decode()
243
244    if fail_func(child.returncode, stderr):
245        raise called_process_error(cwd, args, stdout + stderr)
246
247    if print_stdout:
248        if isinstance(stdout, bytes):
249            stdout = stdout.decode()
250        if stdout:
251            sys.stdout.write(stdout)
252    if print_stderr:
253        if isinstance(stderr, bytes):
254            stderr = stderr.decode()
255        if stderr:
256            sys.stderr.write(stderr)
257    return stdout
258
259
260def get_modified_time(path):
261    # For a symlink, the modified time should be the greater of the link's
262    # modified time and the modified time of the target.
263    return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
264
265
266def is_time_stale(output, inputs):
267    if not os.path.exists(output):
268        return True
269
270    output_time = get_modified_time(output)
271    for i in inputs:
272        if get_modified_time(i) > output_time:
273            return True
274    return False
275
276
277def _check_zip_path(name):
278    if os.path.normpath(name) != name:
279        raise Exception('Non-canonical zip path: %s' % name)
280    if os.path.isabs(name):
281        raise Exception('Absolute zip path: %s' % name)
282
283
284def _is_symlink(zip_file, name):
285    zi = zip_file.getinfo(name)
286
287    # The two high-order bytes of ZipInfo.external_attr represent
288    # UNIX permissions and file type bits.
289    return stat.S_ISLNK(zi.external_attr >> 16)
290
291
292def extract_all(zip_path,
293                path=None,
294                no_clobber=True,
295                pattern=None,
296                predicate=None):
297    if path is None:
298        path = os.getcwd()
299    elif not os.path.exists(path):
300        make_directory(path)
301
302    if not zipfile.is_zipfile(zip_path):
303        raise Exception('Invalid zip file: %s' % zip_path)
304
305    extracted = []
306    with zipfile.ZipFile(zip_path) as z:
307        for name in z.namelist():
308            if name.endswith('/'):
309                make_directory(os.path.join(path, name))
310                continue
311            if pattern is not None:
312                if not fnmatch.fnmatch(name, pattern):
313                    continue
314            if predicate and not predicate(name):
315                continue
316            _check_zip_path(name)
317            if no_clobber:
318                output_path = os.path.join(path, name)
319                if os.path.exists(output_path):
320                    raise Exception('Path already exists from zip: %s %s %s' %
321                                    (zip_path, name, output_path))
322            if _is_symlink(z, name):
323                dest = os.path.join(path, name)
324                make_directory(os.path.dirname(dest))
325                os.symlink(z.read(name), dest)
326                extracted.append(dest)
327            else:
328                z.extract(name, path)
329                extracted.append(os.path.join(path, name))
330
331    return extracted
332
333
334def add_to_zip_hermetic(zip_file,
335                        zip_path,
336                        src_path=None,
337                        data=None,
338                        compress=None):
339    """Adds a file to the given ZipFile with a hard-coded modified time.
340
341    Args:
342      zip_file: ZipFile instance to add the file to.
343      zip_path: Destination path within the zip file.
344      src_path: Path of the source file. Mutually exclusive with |data|.
345      data: File data as a string.
346      compress: Whether to enable compression. Default is taken from ZipFile
347          constructor.
348    """
349    assert (src_path is None) != (data is None), (
350        '|src_path| and |data| are mutually exclusive.')
351    _check_zip_path(zip_path)
352    zipinfo = zipfile.ZipInfo(filename=zip_path, date_time=HERMETIC_TIMESTAMP)
353    zipinfo.external_attr = _HERMETIC_FILE_ATTR
354
355    if src_path and os.path.islink(src_path):
356        zipinfo.filename = zip_path
357        zipinfo.external_attr |= stat.S_IFLNK << 16  # mark as a symlink
358        zip_file.writestr(zipinfo, os.readlink(src_path))
359        return
360
361    # we want to use _HERMETIC_FILE_ATTR, so manually set
362    # the few attr bits we care about.
363    if src_path:
364        st = os.stat(src_path)
365        for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH):
366            if st.st_mode & mode:
367                zipinfo.external_attr |= mode << 16
368
369    if src_path:
370        with open(src_path, 'rb') as f:
371            data = f.read()
372
373    # zipfile will deflate even when it makes the file bigger. To avoid
374    # growing files, disable compression at an arbitrary cut off point.
375    if len(data) < 16:
376        compress = False
377
378    # None converts to ZIP_STORED, when passed explicitly rather than the
379    # default passed to the ZipFile constructor.
380    compress_type = zip_file.compression
381    if compress is not None:
382        compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
383    zip_file.writestr(zipinfo, data, compress_type)
384
385
386def do_zip(inputs,
387           output,
388           base_dir=None,
389           compress_fn=None,
390           zip_prefix_path=None):
391    """Creates a zip file from a list of files.
392
393    Args:
394      inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
395      output: Destination .zip file.
396      base_dir: Prefix to strip from inputs.
397      compress_fn: Applied to each input to determine whether or not to compress.
398          By default, items will be |zipfile.ZIP_STORED|.
399      zip_prefix_path: Path prepended to file path in zip file.
400    """
401    input_tuples = []
402    for tup in inputs:
403        if isinstance(tup, str):
404            tup = (os.path.relpath(tup, base_dir), tup)
405        input_tuples.append(tup)
406
407    # Sort by zip path to ensure stable zip ordering.
408    input_tuples.sort(key=lambda tup: tup[0])
409    with zipfile.ZipFile(output, 'w') as outfile:
410        for zip_path, fs_path in input_tuples:
411            if zip_prefix_path:
412                zip_path = os.path.join(zip_prefix_path, zip_path)
413            compress = compress_fn(zip_path) if compress_fn else None
414            add_to_zip_hermetic(outfile,
415                                zip_path,
416                                src_path=fs_path,
417                                compress=compress)
418
419
420def zip_dir(output, base_dir, compress_fn=None, zip_prefix_path=None):
421    """Creates a zip file from a directory."""
422    inputs = []
423    for root, _, files in os.walk(base_dir):
424        for f in files:
425            inputs.append(os.path.join(root, f))
426
427    with atomic_output(output) as f:
428        do_zip(inputs,
429               f,
430               base_dir,
431               compress_fn=compress_fn,
432               zip_prefix_path=zip_prefix_path)
433
434
435def matches_glob(path, filters):
436    """Returns whether the given path matches any of the given glob patterns."""
437    return filters and any(fnmatch.fnmatch(path, f) for f in filters)
438
439
440def _strip_dst_name(dst_name, options):
441    # Strip specific directories and file if options is not None
442    if options and options.stripFile:
443        for f in options.stripFile:
444            if fnmatch.fnmatch(dst_name, '*/' + f):
445                return True
446    if options and options.stripDir:
447        for d in options.stripDir:
448            if fnmatch.fnmatch(dst_name, d + '/*'):
449                return True
450    return False
451
452
453def merge_zips(output, input_zips, path_transform=None, merge_args=None):
454    """Combines all files from |input_zips| into |output|.
455
456    Args:
457      output: Path or ZipFile instance to add files to.
458      input_zips: Iterable of paths to zip files to merge.
459      path_transform: Called for each entry path. Returns a new path, or None to
460          skip the file.
461    """
462    options = None
463    if merge_args:
464        parser = optparse.OptionParser()
465        parser.add_option('--stripDir',
466                          action='append',
467                          help='strip specific directory')
468        parser.add_option('--stripFile',
469                          action='append',
470                          help='strip specific file.')
471
472        args = expand_file_args(merge_args)
473        options, _ = parser.parse_args(args)
474
475    path_transform = path_transform or (lambda p: p)
476    added_names = set()
477
478    output_is_already_open = not isinstance(output, str)
479    if output_is_already_open:
480        assert isinstance(output, zipfile.ZipFile)
481        out_zip = output
482    else:
483        out_zip = zipfile.ZipFile(output, 'w')
484
485    try:
486        for in_file in input_zips:
487            with zipfile.ZipFile(in_file, 'r') as in_zip:
488                # ijar creates zips with null CRCs.
489                in_zip._expected_crc = None
490                for info in in_zip.infolist():
491                    # Ignore directories.
492                    if info.filename[-1] == '/':
493                        continue
494                    dst_name = path_transform(info.filename)
495                    if not dst_name:
496                        continue
497                    if _strip_dst_name(dst_name, options):
498                        continue
499                    already_added = dst_name in added_names
500                    if not already_added:
501                        add_to_zip_hermetic(
502                            out_zip,
503                            dst_name,
504                            data=in_zip.read(info),
505                            compress=info.compress_type != zipfile.ZIP_STORED)
506                        added_names.add(dst_name)
507    finally:
508        if not output_is_already_open:
509            out_zip.close()
510
511
512def get_sorted_transitive_dependencies(top, deps_func):
513    """Gets the list of all transitive dependencies in sorted order.
514
515    There should be no cycles in the dependency graph (crashes if cycles exist).
516
517    Args:
518      top: A list of the top level nodes
519      deps_func: A function that takes a node and returns a list of its direct
520          dependencies.
521    Returns:
522      A list of all transitive dependencies of nodes in top, in order (a node
523      will appear in the list at a higher index than all of its dependencies).
524    """
525    # Find all deps depth-first, maintaining original order in the case of ties.
526    deps_map = collections.OrderedDict()
527
528    def discover(nodes):
529        for node in nodes:
530            if node in deps_map:
531                continue
532            deps = deps_func(node)
533            discover(deps)
534            deps_map[node] = deps
535
536    discover(top)
537    return list(deps_map.keys())
538
539
540def _compute_python_dependencies():
541    """Gets the paths of imported non-system python modules.
542
543    A path is assumed to be a "system" import if it is outside of chromium's
544    src/. The paths will be relative to the current directory.
545    """
546    _force_lazy_modules_to_load()
547    module_paths = (m.__file__ for m in sys.modules.values()
548                    if m is not None and hasattr(m, '__file__') and m.__file__)
549    abs_module_paths = list(map(os.path.abspath, module_paths))
550
551    assert os.path.isabs(DIR_SOURCE_ROOT)
552    non_system_module_paths = [
553        p for p in abs_module_paths if p.startswith(DIR_SOURCE_ROOT)
554    ]
555
556    def convert_pyc_to_py(s):
557        if s.endswith('.pyc'):
558            return s[:-1]
559        return s
560
561    non_system_module_paths = list(
562        map(convert_pyc_to_py, non_system_module_paths))
563    non_system_module_paths = list(
564        map(os.path.relpath, non_system_module_paths))
565    return sorted(set(non_system_module_paths))
566
567
568def _force_lazy_modules_to_load():
569    """Forces any lazily imported modules to fully load themselves.
570
571    Inspecting the modules' __file__ attribute causes lazily imported modules
572    (e.g. from email) to get fully imported and update sys.modules. Iterate
573    over the values until sys.modules stabilizes so that no modules are missed.
574    """
575    while True:
576        num_modules_before = len(list(sys.modules.keys()))
577        for m in list(sys.modules.values()):
578            if m is not None and hasattr(m, '__file__'):
579                _ = m.__file__
580        num_modules_after = len(list(sys.modules.keys()))
581        if num_modules_before == num_modules_after:
582            break
583
584
585def add_depfile_option(parser):
586    if hasattr(parser, 'add_option'):
587        func = parser.add_option
588    else:
589        func = parser.add_argument
590    func('--depfile', help='Path to depfile (refer to `gn help depfile`)')
591
592
593def write_depfile(depfile_path, first_gn_output, inputs=None, add_pydeps=True):
594    assert depfile_path != first_gn_output  # http://crbug.com/646165
595    inputs = inputs or []
596    if add_pydeps:
597        inputs = _compute_python_dependencies() + inputs
598    inputs = sorted(inputs)
599    make_directory(os.path.dirname(depfile_path))
600    # Ninja does not support multiple outputs in depfiles.
601    with open(depfile_path, 'w') as depfile:
602        depfile.write(first_gn_output.replace(' ', '\\ '))
603        depfile.write(': ')
604        depfile.write(' '.join(i.replace(' ', '\\ ') for i in inputs))
605        depfile.write('\n')
606
607
608def expand_file_args(args):
609    """Replaces file-arg placeholders in args.
610
611    These placeholders have the form:
612      @FileArg(filename:key1:key2:...:keyn)
613
614    The value of such a placeholder is calculated by reading 'filename' as json.
615    And then extracting the value at [key1][key2]...[keyn].
616
617    Note: This intentionally does not return the list of files that appear in
618    such placeholders. An action that uses file-args *must* know the paths of
619    those files prior to the parsing of the arguments (typically by explicitly
620    listing them in the action's inputs in build files).
621    """
622    new_args = list(args)
623    file_jsons = dict()
624    r = re.compile(r'@FileArg\((.*?)\)')
625    for i, arg in enumerate(args):
626        match = r.search(arg)
627        if not match:
628            continue
629
630        if match.end() != len(arg):
631            raise Exception(
632                'Unexpected characters after FileArg: {}'.format(arg))
633
634        lookup_path = match.group(1).split(':')
635        file_path = lookup_path[0]
636        if file_path not in file_jsons:
637            with open(file_path) as f:
638                file_jsons[file_path] = json.load(f)
639
640        expansion = file_jsons[file_path]
641
642        for k in lookup_path[1:]:
643            if k in expansion:
644                expansion = expansion[k]
645            else:
646                expansion = ""
647                print("WARNNING", lookup_path[1:], "is not in metadata file, set default ''")
648        # This should match parse_gn_list. The output is either a GN-formatted list
649        # or a literal (with no quotes).
650        if isinstance(expansion, list):
651            new_args[i] = arg[:match.start()] + gn_helpers.ToGNString(
652                expansion)
653        else:
654            new_args[i] = arg[:match.start()] + str(expansion)
655
656    return new_args
657
658
659def read_sources_list(sources_list_file_name):
660    """Reads a GN-written file containing list of file names and returns a list.
661
662    Note that this function should not be used to parse response files.
663    """
664    with open(sources_list_file_name) as f:
665        return [file_name.strip() for file_name in f]
666
667
668def call_and_write_depfile_if_stale(function,
669                                    options,
670                                    record_path=None,
671                                    input_paths=None,
672                                    input_strings=None,
673                                    output_paths=None,
674                                    force=False,
675                                    pass_changes=False,
676                                    depfile_deps=None,
677                                    add_pydeps=True):
678    """Wraps md5_check.call_and_record_if_stale() and writes a depfile if applicable.
679
680    Depfiles are automatically added to output_paths when present in the
681    |options| argument. They are then created after |function| is called.
682
683    By default, only python dependencies are added to the depfile. If there are
684    other input paths that are not captured by GN deps, then they should be
685    listed in depfile_deps. It's important to write paths to the depfile that
686    are already captured by GN deps since GN args can cause GN deps to change,
687    and such changes are not immediately reflected in depfiles
688    (http://crbug.com/589311).
689    """
690    if not output_paths:
691        raise Exception('At least one output_path must be specified.')
692    input_paths = list(input_paths or [])
693    input_strings = list(input_strings or [])
694    output_paths = list(output_paths or [])
695
696    python_deps = None
697    if hasattr(options, 'depfile') and options.depfile:
698        python_deps = _compute_python_dependencies()
699        input_paths += python_deps
700        output_paths += [options.depfile]
701
702    def on_stale_md5(changes):
703        args = (changes, ) if pass_changes else ()
704        function(*args)
705        if python_deps is not None:
706            all_depfile_deps = list(python_deps) if add_pydeps else []
707            if depfile_deps:
708                all_depfile_deps.extend(depfile_deps)
709            write_depfile(options.depfile,
710                          output_paths[0],
711                          all_depfile_deps,
712                          add_pydeps=False)
713
714    md5_check.call_and_record_if_stale(on_stale_md5,
715                                       record_path=record_path,
716                                       input_paths=input_paths,
717                                       input_strings=input_strings,
718                                       output_paths=output_paths,
719                                       force=force,
720                                       pass_changes=True)
721
722
723def get_all_files(base, follow_symlinks=False):
724    """Returns a list of all the files in |base|. Each entry is relative to the
725    last path entry of |base|."""
726    result = []
727    for root, _, files in os.walk(base, followlinks=follow_symlinks):
728        result.extend([os.path.join(root, f) for f in files])
729
730    return result
731
732
733def rebase_path(input, new_base=None, current_base="."):
734    if new_base:
735        return os.path.relpath(os.path.join(current_base, input), new_base)
736    else:
737        return os.path.realpath(os.path.join(current_base, input))
738