• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3# Copyright 2013 The Chromium Authors. All rights reserved.
4# Use of this source code is governed by a BSD-style license that can be
5# found in the LICENSE file.
6"""Contains common helpers for GN action()s."""
7
8import collections
9import contextlib
10from distutils import extension
11import filecmp
12import fnmatch
13import json
14import os
15import pipes
16import re
17import shutil
18import stat
19import subprocess
20import sys
21import tempfile
22import zipfile
23import optparse
24
25# Any new non-system import must be added to:
26
27sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
28import gn_helpers
29
30# Some clients do not add //build/scripts/util to PYTHONPATH.
31from . import md5_check  # pylint: disable=relative-import
32
33# Definition copied from pylib/constants/__init__.py to avoid adding
34# a dependency on pylib.
35DIR_SOURCE_ROOT = os.environ.get(
36    'CHECKOUT_SOURCE_ROOT',
37    os.path.abspath(
38        os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
39                     os.pardir, os.pardir)))
40
41HERMETIC_TIMESTAMP = (2001, 1, 1, 0, 0, 0)
42_HERMETIC_FILE_ATTR = (0o644 << 16)
43
44
45@contextlib.contextmanager
46def temp_dir():
47    dirname = tempfile.mkdtemp()
48    try:
49        yield dirname
50    finally:
51        shutil.rmtree(dirname)
52
53
54def make_directory(dir_path):
55    try:
56        os.makedirs(dir_path, exist_ok=True)
57    except OSError:
58        pass
59
60
61def delete_directory(dir_path):
62    if os.path.exists(dir_path):
63        shutil.rmtree(dir_path)
64
65
66def check_instance(check_target, instance_name, check_type):
67    if not isinstance(check_target, check_type):
68        raise Exception(f'{instance_name} config type is not {check_type}')
69
70
71def touch(path, fail_if_missing=False):
72    if fail_if_missing and not os.path.exists(path):
73        raise Exception(path + ' doesn\'t exist.')
74
75    make_directory(os.path.dirname(path))
76    with open(path, 'a'):
77        os.utime(path, None)
78
79
80def find_in_directory(directory, filename_filter):
81    files = []
82    for root, _dirnames, filenames in os.walk(directory):
83        matched_files = fnmatch.filter(filenames, filename_filter)
84        files.extend((os.path.join(root, f) for f in matched_files))
85    return files
86
87
88def read_build_vars(path):
89    """Parses a build_vars.txt into a dict."""
90    with open(path) as f:
91        return dict(l.rstrip().split('=', 1) for l in f)
92
93
94def parse_gn_list(gn_string):
95    """Converts a command-line parameter into a list.
96
97    If the input starts with a '[' it is assumed to be a GN-formatted list and
98    it will be parsed accordingly. When empty an empty list will be returned.
99    Otherwise, the parameter will be treated as a single raw string (not
100    GN-formatted in that it's not assumed to have literal quotes that must be
101    removed) and a list will be returned containing that string.
102
103    The common use for this behavior is in the ohos build where things can
104    take lists of @FileArg references that are expanded via expand_file_args.
105    """
106    if gn_string.startswith('['):
107        parser = gn_helpers.GNValueParser(gn_string)
108        return parser.parse_list()
109    if len(gn_string):
110        return [gn_string]
111    return []
112
113
114def parse_and_flatten_gn_lists(gn_lists):
115    ret = []
116    for arg in gn_lists:
117        ret.extend(parse_gn_list(arg))
118    return ret
119
120
121def check_options(options, parser, required=None):
122    if not required:
123        return
124    for option_name in required:
125        if getattr(options, option_name) is None:
126            parser.error('--%s is required' % option_name.replace('_', '-'))
127
128
129def write_json(obj, path, only_if_changed=False):
130    old_dump = None
131    if os.path.exists(path):
132        with open(path, 'r') as oldfile:
133            old_dump = oldfile.read()
134
135    new_dump = json.dumps(obj,
136                          sort_keys=True,
137                          indent=2,
138                          separators=(',', ': '))
139
140    if not only_if_changed or old_dump != new_dump:
141        with open(path, 'w') as outfile:
142            outfile.write(new_dump)
143
144
145@contextlib.contextmanager
146def atomic_output(path, only_if_changed=True):
147    """Helper to prevent half-written outputs.
148
149    Args:
150      path: Path to the final output file, which will be written atomically.
151      only_if_changed: If True (the default), do not touch the filesystem
152        if the content has not changed.
153    Returns:
154      A python context manager that yields a NamedTemporaryFile instance
155      that must be used by clients to write the data to. On exit, the
156      manager will try to replace the final output file with the
157      temporary one if necessary. The temporary file is always destroyed
158      on exit.
159    Example:
160      with build_utils.atomic_output(output_path) as tmp_file:
161        subprocess.check_call(['prog', '--output', tmp_file.name])
162    """
163    # Create in same directory to ensure same filesystem when moving.
164    with tempfile.NamedTemporaryFile(suffix=os.path.basename(path),
165                                     dir=os.path.dirname(path),
166                                     delete=False) as f:
167        try:
168            # Change tempfile permission to 664
169            os.fchmod(f.fileno(), 0o664)
170            yield f
171
172            # file should be closed before comparison/move.
173            f.close()
174            if not (only_if_changed and os.path.exists(path)
175                    and filecmp.cmp(f.name, path)):
176                shutil.move(f.name, path)
177        finally:
178            if os.path.exists(f.name):
179                os.unlink(f.name)
180
181
182class CalledProcessError(Exception):
183    """This exception is raised when the process run by check_output
184    exits with a non-zero exit code.
185    """
186    def __init__(self, cwd, args, output):
187        super(CalledProcessError, self).__init__()
188        self.cwd = cwd
189        self.args = args
190        if isinstance(output, bytes):
191            self.output = output.decode()
192        else:
193            self.output = output
194
195    def __str__(self):
196        # A user should be able to simply copy and paste the command that failed
197        # into their shell.
198        copyable_command = '( cd {}; {} )'.format(
199            os.path.abspath(self.cwd), ' '.join(map(pipes.quote, self.args)))
200        return 'Command failed: {}\n{}'.format(copyable_command, self.output)
201
202
203def filter_lines(output, filter_string):
204    """Output filter from build_utils.check_output.
205
206    Args:
207      output: Executable output as from build_utils.check_output.
208      filter_string: An RE string that will filter (remove) matching
209          lines from |output|.
210
211    Returns:
212      The filtered output, as a single string.
213    """
214    re_filter = re.compile(filter_string)
215    return '\n'.join(line for line in output.splitlines()
216                     if not re_filter.search(line))
217
218
219# This can be used in most cases like subprocess.check_output(). The output,
220# particularly when the command fails, better highlights the command's failure.
221# If the command fails, raises a build_utils.CalledProcessError.
222def check_output(args,
223                 cwd=None,
224                 env=None,
225                 print_stdout=False,
226                 print_stderr=True,
227                 stdout_filter=None,
228                 stderr_filter=None,
229                 fail_func=lambda returncode, stderr: returncode != 0):
230    if not cwd:
231        cwd = os.getcwd()
232
233    cache_exec = None
234    if env and env.pop("useCompileCache", False):
235        cache_exec = os.environ.get("COMPILE_CACHE_EXEC")
236    if cache_exec:
237        execute_args = [cache_exec, "--cwd", cwd]
238        execute_args.extend(args)
239        execute_args.extend(["--build-env"] + [f"{k}={v}" for k, v in env.items() if k != "addTestRunner"])
240        if env.pop("addTestRunner", False):
241            execute_args.append("--add-test-runner")
242        child = subprocess.Popen(execute_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
243    else:
244        child = subprocess.Popen(args,
245                                stdout=subprocess.PIPE,
246                                stderr=subprocess.PIPE,
247                                cwd=cwd,
248                                env=env)
249    stdout, stderr = child.communicate()
250
251    if stdout_filter is not None:
252        stdout = stdout_filter(stdout)
253
254    if stderr_filter is not None:
255        stderr = stderr_filter(stderr)
256    if isinstance(stdout, bytes):
257        stdout = stdout.decode()
258    if isinstance(stderr, bytes):
259        stderr = stderr.decode()
260
261    if fail_func(child.returncode, stderr):
262        raise CalledProcessError(cwd, args, stdout + stderr)
263
264    if print_stdout:
265        if isinstance(stdout, bytes):
266            stdout = stdout.decode()
267        if stdout:
268            sys.stdout.write(stdout)
269    if print_stderr:
270        if isinstance(stderr, bytes):
271            stderr = stderr.decode()
272        if stderr:
273            sys.stderr.write(stderr)
274    return stdout
275
276
277def get_modified_time(path):
278    # For a symlink, the modified time should be the greater of the link's
279    # modified time and the modified time of the target.
280    return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
281
282
283def is_time_stale(output, inputs):
284    if not os.path.exists(output):
285        return True
286
287    output_time = get_modified_time(output)
288    for i in inputs:
289        if get_modified_time(i) > output_time:
290            return True
291    return False
292
293
294def _check_zip_path(name):
295    if os.path.normpath(name) != name:
296        raise Exception('Non-canonical zip path: %s' % name)
297    if os.path.isabs(name):
298        raise Exception('Absolute zip path: %s' % name)
299
300
301def _is_symlink(zip_file, name):
302    zi = zip_file.getinfo(name)
303
304    # The two high-order bytes of ZipInfo.external_attr represent
305    # UNIX permissions and file type bits.
306    return stat.S_ISLNK(zi.external_attr >> 16)
307
308
309def extract_all(zip_path,
310                path=None,
311                no_clobber=True,
312                pattern=None,
313                predicate=None):
314    if path is None:
315        path = os.getcwd()
316    elif not os.path.exists(path):
317        make_directory(path)
318
319    if not zipfile.is_zipfile(zip_path):
320        raise Exception('Invalid zip file: %s' % zip_path)
321
322    extracted = []
323    with zipfile.ZipFile(zip_path) as z:
324        for name in z.namelist():
325            if name.endswith('/'):
326                make_directory(os.path.join(path, name))
327                continue
328            if pattern is not None:
329                if not fnmatch.fnmatch(name, pattern):
330                    continue
331            if predicate and not predicate(name):
332                continue
333            _check_zip_path(name)
334            if no_clobber:
335                output_path = os.path.join(path, name)
336                if os.path.exists(output_path):
337                    raise Exception('Path already exists from zip: %s %s %s' %
338                                    (zip_path, name, output_path))
339            if _is_symlink(z, name):
340                dest = os.path.join(path, name)
341                make_directory(os.path.dirname(dest))
342                os.symlink(z.read(name), dest)
343                extracted.append(dest)
344            else:
345                z.extract(name, path)
346                extracted.append(os.path.join(path, name))
347
348    return extracted
349
350
351def add_to_zip_hermetic(zip_file,
352                        zip_path,
353                        src_path=None,
354                        data=None,
355                        compress=None,
356                        compress_level=6):
357    """Adds a file to the given ZipFile with a hard-coded modified time.
358
359    Args:
360      zip_file: ZipFile instance to add the file to.
361      zip_path: Destination path within the zip file.
362      src_path: Path of the source file. Mutually exclusive with |data|.
363      data: File data as a string.
364      compress: Whether to enable compression. Default is taken from ZipFile
365          constructor.
366    """
367    assert (src_path is None) != (data is None), (
368        '|src_path| and |data| are mutually exclusive.')
369    _check_zip_path(zip_path)
370    zipinfo = zipfile.ZipInfo(filename=zip_path, date_time=HERMETIC_TIMESTAMP)
371    zipinfo.external_attr = _HERMETIC_FILE_ATTR
372
373    if src_path and os.path.islink(src_path):
374        zipinfo.filename = zip_path
375        zipinfo.external_attr |= stat.S_IFLNK << 16  # mark as a symlink
376        zip_file.writestr(zipinfo, os.readlink(src_path))
377        return
378
379    # we want to use _HERMETIC_FILE_ATTR, so manually set
380    # the few attr bits we care about.
381    if src_path:
382        st = os.stat(src_path)
383        for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH):
384            if st.st_mode & mode:
385                zipinfo.external_attr |= mode << 16
386
387    if src_path:
388        with open(src_path, 'rb') as f:
389            data = f.read()
390
391    # zipfile will deflate even when it makes the file bigger. To avoid
392    # growing files, disable compression at an arbitrary cut off point.
393    if len(data) < 16:
394        compress = False
395
396    # None converts to ZIP_STORED, when passed explicitly rather than the
397    # default passed to the ZipFile constructor.
398    compress_type = zip_file.compression
399    if compress is not None:
400        compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
401    if os.getenv("ZIP_COMPRESS_LEVEL"):
402        compress_level = int(os.getenv("ZIP_COMPRESS_LEVEL"))
403    zip_file.writestr(zipinfo, data, compress_type, compress_level)
404
405
406def do_zip(inputs,
407           output,
408           base_dir=None,
409           compress_fn=None,
410           zip_prefix_path=None):
411    """Creates a zip file from a list of files.
412
413    Args:
414      inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
415      output: Destination .zip file.
416      base_dir: Prefix to strip from inputs.
417      compress_fn: Applied to each input to determine whether or not to compress.
418          By default, items will be |zipfile.ZIP_STORED|.
419      zip_prefix_path: Path prepended to file path in zip file.
420    """
421    input_tuples = []
422    for tup in inputs:
423        if isinstance(tup, str):
424            tup = (os.path.relpath(tup, base_dir), tup)
425        input_tuples.append(tup)
426
427    # Sort by zip path to ensure stable zip ordering.
428    input_tuples.sort(key=lambda tup: tup[0])
429    with zipfile.ZipFile(output, 'w') as outfile:
430        for zip_path, fs_path in input_tuples:
431            if zip_prefix_path:
432                zip_path = os.path.join(zip_prefix_path, zip_path)
433            compress = compress_fn(zip_path) if compress_fn else None
434            add_to_zip_hermetic(outfile,
435                                zip_path,
436                                src_path=fs_path,
437                                compress=compress)
438
439
440def zip_dir(output, base_dir, compress_fn=None, zip_prefix_path=None):
441    """Creates a zip file from a directory."""
442    inputs = []
443    for root, _, files in os.walk(base_dir):
444        for f in files:
445            inputs.append(os.path.join(root, f))
446
447    with atomic_output(output) as f:
448        do_zip(inputs,
449               f,
450               base_dir,
451               compress_fn=compress_fn,
452               zip_prefix_path=zip_prefix_path)
453
454
455def matches_glob(path, filters):
456    """Returns whether the given path matches any of the given glob patterns."""
457    return filters and any(fnmatch.fnmatch(path, f) for f in filters)
458
459
460def _strip_dst_name(dst_name, options):
461    # Strip specific directories and file if options is not None
462    if options and options.stripFile:
463        for f in options.stripFile:
464            if fnmatch.fnmatch(dst_name, '*/' + f):
465                return True
466    if options and options.stripDir:
467        for d in options.stripDir:
468            if fnmatch.fnmatch(dst_name, d + '/*'):
469                return True
470    return False
471
472
473def merge_zips(output, input_zips, path_transform=None, merge_args=None):
474    """Combines all files from |input_zips| into |output|.
475
476    Args:
477      output: Path or ZipFile instance to add files to.
478      input_zips: Iterable of paths to zip files to merge.
479      path_transform: Called for each entry path. Returns a new path, or None to
480          skip the file.
481    """
482    options = None
483    if merge_args:
484        parser = optparse.OptionParser()
485        parser.add_option('--stripDir',
486                          action='append',
487                          help='strip specific directory')
488        parser.add_option('--stripFile',
489                          action='append',
490                          help='strip specific file.')
491
492        args = expand_file_args(merge_args)
493        options, _ = parser.parse_args(args)
494
495    path_transform = path_transform or (lambda p: p)
496    added_names = set()
497
498    output_is_already_open = not isinstance(output, str)
499    if output_is_already_open:
500        assert isinstance(output, zipfile.ZipFile)
501        out_zip = output
502    else:
503        out_zip = zipfile.ZipFile(output, 'w')
504
505    try:
506        for in_file in input_zips:
507            with zipfile.ZipFile(in_file, 'r') as in_zip:
508                # ijar creates zips with null CRCs.
509                in_zip._expected_crc = None
510                for info in in_zip.infolist():
511                    # Ignore directories.
512                    if info.filename[-1] == '/':
513                        continue
514                    dst_name = path_transform(info.filename)
515                    if not dst_name:
516                        continue
517                    if _strip_dst_name(dst_name, options):
518                        continue
519                    already_added = dst_name in added_names
520                    if not already_added:
521                        add_to_zip_hermetic(
522                            out_zip,
523                            dst_name,
524                            data=in_zip.read(info),
525                            compress=info.compress_type != zipfile.ZIP_STORED)
526                        added_names.add(dst_name)
527    finally:
528        if not output_is_already_open:
529            out_zip.close()
530
531
532def get_sorted_transitive_dependencies(top, deps_func):
533    """Gets the list of all transitive dependencies in sorted order.
534
535    There should be no cycles in the dependency graph (crashes if cycles exist).
536
537    Args:
538      top: A list of the top level nodes
539      deps_func: A function that takes a node and returns a list of its direct
540          dependencies.
541    Returns:
542      A list of all transitive dependencies of nodes in top, in order (a node
543      will appear in the list at a higher index than all of its dependencies).
544    """
545    # Find all deps depth-first, maintaining original order in the case of ties.
546    deps_map = collections.OrderedDict()
547
548    def discover(nodes):
549        for node in nodes:
550            if node in deps_map:
551                continue
552            deps = deps_func(node)
553            discover(deps)
554            deps_map[node] = deps
555
556    discover(top)
557    return list(deps_map.keys())
558
559
560def _compute_python_dependencies():
561    """Gets the paths of imported non-system python modules.
562
563    A path is assumed to be a "system" import if it is outside of chromium's
564    src/. The paths will be relative to the current directory.
565    """
566    _force_lazy_modules_to_load()
567    module_paths = (m.__file__ for m in sys.modules.values()
568                    if m is not None and hasattr(m, '__file__') and m.__file__)
569    abs_module_paths = list(map(os.path.abspath, module_paths))
570
571    assert os.path.isabs(DIR_SOURCE_ROOT)
572    non_system_module_paths = [
573        p for p in abs_module_paths if p.startswith(DIR_SOURCE_ROOT)
574    ]
575
576    def convert_pyc_to_py(s):
577        if s.endswith('.pyc'):
578            return s[:-1]
579        return s
580
581    non_system_module_paths = list(
582        map(convert_pyc_to_py, non_system_module_paths))
583    non_system_module_paths = list(
584        map(os.path.relpath, non_system_module_paths))
585    return sorted(set(non_system_module_paths))
586
587
588def _force_lazy_modules_to_load():
589    """Forces any lazily imported modules to fully load themselves.
590
591    Inspecting the modules' __file__ attribute causes lazily imported modules
592    (e.g. from email) to get fully imported and update sys.modules. Iterate
593    over the values until sys.modules stabilizes so that no modules are missed.
594    """
595    while True:
596        num_modules_before = len(list(sys.modules.keys()))
597        for m in list(sys.modules.values()):
598            if m is not None and hasattr(m, '__file__'):
599                _ = m.__file__
600        num_modules_after = len(list(sys.modules.keys()))
601        if num_modules_before == num_modules_after:
602            break
603
604
605def add_depfile_option(parser):
606    if hasattr(parser, 'add_option'):
607        func = parser.add_option
608    else:
609        func = parser.add_argument
610    func('--depfile', help='Path to depfile (refer to `gn help depfile`)')
611
612
613def write_depfile(depfile_path, first_gn_output, inputs=None, add_pydeps=True):
614    assert depfile_path != first_gn_output  # http://crbug.com/646165
615    inputs = inputs or []
616    if add_pydeps:
617        inputs = _compute_python_dependencies() + inputs
618    inputs = sorted(inputs)
619    make_directory(os.path.dirname(depfile_path))
620    # Ninja does not support multiple outputs in depfiles.
621    with open(depfile_path, 'w') as depfile:
622        depfile.write(first_gn_output.replace(' ', '\\ '))
623        depfile.write(': ')
624        depfile.write(' '.join(i.replace(' ', '\\ ') for i in inputs))
625        depfile.write('\n')
626
627
628def expand_file_args(args):
629    """Replaces file-arg placeholders in args.
630
631    These placeholders have the form:
632      @FileArg(filename:key1:key2:...:keyn)
633
634    The value of such a placeholder is calculated by reading 'filename' as json.
635    And then extracting the value at [key1][key2]...[keyn].
636
637    Note: This intentionally does not return the list of files that appear in
638    such placeholders. An action that uses file-args *must* know the paths of
639    those files prior to the parsing of the arguments (typically by explicitly
640    listing them in the action's inputs in build files).
641    """
642    new_args = list(args)
643    file_jsons = dict()
644    r = re.compile(r'@FileArg\((.*?)\)')
645    for i, arg in enumerate(args):
646        match = r.search(arg)
647        if not match:
648            continue
649
650        if match.end() != len(arg):
651            raise Exception(
652                'Unexpected characters after FileArg: {}'.format(arg))
653
654        lookup_path = match.group(1).split(':')
655        file_path = lookup_path[0]
656        if file_path not in file_jsons:
657            with open(file_path) as f:
658                file_jsons[file_path] = json.load(f)
659
660        expansion = file_jsons[file_path]
661
662        for k in lookup_path[1:]:
663            if k in expansion:
664                expansion = expansion[k]
665            else:
666                expansion = ""
667                print("WARNNING", lookup_path[1:], "is not in metadata file, set default ''")
668        # This should match parse_gn_list. The output is either a GN-formatted list
669        # or a literal (with no quotes).
670        if isinstance(expansion, list):
671            new_args[i] = arg[:match.start()] + gn_helpers.to_gn_string(
672                expansion)
673        else:
674            new_args[i] = arg[:match.start()] + str(expansion)
675
676    return new_args
677
678
679def read_sources_list(sources_list_file_name):
680    """Reads a GN-written file containing list of file names and returns a list.
681
682    Note that this function should not be used to parse response files.
683    """
684    with open(sources_list_file_name) as f:
685        return [file_name.strip() for file_name in f]
686
687
688def call_and_write_depfile_if_stale(function,
689                                    options,
690                                    record_path=None,
691                                    input_paths=None,
692                                    input_strings=None,
693                                    output_paths=None,
694                                    force=False,
695                                    pass_changes=False,
696                                    depfile_deps=None,
697                                    add_pydeps=True):
698    """Wraps md5_check.call_and_record_if_stale() and writes a depfile if applicable.
699
700    Depfiles are automatically added to output_paths when present in the
701    |options| argument. They are then created after |function| is called.
702
703    By default, only python dependencies are added to the depfile. If there are
704    other input paths that are not captured by GN deps, then they should be
705    listed in depfile_deps. It's important to write paths to the depfile that
706    are already captured by GN deps since GN args can cause GN deps to change,
707    and such changes are not immediately reflected in depfiles
708    (http://crbug.com/589311).
709    """
710    if not output_paths:
711        raise Exception('At least one output_path must be specified.')
712    input_paths = list(input_paths or [])
713    input_strings = list(input_strings or [])
714    output_paths = list(output_paths or [])
715
716    python_deps = None
717    if hasattr(options, 'depfile') and options.depfile:
718        python_deps = _compute_python_dependencies()
719        input_paths += python_deps
720        output_paths += [options.depfile]
721
722    def on_stale_md5(changes):
723        args = (changes, ) if pass_changes else ()
724        function(*args)
725        if python_deps is not None:
726            all_depfile_deps = list(python_deps) if add_pydeps else []
727            if depfile_deps:
728                all_depfile_deps.extend(depfile_deps)
729            write_depfile(options.depfile,
730                          output_paths[0],
731                          all_depfile_deps,
732                          add_pydeps=False)
733
734    md5_check.call_and_record_if_stale(on_stale_md5,
735                                       record_path=record_path,
736                                       input_paths=input_paths,
737                                       input_strings=input_strings,
738                                       output_paths=output_paths,
739                                       force=force,
740                                       pass_changes=True)
741
742
743def get_all_files(base, follow_symlinks=False):
744    """Returns a list of all the files in |base|. Each entry is relative to the
745    last path entry of |base|.
746    """
747    result = []
748    for root, _, files in os.walk(base, followlinks=follow_symlinks):
749        result.extend([os.path.join(root, f) for f in files])
750
751    return result
752
753
754def rebase_path(path_to_rebase, new_base=None, current_base="."):
755    if new_base:
756        return os.path.relpath(os.path.join(current_base, path_to_rebase), new_base)
757    else:
758        return os.path.realpath(os.path.join(current_base, path_to_rebase))
759