• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3# Copyright 2013 The Chromium Authors. All rights reserved.
4# Use of this source code is governed by a BSD-style license that can be
5# found in the LICENSE file.
6"""Contains common helpers for GN action()s."""
7
8import collections
9import contextlib
10from distutils import extension
11import filecmp
12import fnmatch
13import json
14import os
15import pipes
16import re
17import shutil
18import stat
19import subprocess
20import sys
21import tempfile
22import zipfile
23import optparse
24
25# Any new non-system import must be added to:
26
27sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
28import gn_helpers
29
30# Some clients do not add //build/scripts/util to PYTHONPATH.
31from . import md5_check  # pylint: disable=relative-import
32
33# Definition copied from pylib/constants/__init__.py to avoid adding
34# a dependency on pylib.
35DIR_SOURCE_ROOT = os.environ.get(
36    'CHECKOUT_SOURCE_ROOT',
37    os.path.abspath(
38        os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
39                     os.pardir, os.pardir)))
40
41HERMETIC_TIMESTAMP = (2001, 1, 1, 0, 0, 0)
42_HERMETIC_FILE_ATTR = (0o644 << 16)
43
44
45@contextlib.contextmanager
46def temp_dir():
47    dirname = tempfile.mkdtemp()
48    try:
49        yield dirname
50    finally:
51        shutil.rmtree(dirname)
52
53
54def make_directory(dir_path):
55    try:
56        os.makedirs(dir_path, exist_ok=True)
57    except OSError:
58        pass
59
60
61def delete_directory(dir_path):
62    if os.path.exists(dir_path):
63        shutil.rmtree(dir_path)
64
65
66def touch(path, fail_if_missing=False):
67    if fail_if_missing and not os.path.exists(path):
68        raise Exception(path + ' doesn\'t exist.')
69
70    make_directory(os.path.dirname(path))
71    with open(path, 'a'):
72        os.utime(path, None)
73
74
75def find_in_directory(directory, filename_filter):
76    files = []
77    for root, _dirnames, filenames in os.walk(directory):
78        matched_files = fnmatch.filter(filenames, filename_filter)
79        files.extend((os.path.join(root, f) for f in matched_files))
80    return files
81
82
83def read_build_vars(path):
84    """Parses a build_vars.txt into a dict."""
85    with open(path) as f:
86        return dict(l.rstrip().split('=', 1) for l in f)
87
88
89def parse_gn_list(gn_string):
90    """Converts a command-line parameter into a list.
91
92    If the input starts with a '[' it is assumed to be a GN-formatted list and
93    it will be parsed accordingly. When empty an empty list will be returned.
94    Otherwise, the parameter will be treated as a single raw string (not
95    GN-formatted in that it's not assumed to have literal quotes that must be
96    removed) and a list will be returned containing that string.
97
98    The common use for this behavior is in the ohos build where things can
99    take lists of @FileArg references that are expanded via expand_file_args.
100    """
101    if gn_string.startswith('['):
102        parser = gn_helpers.GNValueParser(gn_string)
103        return parser.ParseList()
104    if len(gn_string):
105        return [gn_string]
106    return []
107
108
109def parse_and_flatten_gn_lists(gn_lists):
110    ret = []
111    for arg in gn_lists:
112        ret.extend(parse_gn_list(arg))
113    return ret
114
115
116def check_options(options, parser, required=None):
117    if not required:
118        return
119    for option_name in required:
120        if getattr(options, option_name) is None:
121            parser.error('--%s is required' % option_name.replace('_', '-'))
122
123
124def write_json(obj, path, only_if_changed=False):
125    old_dump = None
126    if os.path.exists(path):
127        with open(path, 'r') as oldfile:
128            old_dump = oldfile.read()
129
130    new_dump = json.dumps(obj,
131                          sort_keys=True,
132                          indent=2,
133                          separators=(',', ': '))
134
135    if not only_if_changed or old_dump != new_dump:
136        with open(path, 'w') as outfile:
137            outfile.write(new_dump)
138
139
140@contextlib.contextmanager
141def atomic_output(path, only_if_changed=True):
142    """Helper to prevent half-written outputs.
143
144    Args:
145      path: Path to the final output file, which will be written atomically.
146      only_if_changed: If True (the default), do not touch the filesystem
147        if the content has not changed.
148    Returns:
149      A python context manager that yields a NamedTemporaryFile instance
150      that must be used by clients to write the data to. On exit, the
151      manager will try to replace the final output file with the
152      temporary one if necessary. The temporary file is always destroyed
153      on exit.
154    Example:
155      with build_utils.atomic_output(output_path) as tmp_file:
156        subprocess.check_call(['prog', '--output', tmp_file.name])
157    """
158    # Create in same directory to ensure same filesystem when moving.
159    with tempfile.NamedTemporaryFile(suffix=os.path.basename(path),
160                                     dir=os.path.dirname(path),
161                                     delete=False) as f:
162        try:
163            # Change tempfile permission to 664
164            os.fchmod(f.fileno(), 0o664)
165            yield f
166
167            # file should be closed before comparison/move.
168            f.close()
169            if not (only_if_changed and os.path.exists(path)
170                    and filecmp.cmp(f.name, path)):
171                shutil.move(f.name, path)
172        finally:
173            if os.path.exists(f.name):
174                os.unlink(f.name)
175
176
177class called_process_error(Exception):
178    """This exception is raised when the process run by check_output
179    exits with a non-zero exit code."""
180    def __init__(self, cwd, args, output):
181        super(called_process_error, self).__init__()
182        self.cwd = cwd
183        self.args = args
184        if isinstance(output, bytes):
185            self.output = output.decode()
186        else:
187            self.output = output
188
189    def __str__(self):
190        # A user should be able to simply copy and paste the command that failed
191        # into their shell.
192        copyable_command = '( cd {}; {} )'.format(
193            os.path.abspath(self.cwd), ' '.join(map(pipes.quote, self.args)))
194        return 'Command failed: {}\n{}'.format(copyable_command, self.output)
195
196
197def filter_lines(output, filter_string):
198    """Output filter from build_utils.check_output.
199
200    Args:
201      output: Executable output as from build_utils.check_output.
202      filter_string: An RE string that will filter (remove) matching
203          lines from |output|.
204
205    Returns:
206      The filtered output, as a single string.
207    """
208    re_filter = re.compile(filter_string)
209    return '\n'.join(line for line in output.splitlines()
210                     if not re_filter.search(line))
211
212
213# This can be used in most cases like subprocess.check_output(). The output,
214# particularly when the command fails, better highlights the command's failure.
215# If the command fails, raises a build_utils.called_process_error.
216def check_output(args,
217                 cwd=None,
218                 env=None,
219                 print_stdout=False,
220                 print_stderr=True,
221                 stdout_filter=None,
222                 stderr_filter=None,
223                 fail_func=lambda returncode, stderr: returncode != 0):
224    if not cwd:
225        cwd = os.getcwd()
226
227    cache_exec = None
228    if env and env.pop("useCompileCache", False):
229        cache_exec = os.environ.get("COMPILE_CACHE_EXEC")
230    if cache_exec:
231        execute_args = [cache_exec, "--cwd", cwd]
232        execute_args.extend(args)
233        execute_args.extend(["--build-env"] + [f"{k}={v}" for k, v in env.items() if k != "addTestRunner"])
234        if env.pop("addTestRunner", False):
235            execute_args.append("--add-test-runner")
236        child = subprocess.Popen(execute_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
237    else:
238        child = subprocess.Popen(args,
239                                stdout=subprocess.PIPE,
240                                stderr=subprocess.PIPE,
241                                cwd=cwd,
242                                env=env)
243    stdout, stderr = child.communicate()
244
245    if stdout_filter is not None:
246        stdout = stdout_filter(stdout)
247
248    if stderr_filter is not None:
249        stderr = stderr_filter(stderr)
250    if isinstance(stdout, bytes):
251        stdout = stdout.decode()
252    if isinstance(stderr, bytes):
253        stderr = stderr.decode()
254
255    if fail_func(child.returncode, stderr):
256        raise called_process_error(cwd, args, stdout + stderr)
257
258    if print_stdout:
259        if isinstance(stdout, bytes):
260            stdout = stdout.decode()
261        if stdout:
262            sys.stdout.write(stdout)
263    if print_stderr:
264        if isinstance(stderr, bytes):
265            stderr = stderr.decode()
266        if stderr:
267            sys.stderr.write(stderr)
268    return stdout
269
270
271def get_modified_time(path):
272    # For a symlink, the modified time should be the greater of the link's
273    # modified time and the modified time of the target.
274    return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
275
276
277def is_time_stale(output, inputs):
278    if not os.path.exists(output):
279        return True
280
281    output_time = get_modified_time(output)
282    for i in inputs:
283        if get_modified_time(i) > output_time:
284            return True
285    return False
286
287
288def _check_zip_path(name):
289    if os.path.normpath(name) != name:
290        raise Exception('Non-canonical zip path: %s' % name)
291    if os.path.isabs(name):
292        raise Exception('Absolute zip path: %s' % name)
293
294
295def _is_symlink(zip_file, name):
296    zi = zip_file.getinfo(name)
297
298    # The two high-order bytes of ZipInfo.external_attr represent
299    # UNIX permissions and file type bits.
300    return stat.S_ISLNK(zi.external_attr >> 16)
301
302
303def extract_all(zip_path,
304                path=None,
305                no_clobber=True,
306                pattern=None,
307                predicate=None):
308    if path is None:
309        path = os.getcwd()
310    elif not os.path.exists(path):
311        make_directory(path)
312
313    if not zipfile.is_zipfile(zip_path):
314        raise Exception('Invalid zip file: %s' % zip_path)
315
316    extracted = []
317    with zipfile.ZipFile(zip_path) as z:
318        for name in z.namelist():
319            if name.endswith('/'):
320                make_directory(os.path.join(path, name))
321                continue
322            if pattern is not None:
323                if not fnmatch.fnmatch(name, pattern):
324                    continue
325            if predicate and not predicate(name):
326                continue
327            _check_zip_path(name)
328            if no_clobber:
329                output_path = os.path.join(path, name)
330                if os.path.exists(output_path):
331                    raise Exception('Path already exists from zip: %s %s %s' %
332                                    (zip_path, name, output_path))
333            if _is_symlink(z, name):
334                dest = os.path.join(path, name)
335                make_directory(os.path.dirname(dest))
336                os.symlink(z.read(name), dest)
337                extracted.append(dest)
338            else:
339                z.extract(name, path)
340                extracted.append(os.path.join(path, name))
341
342    return extracted
343
344
345def add_to_zip_hermetic(zip_file,
346                        zip_path,
347                        src_path=None,
348                        data=None,
349                        compress=None,
350                        compress_level=6):
351    """Adds a file to the given ZipFile with a hard-coded modified time.
352
353    Args:
354      zip_file: ZipFile instance to add the file to.
355      zip_path: Destination path within the zip file.
356      src_path: Path of the source file. Mutually exclusive with |data|.
357      data: File data as a string.
358      compress: Whether to enable compression. Default is taken from ZipFile
359          constructor.
360    """
361    assert (src_path is None) != (data is None), (
362        '|src_path| and |data| are mutually exclusive.')
363    _check_zip_path(zip_path)
364    zipinfo = zipfile.ZipInfo(filename=zip_path, date_time=HERMETIC_TIMESTAMP)
365    zipinfo.external_attr = _HERMETIC_FILE_ATTR
366
367    if src_path and os.path.islink(src_path):
368        zipinfo.filename = zip_path
369        zipinfo.external_attr |= stat.S_IFLNK << 16  # mark as a symlink
370        zip_file.writestr(zipinfo, os.readlink(src_path))
371        return
372
373    # we want to use _HERMETIC_FILE_ATTR, so manually set
374    # the few attr bits we care about.
375    if src_path:
376        st = os.stat(src_path)
377        for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH):
378            if st.st_mode & mode:
379                zipinfo.external_attr |= mode << 16
380
381    if src_path:
382        with open(src_path, 'rb') as f:
383            data = f.read()
384
385    # zipfile will deflate even when it makes the file bigger. To avoid
386    # growing files, disable compression at an arbitrary cut off point.
387    if len(data) < 16:
388        compress = False
389
390    # None converts to ZIP_STORED, when passed explicitly rather than the
391    # default passed to the ZipFile constructor.
392    compress_type = zip_file.compression
393    if compress is not None:
394        compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
395    if os.getenv("ZIP_COMPRESS_LEVEL"):
396        compress_level = int(os.getenv("ZIP_COMPRESS_LEVEL"))
397    zip_file.writestr(zipinfo, data, compress_type, compress_level)
398
399
400def do_zip(inputs,
401           output,
402           base_dir=None,
403           compress_fn=None,
404           zip_prefix_path=None):
405    """Creates a zip file from a list of files.
406
407    Args:
408      inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
409      output: Destination .zip file.
410      base_dir: Prefix to strip from inputs.
411      compress_fn: Applied to each input to determine whether or not to compress.
412          By default, items will be |zipfile.ZIP_STORED|.
413      zip_prefix_path: Path prepended to file path in zip file.
414    """
415    input_tuples = []
416    for tup in inputs:
417        if isinstance(tup, str):
418            tup = (os.path.relpath(tup, base_dir), tup)
419        input_tuples.append(tup)
420
421    # Sort by zip path to ensure stable zip ordering.
422    input_tuples.sort(key=lambda tup: tup[0])
423    with zipfile.ZipFile(output, 'w') as outfile:
424        for zip_path, fs_path in input_tuples:
425            if zip_prefix_path:
426                zip_path = os.path.join(zip_prefix_path, zip_path)
427            compress = compress_fn(zip_path) if compress_fn else None
428            add_to_zip_hermetic(outfile,
429                                zip_path,
430                                src_path=fs_path,
431                                compress=compress)
432
433
434def zip_dir(output, base_dir, compress_fn=None, zip_prefix_path=None):
435    """Creates a zip file from a directory."""
436    inputs = []
437    for root, _, files in os.walk(base_dir):
438        for f in files:
439            inputs.append(os.path.join(root, f))
440
441    with atomic_output(output) as f:
442        do_zip(inputs,
443               f,
444               base_dir,
445               compress_fn=compress_fn,
446               zip_prefix_path=zip_prefix_path)
447
448
449def matches_glob(path, filters):
450    """Returns whether the given path matches any of the given glob patterns."""
451    return filters and any(fnmatch.fnmatch(path, f) for f in filters)
452
453
454def _strip_dst_name(dst_name, options):
455    # Strip specific directories and file if options is not None
456    if options and options.stripFile:
457        for f in options.stripFile:
458            if fnmatch.fnmatch(dst_name, '*/' + f):
459                return True
460    if options and options.stripDir:
461        for d in options.stripDir:
462            if fnmatch.fnmatch(dst_name, d + '/*'):
463                return True
464    return False
465
466
467def merge_zips(output, input_zips, path_transform=None, merge_args=None):
468    """Combines all files from |input_zips| into |output|.
469
470    Args:
471      output: Path or ZipFile instance to add files to.
472      input_zips: Iterable of paths to zip files to merge.
473      path_transform: Called for each entry path. Returns a new path, or None to
474          skip the file.
475    """
476    options = None
477    if merge_args:
478        parser = optparse.OptionParser()
479        parser.add_option('--stripDir',
480                          action='append',
481                          help='strip specific directory')
482        parser.add_option('--stripFile',
483                          action='append',
484                          help='strip specific file.')
485
486        args = expand_file_args(merge_args)
487        options, _ = parser.parse_args(args)
488
489    path_transform = path_transform or (lambda p: p)
490    added_names = set()
491
492    output_is_already_open = not isinstance(output, str)
493    if output_is_already_open:
494        assert isinstance(output, zipfile.ZipFile)
495        out_zip = output
496    else:
497        out_zip = zipfile.ZipFile(output, 'w')
498
499    try:
500        for in_file in input_zips:
501            with zipfile.ZipFile(in_file, 'r') as in_zip:
502                # ijar creates zips with null CRCs.
503                in_zip._expected_crc = None
504                for info in in_zip.infolist():
505                    # Ignore directories.
506                    if info.filename[-1] == '/':
507                        continue
508                    dst_name = path_transform(info.filename)
509                    if not dst_name:
510                        continue
511                    if _strip_dst_name(dst_name, options):
512                        continue
513                    already_added = dst_name in added_names
514                    if not already_added:
515                        add_to_zip_hermetic(
516                            out_zip,
517                            dst_name,
518                            data=in_zip.read(info),
519                            compress=info.compress_type != zipfile.ZIP_STORED)
520                        added_names.add(dst_name)
521    finally:
522        if not output_is_already_open:
523            out_zip.close()
524
525
526def get_sorted_transitive_dependencies(top, deps_func):
527    """Gets the list of all transitive dependencies in sorted order.
528
529    There should be no cycles in the dependency graph (crashes if cycles exist).
530
531    Args:
532      top: A list of the top level nodes
533      deps_func: A function that takes a node and returns a list of its direct
534          dependencies.
535    Returns:
536      A list of all transitive dependencies of nodes in top, in order (a node
537      will appear in the list at a higher index than all of its dependencies).
538    """
539    # Find all deps depth-first, maintaining original order in the case of ties.
540    deps_map = collections.OrderedDict()
541
542    def discover(nodes):
543        for node in nodes:
544            if node in deps_map:
545                continue
546            deps = deps_func(node)
547            discover(deps)
548            deps_map[node] = deps
549
550    discover(top)
551    return list(deps_map.keys())
552
553
554def _compute_python_dependencies():
555    """Gets the paths of imported non-system python modules.
556
557    A path is assumed to be a "system" import if it is outside of chromium's
558    src/. The paths will be relative to the current directory.
559    """
560    _force_lazy_modules_to_load()
561    module_paths = (m.__file__ for m in sys.modules.values()
562                    if m is not None and hasattr(m, '__file__') and m.__file__)
563    abs_module_paths = list(map(os.path.abspath, module_paths))
564
565    assert os.path.isabs(DIR_SOURCE_ROOT)
566    non_system_module_paths = [
567        p for p in abs_module_paths if p.startswith(DIR_SOURCE_ROOT)
568    ]
569
570    def convert_pyc_to_py(s):
571        if s.endswith('.pyc'):
572            return s[:-1]
573        return s
574
575    non_system_module_paths = list(
576        map(convert_pyc_to_py, non_system_module_paths))
577    non_system_module_paths = list(
578        map(os.path.relpath, non_system_module_paths))
579    return sorted(set(non_system_module_paths))
580
581
582def _force_lazy_modules_to_load():
583    """Forces any lazily imported modules to fully load themselves.
584
585    Inspecting the modules' __file__ attribute causes lazily imported modules
586    (e.g. from email) to get fully imported and update sys.modules. Iterate
587    over the values until sys.modules stabilizes so that no modules are missed.
588    """
589    while True:
590        num_modules_before = len(list(sys.modules.keys()))
591        for m in list(sys.modules.values()):
592            if m is not None and hasattr(m, '__file__'):
593                _ = m.__file__
594        num_modules_after = len(list(sys.modules.keys()))
595        if num_modules_before == num_modules_after:
596            break
597
598
599def add_depfile_option(parser):
600    if hasattr(parser, 'add_option'):
601        func = parser.add_option
602    else:
603        func = parser.add_argument
604    func('--depfile', help='Path to depfile (refer to `gn help depfile`)')
605
606
607def write_depfile(depfile_path, first_gn_output, inputs=None, add_pydeps=True):
608    assert depfile_path != first_gn_output  # http://crbug.com/646165
609    inputs = inputs or []
610    if add_pydeps:
611        inputs = _compute_python_dependencies() + inputs
612    inputs = sorted(inputs)
613    make_directory(os.path.dirname(depfile_path))
614    # Ninja does not support multiple outputs in depfiles.
615    with open(depfile_path, 'w') as depfile:
616        depfile.write(first_gn_output.replace(' ', '\\ '))
617        depfile.write(': ')
618        depfile.write(' '.join(i.replace(' ', '\\ ') for i in inputs))
619        depfile.write('\n')
620
621
622def expand_file_args(args):
623    """Replaces file-arg placeholders in args.
624
625    These placeholders have the form:
626      @FileArg(filename:key1:key2:...:keyn)
627
628    The value of such a placeholder is calculated by reading 'filename' as json.
629    And then extracting the value at [key1][key2]...[keyn].
630
631    Note: This intentionally does not return the list of files that appear in
632    such placeholders. An action that uses file-args *must* know the paths of
633    those files prior to the parsing of the arguments (typically by explicitly
634    listing them in the action's inputs in build files).
635    """
636    new_args = list(args)
637    file_jsons = dict()
638    r = re.compile(r'@FileArg\((.*?)\)')
639    for i, arg in enumerate(args):
640        match = r.search(arg)
641        if not match:
642            continue
643
644        if match.end() != len(arg):
645            raise Exception(
646                'Unexpected characters after FileArg: {}'.format(arg))
647
648        lookup_path = match.group(1).split(':')
649        file_path = lookup_path[0]
650        if file_path not in file_jsons:
651            with open(file_path) as f:
652                file_jsons[file_path] = json.load(f)
653
654        expansion = file_jsons[file_path]
655
656        for k in lookup_path[1:]:
657            if k in expansion:
658                expansion = expansion[k]
659            else:
660                expansion = ""
661                print("WARNNING", lookup_path[1:], "is not in metadata file, set default ''")
662        # This should match parse_gn_list. The output is either a GN-formatted list
663        # or a literal (with no quotes).
664        if isinstance(expansion, list):
665            new_args[i] = arg[:match.start()] + gn_helpers.ToGNString(
666                expansion)
667        else:
668            new_args[i] = arg[:match.start()] + str(expansion)
669
670    return new_args
671
672
673def read_sources_list(sources_list_file_name):
674    """Reads a GN-written file containing list of file names and returns a list.
675
676    Note that this function should not be used to parse response files.
677    """
678    with open(sources_list_file_name) as f:
679        return [file_name.strip() for file_name in f]
680
681
682def call_and_write_depfile_if_stale(function,
683                                    options,
684                                    record_path=None,
685                                    input_paths=None,
686                                    input_strings=None,
687                                    output_paths=None,
688                                    force=False,
689                                    pass_changes=False,
690                                    depfile_deps=None,
691                                    add_pydeps=True):
692    """Wraps md5_check.call_and_record_if_stale() and writes a depfile if applicable.
693
694    Depfiles are automatically added to output_paths when present in the
695    |options| argument. They are then created after |function| is called.
696
697    By default, only python dependencies are added to the depfile. If there are
698    other input paths that are not captured by GN deps, then they should be
699    listed in depfile_deps. It's important to write paths to the depfile that
700    are already captured by GN deps since GN args can cause GN deps to change,
701    and such changes are not immediately reflected in depfiles
702    (http://crbug.com/589311).
703    """
704    if not output_paths:
705        raise Exception('At least one output_path must be specified.')
706    input_paths = list(input_paths or [])
707    input_strings = list(input_strings or [])
708    output_paths = list(output_paths or [])
709
710    python_deps = None
711    if hasattr(options, 'depfile') and options.depfile:
712        python_deps = _compute_python_dependencies()
713        input_paths += python_deps
714        output_paths += [options.depfile]
715
716    def on_stale_md5(changes):
717        args = (changes, ) if pass_changes else ()
718        function(*args)
719        if python_deps is not None:
720            all_depfile_deps = list(python_deps) if add_pydeps else []
721            if depfile_deps:
722                all_depfile_deps.extend(depfile_deps)
723            write_depfile(options.depfile,
724                          output_paths[0],
725                          all_depfile_deps,
726                          add_pydeps=False)
727
728    md5_check.call_and_record_if_stale(on_stale_md5,
729                                       record_path=record_path,
730                                       input_paths=input_paths,
731                                       input_strings=input_strings,
732                                       output_paths=output_paths,
733                                       force=force,
734                                       pass_changes=True)
735
736
737def get_all_files(base, follow_symlinks=False):
738    """Returns a list of all the files in |base|. Each entry is relative to the
739    last path entry of |base|."""
740    result = []
741    for root, _, files in os.walk(base, followlinks=follow_symlinks):
742        result.extend([os.path.join(root, f) for f in files])
743
744    return result
745
746
747def rebase_path(input, new_base=None, current_base="."):
748    if new_base:
749        return os.path.relpath(os.path.join(current_base, input), new_base)
750    else:
751        return os.path.realpath(os.path.join(current_base, input))
752