1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3# Copyright 2013 The Chromium Authors. All rights reserved.
4# Use of this source code is governed by a BSD-style license that can be
5# found in the LICENSE file.
6"""Contains common helpers for GN action()s."""
7
8import collections
9import contextlib
10from distutils import extension
11import filecmp
12import fnmatch
13import json
14import os
15import pipes
16import re
17import shutil
18import stat
19import subprocess
20import sys
21import tempfile
22import zipfile
23import optparse
24
25# Any new non-system import must be added to:
26
27sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
28import gn_helpers
29
30# Some clients do not add //build/scripts/util to PYTHONPATH.
31from . import md5_check  # pylint: disable=relative-import
32
33# Definition copied from pylib/constants/__init__.py to avoid adding
34# a dependency on pylib.
35DIR_SOURCE_ROOT = os.environ.get(
36    'CHECKOUT_SOURCE_ROOT',
37    os.path.abspath(
38        os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
39                     os.pardir, os.pardir)))
40
41HERMETIC_TIMESTAMP = (2001, 1, 1, 0, 0, 0)
42_HERMETIC_FILE_ATTR = (0o644 << 16)
43
44
45@contextlib.contextmanager
46def temp_dir():
47    dirname = tempfile.mkdtemp()
48    try:
49        yield dirname
50    finally:
51        shutil.rmtree(dirname)
52
53
54def make_directory(dir_path):
55    try:
56        os.makedirs(dir_path, exist_ok=True)
57    except OSError:
58        pass
59
60
61def delete_directory(dir_path):
62    if os.path.exists(dir_path):
63        shutil.rmtree(dir_path)
64
65
66def touch(path, fail_if_missing=False):
67    if fail_if_missing and not os.path.exists(path):
68        raise Exception(path + ' doesn\'t exist.')
69
70    make_directory(os.path.dirname(path))
71    with open(path, 'a'):
72        os.utime(path, None)
73
74
75def find_in_directory(directory, filename_filter):
76    files = []
77    for root, _dirnames, filenames in os.walk(directory):
78        matched_files = fnmatch.filter(filenames, filename_filter)
79        files.extend((os.path.join(root, f) for f in matched_files))
80    return files
81
82
83def read_build_vars(path):
84    """Parses a build_vars.txt into a dict."""
85    with open(path) as f:
86        return dict(l.rstrip().split('=', 1) for l in f)
87
88
89def parse_gn_list(gn_string):
90    """Converts a command-line parameter into a list.
91
92    If the input starts with a '[' it is assumed to be a GN-formatted list and
93    it will be parsed accordingly. When empty an empty list will be returned.
94    Otherwise, the parameter will be treated as a single raw string (not
95    GN-formatted in that it's not assumed to have literal quotes that must be
96    removed) and a list will be returned containing that string.
97
98    The common use for this behavior is in the ohos build where things can
99    take lists of @FileArg references that are expanded via expand_file_args.
100    """
101    if gn_string.startswith('['):
102        parser = gn_helpers.GNValueParser(gn_string)
103        return parser.parse_list()
104    if len(gn_string):
105        return [gn_string]
106    return []
107
108
109def parse_and_flatten_gn_lists(gn_lists):
110    ret = []
111    for arg in gn_lists:
112        ret.extend(parse_gn_list(arg))
113    return ret
114
115
116def check_options(options, parser, required=None):
117    if not required:
118        return
119    for option_name in required:
120        if getattr(options, option_name) is None:
121            parser.error('--%s is required' % option_name.replace('_', '-'))
122
123
124def write_json(obj, path, only_if_changed=False):
125    old_dump = None
126    if os.path.exists(path):
127        with open(path, 'r') as oldfile:
128            old_dump = oldfile.read()
129
130    new_dump = json.dumps(obj,
131                          sort_keys=True,
132                          indent=2,
133                          separators=(',', ': '))
134
135    if not only_if_changed or old_dump != new_dump:
136        with open(path, 'w') as outfile:
137            outfile.write(new_dump)
138
139
140@contextlib.contextmanager
141def atomic_output(path, only_if_changed=True):
142    """Helper to prevent half-written outputs.
143
144    Args:
145      path: Path to the final output file, which will be written atomically.
146      only_if_changed: If True (the default), do not touch the filesystem
147        if the content has not changed.
148    Returns:
149      A python context manager that yields a NamedTemporaryFile instance
150      that must be used by clients to write the data to. On exit, the
151      manager will try to replace the final output file with the
152      temporary one if necessary. The temporary file is always destroyed
153      on exit.
154    Example:
155      with build_utils.atomic_output(output_path) as tmp_file:
156        subprocess.check_call(['prog', '--output', tmp_file.name])
157    """
158    # Create in same directory to ensure same filesystem when moving.
159    with tempfile.NamedTemporaryFile(suffix=os.path.basename(path),
160                                     dir=os.path.dirname(path),
161                                     delete=False) as f:
162        try:
163            # Change tempfile permission to 664
164            os.fchmod(f.fileno(), 0o664)
165            yield f
166
167            # file should be closed before comparison/move.
168            f.close()
169            if not (only_if_changed and os.path.exists(path)
170                    and filecmp.cmp(f.name, path)):
171                shutil.move(f.name, path)
172        finally:
173            if os.path.exists(f.name):
174                os.unlink(f.name)
175
176
177class CalledProcessError(Exception):
178    """This exception is raised when the process run by check_output
179    exits with a non-zero exit code.
180    """
181    def __init__(self, cwd, args, output):
182        super(CalledProcessError, self).__init__()
183        self.cwd = cwd
184        self.args = args
185        if isinstance(output, bytes):
186            self.output = output.decode()
187        else:
188            self.output = output
189
190    def __str__(self):
191        # A user should be able to simply copy and paste the command that failed
192        # into their shell.
193        copyable_command = '( cd {}; {} )'.format(
194            os.path.abspath(self.cwd), ' '.join(map(pipes.quote, self.args)))
195        return 'Command failed: {}\n{}'.format(copyable_command, self.output)
196
197
198def filter_lines(output, filter_string):
199    """Output filter from build_utils.check_output.
200
201    Args:
202      output: Executable output as from build_utils.check_output.
203      filter_string: An RE string that will filter (remove) matching
204          lines from |output|.
205
206    Returns:
207      The filtered output, as a single string.
208    """
209    re_filter = re.compile(filter_string)
210    return '\n'.join(line for line in output.splitlines()
211                     if not re_filter.search(line))
212
213
214# This can be used in most cases like subprocess.check_output(). The output,
215# particularly when the command fails, better highlights the command's failure.
216# If the command fails, raises a build_utils.CalledProcessError.
217def check_output(args,
218                 cwd=None,
219                 env=None,
220                 print_stdout=False,
221                 print_stderr=True,
222                 stdout_filter=None,
223                 stderr_filter=None,
224                 fail_func=lambda returncode, stderr: returncode != 0):
225    if not cwd:
226        cwd = os.getcwd()
227
228    cache_exec = None
229    if env and env.pop("useCompileCache", False):
230        cache_exec = os.environ.get("COMPILE_CACHE_EXEC")
231    if cache_exec:
232        execute_args = [cache_exec, "--cwd", cwd]
233        execute_args.extend(args)
234        execute_args.extend(["--build-env"] + [f"{k}={v}" for k, v in env.items() if k != "addTestRunner"])
235        if env.pop("addTestRunner", False):
236            execute_args.append("--add-test-runner")
237        child = subprocess.Popen(execute_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
238    else:
239        child = subprocess.Popen(args,
240                                stdout=subprocess.PIPE,
241                                stderr=subprocess.PIPE,
242                                cwd=cwd,
243                                env=env)
244    stdout, stderr = child.communicate()
245
246    if stdout_filter is not None:
247        stdout = stdout_filter(stdout)
248
249    if stderr_filter is not None:
250        stderr = stderr_filter(stderr)
251    if isinstance(stdout, bytes):
252        stdout = stdout.decode()
253    if isinstance(stderr, bytes):
254        stderr = stderr.decode()
255
256    if fail_func(child.returncode, stderr):
257        raise CalledProcessError(cwd, args, stdout + stderr)
258
259    if print_stdout:
260        if isinstance(stdout, bytes):
261            stdout = stdout.decode()
262        if stdout:
263            sys.stdout.write(stdout)
264    if print_stderr:
265        if isinstance(stderr, bytes):
266            stderr = stderr.decode()
267        if stderr:
268            sys.stderr.write(stderr)
269    return stdout
270
271
272def get_modified_time(path):
273    # For a symlink, the modified time should be the greater of the link's
274    # modified time and the modified time of the target.
275    return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
276
277
278def is_time_stale(output, inputs):
279    if not os.path.exists(output):
280        return True
281
282    output_time = get_modified_time(output)
283    for i in inputs:
284        if get_modified_time(i) > output_time:
285            return True
286    return False
287
288
289def _check_zip_path(name):
290    if os.path.normpath(name) != name:
291        raise Exception('Non-canonical zip path: %s' % name)
292    if os.path.isabs(name):
293        raise Exception('Absolute zip path: %s' % name)
294
295
296def _is_symlink(zip_file, name):
297    zi = zip_file.getinfo(name)
298
299    # The two high-order bytes of ZipInfo.external_attr represent
300    # UNIX permissions and file type bits.
301    return stat.S_ISLNK(zi.external_attr >> 16)
302
303
304def extract_all(zip_path,
305                path=None,
306                no_clobber=True,
307                pattern=None,
308                predicate=None):
309    if path is None:
310        path = os.getcwd()
311    elif not os.path.exists(path):
312        make_directory(path)
313
314    if not zipfile.is_zipfile(zip_path):
315        raise Exception('Invalid zip file: %s' % zip_path)
316
317    extracted = []
318    with zipfile.ZipFile(zip_path) as z:
319        for name in z.namelist():
320            if name.endswith('/'):
321                make_directory(os.path.join(path, name))
322                continue
323            if pattern is not None:
324                if not fnmatch.fnmatch(name, pattern):
325                    continue
326            if predicate and not predicate(name):
327                continue
328            _check_zip_path(name)
329            if no_clobber:
330                output_path = os.path.join(path, name)
331                if os.path.exists(output_path):
332                    raise Exception('Path already exists from zip: %s %s %s' %
333                                    (zip_path, name, output_path))
334            if _is_symlink(z, name):
335                dest = os.path.join(path, name)
336                make_directory(os.path.dirname(dest))
337                os.symlink(z.read(name), dest)
338                extracted.append(dest)
339            else:
340                z.extract(name, path)
341                extracted.append(os.path.join(path, name))
342
343    return extracted
344
345
346def add_to_zip_hermetic(zip_file,
347                        zip_path,
348                        src_path=None,
349                        data=None,
350                        compress=None,
351                        compress_level=6):
352    """Adds a file to the given ZipFile with a hard-coded modified time.
353
354    Args:
355      zip_file: ZipFile instance to add the file to.
356      zip_path: Destination path within the zip file.
357      src_path: Path of the source file. Mutually exclusive with |data|.
358      data: File data as a string.
359      compress: Whether to enable compression. Default is taken from ZipFile
360          constructor.
361    """
362    assert (src_path is None) != (data is None), (
363        '|src_path| and |data| are mutually exclusive.')
364    _check_zip_path(zip_path)
365    zipinfo = zipfile.ZipInfo(filename=zip_path, date_time=HERMETIC_TIMESTAMP)
366    zipinfo.external_attr = _HERMETIC_FILE_ATTR
367
368    if src_path and os.path.islink(src_path):
369        zipinfo.filename = zip_path
370        zipinfo.external_attr |= stat.S_IFLNK << 16  # mark as a symlink
371        zip_file.writestr(zipinfo, os.readlink(src_path))
372        return
373
374    # we want to use _HERMETIC_FILE_ATTR, so manually set
375    # the few attr bits we care about.
376    if src_path:
377        st = os.stat(src_path)
378        for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH):
379            if st.st_mode & mode:
380                zipinfo.external_attr |= mode << 16
381
382    if src_path:
383        with open(src_path, 'rb') as f:
384            data = f.read()
385
386    # zipfile will deflate even when it makes the file bigger. To avoid
387    # growing files, disable compression at an arbitrary cut off point.
388    if len(data) < 16:
389        compress = False
390
391    # None converts to ZIP_STORED, when passed explicitly rather than the
392    # default passed to the ZipFile constructor.
393    compress_type = zip_file.compression
394    if compress is not None:
395        compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
396    if os.getenv("ZIP_COMPRESS_LEVEL"):
397        compress_level = int(os.getenv("ZIP_COMPRESS_LEVEL"))
398    zip_file.writestr(zipinfo, data, compress_type, compress_level)
399
400
401def do_zip(inputs,
402           output,
403           base_dir=None,
404           compress_fn=None,
405           zip_prefix_path=None):
406    """Creates a zip file from a list of files.
407
408    Args:
409      inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
410      output: Destination .zip file.
411      base_dir: Prefix to strip from inputs.
412      compress_fn: Applied to each input to determine whether or not to compress.
413          By default, items will be |zipfile.ZIP_STORED|.
414      zip_prefix_path: Path prepended to file path in zip file.
415    """
416    input_tuples = []
417    for tup in inputs:
418        if isinstance(tup, str):
419            tup = (os.path.relpath(tup, base_dir), tup)
420        input_tuples.append(tup)
421
422    # Sort by zip path to ensure stable zip ordering.
423    input_tuples.sort(key=lambda tup: tup[0])
424    with zipfile.ZipFile(output, 'w') as outfile:
425        for zip_path, fs_path in input_tuples:
426            if zip_prefix_path:
427                zip_path = os.path.join(zip_prefix_path, zip_path)
428            compress = compress_fn(zip_path) if compress_fn else None
429            add_to_zip_hermetic(outfile,
430                                zip_path,
431                                src_path=fs_path,
432                                compress=compress)
433
434
435def zip_dir(output, base_dir, compress_fn=None, zip_prefix_path=None):
436    """Creates a zip file from a directory."""
437    inputs = []
438    for root, _, files in os.walk(base_dir):
439        for f in files:
440            inputs.append(os.path.join(root, f))
441
442    with atomic_output(output) as f:
443        do_zip(inputs,
444               f,
445               base_dir,
446               compress_fn=compress_fn,
447               zip_prefix_path=zip_prefix_path)
448
449
450def matches_glob(path, filters):
451    """Returns whether the given path matches any of the given glob patterns."""
452    return filters and any(fnmatch.fnmatch(path, f) for f in filters)
453
454
455def _strip_dst_name(dst_name, options):
456    # Strip specific directories and file if options is not None
457    if options and options.stripFile:
458        for f in options.stripFile:
459            if fnmatch.fnmatch(dst_name, '*/' + f):
460                return True
461    if options and options.stripDir:
462        for d in options.stripDir:
463            if fnmatch.fnmatch(dst_name, d + '/*'):
464                return True
465    return False
466
467
468def merge_zips(output, input_zips, path_transform=None, merge_args=None):
469    """Combines all files from |input_zips| into |output|.
470
471    Args:
472      output: Path or ZipFile instance to add files to.
473      input_zips: Iterable of paths to zip files to merge.
474      path_transform: Called for each entry path. Returns a new path, or None to
475          skip the file.
476    """
477    options = None
478    if merge_args:
479        parser = optparse.OptionParser()
480        parser.add_option('--stripDir',
481                          action='append',
482                          help='strip specific directory')
483        parser.add_option('--stripFile',
484                          action='append',
485                          help='strip specific file.')
486
487        args = expand_file_args(merge_args)
488        options, _ = parser.parse_args(args)
489
490    path_transform = path_transform or (lambda p: p)
491    added_names = set()
492
493    output_is_already_open = not isinstance(output, str)
494    if output_is_already_open:
495        assert isinstance(output, zipfile.ZipFile)
496        out_zip = output
497    else:
498        out_zip = zipfile.ZipFile(output, 'w')
499
500    try:
501        for in_file in input_zips:
502            with zipfile.ZipFile(in_file, 'r') as in_zip:
503                # ijar creates zips with null CRCs.
504                in_zip._expected_crc = None
505                for info in in_zip.infolist():
506                    # Ignore directories.
507                    if info.filename[-1] == '/':
508                        continue
509                    dst_name = path_transform(info.filename)
510                    if not dst_name:
511                        continue
512                    if _strip_dst_name(dst_name, options):
513                        continue
514                    already_added = dst_name in added_names
515                    if not already_added:
516                        add_to_zip_hermetic(
517                            out_zip,
518                            dst_name,
519                            data=in_zip.read(info),
520                            compress=info.compress_type != zipfile.ZIP_STORED)
521                        added_names.add(dst_name)
522    finally:
523        if not output_is_already_open:
524            out_zip.close()
525
526
527def get_sorted_transitive_dependencies(top, deps_func):
528    """Gets the list of all transitive dependencies in sorted order.
529
530    There should be no cycles in the dependency graph (crashes if cycles exist).
531
532    Args:
533      top: A list of the top level nodes
534      deps_func: A function that takes a node and returns a list of its direct
535          dependencies.
536    Returns:
537      A list of all transitive dependencies of nodes in top, in order (a node
538      will appear in the list at a higher index than all of its dependencies).
539    """
540    # Find all deps depth-first, maintaining original order in the case of ties.
541    deps_map = collections.OrderedDict()
542
543    def discover(nodes):
544        for node in nodes:
545            if node in deps_map:
546                continue
547            deps = deps_func(node)
548            discover(deps)
549            deps_map[node] = deps
550
551    discover(top)
552    return list(deps_map.keys())
553
554
555def _compute_python_dependencies():
556    """Gets the paths of imported non-system python modules.
557
558    A path is assumed to be a "system" import if it is outside of chromium's
559    src/. The paths will be relative to the current directory.
560    """
561    _force_lazy_modules_to_load()
562    module_paths = (m.__file__ for m in sys.modules.values()
563                    if m is not None and hasattr(m, '__file__') and m.__file__)
564    abs_module_paths = list(map(os.path.abspath, module_paths))
565
566    assert os.path.isabs(DIR_SOURCE_ROOT)
567    non_system_module_paths = [
568        p for p in abs_module_paths if p.startswith(DIR_SOURCE_ROOT)
569    ]
570
571    def convert_pyc_to_py(s):
572        if s.endswith('.pyc'):
573            return s[:-1]
574        return s
575
576    non_system_module_paths = list(
577        map(convert_pyc_to_py, non_system_module_paths))
578    non_system_module_paths = list(
579        map(os.path.relpath, non_system_module_paths))
580    return sorted(set(non_system_module_paths))
581
582
583def _force_lazy_modules_to_load():
584    """Forces any lazily imported modules to fully load themselves.
585
586    Inspecting the modules' __file__ attribute causes lazily imported modules
587    (e.g. from email) to get fully imported and update sys.modules. Iterate
588    over the values until sys.modules stabilizes so that no modules are missed.
589    """
590    while True:
591        num_modules_before = len(list(sys.modules.keys()))
592        for m in list(sys.modules.values()):
593            if m is not None and hasattr(m, '__file__'):
594                _ = m.__file__
595        num_modules_after = len(list(sys.modules.keys()))
596        if num_modules_before == num_modules_after:
597            break
598
599
600def add_depfile_option(parser):
601    if hasattr(parser, 'add_option'):
602        func = parser.add_option
603    else:
604        func = parser.add_argument
605    func('--depfile', help='Path to depfile (refer to `gn help depfile`)')
606
607
608def write_depfile(depfile_path, first_gn_output, inputs=None, add_pydeps=True):
609    assert depfile_path != first_gn_output  # http://crbug.com/646165
610    inputs = inputs or []
611    if add_pydeps:
612        inputs = _compute_python_dependencies() + inputs
613    inputs = sorted(inputs)
614    make_directory(os.path.dirname(depfile_path))
615    # Ninja does not support multiple outputs in depfiles.
616    with open(depfile_path, 'w') as depfile:
617        depfile.write(first_gn_output.replace(' ', '\\ '))
618        depfile.write(': ')
619        depfile.write(' '.join(i.replace(' ', '\\ ') for i in inputs))
620        depfile.write('\n')
621
622
623def expand_file_args(args):
624    """Replaces file-arg placeholders in args.
625
626    These placeholders have the form:
627      @FileArg(filename:key1:key2:...:keyn)
628
629    The value of such a placeholder is calculated by reading 'filename' as json.
630    And then extracting the value at [key1][key2]...[keyn].
631
632    Note: This intentionally does not return the list of files that appear in
633    such placeholders. An action that uses file-args *must* know the paths of
634    those files prior to the parsing of the arguments (typically by explicitly
635    listing them in the action's inputs in build files).
636    """
637    new_args = list(args)
638    file_jsons = dict()
639    r = re.compile(r'@FileArg\((.*?)\)')
640    for i, arg in enumerate(args):
641        match = r.search(arg)
642        if not match:
643            continue
644
645        if match.end() != len(arg):
646            raise Exception(
647                'Unexpected characters after FileArg: {}'.format(arg))
648
649        lookup_path = match.group(1).split(':')
650        file_path = lookup_path[0]
651        if file_path not in file_jsons:
652            with open(file_path) as f:
653                file_jsons[file_path] = json.load(f)
654
655        expansion = file_jsons[file_path]
656
657        for k in lookup_path[1:]:
658            if k in expansion:
659                expansion = expansion[k]
660            else:
661                expansion = ""
662                print("WARNNING", lookup_path[1:], "is not in metadata file, set default ''")
663        # This should match parse_gn_list. The output is either a GN-formatted list
664        # or a literal (with no quotes).
665        if isinstance(expansion, list):
666            new_args[i] = arg[:match.start()] + gn_helpers.to_gn_string(
667                expansion)
668        else:
669            new_args[i] = arg[:match.start()] + str(expansion)
670
671    return new_args
672
673
674def read_sources_list(sources_list_file_name):
675    """Reads a GN-written file containing list of file names and returns a list.
676
677    Note that this function should not be used to parse response files.
678    """
679    with open(sources_list_file_name) as f:
680        return [file_name.strip() for file_name in f]
681
682
683def call_and_write_depfile_if_stale(function,
684                                    options,
685                                    record_path=None,
686                                    input_paths=None,
687                                    input_strings=None,
688                                    output_paths=None,
689                                    force=False,
690                                    pass_changes=False,
691                                    depfile_deps=None,
692                                    add_pydeps=True):
693    """Wraps md5_check.call_and_record_if_stale() and writes a depfile if applicable.
694
695    Depfiles are automatically added to output_paths when present in the
696    |options| argument. They are then created after |function| is called.
697
698    By default, only python dependencies are added to the depfile. If there are
699    other input paths that are not captured by GN deps, then they should be
700    listed in depfile_deps. It's important to write paths to the depfile that
701    are already captured by GN deps since GN args can cause GN deps to change,
702    and such changes are not immediately reflected in depfiles
703    (http://crbug.com/589311).
704    """
705    if not output_paths:
706        raise Exception('At least one output_path must be specified.')
707    input_paths = list(input_paths or [])
708    input_strings = list(input_strings or [])
709    output_paths = list(output_paths or [])
710
711    python_deps = None
712    if hasattr(options, 'depfile') and options.depfile:
713        python_deps = _compute_python_dependencies()
714        input_paths += python_deps
715        output_paths += [options.depfile]
716
717    def on_stale_md5(changes):
718        args = (changes, ) if pass_changes else ()
719        function(*args)
720        if python_deps is not None:
721            all_depfile_deps = list(python_deps) if add_pydeps else []
722            if depfile_deps:
723                all_depfile_deps.extend(depfile_deps)
724            write_depfile(options.depfile,
725                          output_paths[0],
726                          all_depfile_deps,
727                          add_pydeps=False)
728
729    md5_check.call_and_record_if_stale(on_stale_md5,
730                                       record_path=record_path,
731                                       input_paths=input_paths,
732                                       input_strings=input_strings,
733                                       output_paths=output_paths,
734                                       force=force,
735                                       pass_changes=True)
736
737
738def get_all_files(base, follow_symlinks=False):
739    """Returns a list of all the files in |base|. Each entry is relative to the
740    last path entry of |base|.
741    """
742    result = []
743    for root, _, files in os.walk(base, followlinks=follow_symlinks):
744        result.extend([os.path.join(root, f) for f in files])
745
746    return result
747
748
749def rebase_path(path_to_rebase, new_base=None, current_base="."):
750    if new_base:
751        return os.path.relpath(os.path.join(current_base, path_to_rebase), new_base)
752    else:
753        return os.path.realpath(os.path.join(current_base, path_to_rebase))
754