• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright (c) 2012 Google Inc. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5from __future__ import print_function
6
7import ast
8
9import gyp.common
10import gyp.simple_copy
11import multiprocessing
12import os.path
13import re
14import shlex
15import signal
16import subprocess
17import sys
18import threading
19import traceback
20from distutils.version import StrictVersion
21from gyp.common import GypError
22from gyp.common import OrderedSet
23
24PY3 = bytes != str
25
26# A list of types that are treated as linkable.
27linkable_types = [
28    "executable",
29    "shared_library",
30    "loadable_module",
31    "mac_kernel_extension",
32    "windows_driver",
33]
34
35# A list of sections that contain links to other targets.
36dependency_sections = ["dependencies", "export_dependent_settings"]
37
38# base_path_sections is a list of sections defined by GYP that contain
39# pathnames.  The generators can provide more keys, the two lists are merged
40# into path_sections, but you should call IsPathSection instead of using either
41# list directly.
42base_path_sections = [
43    "destination",
44    "files",
45    "include_dirs",
46    "inputs",
47    "libraries",
48    "outputs",
49    "sources",
50]
51path_sections = set()
52
53# These per-process dictionaries are used to cache build file data when loading
54# in parallel mode.
55per_process_data = {}
56per_process_aux_data = {}
57
58
59def IsPathSection(section):
60    # If section ends in one of the '=+?!' characters, it's applied to a section
61    # without the trailing characters.  '/' is notably absent from this list,
62    # because there's no way for a regular expression to be treated as a path.
63    while section and section[-1:] in "=+?!":
64        section = section[:-1]
65
66    if section in path_sections:
67        return True
68
69    # Sections matching the regexp '_(dir|file|path)s?$' are also
70    # considered PathSections. Using manual string matching since that
71    # is much faster than the regexp and this can be called hundreds of
72    # thousands of times so micro performance matters.
73    if "_" in section:
74        tail = section[-6:]
75        if tail[-1] == "s":
76            tail = tail[:-1]
77        if tail[-5:] in ("_file", "_path"):
78            return True
79        return tail[-4:] == "_dir"
80
81    return False
82
83
84# base_non_configuration_keys is a list of key names that belong in the target
85# itself and should not be propagated into its configurations.  It is merged
86# with a list that can come from the generator to
87# create non_configuration_keys.
88base_non_configuration_keys = [
89    # Sections that must exist inside targets and not configurations.
90    "actions",
91    "configurations",
92    "copies",
93    "default_configuration",
94    "dependencies",
95    "dependencies_original",
96    "libraries",
97    "postbuilds",
98    "product_dir",
99    "product_extension",
100    "product_name",
101    "product_prefix",
102    "rules",
103    "run_as",
104    "sources",
105    "standalone_static_library",
106    "suppress_wildcard",
107    "target_name",
108    "toolset",
109    "toolsets",
110    "type",
111    # Sections that can be found inside targets or configurations, but that
112    # should not be propagated from targets into their configurations.
113    "variables",
114]
115non_configuration_keys = []
116
117# Keys that do not belong inside a configuration dictionary.
118invalid_configuration_keys = [
119    "actions",
120    "all_dependent_settings",
121    "configurations",
122    "dependencies",
123    "direct_dependent_settings",
124    "libraries",
125    "link_settings",
126    "sources",
127    "standalone_static_library",
128    "target_name",
129    "type",
130]
131
132# Controls whether or not the generator supports multiple toolsets.
133multiple_toolsets = False
134
135# Paths for converting filelist paths to output paths: {
136#   toplevel,
137#   qualified_output_dir,
138# }
139generator_filelist_paths = None
140
141
142def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
143    """Return a list of all build files included into build_file_path.
144
145  The returned list will contain build_file_path as well as all other files
146  that it included, either directly or indirectly.  Note that the list may
147  contain files that were included into a conditional section that evaluated
148  to false and was not merged into build_file_path's dict.
149
150  aux_data is a dict containing a key for each build file or included build
151  file.  Those keys provide access to dicts whose "included" keys contain
152  lists of all other files included by the build file.
153
154  included should be left at its default None value by external callers.  It
155  is used for recursion.
156
157  The returned list will not contain any duplicate entries.  Each build file
158  in the list will be relative to the current directory.
159  """
160
161    if included is None:
162        included = []
163
164    if build_file_path in included:
165        return included
166
167    included.append(build_file_path)
168
169    for included_build_file in aux_data[build_file_path].get("included", []):
170        GetIncludedBuildFiles(included_build_file, aux_data, included)
171
172    return included
173
174
175def CheckedEval(file_contents):
176    """Return the eval of a gyp file.
177  The gyp file is restricted to dictionaries and lists only, and
178  repeated keys are not allowed.
179  Note that this is slower than eval() is.
180  """
181
182    syntax_tree = ast.parse(file_contents)
183    assert isinstance(syntax_tree, ast.Module)
184    c1 = syntax_tree.body
185    assert len(c1) == 1
186    c2 = c1[0]
187    assert isinstance(c2, ast.Expr)
188    return CheckNode(c2.value, [])
189
190
191def CheckNode(node, keypath):
192    if isinstance(node, ast.Dict):
193        dict = {}
194        for key, value in zip(node.keys, node.values):
195            assert isinstance(key, ast.Str)
196            key = key.s
197            if key in dict:
198                raise GypError(
199                    "Key '"
200                    + key
201                    + "' repeated at level "
202                    + repr(len(keypath) + 1)
203                    + " with key path '"
204                    + ".".join(keypath)
205                    + "'"
206                )
207            kp = list(keypath)  # Make a copy of the list for descending this node.
208            kp.append(key)
209            dict[key] = CheckNode(value, kp)
210        return dict
211    elif isinstance(node, ast.List):
212        children = []
213        for index, child in enumerate(node.elts):
214            kp = list(keypath)  # Copy list.
215            kp.append(repr(index))
216            children.append(CheckNode(child, kp))
217        return children
218    elif isinstance(node, ast.Str):
219        return node.s
220    else:
221        raise TypeError(
222            "Unknown AST node at key path '" + ".".join(keypath) + "': " + repr(node)
223        )
224
225
226def LoadOneBuildFile(build_file_path, data, aux_data, includes, is_target, check):
227    if build_file_path in data:
228        return data[build_file_path]
229
230    if os.path.exists(build_file_path):
231        # Open the build file for read ('r') with universal-newlines mode ('U')
232        # to make sure platform specific newlines ('\r\n' or '\r') are converted to '\n'
233        # which otherwise will fail eval()
234        if PY3 or sys.platform == "zos":
235            # On z/OS, universal-newlines mode treats the file as an ascii file.
236            # But since node-gyp produces ebcdic files, do not use that mode.
237            build_file_contents = open(build_file_path, "r").read()
238        else:
239            build_file_contents = open(build_file_path, "rU").read()
240    else:
241        raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
242
243    build_file_data = None
244    try:
245        if check:
246            build_file_data = CheckedEval(build_file_contents)
247        else:
248            build_file_data = eval(build_file_contents, {"__builtins__": {}}, None)
249    except SyntaxError as e:
250        e.filename = build_file_path
251        raise
252    except Exception as e:
253        gyp.common.ExceptionAppend(e, "while reading " + build_file_path)
254        raise
255
256    if type(build_file_data) is not dict:
257        raise GypError("%s does not evaluate to a dictionary." % build_file_path)
258
259    data[build_file_path] = build_file_data
260    aux_data[build_file_path] = {}
261
262    # Scan for includes and merge them in.
263    if "skip_includes" not in build_file_data or not build_file_data["skip_includes"]:
264        try:
265            if is_target:
266                LoadBuildFileIncludesIntoDict(
267                    build_file_data, build_file_path, data, aux_data, includes, check
268                )
269            else:
270                LoadBuildFileIncludesIntoDict(
271                    build_file_data, build_file_path, data, aux_data, None, check
272                )
273        except Exception as e:
274            gyp.common.ExceptionAppend(
275                e, "while reading includes of " + build_file_path
276            )
277            raise
278
279    return build_file_data
280
281
282def LoadBuildFileIncludesIntoDict(
283    subdict, subdict_path, data, aux_data, includes, check
284):
285    includes_list = []
286    if includes is not None:
287        includes_list.extend(includes)
288    if "includes" in subdict:
289        for include in subdict["includes"]:
290            # "include" is specified relative to subdict_path, so compute the real
291            # path to include by appending the provided "include" to the directory
292            # in which subdict_path resides.
293            relative_include = os.path.normpath(
294                os.path.join(os.path.dirname(subdict_path), include)
295            )
296            includes_list.append(relative_include)
297        # Unhook the includes list, it's no longer needed.
298        del subdict["includes"]
299
300    # Merge in the included files.
301    for include in includes_list:
302        if "included" not in aux_data[subdict_path]:
303            aux_data[subdict_path]["included"] = []
304        aux_data[subdict_path]["included"].append(include)
305
306        gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
307
308        MergeDicts(
309            subdict,
310            LoadOneBuildFile(include, data, aux_data, None, False, check),
311            subdict_path,
312            include,
313        )
314
315    # Recurse into subdictionaries.
316    for k, v in subdict.items():
317        if type(v) is dict:
318            LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, None, check)
319        elif type(v) is list:
320            LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, check)
321
322
323# This recurses into lists so that it can look for dicts.
324def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
325    for item in sublist:
326        if type(item) is dict:
327            LoadBuildFileIncludesIntoDict(
328                item, sublist_path, data, aux_data, None, check
329            )
330        elif type(item) is list:
331            LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
332
333
334# Processes toolsets in all the targets. This recurses into condition entries
335# since they can contain toolsets as well.
336def ProcessToolsetsInDict(data):
337    if "targets" in data:
338        target_list = data["targets"]
339        new_target_list = []
340        for target in target_list:
341            # If this target already has an explicit 'toolset', and no 'toolsets'
342            # list, don't modify it further.
343            if "toolset" in target and "toolsets" not in target:
344                new_target_list.append(target)
345                continue
346            if multiple_toolsets:
347                toolsets = target.get("toolsets", ["target"])
348            else:
349                toolsets = ["target"]
350            # Make sure this 'toolsets' definition is only processed once.
351            if "toolsets" in target:
352                del target["toolsets"]
353            if len(toolsets) > 0:
354                # Optimization: only do copies if more than one toolset is specified.
355                for build in toolsets[1:]:
356                    new_target = gyp.simple_copy.deepcopy(target)
357                    new_target["toolset"] = build
358                    new_target_list.append(new_target)
359                target["toolset"] = toolsets[0]
360                new_target_list.append(target)
361        data["targets"] = new_target_list
362    if "conditions" in data:
363        for condition in data["conditions"]:
364            if type(condition) is list:
365                for condition_dict in condition[1:]:
366                    if type(condition_dict) is dict:
367                        ProcessToolsetsInDict(condition_dict)
368
369
370# TODO(mark): I don't love this name.  It just means that it's going to load
371# a build file that contains targets and is expected to provide a targets dict
372# that contains the targets...
373def LoadTargetBuildFile(
374    build_file_path,
375    data,
376    aux_data,
377    variables,
378    includes,
379    depth,
380    check,
381    load_dependencies,
382):
383    # If depth is set, predefine the DEPTH variable to be a relative path from
384    # this build file's directory to the directory identified by depth.
385    if depth:
386        # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
387        # temporary measure. This should really be addressed by keeping all paths
388        # in POSIX until actual project generation.
389        d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
390        if d == "":
391            variables["DEPTH"] = "."
392        else:
393            variables["DEPTH"] = d.replace("\\", "/")
394
395    # The 'target_build_files' key is only set when loading target build files in
396    # the non-parallel code path, where LoadTargetBuildFile is called
397    # recursively.  In the parallel code path, we don't need to check whether the
398    # |build_file_path| has already been loaded, because the 'scheduled' set in
399    # ParallelState guarantees that we never load the same |build_file_path|
400    # twice.
401    if "target_build_files" in data:
402        if build_file_path in data["target_build_files"]:
403            # Already loaded.
404            return False
405        data["target_build_files"].add(build_file_path)
406
407    gyp.DebugOutput(
408        gyp.DEBUG_INCLUDES, "Loading Target Build File '%s'", build_file_path
409    )
410
411    build_file_data = LoadOneBuildFile(
412        build_file_path, data, aux_data, includes, True, check
413    )
414
415    # Store DEPTH for later use in generators.
416    build_file_data["_DEPTH"] = depth
417
418    # Set up the included_files key indicating which .gyp files contributed to
419    # this target dict.
420    if "included_files" in build_file_data:
421        raise GypError(build_file_path + " must not contain included_files key")
422
423    included = GetIncludedBuildFiles(build_file_path, aux_data)
424    build_file_data["included_files"] = []
425    for included_file in included:
426        # included_file is relative to the current directory, but it needs to
427        # be made relative to build_file_path's directory.
428        included_relative = gyp.common.RelativePath(
429            included_file, os.path.dirname(build_file_path)
430        )
431        build_file_data["included_files"].append(included_relative)
432
433    # Do a first round of toolsets expansion so that conditions can be defined
434    # per toolset.
435    ProcessToolsetsInDict(build_file_data)
436
437    # Apply "pre"/"early" variable expansions and condition evaluations.
438    ProcessVariablesAndConditionsInDict(
439        build_file_data, PHASE_EARLY, variables, build_file_path
440    )
441
442    # Since some toolsets might have been defined conditionally, perform
443    # a second round of toolsets expansion now.
444    ProcessToolsetsInDict(build_file_data)
445
446    # Look at each project's target_defaults dict, and merge settings into
447    # targets.
448    if "target_defaults" in build_file_data:
449        if "targets" not in build_file_data:
450            raise GypError("Unable to find targets in build file %s" % build_file_path)
451
452        index = 0
453        while index < len(build_file_data["targets"]):
454            # This procedure needs to give the impression that target_defaults is
455            # used as defaults, and the individual targets inherit from that.
456            # The individual targets need to be merged into the defaults.  Make
457            # a deep copy of the defaults for each target, merge the target dict
458            # as found in the input file into that copy, and then hook up the
459            # copy with the target-specific data merged into it as the replacement
460            # target dict.
461            old_target_dict = build_file_data["targets"][index]
462            new_target_dict = gyp.simple_copy.deepcopy(
463                build_file_data["target_defaults"]
464            )
465            MergeDicts(
466                new_target_dict, old_target_dict, build_file_path, build_file_path
467            )
468            build_file_data["targets"][index] = new_target_dict
469            index += 1
470
471        # No longer needed.
472        del build_file_data["target_defaults"]
473
474    # Look for dependencies.  This means that dependency resolution occurs
475    # after "pre" conditionals and variable expansion, but before "post" -
476    # in other words, you can't put a "dependencies" section inside a "post"
477    # conditional within a target.
478
479    dependencies = []
480    if "targets" in build_file_data:
481        for target_dict in build_file_data["targets"]:
482            if "dependencies" not in target_dict:
483                continue
484            for dependency in target_dict["dependencies"]:
485                dependencies.append(
486                    gyp.common.ResolveTarget(build_file_path, dependency, None)[0]
487                )
488
489    if load_dependencies:
490        for dependency in dependencies:
491            try:
492                LoadTargetBuildFile(
493                    dependency,
494                    data,
495                    aux_data,
496                    variables,
497                    includes,
498                    depth,
499                    check,
500                    load_dependencies,
501                )
502            except Exception as e:
503                gyp.common.ExceptionAppend(
504                    e, "while loading dependencies of %s" % build_file_path
505                )
506                raise
507    else:
508        return (build_file_path, dependencies)
509
510
511def CallLoadTargetBuildFile(
512    global_flags,
513    build_file_path,
514    variables,
515    includes,
516    depth,
517    check,
518    generator_input_info,
519):
520    """Wrapper around LoadTargetBuildFile for parallel processing.
521
522     This wrapper is used when LoadTargetBuildFile is executed in
523     a worker process.
524  """
525
526    try:
527        signal.signal(signal.SIGINT, signal.SIG_IGN)
528
529        # Apply globals so that the worker process behaves the same.
530        for key, value in global_flags.items():
531            globals()[key] = value
532
533        SetGeneratorGlobals(generator_input_info)
534        result = LoadTargetBuildFile(
535            build_file_path,
536            per_process_data,
537            per_process_aux_data,
538            variables,
539            includes,
540            depth,
541            check,
542            False,
543        )
544        if not result:
545            return result
546
547        (build_file_path, dependencies) = result
548
549        # We can safely pop the build_file_data from per_process_data because it
550        # will never be referenced by this process again, so we don't need to keep
551        # it in the cache.
552        build_file_data = per_process_data.pop(build_file_path)
553
554        # This gets serialized and sent back to the main process via a pipe.
555        # It's handled in LoadTargetBuildFileCallback.
556        return (build_file_path, build_file_data, dependencies)
557    except GypError as e:
558        sys.stderr.write("gyp: %s\n" % e)
559        return None
560    except Exception as e:
561        print("Exception:", e, file=sys.stderr)
562        print(traceback.format_exc(), file=sys.stderr)
563        return None
564
565
566class ParallelProcessingError(Exception):
567    pass
568
569
570class ParallelState(object):
571    """Class to keep track of state when processing input files in parallel.
572
573  If build files are loaded in parallel, use this to keep track of
574  state during farming out and processing parallel jobs. It's stored
575  in a global so that the callback function can have access to it.
576  """
577
578    def __init__(self):
579        # The multiprocessing pool.
580        self.pool = None
581        # The condition variable used to protect this object and notify
582        # the main loop when there might be more data to process.
583        self.condition = None
584        # The "data" dict that was passed to LoadTargetBuildFileParallel
585        self.data = None
586        # The number of parallel calls outstanding; decremented when a response
587        # was received.
588        self.pending = 0
589        # The set of all build files that have been scheduled, so we don't
590        # schedule the same one twice.
591        self.scheduled = set()
592        # A list of dependency build file paths that haven't been scheduled yet.
593        self.dependencies = []
594        # Flag to indicate if there was an error in a child process.
595        self.error = False
596
597    def LoadTargetBuildFileCallback(self, result):
598        """Handle the results of running LoadTargetBuildFile in another process.
599    """
600        self.condition.acquire()
601        if not result:
602            self.error = True
603            self.condition.notify()
604            self.condition.release()
605            return
606        (build_file_path0, build_file_data0, dependencies0) = result
607        self.data[build_file_path0] = build_file_data0
608        self.data["target_build_files"].add(build_file_path0)
609        for new_dependency in dependencies0:
610            if new_dependency not in self.scheduled:
611                self.scheduled.add(new_dependency)
612                self.dependencies.append(new_dependency)
613        self.pending -= 1
614        self.condition.notify()
615        self.condition.release()
616
617
618def LoadTargetBuildFilesParallel(
619    build_files, data, variables, includes, depth, check, generator_input_info
620):
621    parallel_state = ParallelState()
622    parallel_state.condition = threading.Condition()
623    # Make copies of the build_files argument that we can modify while working.
624    parallel_state.dependencies = list(build_files)
625    parallel_state.scheduled = set(build_files)
626    parallel_state.pending = 0
627    parallel_state.data = data
628
629    try:
630        parallel_state.condition.acquire()
631        while parallel_state.dependencies or parallel_state.pending:
632            if parallel_state.error:
633                break
634            if not parallel_state.dependencies:
635                parallel_state.condition.wait()
636                continue
637
638            dependency = parallel_state.dependencies.pop()
639
640            parallel_state.pending += 1
641            global_flags = {
642                "path_sections": globals()["path_sections"],
643                "non_configuration_keys": globals()["non_configuration_keys"],
644                "multiple_toolsets": globals()["multiple_toolsets"],
645            }
646
647            if not parallel_state.pool:
648                parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count())
649            parallel_state.pool.apply_async(
650                CallLoadTargetBuildFile,
651                args=(
652                    global_flags,
653                    dependency,
654                    variables,
655                    includes,
656                    depth,
657                    check,
658                    generator_input_info,
659                ),
660                callback=parallel_state.LoadTargetBuildFileCallback,
661            )
662    except KeyboardInterrupt as e:
663        parallel_state.pool.terminate()
664        raise e
665
666    parallel_state.condition.release()
667
668    parallel_state.pool.close()
669    parallel_state.pool.join()
670    parallel_state.pool = None
671
672    if parallel_state.error:
673        sys.exit(1)
674
675
676# Look for the bracket that matches the first bracket seen in a
677# string, and return the start and end as a tuple.  For example, if
678# the input is something like "<(foo <(bar)) blah", then it would
679# return (1, 13), indicating the entire string except for the leading
680# "<" and trailing " blah".
681LBRACKETS = set("{[(")
682BRACKETS = {"}": "{", "]": "[", ")": "("}
683
684
685def FindEnclosingBracketGroup(input_str):
686    stack = []
687    start = -1
688    for index, char in enumerate(input_str):
689        if char in LBRACKETS:
690            stack.append(char)
691            if start == -1:
692                start = index
693        elif char in BRACKETS:
694            if not stack:
695                return (-1, -1)
696            if stack.pop() != BRACKETS[char]:
697                return (-1, -1)
698            if not stack:
699                return (start, index + 1)
700    return (-1, -1)
701
702
703def IsStrCanonicalInt(string):
704    """Returns True if |string| is in its canonical integer form.
705
706  The canonical form is such that str(int(string)) == string.
707  """
708    if type(string) is str:
709        # This function is called a lot so for maximum performance, avoid
710        # involving regexps which would otherwise make the code much
711        # shorter. Regexps would need twice the time of this function.
712        if string:
713            if string == "0":
714                return True
715            if string[0] == "-":
716                string = string[1:]
717                if not string:
718                    return False
719            if "1" <= string[0] <= "9":
720                return string.isdigit()
721
722    return False
723
724
725# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
726# "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
727# In the last case, the inner "<()" is captured in match['content'].
728early_variable_re = re.compile(
729    r"(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)"
730    r"(?P<command_string>[-a-zA-Z0-9_.]+)?"
731    r"\((?P<is_array>\s*\[?)"
732    r"(?P<content>.*?)(\]?)\))"
733)
734
735# This matches the same as early_variable_re, but with '>' instead of '<'.
736late_variable_re = re.compile(
737    r"(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)"
738    r"(?P<command_string>[-a-zA-Z0-9_.]+)?"
739    r"\((?P<is_array>\s*\[?)"
740    r"(?P<content>.*?)(\]?)\))"
741)
742
743# This matches the same as early_variable_re, but with '^' instead of '<'.
744latelate_variable_re = re.compile(
745    r"(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)"
746    r"(?P<command_string>[-a-zA-Z0-9_.]+)?"
747    r"\((?P<is_array>\s*\[?)"
748    r"(?P<content>.*?)(\]?)\))"
749)
750
751# Global cache of results from running commands so they don't have to be run
752# more then once.
753cached_command_results = {}
754
755
756def FixupPlatformCommand(cmd):
757    if sys.platform == "win32":
758        if type(cmd) is list:
759            cmd = [re.sub("^cat ", "type ", cmd[0])] + cmd[1:]
760        else:
761            cmd = re.sub("^cat ", "type ", cmd)
762    return cmd
763
764
765PHASE_EARLY = 0
766PHASE_LATE = 1
767PHASE_LATELATE = 2
768
769
770def ExpandVariables(input, phase, variables, build_file):
771    # Look for the pattern that gets expanded into variables
772    if phase == PHASE_EARLY:
773        variable_re = early_variable_re
774        expansion_symbol = "<"
775    elif phase == PHASE_LATE:
776        variable_re = late_variable_re
777        expansion_symbol = ">"
778    elif phase == PHASE_LATELATE:
779        variable_re = latelate_variable_re
780        expansion_symbol = "^"
781    else:
782        assert False
783
784    input_str = str(input)
785    if IsStrCanonicalInt(input_str):
786        return int(input_str)
787
788    # Do a quick scan to determine if an expensive regex search is warranted.
789    if expansion_symbol not in input_str:
790        return input_str
791
792    # Get the entire list of matches as a list of MatchObject instances.
793    # (using findall here would return strings instead of MatchObjects).
794    matches = list(variable_re.finditer(input_str))
795    if not matches:
796        return input_str
797
798    output = input_str
799    # Reverse the list of matches so that replacements are done right-to-left.
800    # That ensures that earlier replacements won't mess up the string in a
801    # way that causes later calls to find the earlier substituted text instead
802    # of what's intended for replacement.
803    matches.reverse()
804    for match_group in matches:
805        match = match_group.groupdict()
806        gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
807        # match['replace'] is the substring to look for, match['type']
808        # is the character code for the replacement type (< > <! >! <| >| <@
809        # >@ <!@ >!@), match['is_array'] contains a '[' for command
810        # arrays, and match['content'] is the name of the variable (< >)
811        # or command to run (<! >!). match['command_string'] is an optional
812        # command string. Currently, only 'pymod_do_main' is supported.
813
814        # run_command is true if a ! variant is used.
815        run_command = "!" in match["type"]
816        command_string = match["command_string"]
817
818        # file_list is true if a | variant is used.
819        file_list = "|" in match["type"]
820
821        # Capture these now so we can adjust them later.
822        replace_start = match_group.start("replace")
823        replace_end = match_group.end("replace")
824
825        # Find the ending paren, and re-evaluate the contained string.
826        (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
827
828        # Adjust the replacement range to match the entire command
829        # found by FindEnclosingBracketGroup (since the variable_re
830        # probably doesn't match the entire command if it contained
831        # nested variables).
832        replace_end = replace_start + c_end
833
834        # Find the "real" replacement, matching the appropriate closing
835        # paren, and adjust the replacement start and end.
836        replacement = input_str[replace_start:replace_end]
837
838        # Figure out what the contents of the variable parens are.
839        contents_start = replace_start + c_start + 1
840        contents_end = replace_end - 1
841        contents = input_str[contents_start:contents_end]
842
843        # Do filter substitution now for <|().
844        # Admittedly, this is different than the evaluation order in other
845        # contexts. However, since filtration has no chance to run on <|(),
846        # this seems like the only obvious way to give them access to filters.
847        if file_list:
848            processed_variables = gyp.simple_copy.deepcopy(variables)
849            ProcessListFiltersInDict(contents, processed_variables)
850            # Recurse to expand variables in the contents
851            contents = ExpandVariables(contents, phase, processed_variables, build_file)
852        else:
853            # Recurse to expand variables in the contents
854            contents = ExpandVariables(contents, phase, variables, build_file)
855
856        # Strip off leading/trailing whitespace so that variable matches are
857        # simpler below (and because they are rarely needed).
858        contents = contents.strip()
859
860        # expand_to_list is true if an @ variant is used.  In that case,
861        # the expansion should result in a list.  Note that the caller
862        # is to be expecting a list in return, and not all callers do
863        # because not all are working in list context.  Also, for list
864        # expansions, there can be no other text besides the variable
865        # expansion in the input string.
866        expand_to_list = "@" in match["type"] and input_str == replacement
867
868        if run_command or file_list:
869            # Find the build file's directory, so commands can be run or file lists
870            # generated relative to it.
871            build_file_dir = os.path.dirname(build_file)
872            if build_file_dir == "" and not file_list:
873                # If build_file is just a leaf filename indicating a file in the
874                # current directory, build_file_dir might be an empty string.  Set
875                # it to None to signal to subprocess.Popen that it should run the
876                # command in the current directory.
877                build_file_dir = None
878
879        # Support <|(listfile.txt ...) which generates a file
880        # containing items from a gyp list, generated at gyp time.
881        # This works around actions/rules which have more inputs than will
882        # fit on the command line.
883        if file_list:
884            if type(contents) is list:
885                contents_list = contents
886            else:
887                contents_list = contents.split(" ")
888            replacement = contents_list[0]
889            if os.path.isabs(replacement):
890                raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
891
892            if not generator_filelist_paths:
893                path = os.path.join(build_file_dir, replacement)
894            else:
895                if os.path.isabs(build_file_dir):
896                    toplevel = generator_filelist_paths["toplevel"]
897                    rel_build_file_dir = gyp.common.RelativePath(
898                        build_file_dir, toplevel
899                    )
900                else:
901                    rel_build_file_dir = build_file_dir
902                qualified_out_dir = generator_filelist_paths["qualified_out_dir"]
903                path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
904                gyp.common.EnsureDirExists(path)
905
906            replacement = gyp.common.RelativePath(path, build_file_dir)
907            f = gyp.common.WriteOnDiff(path)
908            for i in contents_list[1:]:
909                f.write("%s\n" % i)
910            f.close()
911
912        elif run_command:
913            use_shell = True
914            if match["is_array"]:
915                contents = eval(contents)
916                use_shell = False
917
918            # Check for a cached value to avoid executing commands, or generating
919            # file lists more than once. The cache key contains the command to be
920            # run as well as the directory to run it from, to account for commands
921            # that depend on their current directory.
922            # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
923            # someone could author a set of GYP files where each time the command
924            # is invoked it produces different output by design. When the need
925            # arises, the syntax should be extended to support no caching off a
926            # command's output so it is run every time.
927            cache_key = (str(contents), build_file_dir)
928            cached_value = cached_command_results.get(cache_key, None)
929            if cached_value is None:
930                gyp.DebugOutput(
931                    gyp.DEBUG_VARIABLES,
932                    "Executing command '%s' in directory '%s'",
933                    contents,
934                    build_file_dir,
935                )
936
937                replacement = ""
938
939                if command_string == "pymod_do_main":
940                    # <!pymod_do_main(modulename param eters) loads |modulename| as a
941                    # python module and then calls that module's DoMain() function,
942                    # passing ["param", "eters"] as a single list argument. For modules
943                    # that don't load quickly, this can be faster than
944                    # <!(python modulename param eters). Do this in |build_file_dir|.
945                    oldwd = os.getcwd()  # Python doesn't like os.open('.'): no fchdir.
946                    if build_file_dir:  # build_file_dir may be None (see above).
947                        os.chdir(build_file_dir)
948                    sys.path.append(os.getcwd())
949                    try:
950
951                        parsed_contents = shlex.split(contents)
952                        try:
953                            py_module = __import__(parsed_contents[0])
954                        except ImportError as e:
955                            raise GypError(
956                                "Error importing pymod_do_main"
957                                "module (%s): %s" % (parsed_contents[0], e)
958                            )
959                        replacement = str(
960                            py_module.DoMain(parsed_contents[1:])
961                        ).rstrip()
962                    finally:
963                        sys.path.pop()
964                        os.chdir(oldwd)
965                    assert replacement is not None
966                elif command_string:
967                    raise GypError(
968                        "Unknown command string '%s' in '%s'."
969                        % (command_string, contents)
970                    )
971                else:
972                    # Fix up command with platform specific workarounds.
973                    contents = FixupPlatformCommand(contents)
974                    try:
975                        p = subprocess.Popen(
976                            contents,
977                            shell=use_shell,
978                            stdout=subprocess.PIPE,
979                            stderr=subprocess.PIPE,
980                            stdin=subprocess.PIPE,
981                            cwd=build_file_dir,
982                        )
983                    except Exception as e:
984                        raise GypError(
985                            "%s while executing command '%s' in %s"
986                            % (e, contents, build_file)
987                        )
988
989                    p_stdout, p_stderr = p.communicate("")
990                    if PY3:
991                        p_stdout = p_stdout.decode("utf-8")
992                        p_stderr = p_stderr.decode("utf-8")
993
994                    if p.wait() != 0 or p_stderr:
995                        sys.stderr.write(p_stderr)
996                        # Simulate check_call behavior, since check_call only exists
997                        # in python 2.5 and later.
998                        raise GypError(
999                            "Call to '%s' returned exit status %d while in %s."
1000                            % (contents, p.returncode, build_file)
1001                        )
1002                    replacement = p_stdout.rstrip()
1003
1004                cached_command_results[cache_key] = replacement
1005            else:
1006                gyp.DebugOutput(
1007                    gyp.DEBUG_VARIABLES,
1008                    "Had cache value for command '%s' in directory '%s'",
1009                    contents,
1010                    build_file_dir,
1011                )
1012                replacement = cached_value
1013
1014        else:
1015            if contents not in variables:
1016                if contents[-1] in ["!", "/"]:
1017                    # In order to allow cross-compiles (nacl) to happen more naturally,
1018                    # we will allow references to >(sources/) etc. to resolve to
1019                    # and empty list if undefined. This allows actions to:
1020                    # 'action!': [
1021                    #   '>@(_sources!)',
1022                    # ],
1023                    # 'action/': [
1024                    #   '>@(_sources/)',
1025                    # ],
1026                    replacement = []
1027                else:
1028                    raise GypError(
1029                        "Undefined variable " + contents + " in " + build_file
1030                    )
1031            else:
1032                replacement = variables[contents]
1033
1034        if isinstance(replacement, bytes) and not isinstance(replacement, str):
1035            replacement = replacement.decode("utf-8")  # done on Python 3 only
1036        if type(replacement) is list:
1037            for item in replacement:
1038                if isinstance(item, bytes) and not isinstance(item, str):
1039                    item = item.decode("utf-8")  # done on Python 3 only
1040                if not contents[-1] == "/" and type(item) not in (str, int):
1041                    raise GypError(
1042                        "Variable "
1043                        + contents
1044                        + " must expand to a string or list of strings; "
1045                        + "list contains a "
1046                        + item.__class__.__name__
1047                    )
1048            # Run through the list and handle variable expansions in it.  Since
1049            # the list is guaranteed not to contain dicts, this won't do anything
1050            # with conditions sections.
1051            ProcessVariablesAndConditionsInList(
1052                replacement, phase, variables, build_file
1053            )
1054        elif type(replacement) not in (str, int):
1055            raise GypError(
1056                "Variable "
1057                + contents
1058                + " must expand to a string or list of strings; "
1059                + "found a "
1060                + replacement.__class__.__name__
1061            )
1062
1063        if expand_to_list:
1064            # Expanding in list context.  It's guaranteed that there's only one
1065            # replacement to do in |input_str| and that it's this replacement.  See
1066            # above.
1067            if type(replacement) is list:
1068                # If it's already a list, make a copy.
1069                output = replacement[:]
1070            else:
1071                # Split it the same way sh would split arguments.
1072                output = shlex.split(str(replacement))
1073        else:
1074            # Expanding in string context.
1075            encoded_replacement = ""
1076            if type(replacement) is list:
1077                # When expanding a list into string context, turn the list items
1078                # into a string in a way that will work with a subprocess call.
1079                #
1080                # TODO(mark): This isn't completely correct.  This should
1081                # call a generator-provided function that observes the
1082                # proper list-to-argument quoting rules on a specific
1083                # platform instead of just calling the POSIX encoding
1084                # routine.
1085                encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
1086            else:
1087                encoded_replacement = replacement
1088
1089            output = (
1090                output[:replace_start] + str(encoded_replacement) + output[replace_end:]
1091            )
1092        # Prepare for the next match iteration.
1093        input_str = output
1094
1095    if output == input:
1096        gyp.DebugOutput(
1097            gyp.DEBUG_VARIABLES,
1098            "Found only identity matches on %r, avoiding infinite " "recursion.",
1099            output,
1100        )
1101    else:
1102        # Look for more matches now that we've replaced some, to deal with
1103        # expanding local variables (variables defined in the same
1104        # variables block as this one).
1105        gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
1106        if type(output) is list:
1107            if output and type(output[0]) is list:
1108                # Leave output alone if it's a list of lists.
1109                # We don't want such lists to be stringified.
1110                pass
1111            else:
1112                new_output = []
1113                for item in output:
1114                    new_output.append(
1115                        ExpandVariables(item, phase, variables, build_file)
1116                    )
1117                output = new_output
1118        else:
1119            output = ExpandVariables(output, phase, variables, build_file)
1120
1121    # Convert all strings that are canonically-represented integers into integers.
1122    if type(output) is list:
1123        for index, outstr in enumerate(output):
1124            if IsStrCanonicalInt(outstr):
1125                output[index] = int(outstr)
1126    elif IsStrCanonicalInt(output):
1127        output = int(output)
1128
1129    return output
1130
1131
1132# The same condition is often evaluated over and over again so it
1133# makes sense to cache as much as possible between evaluations.
1134cached_conditions_asts = {}
1135
1136
1137def EvalCondition(condition, conditions_key, phase, variables, build_file):
1138    """Returns the dict that should be used or None if the result was
1139  that nothing should be used."""
1140    if type(condition) is not list:
1141        raise GypError(conditions_key + " must be a list")
1142    if len(condition) < 2:
1143        # It's possible that condition[0] won't work in which case this
1144        # attempt will raise its own IndexError.  That's probably fine.
1145        raise GypError(
1146            conditions_key
1147            + " "
1148            + condition[0]
1149            + " must be at least length 2, not "
1150            + str(len(condition))
1151        )
1152
1153    i = 0
1154    result = None
1155    while i < len(condition):
1156        cond_expr = condition[i]
1157        true_dict = condition[i + 1]
1158        if type(true_dict) is not dict:
1159            raise GypError(
1160                "{} {} must be followed by a dictionary, not {}".format(
1161                    conditions_key, cond_expr, type(true_dict)
1162                )
1163            )
1164        if len(condition) > i + 2 and type(condition[i + 2]) is dict:
1165            false_dict = condition[i + 2]
1166            i = i + 3
1167            if i != len(condition):
1168                raise GypError(
1169                    "{} {} has {} unexpected trailing items".format(
1170                        conditions_key, cond_expr, len(condition) - i
1171                    )
1172                )
1173        else:
1174            false_dict = None
1175            i = i + 2
1176        if result is None:
1177            result = EvalSingleCondition(
1178                cond_expr, true_dict, false_dict, phase, variables, build_file
1179            )
1180
1181    return result
1182
1183
1184def EvalSingleCondition(cond_expr, true_dict, false_dict, phase, variables, build_file):
1185    """Returns true_dict if cond_expr evaluates to true, and false_dict
1186  otherwise."""
1187    # Do expansions on the condition itself.  Since the condition can naturally
1188    # contain variable references without needing to resort to GYP expansion
1189    # syntax, this is of dubious value for variables, but someone might want to
1190    # use a command expansion directly inside a condition.
1191    cond_expr_expanded = ExpandVariables(cond_expr, phase, variables, build_file)
1192    if type(cond_expr_expanded) not in (str, int):
1193        raise ValueError(
1194            "Variable expansion in this context permits str and int "
1195            + "only, found "
1196            + cond_expr_expanded.__class__.__name__
1197        )
1198
1199    try:
1200        if cond_expr_expanded in cached_conditions_asts:
1201            ast_code = cached_conditions_asts[cond_expr_expanded]
1202        else:
1203            ast_code = compile(cond_expr_expanded, "<string>", "eval")
1204            cached_conditions_asts[cond_expr_expanded] = ast_code
1205        env = {"__builtins__": {}, "v": StrictVersion}
1206        if eval(ast_code, env, variables):
1207            return true_dict
1208        return false_dict
1209    except SyntaxError as e:
1210        syntax_error = SyntaxError(
1211            "%s while evaluating condition '%s' in %s "
1212            "at character %d." % (str(e.args[0]), e.text, build_file, e.offset),
1213            e.filename,
1214            e.lineno,
1215            e.offset,
1216            e.text,
1217        )
1218        raise syntax_error
1219    except NameError as e:
1220        gyp.common.ExceptionAppend(
1221            e,
1222            "while evaluating condition '%s' in %s" % (cond_expr_expanded, build_file),
1223        )
1224        raise GypError(e)
1225
1226
1227def ProcessConditionsInDict(the_dict, phase, variables, build_file):
1228    # Process a 'conditions' or 'target_conditions' section in the_dict,
1229    # depending on phase.
1230    # early -> conditions
1231    # late -> target_conditions
1232    # latelate -> no conditions
1233    #
1234    # Each item in a conditions list consists of cond_expr, a string expression
1235    # evaluated as the condition, and true_dict, a dict that will be merged into
1236    # the_dict if cond_expr evaluates to true.  Optionally, a third item,
1237    # false_dict, may be present.  false_dict is merged into the_dict if
1238    # cond_expr evaluates to false.
1239    #
1240    # Any dict merged into the_dict will be recursively processed for nested
1241    # conditionals and other expansions, also according to phase, immediately
1242    # prior to being merged.
1243
1244    if phase == PHASE_EARLY:
1245        conditions_key = "conditions"
1246    elif phase == PHASE_LATE:
1247        conditions_key = "target_conditions"
1248    elif phase == PHASE_LATELATE:
1249        return
1250    else:
1251        assert False
1252
1253    if conditions_key not in the_dict:
1254        return
1255
1256    conditions_list = the_dict[conditions_key]
1257    # Unhook the conditions list, it's no longer needed.
1258    del the_dict[conditions_key]
1259
1260    for condition in conditions_list:
1261        merge_dict = EvalCondition(
1262            condition, conditions_key, phase, variables, build_file
1263        )
1264
1265        if merge_dict is not None:
1266            # Expand variables and nested conditinals in the merge_dict before
1267            # merging it.
1268            ProcessVariablesAndConditionsInDict(
1269                merge_dict, phase, variables, build_file
1270            )
1271
1272            MergeDicts(the_dict, merge_dict, build_file, build_file)
1273
1274
1275def LoadAutomaticVariablesFromDict(variables, the_dict):
1276    # Any keys with plain string values in the_dict become automatic variables.
1277    # The variable name is the key name with a "_" character prepended.
1278    for key, value in the_dict.items():
1279        if type(value) in (str, int, list):
1280            variables["_" + key] = value
1281
1282
1283def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
1284    # Any keys in the_dict's "variables" dict, if it has one, becomes a
1285    # variable.  The variable name is the key name in the "variables" dict.
1286    # Variables that end with the % character are set only if they are unset in
1287    # the variables dict.  the_dict_key is the name of the key that accesses
1288    # the_dict in the_dict's parent dict.  If the_dict's parent is not a dict
1289    # (it could be a list or it could be parentless because it is a root dict),
1290    # the_dict_key will be None.
1291    for key, value in the_dict.get("variables", {}).items():
1292        if type(value) not in (str, int, list):
1293            continue
1294
1295        if key.endswith("%"):
1296            variable_name = key[:-1]
1297            if variable_name in variables:
1298                # If the variable is already set, don't set it.
1299                continue
1300            if the_dict_key == "variables" and variable_name in the_dict:
1301                # If the variable is set without a % in the_dict, and the_dict is a
1302                # variables dict (making |variables| a variables sub-dict of a
1303                # variables dict), use the_dict's definition.
1304                value = the_dict[variable_name]
1305        else:
1306            variable_name = key
1307
1308        variables[variable_name] = value
1309
1310
1311def ProcessVariablesAndConditionsInDict(
1312    the_dict, phase, variables_in, build_file, the_dict_key=None
1313):
1314    """Handle all variable and command expansion and conditional evaluation.
1315
1316  This function is the public entry point for all variable expansions and
1317  conditional evaluations.  The variables_in dictionary will not be modified
1318  by this function.
1319  """
1320
1321    # Make a copy of the variables_in dict that can be modified during the
1322    # loading of automatics and the loading of the variables dict.
1323    variables = variables_in.copy()
1324    LoadAutomaticVariablesFromDict(variables, the_dict)
1325
1326    if "variables" in the_dict:
1327        # Make sure all the local variables are added to the variables
1328        # list before we process them so that you can reference one
1329        # variable from another.  They will be fully expanded by recursion
1330        # in ExpandVariables.
1331        for key, value in the_dict["variables"].items():
1332            variables[key] = value
1333
1334        # Handle the associated variables dict first, so that any variable
1335        # references within can be resolved prior to using them as variables.
1336        # Pass a copy of the variables dict to avoid having it be tainted.
1337        # Otherwise, it would have extra automatics added for everything that
1338        # should just be an ordinary variable in this scope.
1339        ProcessVariablesAndConditionsInDict(
1340            the_dict["variables"], phase, variables, build_file, "variables"
1341        )
1342
1343    LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1344
1345    for key, value in the_dict.items():
1346        # Skip "variables", which was already processed if present.
1347        if key != "variables" and type(value) is str:
1348            expanded = ExpandVariables(value, phase, variables, build_file)
1349            if type(expanded) not in (str, int):
1350                raise ValueError(
1351                    "Variable expansion in this context permits str and int "
1352                    + "only, found "
1353                    + expanded.__class__.__name__
1354                    + " for "
1355                    + key
1356                )
1357            the_dict[key] = expanded
1358
1359    # Variable expansion may have resulted in changes to automatics.  Reload.
1360    # TODO(mark): Optimization: only reload if no changes were made.
1361    variables = variables_in.copy()
1362    LoadAutomaticVariablesFromDict(variables, the_dict)
1363    LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1364
1365    # Process conditions in this dict.  This is done after variable expansion
1366    # so that conditions may take advantage of expanded variables.  For example,
1367    # if the_dict contains:
1368    #   {'type':       '<(library_type)',
1369    #    'conditions': [['_type=="static_library"', { ... }]]},
1370    # _type, as used in the condition, will only be set to the value of
1371    # library_type if variable expansion is performed before condition
1372    # processing.  However, condition processing should occur prior to recursion
1373    # so that variables (both automatic and "variables" dict type) may be
1374    # adjusted by conditions sections, merged into the_dict, and have the
1375    # intended impact on contained dicts.
1376    #
1377    # This arrangement means that a "conditions" section containing a "variables"
1378    # section will only have those variables effective in subdicts, not in
1379    # the_dict.  The workaround is to put a "conditions" section within a
1380    # "variables" section.  For example:
1381    #   {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
1382    #    'defines':    ['<(define)'],
1383    #    'my_subdict': {'defines': ['<(define)']}},
1384    # will not result in "IS_MAC" being appended to the "defines" list in the
1385    # current scope but would result in it being appended to the "defines" list
1386    # within "my_subdict".  By comparison:
1387    #   {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
1388    #    'defines':    ['<(define)'],
1389    #    'my_subdict': {'defines': ['<(define)']}},
1390    # will append "IS_MAC" to both "defines" lists.
1391
1392    # Evaluate conditions sections, allowing variable expansions within them
1393    # as well as nested conditionals.  This will process a 'conditions' or
1394    # 'target_conditions' section, perform appropriate merging and recursive
1395    # conditional and variable processing, and then remove the conditions section
1396    # from the_dict if it is present.
1397    ProcessConditionsInDict(the_dict, phase, variables, build_file)
1398
1399    # Conditional processing may have resulted in changes to automatics or the
1400    # variables dict.  Reload.
1401    variables = variables_in.copy()
1402    LoadAutomaticVariablesFromDict(variables, the_dict)
1403    LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1404
1405    # Recurse into child dicts, or process child lists which may result in
1406    # further recursion into descendant dicts.
1407    for key, value in the_dict.items():
1408        # Skip "variables" and string values, which were already processed if
1409        # present.
1410        if key == "variables" or type(value) is str:
1411            continue
1412        if type(value) is dict:
1413            # Pass a copy of the variables dict so that subdicts can't influence
1414            # parents.
1415            ProcessVariablesAndConditionsInDict(
1416                value, phase, variables, build_file, key
1417            )
1418        elif type(value) is list:
1419            # The list itself can't influence the variables dict, and
1420            # ProcessVariablesAndConditionsInList will make copies of the variables
1421            # dict if it needs to pass it to something that can influence it.  No
1422            # copy is necessary here.
1423            ProcessVariablesAndConditionsInList(value, phase, variables, build_file)
1424        elif type(value) is not int:
1425            raise TypeError("Unknown type " + value.__class__.__name__ + " for " + key)
1426
1427
1428def ProcessVariablesAndConditionsInList(the_list, phase, variables, build_file):
1429    # Iterate using an index so that new values can be assigned into the_list.
1430    index = 0
1431    while index < len(the_list):
1432        item = the_list[index]
1433        if type(item) is dict:
1434            # Make a copy of the variables dict so that it won't influence anything
1435            # outside of its own scope.
1436            ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
1437        elif type(item) is list:
1438            ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
1439        elif type(item) is str:
1440            expanded = ExpandVariables(item, phase, variables, build_file)
1441            if type(expanded) in (str, int):
1442                the_list[index] = expanded
1443            elif type(expanded) is list:
1444                the_list[index : index + 1] = expanded
1445                index += len(expanded)
1446
1447                # index now identifies the next item to examine.  Continue right now
1448                # without falling into the index increment below.
1449                continue
1450            else:
1451                raise ValueError(
1452                    "Variable expansion in this context permits strings and "
1453                    + "lists only, found "
1454                    + expanded.__class__.__name__
1455                    + " at "
1456                    + index
1457                )
1458        elif type(item) is not int:
1459            raise TypeError(
1460                "Unknown type " + item.__class__.__name__ + " at index " + index
1461            )
1462        index = index + 1
1463
1464
1465def BuildTargetsDict(data):
1466    """Builds a dict mapping fully-qualified target names to their target dicts.
1467
1468  |data| is a dict mapping loaded build files by pathname relative to the
1469  current directory.  Values in |data| are build file contents.  For each
1470  |data| value with a "targets" key, the value of the "targets" key is taken
1471  as a list containing target dicts.  Each target's fully-qualified name is
1472  constructed from the pathname of the build file (|data| key) and its
1473  "target_name" property.  These fully-qualified names are used as the keys
1474  in the returned dict.  These keys provide access to the target dicts,
1475  the dicts in the "targets" lists.
1476  """
1477
1478    targets = {}
1479    for build_file in data["target_build_files"]:
1480        for target in data[build_file].get("targets", []):
1481            target_name = gyp.common.QualifiedTarget(
1482                build_file, target["target_name"], target["toolset"]
1483            )
1484            if target_name in targets:
1485                raise GypError("Duplicate target definitions for " + target_name)
1486            targets[target_name] = target
1487
1488    return targets
1489
1490
1491def QualifyDependencies(targets):
1492    """Make dependency links fully-qualified relative to the current directory.
1493
1494  |targets| is a dict mapping fully-qualified target names to their target
1495  dicts.  For each target in this dict, keys known to contain dependency
1496  links are examined, and any dependencies referenced will be rewritten
1497  so that they are fully-qualified and relative to the current directory.
1498  All rewritten dependencies are suitable for use as keys to |targets| or a
1499  similar dict.
1500  """
1501
1502    all_dependency_sections = [
1503        dep + op for dep in dependency_sections for op in ("", "!", "/")
1504    ]
1505
1506    for target, target_dict in targets.items():
1507        target_build_file = gyp.common.BuildFile(target)
1508        toolset = target_dict["toolset"]
1509        for dependency_key in all_dependency_sections:
1510            dependencies = target_dict.get(dependency_key, [])
1511            for index, dep in enumerate(dependencies):
1512                dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
1513                    target_build_file, dep, toolset
1514                )
1515                if not multiple_toolsets:
1516                    # Ignore toolset specification in the dependency if it is specified.
1517                    dep_toolset = toolset
1518                dependency = gyp.common.QualifiedTarget(
1519                    dep_file, dep_target, dep_toolset
1520                )
1521                dependencies[index] = dependency
1522
1523                # Make sure anything appearing in a list other than "dependencies" also
1524                # appears in the "dependencies" list.
1525                if (
1526                    dependency_key != "dependencies"
1527                    and dependency not in target_dict["dependencies"]
1528                ):
1529                    raise GypError(
1530                        "Found "
1531                        + dependency
1532                        + " in "
1533                        + dependency_key
1534                        + " of "
1535                        + target
1536                        + ", but not in dependencies"
1537                    )
1538
1539
1540def ExpandWildcardDependencies(targets, data):
1541    """Expands dependencies specified as build_file:*.
1542
1543  For each target in |targets|, examines sections containing links to other
1544  targets.  If any such section contains a link of the form build_file:*, it
1545  is taken as a wildcard link, and is expanded to list each target in
1546  build_file.  The |data| dict provides access to build file dicts.
1547
1548  Any target that does not wish to be included by wildcard can provide an
1549  optional "suppress_wildcard" key in its target dict.  When present and
1550  true, a wildcard dependency link will not include such targets.
1551
1552  All dependency names, including the keys to |targets| and the values in each
1553  dependency list, must be qualified when this function is called.
1554  """
1555
1556    for target, target_dict in targets.items():
1557        target_build_file = gyp.common.BuildFile(target)
1558        for dependency_key in dependency_sections:
1559            dependencies = target_dict.get(dependency_key, [])
1560
1561            # Loop this way instead of "for dependency in" or "for index in range"
1562            # because the dependencies list will be modified within the loop body.
1563            index = 0
1564            while index < len(dependencies):
1565                (
1566                    dependency_build_file,
1567                    dependency_target,
1568                    dependency_toolset,
1569                ) = gyp.common.ParseQualifiedTarget(dependencies[index])
1570                if dependency_target != "*" and dependency_toolset != "*":
1571                    # Not a wildcard.  Keep it moving.
1572                    index = index + 1
1573                    continue
1574
1575                if dependency_build_file == target_build_file:
1576                    # It's an error for a target to depend on all other targets in
1577                    # the same file, because a target cannot depend on itself.
1578                    raise GypError(
1579                        "Found wildcard in "
1580                        + dependency_key
1581                        + " of "
1582                        + target
1583                        + " referring to same build file"
1584                    )
1585
1586                # Take the wildcard out and adjust the index so that the next
1587                # dependency in the list will be processed the next time through the
1588                # loop.
1589                del dependencies[index]
1590                index = index - 1
1591
1592                # Loop through the targets in the other build file, adding them to
1593                # this target's list of dependencies in place of the removed
1594                # wildcard.
1595                dependency_target_dicts = data[dependency_build_file]["targets"]
1596                for dependency_target_dict in dependency_target_dicts:
1597                    if int(dependency_target_dict.get("suppress_wildcard", False)):
1598                        continue
1599                    dependency_target_name = dependency_target_dict["target_name"]
1600                    if (
1601                        dependency_target != "*"
1602                        and dependency_target != dependency_target_name
1603                    ):
1604                        continue
1605                    dependency_target_toolset = dependency_target_dict["toolset"]
1606                    if (
1607                        dependency_toolset != "*"
1608                        and dependency_toolset != dependency_target_toolset
1609                    ):
1610                        continue
1611                    dependency = gyp.common.QualifiedTarget(
1612                        dependency_build_file,
1613                        dependency_target_name,
1614                        dependency_target_toolset,
1615                    )
1616                    index = index + 1
1617                    dependencies.insert(index, dependency)
1618
1619                index = index + 1
1620
1621
1622def Unify(items):
1623    """Removes duplicate elements from items, keeping the first element."""
1624    seen = {}
1625    return [seen.setdefault(e, e) for e in items if e not in seen]
1626
1627
1628def RemoveDuplicateDependencies(targets):
1629    """Makes sure every dependency appears only once in all targets's dependency
1630  lists."""
1631    for target_name, target_dict in targets.items():
1632        for dependency_key in dependency_sections:
1633            dependencies = target_dict.get(dependency_key, [])
1634            if dependencies:
1635                target_dict[dependency_key] = Unify(dependencies)
1636
1637
1638def Filter(items, item):
1639    """Removes item from items."""
1640    res = {}
1641    return [res.setdefault(e, e) for e in items if e != item]
1642
1643
1644def RemoveSelfDependencies(targets):
1645    """Remove self dependencies from targets that have the prune_self_dependency
1646  variable set."""
1647    for target_name, target_dict in targets.items():
1648        for dependency_key in dependency_sections:
1649            dependencies = target_dict.get(dependency_key, [])
1650            if dependencies:
1651                for t in dependencies:
1652                    if t == target_name:
1653                        if (
1654                            targets[t]
1655                            .get("variables", {})
1656                            .get("prune_self_dependency", 0)
1657                        ):
1658                            target_dict[dependency_key] = Filter(
1659                                dependencies, target_name
1660                            )
1661
1662
1663def RemoveLinkDependenciesFromNoneTargets(targets):
1664    """Remove dependencies having the 'link_dependency' attribute from the 'none'
1665  targets."""
1666    for target_name, target_dict in targets.items():
1667        for dependency_key in dependency_sections:
1668            dependencies = target_dict.get(dependency_key, [])
1669            if dependencies:
1670                for t in dependencies:
1671                    if target_dict.get("type", None) == "none":
1672                        if targets[t].get("variables", {}).get("link_dependency", 0):
1673                            target_dict[dependency_key] = Filter(
1674                                target_dict[dependency_key], t
1675                            )
1676
1677
1678class DependencyGraphNode(object):
1679    """
1680
1681  Attributes:
1682    ref: A reference to an object that this DependencyGraphNode represents.
1683    dependencies: List of DependencyGraphNodes on which this one depends.
1684    dependents: List of DependencyGraphNodes that depend on this one.
1685  """
1686
1687    class CircularException(GypError):
1688        pass
1689
1690    def __init__(self, ref):
1691        self.ref = ref
1692        self.dependencies = []
1693        self.dependents = []
1694
1695    def __repr__(self):
1696        return "<DependencyGraphNode: %r>" % self.ref
1697
1698    def FlattenToList(self):
1699        # flat_list is the sorted list of dependencies - actually, the list items
1700        # are the "ref" attributes of DependencyGraphNodes.  Every target will
1701        # appear in flat_list after all of its dependencies, and before all of its
1702        # dependents.
1703        flat_list = OrderedSet()
1704
1705        def ExtractNodeRef(node):
1706            """Extracts the object that the node represents from the given node."""
1707            return node.ref
1708
1709        # in_degree_zeros is the list of DependencyGraphNodes that have no
1710        # dependencies not in flat_list.  Initially, it is a copy of the children
1711        # of this node, because when the graph was built, nodes with no
1712        # dependencies were made implicit dependents of the root node.
1713        in_degree_zeros = sorted(self.dependents[:], key=ExtractNodeRef)
1714
1715        while in_degree_zeros:
1716            # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
1717            # can be appended to flat_list.  Take these nodes out of in_degree_zeros
1718            # as work progresses, so that the next node to process from the list can
1719            # always be accessed at a consistent position.
1720            node = in_degree_zeros.pop()
1721            flat_list.add(node.ref)
1722
1723            # Look at dependents of the node just added to flat_list.  Some of them
1724            # may now belong in in_degree_zeros.
1725            for node_dependent in sorted(node.dependents, key=ExtractNodeRef):
1726                is_in_degree_zero = True
1727                # TODO: We want to check through the
1728                # node_dependent.dependencies list but if it's long and we
1729                # always start at the beginning, then we get O(n^2) behaviour.
1730                for node_dependent_dependency in sorted(
1731                    node_dependent.dependencies, key=ExtractNodeRef
1732                ):
1733                    if node_dependent_dependency.ref not in flat_list:
1734                        # The dependent one or more dependencies not in flat_list.
1735                        # There will be more chances to add it to flat_list
1736                        # when examining it again as a dependent of those other
1737                        # dependencies, provided that there are no cycles.
1738                        is_in_degree_zero = False
1739                        break
1740
1741                if is_in_degree_zero:
1742                    # All of the dependent's dependencies are already in flat_list.  Add
1743                    # it to in_degree_zeros where it will be processed in a future
1744                    # iteration of the outer loop.
1745                    in_degree_zeros += [node_dependent]
1746
1747        return list(flat_list)
1748
1749    def FindCycles(self):
1750        """
1751    Returns a list of cycles in the graph, where each cycle is its own list.
1752    """
1753        results = []
1754        visited = set()
1755
1756        def Visit(node, path):
1757            for child in node.dependents:
1758                if child in path:
1759                    results.append([child] + path[: path.index(child) + 1])
1760                elif child not in visited:
1761                    visited.add(child)
1762                    Visit(child, [child] + path)
1763
1764        visited.add(self)
1765        Visit(self, [self])
1766
1767        return results
1768
1769    def DirectDependencies(self, dependencies=None):
1770        """Returns a list of just direct dependencies."""
1771        if dependencies is None:
1772            dependencies = []
1773
1774        for dependency in self.dependencies:
1775            # Check for None, corresponding to the root node.
1776            if dependency.ref and dependency.ref not in dependencies:
1777                dependencies.append(dependency.ref)
1778
1779        return dependencies
1780
1781    def _AddImportedDependencies(self, targets, dependencies=None):
1782        """Given a list of direct dependencies, adds indirect dependencies that
1783    other dependencies have declared to export their settings.
1784
1785    This method does not operate on self.  Rather, it operates on the list
1786    of dependencies in the |dependencies| argument.  For each dependency in
1787    that list, if any declares that it exports the settings of one of its
1788    own dependencies, those dependencies whose settings are "passed through"
1789    are added to the list.  As new items are added to the list, they too will
1790    be processed, so it is possible to import settings through multiple levels
1791    of dependencies.
1792
1793    This method is not terribly useful on its own, it depends on being
1794    "primed" with a list of direct dependencies such as one provided by
1795    DirectDependencies.  DirectAndImportedDependencies is intended to be the
1796    public entry point.
1797    """
1798
1799        if dependencies is None:
1800            dependencies = []
1801
1802        index = 0
1803        while index < len(dependencies):
1804            dependency = dependencies[index]
1805            dependency_dict = targets[dependency]
1806            # Add any dependencies whose settings should be imported to the list
1807            # if not already present.  Newly-added items will be checked for
1808            # their own imports when the list iteration reaches them.
1809            # Rather than simply appending new items, insert them after the
1810            # dependency that exported them.  This is done to more closely match
1811            # the depth-first method used by DeepDependencies.
1812            add_index = 1
1813            for imported_dependency in dependency_dict.get(
1814                "export_dependent_settings", []
1815            ):
1816                if imported_dependency not in dependencies:
1817                    dependencies.insert(index + add_index, imported_dependency)
1818                    add_index = add_index + 1
1819            index = index + 1
1820
1821        return dependencies
1822
1823    def DirectAndImportedDependencies(self, targets, dependencies=None):
1824        """Returns a list of a target's direct dependencies and all indirect
1825    dependencies that a dependency has advertised settings should be exported
1826    through the dependency for.
1827    """
1828
1829        dependencies = self.DirectDependencies(dependencies)
1830        return self._AddImportedDependencies(targets, dependencies)
1831
1832    def DeepDependencies(self, dependencies=None):
1833        """Returns an OrderedSet of all of a target's dependencies, recursively."""
1834        if dependencies is None:
1835            # Using a list to get ordered output and a set to do fast "is it
1836            # already added" checks.
1837            dependencies = OrderedSet()
1838
1839        for dependency in self.dependencies:
1840            # Check for None, corresponding to the root node.
1841            if dependency.ref is None:
1842                continue
1843            if dependency.ref not in dependencies:
1844                dependency.DeepDependencies(dependencies)
1845                dependencies.add(dependency.ref)
1846
1847        return dependencies
1848
1849    def _LinkDependenciesInternal(
1850        self, targets, include_shared_libraries, dependencies=None, initial=True
1851    ):
1852        """Returns an OrderedSet of dependency targets that are linked
1853    into this target.
1854
1855    This function has a split personality, depending on the setting of
1856    |initial|.  Outside callers should always leave |initial| at its default
1857    setting.
1858
1859    When adding a target to the list of dependencies, this function will
1860    recurse into itself with |initial| set to False, to collect dependencies
1861    that are linked into the linkable target for which the list is being built.
1862
1863    If |include_shared_libraries| is False, the resulting dependencies will not
1864    include shared_library targets that are linked into this target.
1865    """
1866        if dependencies is None:
1867            # Using a list to get ordered output and a set to do fast "is it
1868            # already added" checks.
1869            dependencies = OrderedSet()
1870
1871        # Check for None, corresponding to the root node.
1872        if self.ref is None:
1873            return dependencies
1874
1875        # It's kind of sucky that |targets| has to be passed into this function,
1876        # but that's presently the easiest way to access the target dicts so that
1877        # this function can find target types.
1878
1879        if "target_name" not in targets[self.ref]:
1880            raise GypError("Missing 'target_name' field in target.")
1881
1882        if "type" not in targets[self.ref]:
1883            raise GypError(
1884                "Missing 'type' field in target %s" % targets[self.ref]["target_name"]
1885            )
1886
1887        target_type = targets[self.ref]["type"]
1888
1889        is_linkable = target_type in linkable_types
1890
1891        if initial and not is_linkable:
1892            # If this is the first target being examined and it's not linkable,
1893            # return an empty list of link dependencies, because the link
1894            # dependencies are intended to apply to the target itself (initial is
1895            # True) and this target won't be linked.
1896            return dependencies
1897
1898        # Don't traverse 'none' targets if explicitly excluded.
1899        if target_type == "none" and not targets[self.ref].get(
1900            "dependencies_traverse", True
1901        ):
1902            dependencies.add(self.ref)
1903            return dependencies
1904
1905        # Executables, mac kernel extensions, windows drivers and loadable modules
1906        # are already fully and finally linked. Nothing else can be a link
1907        # dependency of them, there can only be dependencies in the sense that a
1908        # dependent target might run an executable or load the loadable_module.
1909        if not initial and target_type in (
1910            "executable",
1911            "loadable_module",
1912            "mac_kernel_extension",
1913            "windows_driver",
1914        ):
1915            return dependencies
1916
1917        # Shared libraries are already fully linked.  They should only be included
1918        # in |dependencies| when adjusting static library dependencies (in order to
1919        # link against the shared_library's import lib), but should not be included
1920        # in |dependencies| when propagating link_settings.
1921        # The |include_shared_libraries| flag controls which of these two cases we
1922        # are handling.
1923        if (
1924            not initial
1925            and target_type == "shared_library"
1926            and not include_shared_libraries
1927        ):
1928            return dependencies
1929
1930        # The target is linkable, add it to the list of link dependencies.
1931        if self.ref not in dependencies:
1932            dependencies.add(self.ref)
1933            if initial or not is_linkable:
1934                # If this is a subsequent target and it's linkable, don't look any
1935                # further for linkable dependencies, as they'll already be linked into
1936                # this target linkable.  Always look at dependencies of the initial
1937                # target, and always look at dependencies of non-linkables.
1938                for dependency in self.dependencies:
1939                    dependency._LinkDependenciesInternal(
1940                        targets, include_shared_libraries, dependencies, False
1941                    )
1942
1943        return dependencies
1944
1945    def DependenciesForLinkSettings(self, targets):
1946        """
1947    Returns a list of dependency targets whose link_settings should be merged
1948    into this target.
1949    """
1950
1951        # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
1952        # link_settings are propagated.  So for now, we will allow it, unless the
1953        # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
1954        # False.  Once chrome is fixed, we can remove this flag.
1955        include_shared_libraries = targets[self.ref].get(
1956            "allow_sharedlib_linksettings_propagation", True
1957        )
1958        return self._LinkDependenciesInternal(targets, include_shared_libraries)
1959
1960    def DependenciesToLinkAgainst(self, targets):
1961        """
1962    Returns a list of dependency targets that are linked into this target.
1963    """
1964        return self._LinkDependenciesInternal(targets, True)
1965
1966
1967def BuildDependencyList(targets):
1968    # Create a DependencyGraphNode for each target.  Put it into a dict for easy
1969    # access.
1970    dependency_nodes = {}
1971    for target, spec in targets.items():
1972        if target not in dependency_nodes:
1973            dependency_nodes[target] = DependencyGraphNode(target)
1974
1975    # Set up the dependency links.  Targets that have no dependencies are treated
1976    # as dependent on root_node.
1977    root_node = DependencyGraphNode(None)
1978    for target, spec in targets.items():
1979        target_node = dependency_nodes[target]
1980        dependencies = spec.get("dependencies")
1981        if not dependencies:
1982            target_node.dependencies = [root_node]
1983            root_node.dependents.append(target_node)
1984        else:
1985            for dependency in dependencies:
1986                dependency_node = dependency_nodes.get(dependency)
1987                if not dependency_node:
1988                    raise GypError(
1989                        "Dependency '%s' not found while "
1990                        "trying to load target %s" % (dependency, target)
1991                    )
1992                target_node.dependencies.append(dependency_node)
1993                dependency_node.dependents.append(target_node)
1994
1995    flat_list = root_node.FlattenToList()
1996
1997    # If there's anything left unvisited, there must be a circular dependency
1998    # (cycle).
1999    if len(flat_list) != len(targets):
2000        if not root_node.dependents:
2001            # If all targets have dependencies, add the first target as a dependent
2002            # of root_node so that the cycle can be discovered from root_node.
2003            target = next(iter(targets))
2004            target_node = dependency_nodes[target]
2005            target_node.dependencies.append(root_node)
2006            root_node.dependents.append(target_node)
2007
2008        cycles = []
2009        for cycle in root_node.FindCycles():
2010            paths = [node.ref for node in cycle]
2011            cycles.append("Cycle: %s" % " -> ".join(paths))
2012        raise DependencyGraphNode.CircularException(
2013            "Cycles in dependency graph detected:\n" + "\n".join(cycles)
2014        )
2015
2016    return [dependency_nodes, flat_list]
2017
2018
2019def VerifyNoGYPFileCircularDependencies(targets):
2020    # Create a DependencyGraphNode for each gyp file containing a target.  Put
2021    # it into a dict for easy access.
2022    dependency_nodes = {}
2023    for target in targets:
2024        build_file = gyp.common.BuildFile(target)
2025        if build_file not in dependency_nodes:
2026            dependency_nodes[build_file] = DependencyGraphNode(build_file)
2027
2028    # Set up the dependency links.
2029    for target, spec in targets.items():
2030        build_file = gyp.common.BuildFile(target)
2031        build_file_node = dependency_nodes[build_file]
2032        target_dependencies = spec.get("dependencies", [])
2033        for dependency in target_dependencies:
2034            try:
2035                dependency_build_file = gyp.common.BuildFile(dependency)
2036            except GypError as e:
2037                gyp.common.ExceptionAppend(
2038                    e, "while computing dependencies of .gyp file %s" % build_file
2039                )
2040                raise
2041
2042            if dependency_build_file == build_file:
2043                # A .gyp file is allowed to refer back to itself.
2044                continue
2045            dependency_node = dependency_nodes.get(dependency_build_file)
2046            if not dependency_node:
2047                raise GypError("Dependency '%s' not found" % dependency_build_file)
2048            if dependency_node not in build_file_node.dependencies:
2049                build_file_node.dependencies.append(dependency_node)
2050                dependency_node.dependents.append(build_file_node)
2051
2052    # Files that have no dependencies are treated as dependent on root_node.
2053    root_node = DependencyGraphNode(None)
2054    for build_file_node in dependency_nodes.values():
2055        if len(build_file_node.dependencies) == 0:
2056            build_file_node.dependencies.append(root_node)
2057            root_node.dependents.append(build_file_node)
2058
2059    flat_list = root_node.FlattenToList()
2060
2061    # If there's anything left unvisited, there must be a circular dependency
2062    # (cycle).
2063    if len(flat_list) != len(dependency_nodes):
2064        if not root_node.dependents:
2065            # If all files have dependencies, add the first file as a dependent
2066            # of root_node so that the cycle can be discovered from root_node.
2067            file_node = next(iter(dependency_nodes.values()))
2068            file_node.dependencies.append(root_node)
2069            root_node.dependents.append(file_node)
2070        cycles = []
2071        for cycle in root_node.FindCycles():
2072            paths = [node.ref for node in cycle]
2073            cycles.append("Cycle: %s" % " -> ".join(paths))
2074        raise DependencyGraphNode.CircularException(
2075            "Cycles in .gyp file dependency graph detected:\n" + "\n".join(cycles)
2076        )
2077
2078
2079def DoDependentSettings(key, flat_list, targets, dependency_nodes):
2080    # key should be one of all_dependent_settings, direct_dependent_settings,
2081    # or link_settings.
2082
2083    for target in flat_list:
2084        target_dict = targets[target]
2085        build_file = gyp.common.BuildFile(target)
2086
2087        if key == "all_dependent_settings":
2088            dependencies = dependency_nodes[target].DeepDependencies()
2089        elif key == "direct_dependent_settings":
2090            dependencies = dependency_nodes[target].DirectAndImportedDependencies(
2091                targets
2092            )
2093        elif key == "link_settings":
2094            dependencies = dependency_nodes[target].DependenciesForLinkSettings(targets)
2095        else:
2096            raise GypError(
2097                "DoDependentSettings doesn't know how to determine "
2098                "dependencies for " + key
2099            )
2100
2101        for dependency in dependencies:
2102            dependency_dict = targets[dependency]
2103            if key not in dependency_dict:
2104                continue
2105            dependency_build_file = gyp.common.BuildFile(dependency)
2106            MergeDicts(
2107                target_dict, dependency_dict[key], build_file, dependency_build_file
2108            )
2109
2110
2111def AdjustStaticLibraryDependencies(
2112    flat_list, targets, dependency_nodes, sort_dependencies
2113):
2114    # Recompute target "dependencies" properties.  For each static library
2115    # target, remove "dependencies" entries referring to other static libraries,
2116    # unless the dependency has the "hard_dependency" attribute set.  For each
2117    # linkable target, add a "dependencies" entry referring to all of the
2118    # target's computed list of link dependencies (including static libraries
2119    # if no such entry is already present.
2120    for target in flat_list:
2121        target_dict = targets[target]
2122        target_type = target_dict["type"]
2123
2124        if target_type == "static_library":
2125            if "dependencies" not in target_dict:
2126                continue
2127
2128            target_dict["dependencies_original"] = target_dict.get("dependencies", [])[
2129                :
2130            ]
2131
2132            # A static library should not depend on another static library unless
2133            # the dependency relationship is "hard," which should only be done when
2134            # a dependent relies on some side effect other than just the build
2135            # product, like a rule or action output. Further, if a target has a
2136            # non-hard dependency, but that dependency exports a hard dependency,
2137            # the non-hard dependency can safely be removed, but the exported hard
2138            # dependency must be added to the target to keep the same dependency
2139            # ordering.
2140            dependencies = dependency_nodes[target].DirectAndImportedDependencies(
2141                targets
2142            )
2143            index = 0
2144            while index < len(dependencies):
2145                dependency = dependencies[index]
2146                dependency_dict = targets[dependency]
2147
2148                # Remove every non-hard static library dependency and remove every
2149                # non-static library dependency that isn't a direct dependency.
2150                if (
2151                    dependency_dict["type"] == "static_library"
2152                    and not dependency_dict.get("hard_dependency", False)
2153                ) or (
2154                    dependency_dict["type"] != "static_library"
2155                    and dependency not in target_dict["dependencies"]
2156                ):
2157                    # Take the dependency out of the list, and don't increment index
2158                    # because the next dependency to analyze will shift into the index
2159                    # formerly occupied by the one being removed.
2160                    del dependencies[index]
2161                else:
2162                    index = index + 1
2163
2164            # Update the dependencies. If the dependencies list is empty, it's not
2165            # needed, so unhook it.
2166            if len(dependencies) > 0:
2167                target_dict["dependencies"] = dependencies
2168            else:
2169                del target_dict["dependencies"]
2170
2171        elif target_type in linkable_types:
2172            # Get a list of dependency targets that should be linked into this
2173            # target.  Add them to the dependencies list if they're not already
2174            # present.
2175
2176            link_dependencies = dependency_nodes[target].DependenciesToLinkAgainst(
2177                targets
2178            )
2179            for dependency in link_dependencies:
2180                if dependency == target:
2181                    continue
2182                if "dependencies" not in target_dict:
2183                    target_dict["dependencies"] = []
2184                if dependency not in target_dict["dependencies"]:
2185                    target_dict["dependencies"].append(dependency)
2186            # Sort the dependencies list in the order from dependents to dependencies.
2187            # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
2188            # Note: flat_list is already sorted in the order from dependencies to
2189            # dependents.
2190            if sort_dependencies and "dependencies" in target_dict:
2191                target_dict["dependencies"] = [
2192                    dep
2193                    for dep in reversed(flat_list)
2194                    if dep in target_dict["dependencies"]
2195                ]
2196
2197
2198# Initialize this here to speed up MakePathRelative.
2199exception_re = re.compile(r"""["']?[-/$<>^]""")
2200
2201
2202def MakePathRelative(to_file, fro_file, item):
2203    # If item is a relative path, it's relative to the build file dict that it's
2204    # coming from.  Fix it up to make it relative to the build file dict that
2205    # it's going into.
2206    # Exception: any |item| that begins with these special characters is
2207    # returned without modification.
2208    #   /   Used when a path is already absolute (shortcut optimization;
2209    #       such paths would be returned as absolute anyway)
2210    #   $   Used for build environment variables
2211    #   -   Used for some build environment flags (such as -lapr-1 in a
2212    #       "libraries" section)
2213    #   <   Used for our own variable and command expansions (see ExpandVariables)
2214    #   >   Used for our own variable and command expansions (see ExpandVariables)
2215    #   ^   Used for our own variable and command expansions (see ExpandVariables)
2216    #
2217    #   "/' Used when a value is quoted.  If these are present, then we
2218    #       check the second character instead.
2219    #
2220    if to_file == fro_file or exception_re.match(item):
2221        return item
2222    else:
2223        # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
2224        # temporary measure. This should really be addressed by keeping all paths
2225        # in POSIX until actual project generation.
2226        ret = os.path.normpath(
2227            os.path.join(
2228                gyp.common.RelativePath(
2229                    os.path.dirname(fro_file), os.path.dirname(to_file)
2230                ),
2231                item,
2232            )
2233        ).replace("\\", "/")
2234        if item.endswith("/"):
2235            ret += "/"
2236        return ret
2237
2238
2239def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
2240    # Python documentation recommends objects which do not support hash
2241    # set this value to None. Python library objects follow this rule.
2242    def is_hashable(val):
2243        return val.__hash__
2244
2245    # If x is hashable, returns whether x is in s. Else returns whether x is in items.
2246    def is_in_set_or_list(x, s, items):
2247        if is_hashable(x):
2248            return x in s
2249        return x in items
2250
2251    prepend_index = 0
2252
2253    # Make membership testing of hashables in |to| (in particular, strings)
2254    # faster.
2255    hashable_to_set = set(x for x in to if is_hashable(x))
2256    for item in fro:
2257        singleton = False
2258        if type(item) in (str, int):
2259            # The cheap and easy case.
2260            if is_paths:
2261                to_item = MakePathRelative(to_file, fro_file, item)
2262            else:
2263                to_item = item
2264
2265            if not (type(item) is str and item.startswith("-")):
2266                # Any string that doesn't begin with a "-" is a singleton - it can
2267                # only appear once in a list, to be enforced by the list merge append
2268                # or prepend.
2269                singleton = True
2270        elif type(item) is dict:
2271            # Make a copy of the dictionary, continuing to look for paths to fix.
2272            # The other intelligent aspects of merge processing won't apply because
2273            # item is being merged into an empty dict.
2274            to_item = {}
2275            MergeDicts(to_item, item, to_file, fro_file)
2276        elif type(item) is list:
2277            # Recurse, making a copy of the list.  If the list contains any
2278            # descendant dicts, path fixing will occur.  Note that here, custom
2279            # values for is_paths and append are dropped; those are only to be
2280            # applied to |to| and |fro|, not sublists of |fro|.  append shouldn't
2281            # matter anyway because the new |to_item| list is empty.
2282            to_item = []
2283            MergeLists(to_item, item, to_file, fro_file)
2284        else:
2285            raise TypeError(
2286                "Attempt to merge list item of unsupported type "
2287                + item.__class__.__name__
2288            )
2289
2290        if append:
2291            # If appending a singleton that's already in the list, don't append.
2292            # This ensures that the earliest occurrence of the item will stay put.
2293            if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
2294                to.append(to_item)
2295                if is_hashable(to_item):
2296                    hashable_to_set.add(to_item)
2297        else:
2298            # If prepending a singleton that's already in the list, remove the
2299            # existing instance and proceed with the prepend.  This ensures that the
2300            # item appears at the earliest possible position in the list.
2301            while singleton and to_item in to:
2302                to.remove(to_item)
2303
2304            # Don't just insert everything at index 0.  That would prepend the new
2305            # items to the list in reverse order, which would be an unwelcome
2306            # surprise.
2307            to.insert(prepend_index, to_item)
2308            if is_hashable(to_item):
2309                hashable_to_set.add(to_item)
2310            prepend_index = prepend_index + 1
2311
2312
2313def MergeDicts(to, fro, to_file, fro_file):
2314    # I wanted to name the parameter "from" but it's a Python keyword...
2315    for k, v in fro.items():
2316        # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
2317        # copy semantics.  Something else may want to merge from the |fro| dict
2318        # later, and having the same dict ref pointed to twice in the tree isn't
2319        # what anyone wants considering that the dicts may subsequently be
2320        # modified.
2321        if k in to:
2322            bad_merge = False
2323            if type(v) in (str, int):
2324                if type(to[k]) not in (str, int):
2325                    bad_merge = True
2326            elif not isinstance(v, type(to[k])):
2327                bad_merge = True
2328
2329            if bad_merge:
2330                raise TypeError(
2331                    "Attempt to merge dict value of type "
2332                    + v.__class__.__name__
2333                    + " into incompatible type "
2334                    + to[k].__class__.__name__
2335                    + " for key "
2336                    + k
2337                )
2338        if type(v) in (str, int):
2339            # Overwrite the existing value, if any.  Cheap and easy.
2340            is_path = IsPathSection(k)
2341            if is_path:
2342                to[k] = MakePathRelative(to_file, fro_file, v)
2343            else:
2344                to[k] = v
2345        elif type(v) is dict:
2346            # Recurse, guaranteeing copies will be made of objects that require it.
2347            if k not in to:
2348                to[k] = {}
2349            MergeDicts(to[k], v, to_file, fro_file)
2350        elif type(v) is list:
2351            # Lists in dicts can be merged with different policies, depending on
2352            # how the key in the "from" dict (k, the from-key) is written.
2353            #
2354            # If the from-key has          ...the to-list will have this action
2355            # this character appended:...     applied when receiving the from-list:
2356            #                           =  replace
2357            #                           +  prepend
2358            #                           ?  set, only if to-list does not yet exist
2359            #                      (none)  append
2360            #
2361            # This logic is list-specific, but since it relies on the associated
2362            # dict key, it's checked in this dict-oriented function.
2363            ext = k[-1]
2364            append = True
2365            if ext == "=":
2366                list_base = k[:-1]
2367                lists_incompatible = [list_base, list_base + "?"]
2368                to[list_base] = []
2369            elif ext == "+":
2370                list_base = k[:-1]
2371                lists_incompatible = [list_base + "=", list_base + "?"]
2372                append = False
2373            elif ext == "?":
2374                list_base = k[:-1]
2375                lists_incompatible = [list_base, list_base + "=", list_base + "+"]
2376            else:
2377                list_base = k
2378                lists_incompatible = [list_base + "=", list_base + "?"]
2379
2380            # Some combinations of merge policies appearing together are meaningless.
2381            # It's stupid to replace and append simultaneously, for example.  Append
2382            # and prepend are the only policies that can coexist.
2383            for list_incompatible in lists_incompatible:
2384                if list_incompatible in fro:
2385                    raise GypError(
2386                        "Incompatible list policies " + k + " and " + list_incompatible
2387                    )
2388
2389            if list_base in to:
2390                if ext == "?":
2391                    # If the key ends in "?", the list will only be merged if it doesn't
2392                    # already exist.
2393                    continue
2394                elif type(to[list_base]) is not list:
2395                    # This may not have been checked above if merging in a list with an
2396                    # extension character.
2397                    raise TypeError(
2398                        "Attempt to merge dict value of type "
2399                        + v.__class__.__name__
2400                        + " into incompatible type "
2401                        + to[list_base].__class__.__name__
2402                        + " for key "
2403                        + list_base
2404                        + "("
2405                        + k
2406                        + ")"
2407                    )
2408            else:
2409                to[list_base] = []
2410
2411            # Call MergeLists, which will make copies of objects that require it.
2412            # MergeLists can recurse back into MergeDicts, although this will be
2413            # to make copies of dicts (with paths fixed), there will be no
2414            # subsequent dict "merging" once entering a list because lists are
2415            # always replaced, appended to, or prepended to.
2416            is_paths = IsPathSection(list_base)
2417            MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
2418        else:
2419            raise TypeError(
2420                "Attempt to merge dict value of unsupported type "
2421                + v.__class__.__name__
2422                + " for key "
2423                + k
2424            )
2425
2426
2427def MergeConfigWithInheritance(
2428    new_configuration_dict, build_file, target_dict, configuration, visited
2429):
2430    # Skip if previously visited.
2431    if configuration in visited:
2432        return
2433
2434    # Look at this configuration.
2435    configuration_dict = target_dict["configurations"][configuration]
2436
2437    # Merge in parents.
2438    for parent in configuration_dict.get("inherit_from", []):
2439        MergeConfigWithInheritance(
2440            new_configuration_dict,
2441            build_file,
2442            target_dict,
2443            parent,
2444            visited + [configuration],
2445        )
2446
2447    # Merge it into the new config.
2448    MergeDicts(new_configuration_dict, configuration_dict, build_file, build_file)
2449
2450    # Drop abstract.
2451    if "abstract" in new_configuration_dict:
2452        del new_configuration_dict["abstract"]
2453
2454
2455def SetUpConfigurations(target, target_dict):
2456    # key_suffixes is a list of key suffixes that might appear on key names.
2457    # These suffixes are handled in conditional evaluations (for =, +, and ?)
2458    # and rules/exclude processing (for ! and /).  Keys with these suffixes
2459    # should be treated the same as keys without.
2460    key_suffixes = ["=", "+", "?", "!", "/"]
2461
2462    build_file = gyp.common.BuildFile(target)
2463
2464    # Provide a single configuration by default if none exists.
2465    # TODO(mark): Signal an error if default_configurations exists but
2466    # configurations does not.
2467    if "configurations" not in target_dict:
2468        target_dict["configurations"] = {"Default": {}}
2469    if "default_configuration" not in target_dict:
2470        concrete = [
2471            i
2472            for (i, config) in target_dict["configurations"].items()
2473            if not config.get("abstract")
2474        ]
2475        target_dict["default_configuration"] = sorted(concrete)[0]
2476
2477    merged_configurations = {}
2478    configs = target_dict["configurations"]
2479    for (configuration, old_configuration_dict) in configs.items():
2480        # Skip abstract configurations (saves work only).
2481        if old_configuration_dict.get("abstract"):
2482            continue
2483        # Configurations inherit (most) settings from the enclosing target scope.
2484        # Get the inheritance relationship right by making a copy of the target
2485        # dict.
2486        new_configuration_dict = {}
2487        for (key, target_val) in target_dict.items():
2488            key_ext = key[-1:]
2489            if key_ext in key_suffixes:
2490                key_base = key[:-1]
2491            else:
2492                key_base = key
2493            if key_base not in non_configuration_keys:
2494                new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val)
2495
2496        # Merge in configuration (with all its parents first).
2497        MergeConfigWithInheritance(
2498            new_configuration_dict, build_file, target_dict, configuration, []
2499        )
2500
2501        merged_configurations[configuration] = new_configuration_dict
2502
2503    # Put the new configurations back into the target dict as a configuration.
2504    for configuration in merged_configurations.keys():
2505        target_dict["configurations"][configuration] = merged_configurations[
2506            configuration
2507        ]
2508
2509    # Now drop all the abstract ones.
2510    configs = target_dict["configurations"]
2511    target_dict["configurations"] = {
2512        k: v for k, v in configs.items() if not v.get("abstract")
2513    }
2514
2515    # Now that all of the target's configurations have been built, go through
2516    # the target dict's keys and remove everything that's been moved into a
2517    # "configurations" section.
2518    delete_keys = []
2519    for key in target_dict:
2520        key_ext = key[-1:]
2521        if key_ext in key_suffixes:
2522            key_base = key[:-1]
2523        else:
2524            key_base = key
2525        if key_base not in non_configuration_keys:
2526            delete_keys.append(key)
2527    for key in delete_keys:
2528        del target_dict[key]
2529
2530    # Check the configurations to see if they contain invalid keys.
2531    for configuration in target_dict["configurations"].keys():
2532        configuration_dict = target_dict["configurations"][configuration]
2533        for key in configuration_dict.keys():
2534            if key in invalid_configuration_keys:
2535                raise GypError(
2536                    "%s not allowed in the %s configuration, found in "
2537                    "target %s" % (key, configuration, target)
2538                )
2539
2540
2541def ProcessListFiltersInDict(name, the_dict):
2542    """Process regular expression and exclusion-based filters on lists.
2543
2544  An exclusion list is in a dict key named with a trailing "!", like
2545  "sources!".  Every item in such a list is removed from the associated
2546  main list, which in this example, would be "sources".  Removed items are
2547  placed into a "sources_excluded" list in the dict.
2548
2549  Regular expression (regex) filters are contained in dict keys named with a
2550  trailing "/", such as "sources/" to operate on the "sources" list.  Regex
2551  filters in a dict take the form:
2552    'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
2553                  ['include', '_mac\\.cc$'] ],
2554  The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
2555  _win.cc.  The second filter then includes all files ending in _mac.cc that
2556  are now or were once in the "sources" list.  Items matching an "exclude"
2557  filter are subject to the same processing as would occur if they were listed
2558  by name in an exclusion list (ending in "!").  Items matching an "include"
2559  filter are brought back into the main list if previously excluded by an
2560  exclusion list or exclusion regex filter.  Subsequent matching "exclude"
2561  patterns can still cause items to be excluded after matching an "include".
2562  """
2563
2564    # Look through the dictionary for any lists whose keys end in "!" or "/".
2565    # These are lists that will be treated as exclude lists and regular
2566    # expression-based exclude/include lists.  Collect the lists that are
2567    # needed first, looking for the lists that they operate on, and assemble
2568    # then into |lists|.  This is done in a separate loop up front, because
2569    # the _included and _excluded keys need to be added to the_dict, and that
2570    # can't be done while iterating through it.
2571
2572    lists = []
2573    del_lists = []
2574    for key, value in the_dict.items():
2575        operation = key[-1]
2576        if operation != "!" and operation != "/":
2577            continue
2578
2579        if type(value) is not list:
2580            raise ValueError(
2581                name + " key " + key + " must be list, not " + value.__class__.__name__
2582            )
2583
2584        list_key = key[:-1]
2585        if list_key not in the_dict:
2586            # This happens when there's a list like "sources!" but no corresponding
2587            # "sources" list.  Since there's nothing for it to operate on, queue up
2588            # the "sources!" list for deletion now.
2589            del_lists.append(key)
2590            continue
2591
2592        if type(the_dict[list_key]) is not list:
2593            value = the_dict[list_key]
2594            raise ValueError(
2595                name
2596                + " key "
2597                + list_key
2598                + " must be list, not "
2599                + value.__class__.__name__
2600                + " when applying "
2601                + {"!": "exclusion", "/": "regex"}[operation]
2602            )
2603
2604        if list_key not in lists:
2605            lists.append(list_key)
2606
2607    # Delete the lists that are known to be unneeded at this point.
2608    for del_list in del_lists:
2609        del the_dict[del_list]
2610
2611    for list_key in lists:
2612        the_list = the_dict[list_key]
2613
2614        # Initialize the list_actions list, which is parallel to the_list.  Each
2615        # item in list_actions identifies whether the corresponding item in
2616        # the_list should be excluded, unconditionally preserved (included), or
2617        # whether no exclusion or inclusion has been applied.  Items for which
2618        # no exclusion or inclusion has been applied (yet) have value -1, items
2619        # excluded have value 0, and items included have value 1.  Includes and
2620        # excludes override previous actions.  All items in list_actions are
2621        # initialized to -1 because no excludes or includes have been processed
2622        # yet.
2623        list_actions = list((-1,) * len(the_list))
2624
2625        exclude_key = list_key + "!"
2626        if exclude_key in the_dict:
2627            for exclude_item in the_dict[exclude_key]:
2628                for index, list_item in enumerate(the_list):
2629                    if exclude_item == list_item:
2630                        # This item matches the exclude_item, so set its action to 0
2631                        # (exclude).
2632                        list_actions[index] = 0
2633
2634            # The "whatever!" list is no longer needed, dump it.
2635            del the_dict[exclude_key]
2636
2637        regex_key = list_key + "/"
2638        if regex_key in the_dict:
2639            for regex_item in the_dict[regex_key]:
2640                [action, pattern] = regex_item
2641                pattern_re = re.compile(pattern)
2642
2643                if action == "exclude":
2644                    # This item matches an exclude regex, set its value to 0 (exclude).
2645                    action_value = 0
2646                elif action == "include":
2647                    # This item matches an include regex, set its value to 1 (include).
2648                    action_value = 1
2649                else:
2650                    # This is an action that doesn't make any sense.
2651                    raise ValueError(
2652                        "Unrecognized action "
2653                        + action
2654                        + " in "
2655                        + name
2656                        + " key "
2657                        + regex_key
2658                    )
2659
2660                for index, list_item in enumerate(the_list):
2661                    if list_actions[index] == action_value:
2662                        # Even if the regex matches, nothing will change so continue
2663                        # (regex searches are expensive).
2664                        continue
2665                    if pattern_re.search(list_item):
2666                        # Regular expression match.
2667                        list_actions[index] = action_value
2668
2669            # The "whatever/" list is no longer needed, dump it.
2670            del the_dict[regex_key]
2671
2672        # Add excluded items to the excluded list.
2673        #
2674        # Note that exclude_key ("sources!") is different from excluded_key
2675        # ("sources_excluded").  The exclude_key list is input and it was already
2676        # processed and deleted; the excluded_key list is output and it's about
2677        # to be created.
2678        excluded_key = list_key + "_excluded"
2679        if excluded_key in the_dict:
2680            raise GypError(
2681                name + " key " + excluded_key + " must not be present prior "
2682                " to applying exclusion/regex filters for " + list_key
2683            )
2684
2685        excluded_list = []
2686
2687        # Go backwards through the list_actions list so that as items are deleted,
2688        # the indices of items that haven't been seen yet don't shift.  That means
2689        # that things need to be prepended to excluded_list to maintain them in the
2690        # same order that they existed in the_list.
2691        for index in range(len(list_actions) - 1, -1, -1):
2692            if list_actions[index] == 0:
2693                # Dump anything with action 0 (exclude).  Keep anything with action 1
2694                # (include) or -1 (no include or exclude seen for the item).
2695                excluded_list.insert(0, the_list[index])
2696                del the_list[index]
2697
2698        # If anything was excluded, put the excluded list into the_dict at
2699        # excluded_key.
2700        if len(excluded_list) > 0:
2701            the_dict[excluded_key] = excluded_list
2702
2703    # Now recurse into subdicts and lists that may contain dicts.
2704    for key, value in the_dict.items():
2705        if type(value) is dict:
2706            ProcessListFiltersInDict(key, value)
2707        elif type(value) is list:
2708            ProcessListFiltersInList(key, value)
2709
2710
2711def ProcessListFiltersInList(name, the_list):
2712    for item in the_list:
2713        if type(item) is dict:
2714            ProcessListFiltersInDict(name, item)
2715        elif type(item) is list:
2716            ProcessListFiltersInList(name, item)
2717
2718
2719def ValidateTargetType(target, target_dict):
2720    """Ensures the 'type' field on the target is one of the known types.
2721
2722  Arguments:
2723    target: string, name of target.
2724    target_dict: dict, target spec.
2725
2726  Raises an exception on error.
2727  """
2728    VALID_TARGET_TYPES = (
2729        "executable",
2730        "loadable_module",
2731        "static_library",
2732        "shared_library",
2733        "mac_kernel_extension",
2734        "none",
2735        "windows_driver",
2736    )
2737    target_type = target_dict.get("type", None)
2738    if target_type not in VALID_TARGET_TYPES:
2739        raise GypError(
2740            "Target %s has an invalid target type '%s'.  "
2741            "Must be one of %s." % (target, target_type, "/".join(VALID_TARGET_TYPES))
2742        )
2743    if (
2744        target_dict.get("standalone_static_library", 0)
2745        and not target_type == "static_library"
2746    ):
2747        raise GypError(
2748            "Target %s has type %s but standalone_static_library flag is"
2749            " only valid for static_library type." % (target, target_type)
2750        )
2751
2752
2753def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
2754    """Ensures that the rules sections in target_dict are valid and consistent,
2755  and determines which sources they apply to.
2756
2757  Arguments:
2758    target: string, name of target.
2759    target_dict: dict, target spec containing "rules" and "sources" lists.
2760    extra_sources_for_rules: a list of keys to scan for rule matches in
2761        addition to 'sources'.
2762  """
2763
2764    # Dicts to map between values found in rules' 'rule_name' and 'extension'
2765    # keys and the rule dicts themselves.
2766    rule_names = {}
2767    rule_extensions = {}
2768
2769    rules = target_dict.get("rules", [])
2770    for rule in rules:
2771        # Make sure that there's no conflict among rule names and extensions.
2772        rule_name = rule["rule_name"]
2773        if rule_name in rule_names:
2774            raise GypError(
2775                "rule %s exists in duplicate, target %s" % (rule_name, target)
2776            )
2777        rule_names[rule_name] = rule
2778
2779        rule_extension = rule["extension"]
2780        if rule_extension.startswith("."):
2781            rule_extension = rule_extension[1:]
2782        if rule_extension in rule_extensions:
2783            raise GypError(
2784                (
2785                    "extension %s associated with multiple rules, "
2786                    + "target %s rules %s and %s"
2787                )
2788                % (
2789                    rule_extension,
2790                    target,
2791                    rule_extensions[rule_extension]["rule_name"],
2792                    rule_name,
2793                )
2794            )
2795        rule_extensions[rule_extension] = rule
2796
2797        # Make sure rule_sources isn't already there.  It's going to be
2798        # created below if needed.
2799        if "rule_sources" in rule:
2800            raise GypError(
2801                "rule_sources must not exist in input, target %s rule %s"
2802                % (target, rule_name)
2803            )
2804
2805        rule_sources = []
2806        source_keys = ["sources"]
2807        source_keys.extend(extra_sources_for_rules)
2808        for source_key in source_keys:
2809            for source in target_dict.get(source_key, []):
2810                (source_root, source_extension) = os.path.splitext(source)
2811                if source_extension.startswith("."):
2812                    source_extension = source_extension[1:]
2813                if source_extension == rule_extension:
2814                    rule_sources.append(source)
2815
2816        if len(rule_sources) > 0:
2817            rule["rule_sources"] = rule_sources
2818
2819
2820def ValidateRunAsInTarget(target, target_dict, build_file):
2821    target_name = target_dict.get("target_name")
2822    run_as = target_dict.get("run_as")
2823    if not run_as:
2824        return
2825    if type(run_as) is not dict:
2826        raise GypError(
2827            "The 'run_as' in target %s from file %s should be a "
2828            "dictionary." % (target_name, build_file)
2829        )
2830    action = run_as.get("action")
2831    if not action:
2832        raise GypError(
2833            "The 'run_as' in target %s from file %s must have an "
2834            "'action' section." % (target_name, build_file)
2835        )
2836    if type(action) is not list:
2837        raise GypError(
2838            "The 'action' for 'run_as' in target %s from file %s "
2839            "must be a list." % (target_name, build_file)
2840        )
2841    working_directory = run_as.get("working_directory")
2842    if working_directory and type(working_directory) is not str:
2843        raise GypError(
2844            "The 'working_directory' for 'run_as' in target %s "
2845            "in file %s should be a string." % (target_name, build_file)
2846        )
2847    environment = run_as.get("environment")
2848    if environment and type(environment) is not dict:
2849        raise GypError(
2850            "The 'environment' for 'run_as' in target %s "
2851            "in file %s should be a dictionary." % (target_name, build_file)
2852        )
2853
2854
2855def ValidateActionsInTarget(target, target_dict, build_file):
2856    """Validates the inputs to the actions in a target."""
2857    target_name = target_dict.get("target_name")
2858    actions = target_dict.get("actions", [])
2859    for action in actions:
2860        action_name = action.get("action_name")
2861        if not action_name:
2862            raise GypError(
2863                "Anonymous action in target %s.  "
2864                "An action must have an 'action_name' field." % target_name
2865            )
2866        inputs = action.get("inputs", None)
2867        if inputs is None:
2868            raise GypError("Action in target %s has no inputs." % target_name)
2869        action_command = action.get("action")
2870        if action_command and not action_command[0]:
2871            raise GypError("Empty action as command in target %s." % target_name)
2872
2873
2874def TurnIntIntoStrInDict(the_dict):
2875    """Given dict the_dict, recursively converts all integers into strings.
2876  """
2877    # Use items instead of iteritems because there's no need to try to look at
2878    # reinserted keys and their associated values.
2879    for k, v in the_dict.items():
2880        if type(v) is int:
2881            v = str(v)
2882            the_dict[k] = v
2883        elif type(v) is dict:
2884            TurnIntIntoStrInDict(v)
2885        elif type(v) is list:
2886            TurnIntIntoStrInList(v)
2887
2888        if type(k) is int:
2889            del the_dict[k]
2890            the_dict[str(k)] = v
2891
2892
2893def TurnIntIntoStrInList(the_list):
2894    """Given list the_list, recursively converts all integers into strings.
2895  """
2896    for index, item in enumerate(the_list):
2897        if type(item) is int:
2898            the_list[index] = str(item)
2899        elif type(item) is dict:
2900            TurnIntIntoStrInDict(item)
2901        elif type(item) is list:
2902            TurnIntIntoStrInList(item)
2903
2904
2905def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, data):
2906    """Return only the targets that are deep dependencies of |root_targets|."""
2907    qualified_root_targets = []
2908    for target in root_targets:
2909        target = target.strip()
2910        qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
2911        if not qualified_targets:
2912            raise GypError("Could not find target %s" % target)
2913        qualified_root_targets.extend(qualified_targets)
2914
2915    wanted_targets = {}
2916    for target in qualified_root_targets:
2917        wanted_targets[target] = targets[target]
2918        for dependency in dependency_nodes[target].DeepDependencies():
2919            wanted_targets[dependency] = targets[dependency]
2920
2921    wanted_flat_list = [t for t in flat_list if t in wanted_targets]
2922
2923    # Prune unwanted targets from each build_file's data dict.
2924    for build_file in data["target_build_files"]:
2925        if "targets" not in data[build_file]:
2926            continue
2927        new_targets = []
2928        for target in data[build_file]["targets"]:
2929            qualified_name = gyp.common.QualifiedTarget(
2930                build_file, target["target_name"], target["toolset"]
2931            )
2932            if qualified_name in wanted_targets:
2933                new_targets.append(target)
2934        data[build_file]["targets"] = new_targets
2935
2936    return wanted_targets, wanted_flat_list
2937
2938
2939def VerifyNoCollidingTargets(targets):
2940    """Verify that no two targets in the same directory share the same name.
2941
2942  Arguments:
2943    targets: A list of targets in the form 'path/to/file.gyp:target_name'.
2944  """
2945    # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
2946    used = {}
2947    for target in targets:
2948        # Separate out 'path/to/file.gyp, 'target_name' from
2949        # 'path/to/file.gyp:target_name'.
2950        path, name = target.rsplit(":", 1)
2951        # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
2952        subdir, gyp = os.path.split(path)
2953        # Use '.' for the current directory '', so that the error messages make
2954        # more sense.
2955        if not subdir:
2956            subdir = "."
2957        # Prepare a key like 'path/to:target_name'.
2958        key = subdir + ":" + name
2959        if key in used:
2960            # Complain if this target is already used.
2961            raise GypError(
2962                'Duplicate target name "%s" in directory "%s" used both '
2963                'in "%s" and "%s".' % (name, subdir, gyp, used[key])
2964            )
2965        used[key] = gyp
2966
2967
2968def SetGeneratorGlobals(generator_input_info):
2969    # Set up path_sections and non_configuration_keys with the default data plus
2970    # the generator-specific data.
2971    global path_sections
2972    path_sections = set(base_path_sections)
2973    path_sections.update(generator_input_info["path_sections"])
2974
2975    global non_configuration_keys
2976    non_configuration_keys = base_non_configuration_keys[:]
2977    non_configuration_keys.extend(generator_input_info["non_configuration_keys"])
2978
2979    global multiple_toolsets
2980    multiple_toolsets = generator_input_info["generator_supports_multiple_toolsets"]
2981
2982    global generator_filelist_paths
2983    generator_filelist_paths = generator_input_info["generator_filelist_paths"]
2984
2985
2986def Load(
2987    build_files,
2988    variables,
2989    includes,
2990    depth,
2991    generator_input_info,
2992    check,
2993    circular_check,
2994    parallel,
2995    root_targets,
2996):
2997    SetGeneratorGlobals(generator_input_info)
2998    # A generator can have other lists (in addition to sources) be processed
2999    # for rules.
3000    extra_sources_for_rules = generator_input_info["extra_sources_for_rules"]
3001
3002    # Load build files.  This loads every target-containing build file into
3003    # the |data| dictionary such that the keys to |data| are build file names,
3004    # and the values are the entire build file contents after "early" or "pre"
3005    # processing has been done and includes have been resolved.
3006    # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
3007    # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
3008    # track of the keys corresponding to "target" files.
3009    data = {"target_build_files": set()}
3010    # Normalize paths everywhere.  This is important because paths will be
3011    # used as keys to the data dict and for references between input files.
3012    build_files = set(map(os.path.normpath, build_files))
3013    if parallel:
3014        LoadTargetBuildFilesParallel(
3015            build_files, data, variables, includes, depth, check, generator_input_info
3016        )
3017    else:
3018        aux_data = {}
3019        for build_file in build_files:
3020            try:
3021                LoadTargetBuildFile(
3022                    build_file, data, aux_data, variables, includes, depth, check, True
3023                )
3024            except Exception as e:
3025                gyp.common.ExceptionAppend(e, "while trying to load %s" % build_file)
3026                raise
3027
3028    # Build a dict to access each target's subdict by qualified name.
3029    targets = BuildTargetsDict(data)
3030
3031    # Fully qualify all dependency links.
3032    QualifyDependencies(targets)
3033
3034    # Remove self-dependencies from targets that have 'prune_self_dependencies'
3035    # set to 1.
3036    RemoveSelfDependencies(targets)
3037
3038    # Expand dependencies specified as build_file:*.
3039    ExpandWildcardDependencies(targets, data)
3040
3041    # Remove all dependencies marked as 'link_dependency' from the targets of
3042    # type 'none'.
3043    RemoveLinkDependenciesFromNoneTargets(targets)
3044
3045    # Apply exclude (!) and regex (/) list filters only for dependency_sections.
3046    for target_name, target_dict in targets.items():
3047        tmp_dict = {}
3048        for key_base in dependency_sections:
3049            for op in ("", "!", "/"):
3050                key = key_base + op
3051                if key in target_dict:
3052                    tmp_dict[key] = target_dict[key]
3053                    del target_dict[key]
3054        ProcessListFiltersInDict(target_name, tmp_dict)
3055        # Write the results back to |target_dict|.
3056        for key in tmp_dict:
3057            target_dict[key] = tmp_dict[key]
3058
3059    # Make sure every dependency appears at most once.
3060    RemoveDuplicateDependencies(targets)
3061
3062    if circular_check:
3063        # Make sure that any targets in a.gyp don't contain dependencies in other
3064        # .gyp files that further depend on a.gyp.
3065        VerifyNoGYPFileCircularDependencies(targets)
3066
3067    [dependency_nodes, flat_list] = BuildDependencyList(targets)
3068
3069    if root_targets:
3070        # Remove, from |targets| and |flat_list|, the targets that are not deep
3071        # dependencies of the targets specified in |root_targets|.
3072        targets, flat_list = PruneUnwantedTargets(
3073            targets, flat_list, dependency_nodes, root_targets, data
3074        )
3075
3076    # Check that no two targets in the same directory have the same name.
3077    VerifyNoCollidingTargets(flat_list)
3078
3079    # Handle dependent settings of various types.
3080    for settings_type in [
3081        "all_dependent_settings",
3082        "direct_dependent_settings",
3083        "link_settings",
3084    ]:
3085        DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
3086
3087        # Take out the dependent settings now that they've been published to all
3088        # of the targets that require them.
3089        for target in flat_list:
3090            if settings_type in targets[target]:
3091                del targets[target][settings_type]
3092
3093    # Make sure static libraries don't declare dependencies on other static
3094    # libraries, but that linkables depend on all unlinked static libraries
3095    # that they need so that their link steps will be correct.
3096    gii = generator_input_info
3097    if gii["generator_wants_static_library_dependencies_adjusted"]:
3098        AdjustStaticLibraryDependencies(
3099            flat_list,
3100            targets,
3101            dependency_nodes,
3102            gii["generator_wants_sorted_dependencies"],
3103        )
3104
3105    # Apply "post"/"late"/"target" variable expansions and condition evaluations.
3106    for target in flat_list:
3107        target_dict = targets[target]
3108        build_file = gyp.common.BuildFile(target)
3109        ProcessVariablesAndConditionsInDict(
3110            target_dict, PHASE_LATE, variables, build_file
3111        )
3112
3113    # Move everything that can go into a "configurations" section into one.
3114    for target in flat_list:
3115        target_dict = targets[target]
3116        SetUpConfigurations(target, target_dict)
3117
3118    # Apply exclude (!) and regex (/) list filters.
3119    for target in flat_list:
3120        target_dict = targets[target]
3121        ProcessListFiltersInDict(target, target_dict)
3122
3123    # Apply "latelate" variable expansions and condition evaluations.
3124    for target in flat_list:
3125        target_dict = targets[target]
3126        build_file = gyp.common.BuildFile(target)
3127        ProcessVariablesAndConditionsInDict(
3128            target_dict, PHASE_LATELATE, variables, build_file
3129        )
3130
3131    # Make sure that the rules make sense, and build up rule_sources lists as
3132    # needed.  Not all generators will need to use the rule_sources lists, but
3133    # some may, and it seems best to build the list in a common spot.
3134    # Also validate actions and run_as elements in targets.
3135    for target in flat_list:
3136        target_dict = targets[target]
3137        build_file = gyp.common.BuildFile(target)
3138        ValidateTargetType(target, target_dict)
3139        ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
3140        ValidateRunAsInTarget(target, target_dict, build_file)
3141        ValidateActionsInTarget(target, target_dict, build_file)
3142
3143    # Generators might not expect ints.  Turn them into strs.
3144    TurnIntIntoStrInDict(data)
3145
3146    # TODO(mark): Return |data| for now because the generator needs a list of
3147    # build files that came in.  In the future, maybe it should just accept
3148    # a list, and not the whole data dict.
3149    return [flat_list, targets, data]
3150