summaryrefslogtreecommitdiff
path: root/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator
diff options
context:
space:
mode:
Diffstat (limited to 'deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator')
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py0
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py741
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py1095
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py1221
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py99
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py424
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py94
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py56
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py2220
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py3494
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py37
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py2410
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py47
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py1300
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py23
15 files changed, 13261 insertions, 0 deletions
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
new file mode 100644
index 0000000000..921c1a6b71
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
@@ -0,0 +1,741 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
+the generator flag config_path) the path of a json file that dictates the files
+and targets to search for. The following keys are supported:
+files: list of paths (relative) of the files to search for.
+test_targets: unqualified target names to search for. Any target in this list
+that depends upon a file in |files| is output regardless of the type of target
+or chain of dependencies.
+additional_compile_targets: Unqualified targets to search for in addition to
+test_targets. Targets in the combined list that depend upon a file in |files|
+are not necessarily output. For example, if the target is of type none then the
+target is not output (but one of the descendants of the target will be).
+
+The following is output:
+error: only supplied if there is an error.
+compile_targets: minimal set of targets that directly or indirectly (for
+ targets of type none) depend on the files in |files| and is one of the
+ supplied targets or a target that one of the supplied targets depends on.
+ The expectation is this set of targets is passed into a build step. This list
+ always contains the output of test_targets as well.
+test_targets: set of targets from the supplied |test_targets| that either
+ directly or indirectly depend upon a file in |files|. This list if useful
+ if additional processing needs to be done for certain targets after the
+ build, such as running tests.
+status: outputs one of three values: none of the supplied files were found,
+ one of the include files changed so that it should be assumed everything
+ changed (in this case test_targets and compile_targets are not output) or at
+ least one file was found.
+invalid_targets: list of supplied targets that were not found.
+
+Example:
+Consider a graph like the following:
+ A D
+ / \
+B C
+A depends upon both B and C, A is of type none and B and C are executables.
+D is an executable, has no dependencies and nothing depends on it.
+If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and
+files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then
+the following is output:
+|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc
+and the supplied target A depends upon it. A is not output as a build_target
+as it is of type none with no rules and actions.
+|test_targets| = ["B"] B directly depends upon the change file b.cc.
+
+Even though the file d.cc, which D depends upon, has changed D is not output
+as it was not supplied by way of |additional_compile_targets| or |test_targets|.
+
+If the generator flag analyzer_output_path is specified, output is written
+there. Otherwise output is written to stdout.
+
+In Gyp the "all" target is shorthand for the root targets in the files passed
+to gyp. For example, if file "a.gyp" contains targets "a1" and
+"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency
+on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2".
+Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not
+directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
+then the "all" target includes "b1" and "b2".
+"""
+
+import gyp.common
+import gyp.ninja_syntax as ninja_syntax
+import json
+import os
+import posixpath
+import sys
+
+debug = False
+
+found_dependency_string = 'Found dependency'
+no_dependency_string = 'No dependencies'
+# Status when it should be assumed that everything has changed.
+all_changed_string = 'Found dependency (all)'
+
+# MatchStatus is used indicate if and how a target depends upon the supplied
+# sources.
+# The target's sources contain one of the supplied paths.
+MATCH_STATUS_MATCHES = 1
+# The target has a dependency on another target that contains one of the
+# supplied paths.
+MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
+# The target's sources weren't in the supplied paths and none of the target's
+# dependencies depend upon a target that matched.
+MATCH_STATUS_DOESNT_MATCH = 3
+# The target doesn't contain the source, but the dependent targets have not yet
+# been visited to determine a more specific status yet.
+MATCH_STATUS_TBD = 4
+
+generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
+
+generator_wants_static_library_dependencies_adjusted = False
+
+generator_default_variables = {
+}
+for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
+ 'LIB_DIR', 'SHARED_LIB_DIR']:
+ generator_default_variables[dirname] = '!!!'
+
+for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
+ 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
+ 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
+ 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
+ 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
+ 'CONFIGURATION_NAME']:
+ generator_default_variables[unused] = ''
+
+
+def _ToGypPath(path):
+ """Converts a path to the format used by gyp."""
+ if os.sep == '\\' and os.altsep == '/':
+ return path.replace('\\', '/')
+ return path
+
+
+def _ResolveParent(path, base_path_components):
+ """Resolves |path|, which starts with at least one '../'. Returns an empty
+ string if the path shouldn't be considered. See _AddSources() for a
+ description of |base_path_components|."""
+ depth = 0
+ while path.startswith('../'):
+ depth += 1
+ path = path[3:]
+ # Relative includes may go outside the source tree. For example, an action may
+ # have inputs in /usr/include, which are not in the source tree.
+ if depth > len(base_path_components):
+ return ''
+ if depth == len(base_path_components):
+ return path
+ return '/'.join(base_path_components[0:len(base_path_components) - depth]) + \
+ '/' + path
+
+
+def _AddSources(sources, base_path, base_path_components, result):
+ """Extracts valid sources from |sources| and adds them to |result|. Each
+ source file is relative to |base_path|, but may contain '..'. To make
+ resolving '..' easier |base_path_components| contains each of the
+ directories in |base_path|. Additionally each source may contain variables.
+ Such sources are ignored as it is assumed dependencies on them are expressed
+ and tracked in some other means."""
+ # NOTE: gyp paths are always posix style.
+ for source in sources:
+ if not len(source) or source.startswith('!!!') or source.startswith('$'):
+ continue
+ # variable expansion may lead to //.
+ org_source = source
+ source = source[0] + source[1:].replace('//', '/')
+ if source.startswith('../'):
+ source = _ResolveParent(source, base_path_components)
+ if len(source):
+ result.append(source)
+ continue
+ result.append(base_path + source)
+ if debug:
+ print 'AddSource', org_source, result[len(result) - 1]
+
+
+def _ExtractSourcesFromAction(action, base_path, base_path_components,
+ results):
+ if 'inputs' in action:
+ _AddSources(action['inputs'], base_path, base_path_components, results)
+
+
+def _ToLocalPath(toplevel_dir, path):
+ """Converts |path| to a path relative to |toplevel_dir|."""
+ if path == toplevel_dir:
+ return ''
+ if path.startswith(toplevel_dir + '/'):
+ return path[len(toplevel_dir) + len('/'):]
+ return path
+
+
+def _ExtractSources(target, target_dict, toplevel_dir):
+ # |target| is either absolute or relative and in the format of the OS. Gyp
+ # source paths are always posix. Convert |target| to a posix path relative to
+ # |toplevel_dir_|. This is done to make it easy to build source paths.
+ base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
+ base_path_components = base_path.split('/')
+
+ # Add a trailing '/' so that _AddSources() can easily build paths.
+ if len(base_path):
+ base_path += '/'
+
+ if debug:
+ print 'ExtractSources', target, base_path
+
+ results = []
+ if 'sources' in target_dict:
+ _AddSources(target_dict['sources'], base_path, base_path_components,
+ results)
+ # Include the inputs from any actions. Any changes to these affect the
+ # resulting output.
+ if 'actions' in target_dict:
+ for action in target_dict['actions']:
+ _ExtractSourcesFromAction(action, base_path, base_path_components,
+ results)
+ if 'rules' in target_dict:
+ for rule in target_dict['rules']:
+ _ExtractSourcesFromAction(rule, base_path, base_path_components, results)
+
+ return results
+
+
+class Target(object):
+ """Holds information about a particular target:
+ deps: set of Targets this Target depends upon. This is not recursive, only the
+ direct dependent Targets.
+ match_status: one of the MatchStatus values.
+ back_deps: set of Targets that have a dependency on this Target.
+ visited: used during iteration to indicate whether we've visited this target.
+ This is used for two iterations, once in building the set of Targets and
+ again in _GetBuildTargets().
+ name: fully qualified name of the target.
+ requires_build: True if the target type is such that it needs to be built.
+ See _DoesTargetTypeRequireBuild for details.
+ added_to_compile_targets: used when determining if the target was added to the
+ set of targets that needs to be built.
+ in_roots: true if this target is a descendant of one of the root nodes.
+ is_executable: true if the type of target is executable.
+ is_static_library: true if the type of target is static_library.
+ is_or_has_linked_ancestor: true if the target does a link (eg executable), or
+ if there is a target in back_deps that does a link."""
+ def __init__(self, name):
+ self.deps = set()
+ self.match_status = MATCH_STATUS_TBD
+ self.back_deps = set()
+ self.name = name
+ # TODO(sky): I don't like hanging this off Target. This state is specific
+ # to certain functions and should be isolated there.
+ self.visited = False
+ self.requires_build = False
+ self.added_to_compile_targets = False
+ self.in_roots = False
+ self.is_executable = False
+ self.is_static_library = False
+ self.is_or_has_linked_ancestor = False
+
+
+class Config(object):
+ """Details what we're looking for
+ files: set of files to search for
+ targets: see file description for details."""
+ def __init__(self):
+ self.files = []
+ self.targets = set()
+ self.additional_compile_target_names = set()
+ self.test_target_names = set()
+
+ def Init(self, params):
+ """Initializes Config. This is a separate method as it raises an exception
+ if there is a parse error."""
+ generator_flags = params.get('generator_flags', {})
+ config_path = generator_flags.get('config_path', None)
+ if not config_path:
+ return
+ try:
+ f = open(config_path, 'r')
+ config = json.load(f)
+ f.close()
+ except IOError:
+ raise Exception('Unable to open file ' + config_path)
+ except ValueError as e:
+ raise Exception('Unable to parse config file ' + config_path + str(e))
+ if not isinstance(config, dict):
+ raise Exception('config_path must be a JSON file containing a dictionary')
+ self.files = config.get('files', [])
+ self.additional_compile_target_names = set(
+ config.get('additional_compile_targets', []))
+ self.test_target_names = set(config.get('test_targets', []))
+
+
+def _WasBuildFileModified(build_file, data, files, toplevel_dir):
+ """Returns true if the build file |build_file| is either in |files| or
+ one of the files included by |build_file| is in |files|. |toplevel_dir| is
+ the root of the source tree."""
+ if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
+ if debug:
+ print 'gyp file modified', build_file
+ return True
+
+ # First element of included_files is the file itself.
+ if len(data[build_file]['included_files']) <= 1:
+ return False
+
+ for include_file in data[build_file]['included_files'][1:]:
+ # |included_files| are relative to the directory of the |build_file|.
+ rel_include_file = \
+ _ToGypPath(gyp.common.UnrelativePath(include_file, build_file))
+ if _ToLocalPath(toplevel_dir, rel_include_file) in files:
+ if debug:
+ print 'included gyp file modified, gyp_file=', build_file, \
+ 'included file=', rel_include_file
+ return True
+ return False
+
+
+def _GetOrCreateTargetByName(targets, target_name):
+ """Creates or returns the Target at targets[target_name]. If there is no
+ Target for |target_name| one is created. Returns a tuple of whether a new
+ Target was created and the Target."""
+ if target_name in targets:
+ return False, targets[target_name]
+ target = Target(target_name)
+ targets[target_name] = target
+ return True, target
+
+
+def _DoesTargetTypeRequireBuild(target_dict):
+ """Returns true if the target type is such that it needs to be built."""
+ # If a 'none' target has rules or actions we assume it requires a build.
+ return bool(target_dict['type'] != 'none' or
+ target_dict.get('actions') or target_dict.get('rules'))
+
+
+def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
+ build_files):
+ """Returns a tuple of the following:
+ . A dictionary mapping from fully qualified name to Target.
+ . A list of the targets that have a source file in |files|.
+ . Targets that constitute the 'all' target. See description at top of file
+ for details on the 'all' target.
+ This sets the |match_status| of the targets that contain any of the source
+ files in |files| to MATCH_STATUS_MATCHES.
+ |toplevel_dir| is the root of the source tree."""
+ # Maps from target name to Target.
+ name_to_target = {}
+
+ # Targets that matched.
+ matching_targets = []
+
+ # Queue of targets to visit.
+ targets_to_visit = target_list[:]
+
+ # Maps from build file to a boolean indicating whether the build file is in
+ # |files|.
+ build_file_in_files = {}
+
+ # Root targets across all files.
+ roots = set()
+
+ # Set of Targets in |build_files|.
+ build_file_targets = set()
+
+ while len(targets_to_visit) > 0:
+ target_name = targets_to_visit.pop()
+ created_target, target = _GetOrCreateTargetByName(name_to_target,
+ target_name)
+ if created_target:
+ roots.add(target)
+ elif target.visited:
+ continue
+
+ target.visited = True
+ target.requires_build = _DoesTargetTypeRequireBuild(
+ target_dicts[target_name])
+ target_type = target_dicts[target_name]['type']
+ target.is_executable = target_type == 'executable'
+ target.is_static_library = target_type == 'static_library'
+ target.is_or_has_linked_ancestor = (target_type == 'executable' or
+ target_type == 'shared_library')
+
+ build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
+ if not build_file in build_file_in_files:
+ build_file_in_files[build_file] = \
+ _WasBuildFileModified(build_file, data, files, toplevel_dir)
+
+ if build_file in build_files:
+ build_file_targets.add(target)
+
+ # If a build file (or any of its included files) is modified we assume all
+ # targets in the file are modified.
+ if build_file_in_files[build_file]:
+ print 'matching target from modified build file', target_name
+ target.match_status = MATCH_STATUS_MATCHES
+ matching_targets.append(target)
+ else:
+ sources = _ExtractSources(target_name, target_dicts[target_name],
+ toplevel_dir)
+ for source in sources:
+ if _ToGypPath(os.path.normpath(source)) in files:
+ print 'target', target_name, 'matches', source
+ target.match_status = MATCH_STATUS_MATCHES
+ matching_targets.append(target)
+ break
+
+ # Add dependencies to visit as well as updating back pointers for deps.
+ for dep in target_dicts[target_name].get('dependencies', []):
+ targets_to_visit.append(dep)
+
+ created_dep_target, dep_target = _GetOrCreateTargetByName(name_to_target,
+ dep)
+ if not created_dep_target:
+ roots.discard(dep_target)
+
+ target.deps.add(dep_target)
+ dep_target.back_deps.add(target)
+
+ return name_to_target, matching_targets, roots & build_file_targets
+
+
+def _GetUnqualifiedToTargetMapping(all_targets, to_find):
+ """Returns a tuple of the following:
+ . mapping (dictionary) from unqualified name to Target for all the
+ Targets in |to_find|.
+ . any target names not found. If this is empty all targets were found."""
+ result = {}
+ if not to_find:
+ return {}, []
+ to_find = set(to_find)
+ for target_name in all_targets.keys():
+ extracted = gyp.common.ParseQualifiedTarget(target_name)
+ if len(extracted) > 1 and extracted[1] in to_find:
+ to_find.remove(extracted[1])
+ result[extracted[1]] = all_targets[target_name]
+ if not to_find:
+ return result, []
+ return result, [x for x in to_find]
+
+
+def _DoesTargetDependOnMatchingTargets(target):
+ """Returns true if |target| or any of its dependencies is one of the
+ targets containing the files supplied as input to analyzer. This updates
+ |matches| of the Targets as it recurses.
+ target: the Target to look for."""
+ if target.match_status == MATCH_STATUS_DOESNT_MATCH:
+ return False
+ if target.match_status == MATCH_STATUS_MATCHES or \
+ target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY:
+ return True
+ for dep in target.deps:
+ if _DoesTargetDependOnMatchingTargets(dep):
+ target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
+ print '\t', target.name, 'matches by dep', dep.name
+ return True
+ target.match_status = MATCH_STATUS_DOESNT_MATCH
+ return False
+
+
+def _GetTargetsDependingOnMatchingTargets(possible_targets):
+ """Returns the list of Targets in |possible_targets| that depend (either
+ directly on indirectly) on at least one of the targets containing the files
+ supplied as input to analyzer.
+ possible_targets: targets to search from."""
+ found = []
+ print 'Targets that matched by dependency:'
+ for target in possible_targets:
+ if _DoesTargetDependOnMatchingTargets(target):
+ found.append(target)
+ return found
+
+
+def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
+ """Recurses through all targets that depend on |target|, adding all targets
+ that need to be built (and are in |roots|) to |result|.
+ roots: set of root targets.
+ add_if_no_ancestor: If true and there are no ancestors of |target| then add
+ |target| to |result|. |target| must still be in |roots|.
+ result: targets that need to be built are added here."""
+ if target.visited:
+ return
+
+ target.visited = True
+ target.in_roots = target in roots
+
+ for back_dep_target in target.back_deps:
+ _AddCompileTargets(back_dep_target, roots, False, result)
+ target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
+ target.in_roots |= back_dep_target.in_roots
+ target.is_or_has_linked_ancestor |= (
+ back_dep_target.is_or_has_linked_ancestor)
+
+ # Always add 'executable' targets. Even though they may be built by other
+ # targets that depend upon them it makes detection of what is going to be
+ # built easier.
+ # And always add static_libraries that have no dependencies on them from
+ # linkables. This is necessary as the other dependencies on them may be
+ # static libraries themselves, which are not compile time dependencies.
+ if target.in_roots and \
+ (target.is_executable or
+ (not target.added_to_compile_targets and
+ (add_if_no_ancestor or target.requires_build)) or
+ (target.is_static_library and add_if_no_ancestor and
+ not target.is_or_has_linked_ancestor)):
+ print '\t\tadding to compile targets', target.name, 'executable', \
+ target.is_executable, 'added_to_compile_targets', \
+ target.added_to_compile_targets, 'add_if_no_ancestor', \
+ add_if_no_ancestor, 'requires_build', target.requires_build, \
+ 'is_static_library', target.is_static_library, \
+ 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor
+ result.add(target)
+ target.added_to_compile_targets = True
+
+
+def _GetCompileTargets(matching_targets, supplied_targets):
+ """Returns the set of Targets that require a build.
+ matching_targets: targets that changed and need to be built.
+ supplied_targets: set of targets supplied to analyzer to search from."""
+ result = set()
+ for target in matching_targets:
+ print 'finding compile targets for match', target.name
+ _AddCompileTargets(target, supplied_targets, True, result)
+ return result
+
+
+def _WriteOutput(params, **values):
+ """Writes the output, either to stdout or a file is specified."""
+ if 'error' in values:
+ print 'Error:', values['error']
+ if 'status' in values:
+ print values['status']
+ if 'targets' in values:
+ values['targets'].sort()
+ print 'Supplied targets that depend on changed files:'
+ for target in values['targets']:
+ print '\t', target
+ if 'invalid_targets' in values:
+ values['invalid_targets'].sort()
+ print 'The following targets were not found:'
+ for target in values['invalid_targets']:
+ print '\t', target
+ if 'build_targets' in values:
+ values['build_targets'].sort()
+ print 'Targets that require a build:'
+ for target in values['build_targets']:
+ print '\t', target
+ if 'compile_targets' in values:
+ values['compile_targets'].sort()
+ print 'Targets that need to be built:'
+ for target in values['compile_targets']:
+ print '\t', target
+ if 'test_targets' in values:
+ values['test_targets'].sort()
+ print 'Test targets:'
+ for target in values['test_targets']:
+ print '\t', target
+
+ output_path = params.get('generator_flags', {}).get(
+ 'analyzer_output_path', None)
+ if not output_path:
+ print json.dumps(values)
+ return
+ try:
+ f = open(output_path, 'w')
+ f.write(json.dumps(values) + '\n')
+ f.close()
+ except IOError as e:
+ print 'Error writing to output file', output_path, str(e)
+
+
+def _WasGypIncludeFileModified(params, files):
+ """Returns true if one of the files in |files| is in the set of included
+ files."""
+ if params['options'].includes:
+ for include in params['options'].includes:
+ if _ToGypPath(os.path.normpath(include)) in files:
+ print 'Include file modified, assuming all changed', include
+ return True
+ return False
+
+
+def _NamesNotIn(names, mapping):
+ """Returns a list of the values in |names| that are not in |mapping|."""
+ return [name for name in names if name not in mapping]
+
+
+def _LookupTargets(names, mapping):
+ """Returns a list of the mapping[name] for each value in |names| that is in
+ |mapping|."""
+ return [mapping[name] for name in names if name in mapping]
+
+
+def CalculateVariables(default_variables, params):
+ """Calculate additional variables for use in the build (called by gyp)."""
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == 'mac':
+ default_variables.setdefault('OS', 'mac')
+ elif flavor == 'win':
+ default_variables.setdefault('OS', 'win')
+ # Copy additional generator configuration data from VS, which is shared
+ # by the Windows Ninja generator.
+ import gyp.generator.msvs as msvs_generator
+ generator_additional_non_configuration_keys = getattr(msvs_generator,
+ 'generator_additional_non_configuration_keys', [])
+ generator_additional_path_sections = getattr(msvs_generator,
+ 'generator_additional_path_sections', [])
+
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
+ else:
+ operating_system = flavor
+ if flavor == 'android':
+ operating_system = 'linux' # Keep this legacy behavior for now.
+ default_variables.setdefault('OS', operating_system)
+
+
+class TargetCalculator(object):
+ """Calculates the matching test_targets and matching compile_targets."""
+ def __init__(self, files, additional_compile_target_names, test_target_names,
+ data, target_list, target_dicts, toplevel_dir, build_files):
+ self._additional_compile_target_names = set(additional_compile_target_names)
+ self._test_target_names = set(test_target_names)
+ self._name_to_target, self._changed_targets, self._root_targets = (
+ _GenerateTargets(data, target_list, target_dicts, toplevel_dir,
+ frozenset(files), build_files))
+ self._unqualified_mapping, self.invalid_targets = (
+ _GetUnqualifiedToTargetMapping(self._name_to_target,
+ self._supplied_target_names_no_all()))
+
+ def _supplied_target_names(self):
+ return self._additional_compile_target_names | self._test_target_names
+
+ def _supplied_target_names_no_all(self):
+ """Returns the supplied test targets without 'all'."""
+ result = self._supplied_target_names();
+ result.discard('all')
+ return result
+
+ def is_build_impacted(self):
+ """Returns true if the supplied files impact the build at all."""
+ return self._changed_targets
+
+ def find_matching_test_target_names(self):
+ """Returns the set of output test targets."""
+ assert self.is_build_impacted()
+ # Find the test targets first. 'all' is special cased to mean all the
+ # root targets. To deal with all the supplied |test_targets| are expanded
+ # to include the root targets during lookup. If any of the root targets
+ # match, we remove it and replace it with 'all'.
+ test_target_names_no_all = set(self._test_target_names)
+ test_target_names_no_all.discard('all')
+ test_targets_no_all = _LookupTargets(test_target_names_no_all,
+ self._unqualified_mapping)
+ test_target_names_contains_all = 'all' in self._test_target_names
+ if test_target_names_contains_all:
+ test_targets = [x for x in (set(test_targets_no_all) |
+ set(self._root_targets))]
+ else:
+ test_targets = [x for x in test_targets_no_all]
+ print 'supplied test_targets'
+ for target_name in self._test_target_names:
+ print '\t', target_name
+ print 'found test_targets'
+ for target in test_targets:
+ print '\t', target.name
+ print 'searching for matching test targets'
+ matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
+ matching_test_targets_contains_all = (test_target_names_contains_all and
+ set(matching_test_targets) &
+ set(self._root_targets))
+ if matching_test_targets_contains_all:
+ # Remove any of the targets for all that were not explicitly supplied,
+ # 'all' is subsequentely added to the matching names below.
+ matching_test_targets = [x for x in (set(matching_test_targets) &
+ set(test_targets_no_all))]
+ print 'matched test_targets'
+ for target in matching_test_targets:
+ print '\t', target.name
+ matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1]
+ for target in matching_test_targets]
+ if matching_test_targets_contains_all:
+ matching_target_names.append('all')
+ print '\tall'
+ return matching_target_names
+
+ def find_matching_compile_target_names(self):
+ """Returns the set of output compile targets."""
+ assert self.is_build_impacted();
+ # Compile targets are found by searching up from changed targets.
+ # Reset the visited status for _GetBuildTargets.
+ for target in self._name_to_target.itervalues():
+ target.visited = False
+
+ supplied_targets = _LookupTargets(self._supplied_target_names_no_all(),
+ self._unqualified_mapping)
+ if 'all' in self._supplied_target_names():
+ supplied_targets = [x for x in (set(supplied_targets) |
+ set(self._root_targets))]
+ print 'Supplied test_targets & compile_targets'
+ for target in supplied_targets:
+ print '\t', target.name
+ print 'Finding compile targets'
+ compile_targets = _GetCompileTargets(self._changed_targets,
+ supplied_targets)
+ return [gyp.common.ParseQualifiedTarget(target.name)[1]
+ for target in compile_targets]
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ """Called by gyp as the final stage. Outputs results."""
+ config = Config()
+ try:
+ config.Init(params)
+
+ if not config.files:
+ raise Exception('Must specify files to analyze via config_path generator '
+ 'flag')
+
+ toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir))
+ if debug:
+ print 'toplevel_dir', toplevel_dir
+
+ if _WasGypIncludeFileModified(params, config.files):
+ result_dict = { 'status': all_changed_string,
+ 'test_targets': list(config.test_target_names),
+ 'compile_targets': list(
+ config.additional_compile_target_names |
+ config.test_target_names) }
+ _WriteOutput(params, **result_dict)
+ return
+
+ calculator = TargetCalculator(config.files,
+ config.additional_compile_target_names,
+ config.test_target_names, data,
+ target_list, target_dicts, toplevel_dir,
+ params['build_files'])
+ if not calculator.is_build_impacted():
+ result_dict = { 'status': no_dependency_string,
+ 'test_targets': [],
+ 'compile_targets': [] }
+ if calculator.invalid_targets:
+ result_dict['invalid_targets'] = calculator.invalid_targets
+ _WriteOutput(params, **result_dict)
+ return
+
+ test_target_names = calculator.find_matching_test_target_names()
+ compile_target_names = calculator.find_matching_compile_target_names()
+ found_at_least_one_target = compile_target_names or test_target_names
+ result_dict = { 'test_targets': test_target_names,
+ 'status': found_dependency_string if
+ found_at_least_one_target else no_dependency_string,
+ 'compile_targets': list(
+ set(compile_target_names) |
+ set(test_target_names)) }
+ if calculator.invalid_targets:
+ result_dict['invalid_targets'] = calculator.invalid_targets
+ _WriteOutput(params, **result_dict)
+
+ except Exception as e:
+ _WriteOutput(params, error=str(e))
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
new file mode 100644
index 0000000000..5b26cc785a
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
@@ -0,0 +1,1095 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Notes:
+#
+# This generates makefiles suitable for inclusion into the Android build system
+# via an Android.mk file. It is based on make.py, the standard makefile
+# generator.
+#
+# The code below generates a separate .mk file for each target, but
+# all are sourced by the top-level GypAndroid.mk. This means that all
+# variables in .mk-files clobber one another, and furthermore that any
+# variables set potentially clash with other Android build system variables.
+# Try to avoid setting global variables where possible.
+
+import gyp
+import gyp.common
+import gyp.generator.make as make # Reuse global functions from make backend.
+import os
+import re
+import subprocess
+
+generator_default_variables = {
+ 'OS': 'android',
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '',
+ 'STATIC_LIB_PREFIX': 'lib',
+ 'SHARED_LIB_PREFIX': 'lib',
+ 'STATIC_LIB_SUFFIX': '.a',
+ 'SHARED_LIB_SUFFIX': '.so',
+ 'INTERMEDIATE_DIR': '$(gyp_intermediate_dir)',
+ 'SHARED_INTERMEDIATE_DIR': '$(gyp_shared_intermediate_dir)',
+ 'PRODUCT_DIR': '$(gyp_shared_intermediate_dir)',
+ 'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)',
+ 'LIB_DIR': '$(obj).$(TOOLSET)',
+ 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
+ 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
+ 'RULE_INPUT_PATH': '$(RULE_SOURCES)',
+ 'RULE_INPUT_EXT': '$(suffix $<)',
+ 'RULE_INPUT_NAME': '$(notdir $<)',
+ 'CONFIGURATION_NAME': '$(GYP_CONFIGURATION)',
+}
+
+# Make supports multiple toolsets
+generator_supports_multiple_toolsets = True
+
+
+# Generator-specific gyp specs.
+generator_additional_non_configuration_keys = [
+ # Boolean to declare that this target does not want its name mangled.
+ 'android_unmangled_name',
+ # Map of android build system variables to set.
+ 'aosp_build_settings',
+]
+generator_additional_path_sections = []
+generator_extra_sources_for_rules = []
+
+
+ALL_MODULES_FOOTER = """\
+# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from
+# all the included sub-makefiles. This is just here to clarify.
+gyp_all_modules:
+"""
+
+header = """\
+# This file is generated by gyp; do not edit.
+
+"""
+
+# Map gyp target types to Android module classes.
+MODULE_CLASSES = {
+ 'static_library': 'STATIC_LIBRARIES',
+ 'shared_library': 'SHARED_LIBRARIES',
+ 'executable': 'EXECUTABLES',
+}
+
+
+def IsCPPExtension(ext):
+ return make.COMPILABLE_EXTENSIONS.get(ext) == 'cxx'
+
+
+def Sourceify(path):
+ """Convert a path to its source directory form. The Android backend does not
+ support options.generator_output, so this function is a noop."""
+ return path
+
+
+# Map from qualified target to path to output.
+# For Android, the target of these maps is a tuple ('static', 'modulename'),
+# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string,
+# since we link by module.
+target_outputs = {}
+# Map from qualified target to any linkable output. A subset
+# of target_outputs. E.g. when mybinary depends on liba, we want to
+# include liba in the linker line; when otherbinary depends on
+# mybinary, we just want to build mybinary first.
+target_link_deps = {}
+
+
+class AndroidMkWriter(object):
+ """AndroidMkWriter packages up the writing of one target-specific Android.mk.
+
+ Its only real entry point is Write(), and is mostly used for namespacing.
+ """
+
+ def __init__(self, android_top_dir):
+ self.android_top_dir = android_top_dir
+
+ def Write(self, qualified_target, relative_target, base_path, output_filename,
+ spec, configs, part_of_all, write_alias_target, sdk_version):
+ """The main entry point: writes a .mk file for a single target.
+
+ Arguments:
+ qualified_target: target we're generating
+ relative_target: qualified target name relative to the root
+ base_path: path relative to source root we're building in, used to resolve
+ target-relative paths
+ output_filename: output .mk file name to write
+ spec, configs: gyp info
+ part_of_all: flag indicating this target is part of 'all'
+ write_alias_target: flag indicating whether to create short aliases for
+ this target
+ sdk_version: what to emit for LOCAL_SDK_VERSION in output
+ """
+ gyp.common.EnsureDirExists(output_filename)
+
+ self.fp = open(output_filename, 'w')
+
+ self.fp.write(header)
+
+ self.qualified_target = qualified_target
+ self.relative_target = relative_target
+ self.path = base_path
+ self.target = spec['target_name']
+ self.type = spec['type']
+ self.toolset = spec['toolset']
+
+ deps, link_deps = self.ComputeDeps(spec)
+
+ # Some of the generation below can add extra output, sources, or
+ # link dependencies. All of the out params of the functions that
+ # follow use names like extra_foo.
+ extra_outputs = []
+ extra_sources = []
+
+ self.android_class = MODULE_CLASSES.get(self.type, 'GYP')
+ self.android_module = self.ComputeAndroidModule(spec)
+ (self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec)
+ self.output = self.output_binary = self.ComputeOutput(spec)
+
+ # Standard header.
+ self.WriteLn('include $(CLEAR_VARS)\n')
+
+ # Module class and name.
+ self.WriteLn('LOCAL_MODULE_CLASS := ' + self.android_class)
+ self.WriteLn('LOCAL_MODULE := ' + self.android_module)
+ # Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE.
+ # The library module classes fail if the stem is set. ComputeOutputParts
+ # makes sure that stem == modulename in these cases.
+ if self.android_stem != self.android_module:
+ self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem)
+ self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix)
+ if self.toolset == 'host':
+ self.WriteLn('LOCAL_IS_HOST_MODULE := true')
+ self.WriteLn('LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)')
+ elif sdk_version > 0:
+ self.WriteLn('LOCAL_MODULE_TARGET_ARCH := '
+ '$(TARGET_$(GYP_VAR_PREFIX)ARCH)')
+ self.WriteLn('LOCAL_SDK_VERSION := %s' % sdk_version)
+
+ # Grab output directories; needed for Actions and Rules.
+ if self.toolset == 'host':
+ self.WriteLn('gyp_intermediate_dir := '
+ '$(call local-intermediates-dir,,$(GYP_HOST_VAR_PREFIX))')
+ else:
+ self.WriteLn('gyp_intermediate_dir := '
+ '$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))')
+ self.WriteLn('gyp_shared_intermediate_dir := '
+ '$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))')
+ self.WriteLn()
+
+ # List files this target depends on so that actions/rules/copies/sources
+ # can depend on the list.
+ # TODO: doesn't pull in things through transitive link deps; needed?
+ target_dependencies = [x[1] for x in deps if x[0] == 'path']
+ self.WriteLn('# Make sure our deps are built first.')
+ self.WriteList(target_dependencies, 'GYP_TARGET_DEPENDENCIES',
+ local_pathify=True)
+
+ # Actions must come first, since they can generate more OBJs for use below.
+ if 'actions' in spec:
+ self.WriteActions(spec['actions'], extra_sources, extra_outputs)
+
+ # Rules must be early like actions.
+ if 'rules' in spec:
+ self.WriteRules(spec['rules'], extra_sources, extra_outputs)
+
+ if 'copies' in spec:
+ self.WriteCopies(spec['copies'], extra_outputs)
+
+ # GYP generated outputs.
+ self.WriteList(extra_outputs, 'GYP_GENERATED_OUTPUTS', local_pathify=True)
+
+ # Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend
+ # on both our dependency targets and our generated files.
+ self.WriteLn('# Make sure our deps and generated files are built first.')
+ self.WriteLn('LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) '
+ '$(GYP_GENERATED_OUTPUTS)')
+ self.WriteLn()
+
+ # Sources.
+ if spec.get('sources', []) or extra_sources:
+ self.WriteSources(spec, configs, extra_sources)
+
+ self.WriteTarget(spec, configs, deps, link_deps, part_of_all,
+ write_alias_target)
+
+ # Update global list of target outputs, used in dependency tracking.
+ target_outputs[qualified_target] = ('path', self.output_binary)
+
+ # Update global list of link dependencies.
+ if self.type == 'static_library':
+ target_link_deps[qualified_target] = ('static', self.android_module)
+ elif self.type == 'shared_library':
+ target_link_deps[qualified_target] = ('shared', self.android_module)
+
+ self.fp.close()
+ return self.android_module
+
+
+ def WriteActions(self, actions, extra_sources, extra_outputs):
+ """Write Makefile code for any 'actions' from the gyp input.
+
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ actions (used to make other pieces dependent on these
+ actions)
+ """
+ for action in actions:
+ name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
+ action['action_name']))
+ self.WriteLn('### Rules for action "%s":' % action['action_name'])
+ inputs = action['inputs']
+ outputs = action['outputs']
+
+ # Build up a list of outputs.
+ # Collect the output dirs we'll need.
+ dirs = set()
+ for out in outputs:
+ if not out.startswith('$'):
+ print ('WARNING: Action for target "%s" writes output to local path '
+ '"%s".' % (self.target, out))
+ dir = os.path.split(out)[0]
+ if dir:
+ dirs.add(dir)
+ if int(action.get('process_outputs_as_sources', False)):
+ extra_sources += outputs
+
+ # Prepare the actual command.
+ command = gyp.common.EncodePOSIXShellList(action['action'])
+ if 'message' in action:
+ quiet_cmd = 'Gyp action: %s ($@)' % action['message']
+ else:
+ quiet_cmd = 'Gyp action: %s ($@)' % name
+ if len(dirs) > 0:
+ command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
+
+ cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
+ command = cd_action + command
+
+ # The makefile rules are all relative to the top dir, but the gyp actions
+ # are defined relative to their containing dir. This replaces the gyp_*
+ # variables for the action rule with an absolute version so that the
+ # output goes in the right place.
+ # Only write the gyp_* rules for the "primary" output (:1);
+ # it's superfluous for the "extra outputs", and this avoids accidentally
+ # writing duplicate dummy rules for those outputs.
+ main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
+ self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
+ self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
+ self.WriteLn('%s: gyp_intermediate_dir := '
+ '$(abspath $(gyp_intermediate_dir))' % main_output)
+ self.WriteLn('%s: gyp_shared_intermediate_dir := '
+ '$(abspath $(gyp_shared_intermediate_dir))' % main_output)
+
+ # Android's envsetup.sh adds a number of directories to the path including
+ # the built host binary directory. This causes actions/rules invoked by
+ # gyp to sometimes use these instead of system versions, e.g. bison.
+ # The built host binaries may not be suitable, and can cause errors.
+ # So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable
+ # set by envsetup.
+ self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))'
+ % main_output)
+
+ # Don't allow spaces in input/output filenames, but make an exception for
+ # filenames which start with '$(' since it's okay for there to be spaces
+ # inside of make function/macro invocations.
+ for input in inputs:
+ if not input.startswith('$(') and ' ' in input:
+ raise gyp.common.GypError(
+ 'Action input filename "%s" in target %s contains a space' %
+ (input, self.target))
+ for output in outputs:
+ if not output.startswith('$(') and ' ' in output:
+ raise gyp.common.GypError(
+ 'Action output filename "%s" in target %s contains a space' %
+ (output, self.target))
+
+ self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
+ (main_output, ' '.join(map(self.LocalPathify, inputs))))
+ self.WriteLn('\t@echo "%s"' % quiet_cmd)
+ self.WriteLn('\t$(hide)%s\n' % command)
+ for output in outputs[1:]:
+ # Make each output depend on the main output, with an empty command
+ # to force make to notice that the mtime has changed.
+ self.WriteLn('%s: %s ;' % (self.LocalPathify(output), main_output))
+
+ extra_outputs += outputs
+ self.WriteLn()
+
+ self.WriteLn()
+
+
+ def WriteRules(self, rules, extra_sources, extra_outputs):
+ """Write Makefile code for any 'rules' from the gyp input.
+
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ rules (used to make other pieces dependent on these rules)
+ """
+ if len(rules) == 0:
+ return
+
+ for rule in rules:
+ if len(rule.get('rule_sources', [])) == 0:
+ continue
+ name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
+ rule['rule_name']))
+ self.WriteLn('\n### Generated for rule "%s":' % name)
+ self.WriteLn('# "%s":' % rule)
+
+ inputs = rule.get('inputs')
+ for rule_source in rule.get('rule_sources', []):
+ (rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
+ (rule_source_root, rule_source_ext) = \
+ os.path.splitext(rule_source_basename)
+
+ outputs = [self.ExpandInputRoot(out, rule_source_root,
+ rule_source_dirname)
+ for out in rule['outputs']]
+
+ dirs = set()
+ for out in outputs:
+ if not out.startswith('$'):
+ print ('WARNING: Rule for target %s writes output to local path %s'
+ % (self.target, out))
+ dir = os.path.dirname(out)
+ if dir:
+ dirs.add(dir)
+ extra_outputs += outputs
+ if int(rule.get('process_outputs_as_sources', False)):
+ extra_sources.extend(outputs)
+
+ components = []
+ for component in rule['action']:
+ component = self.ExpandInputRoot(component, rule_source_root,
+ rule_source_dirname)
+ if '$(RULE_SOURCES)' in component:
+ component = component.replace('$(RULE_SOURCES)',
+ rule_source)
+ components.append(component)
+
+ command = gyp.common.EncodePOSIXShellList(components)
+ cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
+ command = cd_action + command
+ if dirs:
+ command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
+
+ # We set up a rule to build the first output, and then set up
+ # a rule for each additional output to depend on the first.
+ outputs = map(self.LocalPathify, outputs)
+ main_output = outputs[0]
+ self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
+ self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
+ self.WriteLn('%s: gyp_intermediate_dir := '
+ '$(abspath $(gyp_intermediate_dir))' % main_output)
+ self.WriteLn('%s: gyp_shared_intermediate_dir := '
+ '$(abspath $(gyp_shared_intermediate_dir))' % main_output)
+
+ # See explanation in WriteActions.
+ self.WriteLn('%s: export PATH := '
+ '$(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output)
+
+ main_output_deps = self.LocalPathify(rule_source)
+ if inputs:
+ main_output_deps += ' '
+ main_output_deps += ' '.join([self.LocalPathify(f) for f in inputs])
+
+ self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
+ (main_output, main_output_deps))
+ self.WriteLn('\t%s\n' % command)
+ for output in outputs[1:]:
+ # Make each output depend on the main output, with an empty command
+ # to force make to notice that the mtime has changed.
+ self.WriteLn('%s: %s ;' % (output, main_output))
+ self.WriteLn()
+
+ self.WriteLn()
+
+
+ def WriteCopies(self, copies, extra_outputs):
+ """Write Makefile code for any 'copies' from the gyp input.
+
+ extra_outputs: a list that will be filled in with any outputs of this action
+ (used to make other pieces dependent on this action)
+ """
+ self.WriteLn('### Generated for copy rule.')
+
+ variable = make.StringToMakefileVariable(self.relative_target + '_copies')
+ outputs = []
+ for copy in copies:
+ for path in copy['files']:
+ # The Android build system does not allow generation of files into the
+ # source tree. The destination should start with a variable, which will
+ # typically be $(gyp_intermediate_dir) or
+ # $(gyp_shared_intermediate_dir). Note that we can't use an assertion
+ # because some of the gyp tests depend on this.
+ if not copy['destination'].startswith('$'):
+ print ('WARNING: Copy rule for target %s writes output to '
+ 'local path %s' % (self.target, copy['destination']))
+
+ # LocalPathify() calls normpath, stripping trailing slashes.
+ path = Sourceify(self.LocalPathify(path))
+ filename = os.path.split(path)[1]
+ output = Sourceify(self.LocalPathify(os.path.join(copy['destination'],
+ filename)))
+
+ self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)' %
+ (output, path))
+ self.WriteLn('\t@echo Copying: $@')
+ self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
+ self.WriteLn('\t$(hide) $(ACP) -rpf $< $@')
+ self.WriteLn()
+ outputs.append(output)
+ self.WriteLn('%s = %s' % (variable,
+ ' '.join(map(make.QuoteSpaces, outputs))))
+ extra_outputs.append('$(%s)' % variable)
+ self.WriteLn()
+
+
+ def WriteSourceFlags(self, spec, configs):
+ """Write out the flags and include paths used to compile source files for
+ the current target.
+
+ Args:
+ spec, configs: input from gyp.
+ """
+ for configname, config in sorted(configs.iteritems()):
+ extracted_includes = []
+
+ self.WriteLn('\n# Flags passed to both C and C++ files.')
+ cflags, includes_from_cflags = self.ExtractIncludesFromCFlags(
+ config.get('cflags', []) + config.get('cflags_c', []))
+ extracted_includes.extend(includes_from_cflags)
+ self.WriteList(cflags, 'MY_CFLAGS_%s' % configname)
+
+ self.WriteList(config.get('defines'), 'MY_DEFS_%s' % configname,
+ prefix='-D', quoter=make.EscapeCppDefine)
+
+ self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS')
+ includes = list(config.get('include_dirs', []))
+ includes.extend(extracted_includes)
+ includes = map(Sourceify, map(self.LocalPathify, includes))
+ includes = self.NormalizeIncludePaths(includes)
+ self.WriteList(includes, 'LOCAL_C_INCLUDES_%s' % configname)
+
+ self.WriteLn('\n# Flags passed to only C++ (and not C) files.')
+ self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS_%s' % configname)
+
+ self.WriteLn('\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) '
+ '$(MY_DEFS_$(GYP_CONFIGURATION))')
+ # Undefine ANDROID for host modules
+ # TODO: the source code should not use macro ANDROID to tell if it's host
+ # or target module.
+ if self.toolset == 'host':
+ self.WriteLn('# Undefine ANDROID for host modules')
+ self.WriteLn('LOCAL_CFLAGS += -UANDROID')
+ self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
+ '$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))')
+ self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))')
+ # Android uses separate flags for assembly file invocations, but gyp expects
+ # the same CFLAGS to be applied:
+ self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)')
+
+
+ def WriteSources(self, spec, configs, extra_sources):
+ """Write Makefile code for any 'sources' from the gyp input.
+ These are source files necessary to build the current target.
+ We need to handle shared_intermediate directory source files as
+ a special case by copying them to the intermediate directory and
+ treating them as a genereated sources. Otherwise the Android build
+ rules won't pick them up.
+
+ Args:
+ spec, configs: input from gyp.
+ extra_sources: Sources generated from Actions or Rules.
+ """
+ sources = filter(make.Compilable, spec.get('sources', []))
+ generated_not_sources = [x for x in extra_sources if not make.Compilable(x)]
+ extra_sources = filter(make.Compilable, extra_sources)
+
+ # Determine and output the C++ extension used by these sources.
+ # We simply find the first C++ file and use that extension.
+ all_sources = sources + extra_sources
+ local_cpp_extension = '.cpp'
+ for source in all_sources:
+ (root, ext) = os.path.splitext(source)
+ if IsCPPExtension(ext):
+ local_cpp_extension = ext
+ break
+ if local_cpp_extension != '.cpp':
+ self.WriteLn('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension)
+
+ # We need to move any non-generated sources that are coming from the
+ # shared intermediate directory out of LOCAL_SRC_FILES and put them
+ # into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files
+ # that don't match our local_cpp_extension, since Android will only
+ # generate Makefile rules for a single LOCAL_CPP_EXTENSION.
+ local_files = []
+ for source in sources:
+ (root, ext) = os.path.splitext(source)
+ if '$(gyp_shared_intermediate_dir)' in source:
+ extra_sources.append(source)
+ elif '$(gyp_intermediate_dir)' in source:
+ extra_sources.append(source)
+ elif IsCPPExtension(ext) and ext != local_cpp_extension:
+ extra_sources.append(source)
+ else:
+ local_files.append(os.path.normpath(os.path.join(self.path, source)))
+
+ # For any generated source, if it is coming from the shared intermediate
+ # directory then we add a Make rule to copy them to the local intermediate
+ # directory first. This is because the Android LOCAL_GENERATED_SOURCES
+ # must be in the local module intermediate directory for the compile rules
+ # to work properly. If the file has the wrong C++ extension, then we add
+ # a rule to copy that to intermediates and use the new version.
+ final_generated_sources = []
+ # If a source file gets copied, we still need to add the orginal source
+ # directory as header search path, for GCC searches headers in the
+ # directory that contains the source file by default.
+ origin_src_dirs = []
+ for source in extra_sources:
+ local_file = source
+ if not '$(gyp_intermediate_dir)/' in local_file:
+ basename = os.path.basename(local_file)
+ local_file = '$(gyp_intermediate_dir)/' + basename
+ (root, ext) = os.path.splitext(local_file)
+ if IsCPPExtension(ext) and ext != local_cpp_extension:
+ local_file = root + local_cpp_extension
+ if local_file != source:
+ self.WriteLn('%s: %s' % (local_file, self.LocalPathify(source)))
+ self.WriteLn('\tmkdir -p $(@D); cp $< $@')
+ origin_src_dirs.append(os.path.dirname(source))
+ final_generated_sources.append(local_file)
+
+ # We add back in all of the non-compilable stuff to make sure that the
+ # make rules have dependencies on them.
+ final_generated_sources.extend(generated_not_sources)
+ self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES')
+
+ origin_src_dirs = gyp.common.uniquer(origin_src_dirs)
+ origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs))
+ self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS')
+
+ self.WriteList(local_files, 'LOCAL_SRC_FILES')
+
+ # Write out the flags used to compile the source; this must be done last
+ # so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path.
+ self.WriteSourceFlags(spec, configs)
+
+
+ def ComputeAndroidModule(self, spec):
+ """Return the Android module name used for a gyp spec.
+
+ We use the complete qualified target name to avoid collisions between
+ duplicate targets in different directories. We also add a suffix to
+ distinguish gyp-generated module names.
+ """
+
+ if int(spec.get('android_unmangled_name', 0)):
+ assert self.type != 'shared_library' or self.target.startswith('lib')
+ return self.target
+
+ if self.type == 'shared_library':
+ # For reasons of convention, the Android build system requires that all
+ # shared library modules are named 'libfoo' when generating -l flags.
+ prefix = 'lib_'
+ else:
+ prefix = ''
+
+ if spec['toolset'] == 'host':
+ suffix = '_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp'
+ else:
+ suffix = '_gyp'
+
+ if self.path:
+ middle = make.StringToMakefileVariable('%s_%s' % (self.path, self.target))
+ else:
+ middle = make.StringToMakefileVariable(self.target)
+
+ return ''.join([prefix, middle, suffix])
+
+
+ def ComputeOutputParts(self, spec):
+ """Return the 'output basename' of a gyp spec, split into filename + ext.
+
+ Android libraries must be named the same thing as their module name,
+ otherwise the linker can't find them, so product_name and so on must be
+ ignored if we are building a library, and the "lib" prepending is
+ not done for Android.
+ """
+ assert self.type != 'loadable_module' # TODO: not supported?
+
+ target = spec['target_name']
+ target_prefix = ''
+ target_ext = ''
+ if self.type == 'static_library':
+ target = self.ComputeAndroidModule(spec)
+ target_ext = '.a'
+ elif self.type == 'shared_library':
+ target = self.ComputeAndroidModule(spec)
+ target_ext = '.so'
+ elif self.type == 'none':
+ target_ext = '.stamp'
+ elif self.type != 'executable':
+ print ("ERROR: What output file should be generated?",
+ "type", self.type, "target", target)
+
+ if self.type != 'static_library' and self.type != 'shared_library':
+ target_prefix = spec.get('product_prefix', target_prefix)
+ target = spec.get('product_name', target)
+ product_ext = spec.get('product_extension')
+ if product_ext:
+ target_ext = '.' + product_ext
+
+ target_stem = target_prefix + target
+ return (target_stem, target_ext)
+
+
+ def ComputeOutputBasename(self, spec):
+ """Return the 'output basename' of a gyp spec.
+
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ 'libfoobar.so'
+ """
+ return ''.join(self.ComputeOutputParts(spec))
+
+
+ def ComputeOutput(self, spec):
+ """Return the 'output' (full output path) of a gyp spec.
+
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ '$(obj)/baz/libfoobar.so'
+ """
+ if self.type == 'executable':
+ # We install host executables into shared_intermediate_dir so they can be
+ # run by gyp rules that refer to PRODUCT_DIR.
+ path = '$(gyp_shared_intermediate_dir)'
+ elif self.type == 'shared_library':
+ if self.toolset == 'host':
+ path = '$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)'
+ else:
+ path = '$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)'
+ else:
+ # Other targets just get built into their intermediate dir.
+ if self.toolset == 'host':
+ path = ('$(call intermediates-dir-for,%s,%s,true,,'
+ '$(GYP_HOST_VAR_PREFIX))' % (self.android_class,
+ self.android_module))
+ else:
+ path = ('$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))'
+ % (self.android_class, self.android_module))
+
+ assert spec.get('product_dir') is None # TODO: not supported?
+ return os.path.join(path, self.ComputeOutputBasename(spec))
+
+ def NormalizeIncludePaths(self, include_paths):
+ """ Normalize include_paths.
+ Convert absolute paths to relative to the Android top directory.
+
+ Args:
+ include_paths: A list of unprocessed include paths.
+ Returns:
+ A list of normalized include paths.
+ """
+ normalized = []
+ for path in include_paths:
+ if path[0] == '/':
+ path = gyp.common.RelativePath(path, self.android_top_dir)
+ normalized.append(path)
+ return normalized
+
+ def ExtractIncludesFromCFlags(self, cflags):
+ """Extract includes "-I..." out from cflags
+
+ Args:
+ cflags: A list of compiler flags, which may be mixed with "-I.."
+ Returns:
+ A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.
+ """
+ clean_cflags = []
+ include_paths = []
+ for flag in cflags:
+ if flag.startswith('-I'):
+ include_paths.append(flag[2:])
+ else:
+ clean_cflags.append(flag)
+
+ return (clean_cflags, include_paths)
+
+ def FilterLibraries(self, libraries):
+ """Filter the 'libraries' key to separate things that shouldn't be ldflags.
+
+ Library entries that look like filenames should be converted to android
+ module names instead of being passed to the linker as flags.
+
+ Args:
+ libraries: the value of spec.get('libraries')
+ Returns:
+ A tuple (static_lib_modules, dynamic_lib_modules, ldflags)
+ """
+ static_lib_modules = []
+ dynamic_lib_modules = []
+ ldflags = []
+ for libs in libraries:
+ # Libs can have multiple words.
+ for lib in libs.split():
+ # Filter the system libraries, which are added by default by the Android
+ # build system.
+ if (lib == '-lc' or lib == '-lstdc++' or lib == '-lm' or
+ lib.endswith('libgcc.a')):
+ continue
+ match = re.search(r'([^/]+)\.a$', lib)
+ if match:
+ static_lib_modules.append(match.group(1))
+ continue
+ match = re.search(r'([^/]+)\.so$', lib)
+ if match:
+ dynamic_lib_modules.append(match.group(1))
+ continue
+ if lib.startswith('-l'):
+ ldflags.append(lib)
+ return (static_lib_modules, dynamic_lib_modules, ldflags)
+
+
+ def ComputeDeps(self, spec):
+ """Compute the dependencies of a gyp spec.
+
+ Returns a tuple (deps, link_deps), where each is a list of
+ filenames that will need to be put in front of make for either
+ building (deps) or linking (link_deps).
+ """
+ deps = []
+ link_deps = []
+ if 'dependencies' in spec:
+ deps.extend([target_outputs[dep] for dep in spec['dependencies']
+ if target_outputs[dep]])
+ for dep in spec['dependencies']:
+ if dep in target_link_deps:
+ link_deps.append(target_link_deps[dep])
+ deps.extend(link_deps)
+ return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
+
+
+ def WriteTargetFlags(self, spec, configs, link_deps):
+ """Write Makefile code to specify the link flags and library dependencies.
+
+ spec, configs: input from gyp.
+ link_deps: link dependency list; see ComputeDeps()
+ """
+ # Libraries (i.e. -lfoo)
+ # These must be included even for static libraries as some of them provide
+ # implicit include paths through the build system.
+ libraries = gyp.common.uniquer(spec.get('libraries', []))
+ static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries)
+
+ if self.type != 'static_library':
+ for configname, config in sorted(configs.iteritems()):
+ ldflags = list(config.get('ldflags', []))
+ self.WriteLn('')
+ self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname)
+ self.WriteList(ldflags_libs, 'LOCAL_GYP_LIBS')
+ self.WriteLn('LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION)) '
+ '$(LOCAL_GYP_LIBS)')
+
+ # Link dependencies (i.e. other gyp targets this target depends on)
+ # These need not be included for static libraries as within the gyp build
+ # we do not use the implicit include path mechanism.
+ if self.type != 'static_library':
+ static_link_deps = [x[1] for x in link_deps if x[0] == 'static']
+ shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared']
+ else:
+ static_link_deps = []
+ shared_link_deps = []
+
+ # Only write the lists if they are non-empty.
+ if static_libs or static_link_deps:
+ self.WriteLn('')
+ self.WriteList(static_libs + static_link_deps,
+ 'LOCAL_STATIC_LIBRARIES')
+ self.WriteLn('# Enable grouping to fix circular references')
+ self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true')
+ if dynamic_libs or shared_link_deps:
+ self.WriteLn('')
+ self.WriteList(dynamic_libs + shared_link_deps,
+ 'LOCAL_SHARED_LIBRARIES')
+
+
+ def WriteTarget(self, spec, configs, deps, link_deps, part_of_all,
+ write_alias_target):
+ """Write Makefile code to produce the final target of the gyp spec.
+
+ spec, configs: input from gyp.
+ deps, link_deps: dependency lists; see ComputeDeps()
+ part_of_all: flag indicating this target is part of 'all'
+ write_alias_target: flag indicating whether to create short aliases for this
+ target
+ """
+ self.WriteLn('### Rules for final target.')
+
+ if self.type != 'none':
+ self.WriteTargetFlags(spec, configs, link_deps)
+
+ settings = spec.get('aosp_build_settings', {})
+ if settings:
+ self.WriteLn('### Set directly by aosp_build_settings.')
+ for k, v in settings.iteritems():
+ if isinstance(v, list):
+ self.WriteList(v, k)
+ else:
+ self.WriteLn('%s := %s' % (k, make.QuoteIfNecessary(v)))
+ self.WriteLn('')
+
+ # Add to the set of targets which represent the gyp 'all' target. We use the
+ # name 'gyp_all_modules' as the Android build system doesn't allow the use
+ # of the Make target 'all' and because 'all_modules' is the equivalent of
+ # the Make target 'all' on Android.
+ if part_of_all and write_alias_target:
+ self.WriteLn('# Add target alias to "gyp_all_modules" target.')
+ self.WriteLn('.PHONY: gyp_all_modules')
+ self.WriteLn('gyp_all_modules: %s' % self.android_module)
+ self.WriteLn('')
+
+ # Add an alias from the gyp target name to the Android module name. This
+ # simplifies manual builds of the target, and is required by the test
+ # framework.
+ if self.target != self.android_module and write_alias_target:
+ self.WriteLn('# Alias gyp target name.')
+ self.WriteLn('.PHONY: %s' % self.target)
+ self.WriteLn('%s: %s' % (self.target, self.android_module))
+ self.WriteLn('')
+
+ # Add the command to trigger build of the target type depending
+ # on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY
+ # NOTE: This has to come last!
+ modifier = ''
+ if self.toolset == 'host':
+ modifier = 'HOST_'
+ if self.type == 'static_library':
+ self.WriteLn('include $(BUILD_%sSTATIC_LIBRARY)' % modifier)
+ elif self.type == 'shared_library':
+ self.WriteLn('LOCAL_PRELINK_MODULE := false')
+ self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier)
+ elif self.type == 'executable':
+ self.WriteLn('LOCAL_CXX_STL := libc++_static')
+ # Executables are for build and test purposes only, so they're installed
+ # to a directory that doesn't get included in the system image.
+ self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)')
+ self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier)
+ else:
+ self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp')
+ self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
+ if self.toolset == 'target':
+ self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)')
+ else:
+ self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_HOST_VAR_PREFIX)')
+ self.WriteLn()
+ self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk')
+ self.WriteLn()
+ self.WriteLn('$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)')
+ self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"')
+ self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
+ self.WriteLn('\t$(hide) touch $@')
+ self.WriteLn()
+ self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX :=')
+
+
+ def WriteList(self, value_list, variable=None, prefix='',
+ quoter=make.QuoteIfNecessary, local_pathify=False):
+ """Write a variable definition that is a list of values.
+
+ E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
+ foo = blaha blahb
+ but in a pretty-printed style.
+ """
+ values = ''
+ if value_list:
+ value_list = [quoter(prefix + l) for l in value_list]
+ if local_pathify:
+ value_list = [self.LocalPathify(l) for l in value_list]
+ values = ' \\\n\t' + ' \\\n\t'.join(value_list)
+ self.fp.write('%s :=%s\n\n' % (variable, values))
+
+
+ def WriteLn(self, text=''):
+ self.fp.write(text + '\n')
+
+
+ def LocalPathify(self, path):
+ """Convert a subdirectory-relative path into a normalized path which starts
+ with the make variable $(LOCAL_PATH) (i.e. the top of the project tree).
+ Absolute paths, or paths that contain variables, are just normalized."""
+ if '$(' in path or os.path.isabs(path):
+ # path is not a file in the project tree in this case, but calling
+ # normpath is still important for trimming trailing slashes.
+ return os.path.normpath(path)
+ local_path = os.path.join('$(LOCAL_PATH)', self.path, path)
+ local_path = os.path.normpath(local_path)
+ # Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH)
+ # - i.e. that the resulting path is still inside the project tree. The
+ # path may legitimately have ended up containing just $(LOCAL_PATH), though,
+ # so we don't look for a slash.
+ assert local_path.startswith('$(LOCAL_PATH)'), (
+ 'Path %s attempts to escape from gyp path %s !)' % (path, self.path))
+ return local_path
+
+
+ def ExpandInputRoot(self, template, expansion, dirname):
+ if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
+ return template
+ path = template % {
+ 'INPUT_ROOT': expansion,
+ 'INPUT_DIRNAME': dirname,
+ }
+ return os.path.normpath(path)
+
+
+def PerformBuild(data, configurations, params):
+ # The android backend only supports the default configuration.
+ options = params['options']
+ makefile = os.path.abspath(os.path.join(options.toplevel_dir,
+ 'GypAndroid.mk'))
+ env = dict(os.environ)
+ env['ONE_SHOT_MAKEFILE'] = makefile
+ arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules']
+ print 'Building: %s' % arguments
+ subprocess.check_call(arguments, env=env)
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ options = params['options']
+ generator_flags = params.get('generator_flags', {})
+ builddir_name = generator_flags.get('output_dir', 'out')
+ limit_to_target_all = generator_flags.get('limit_to_target_all', False)
+ write_alias_targets = generator_flags.get('write_alias_targets', True)
+ sdk_version = generator_flags.get('aosp_sdk_version', 0)
+ android_top_dir = os.environ.get('ANDROID_BUILD_TOP')
+ assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.'
+
+ def CalculateMakefilePath(build_file, base_name):
+ """Determine where to write a Makefile for a given gyp file."""
+ # Paths in gyp files are relative to the .gyp file, but we want
+ # paths relative to the source root for the master makefile. Grab
+ # the path of the .gyp file as the base to relativize against.
+ # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
+ base_path = gyp.common.RelativePath(os.path.dirname(build_file),
+ options.depth)
+ # We write the file in the base_path directory.
+ output_file = os.path.join(options.depth, base_path, base_name)
+ assert not options.generator_output, (
+ 'The Android backend does not support options.generator_output.')
+ base_path = gyp.common.RelativePath(os.path.dirname(build_file),
+ options.toplevel_dir)
+ return base_path, output_file
+
+ # TODO: search for the first non-'Default' target. This can go
+ # away when we add verification that all targets have the
+ # necessary configurations.
+ default_configuration = None
+ toolsets = set([target_dicts[target]['toolset'] for target in target_list])
+ for target in target_list:
+ spec = target_dicts[target]
+ if spec['default_configuration'] != 'Default':
+ default_configuration = spec['default_configuration']
+ break
+ if not default_configuration:
+ default_configuration = 'Default'
+
+ srcdir = '.'
+ makefile_name = 'GypAndroid' + options.suffix + '.mk'
+ makefile_path = os.path.join(options.toplevel_dir, makefile_name)
+ assert not options.generator_output, (
+ 'The Android backend does not support options.generator_output.')
+ gyp.common.EnsureDirExists(makefile_path)
+ root_makefile = open(makefile_path, 'w')
+
+ root_makefile.write(header)
+
+ # We set LOCAL_PATH just once, here, to the top of the project tree. This
+ # allows all the other paths we use to be relative to the Android.mk file,
+ # as the Android build system expects.
+ root_makefile.write('\nLOCAL_PATH := $(call my-dir)\n')
+
+ # Find the list of targets that derive from the gyp file(s) being built.
+ needed_targets = set()
+ for build_file in params['build_files']:
+ for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
+ needed_targets.add(target)
+
+ build_files = set()
+ include_list = set()
+ android_modules = {}
+ for qualified_target in target_list:
+ build_file, target, toolset = gyp.common.ParseQualifiedTarget(
+ qualified_target)
+ relative_build_file = gyp.common.RelativePath(build_file,
+ options.toplevel_dir)
+ build_files.add(relative_build_file)
+ included_files = data[build_file]['included_files']
+ for included_file in included_files:
+ # The included_files entries are relative to the dir of the build file
+ # that included them, so we have to undo that and then make them relative
+ # to the root dir.
+ relative_include_file = gyp.common.RelativePath(
+ gyp.common.UnrelativePath(included_file, build_file),
+ options.toplevel_dir)
+ abs_include_file = os.path.abspath(relative_include_file)
+ # If the include file is from the ~/.gyp dir, we should use absolute path
+ # so that relocating the src dir doesn't break the path.
+ if (params['home_dot_gyp'] and
+ abs_include_file.startswith(params['home_dot_gyp'])):
+ build_files.add(abs_include_file)
+ else:
+ build_files.add(relative_include_file)
+
+ base_path, output_file = CalculateMakefilePath(build_file,
+ target + '.' + toolset + options.suffix + '.mk')
+
+ spec = target_dicts[qualified_target]
+ configs = spec['configurations']
+
+ part_of_all = qualified_target in needed_targets
+ if limit_to_target_all and not part_of_all:
+ continue
+
+ relative_target = gyp.common.QualifiedTarget(relative_build_file, target,
+ toolset)
+ writer = AndroidMkWriter(android_top_dir)
+ android_module = writer.Write(qualified_target, relative_target, base_path,
+ output_file, spec, configs,
+ part_of_all=part_of_all,
+ write_alias_target=write_alias_targets,
+ sdk_version=sdk_version)
+ if android_module in android_modules:
+ print ('ERROR: Android module names must be unique. The following '
+ 'targets both generate Android module name %s.\n %s\n %s' %
+ (android_module, android_modules[android_module],
+ qualified_target))
+ return
+ android_modules[android_module] = qualified_target
+
+ # Our root_makefile lives at the source root. Compute the relative path
+ # from there to the output_file for including.
+ mkfile_rel_path = gyp.common.RelativePath(output_file,
+ os.path.dirname(makefile_path))
+ include_list.add(mkfile_rel_path)
+
+ root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration)
+ root_makefile.write('GYP_VAR_PREFIX ?=\n')
+ root_makefile.write('GYP_HOST_VAR_PREFIX ?=\n')
+ root_makefile.write('GYP_HOST_MULTILIB ?= first\n')
+
+ # Write out the sorted list of includes.
+ root_makefile.write('\n')
+ for include_file in sorted(include_list):
+ root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n')
+ root_makefile.write('\n')
+
+ if write_alias_targets:
+ root_makefile.write(ALL_MODULES_FOOTER)
+
+ root_makefile.close()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
new file mode 100644
index 0000000000..17f5e6396c
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
@@ -0,0 +1,1221 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""cmake output module
+
+This module is under development and should be considered experimental.
+
+This module produces cmake (2.8.8+) input as its output. One CMakeLists.txt is
+created for each configuration.
+
+This module's original purpose was to support editing in IDEs like KDevelop
+which use CMake for project management. It is also possible to use CMake to
+generate projects for other IDEs such as eclipse cdt and code::blocks. QtCreator
+will convert the CMakeLists.txt to a code::blocks cbp for the editor to read,
+but build using CMake. As a result QtCreator editor is unaware of compiler
+defines. The generated CMakeLists.txt can also be used to build on Linux. There
+is currently no support for building on platforms other than Linux.
+
+The generated CMakeLists.txt should properly compile all projects. However,
+there is a mismatch between gyp and cmake with regard to linking. All attempts
+are made to work around this, but CMake sometimes sees -Wl,--start-group as a
+library and incorrectly repeats it. As a result the output of this generator
+should not be relied on for building.
+
+When using with kdevelop, use version 4.4+. Previous versions of kdevelop will
+not be able to find the header file directories described in the generated
+CMakeLists.txt file.
+"""
+
+import multiprocessing
+import os
+import signal
+import string
+import subprocess
+import gyp.common
+
+generator_default_variables = {
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '',
+ 'STATIC_LIB_PREFIX': 'lib',
+ 'STATIC_LIB_SUFFIX': '.a',
+ 'SHARED_LIB_PREFIX': 'lib',
+ 'SHARED_LIB_SUFFIX': '.so',
+ 'SHARED_LIB_DIR': '${builddir}/lib.${TOOLSET}',
+ 'LIB_DIR': '${obj}.${TOOLSET}',
+ 'INTERMEDIATE_DIR': '${obj}.${TOOLSET}/${TARGET}/geni',
+ 'SHARED_INTERMEDIATE_DIR': '${obj}/gen',
+ 'PRODUCT_DIR': '${builddir}',
+ 'RULE_INPUT_PATH': '${RULE_INPUT_PATH}',
+ 'RULE_INPUT_DIRNAME': '${RULE_INPUT_DIRNAME}',
+ 'RULE_INPUT_NAME': '${RULE_INPUT_NAME}',
+ 'RULE_INPUT_ROOT': '${RULE_INPUT_ROOT}',
+ 'RULE_INPUT_EXT': '${RULE_INPUT_EXT}',
+ 'CONFIGURATION_NAME': '${configuration}',
+}
+
+FULL_PATH_VARS = ('${CMAKE_CURRENT_LIST_DIR}', '${builddir}', '${obj}')
+
+generator_supports_multiple_toolsets = True
+generator_wants_static_library_dependencies_adjusted = True
+
+COMPILABLE_EXTENSIONS = {
+ '.c': 'cc',
+ '.cc': 'cxx',
+ '.cpp': 'cxx',
+ '.cxx': 'cxx',
+ '.s': 's', # cc
+ '.S': 's', # cc
+}
+
+
+def RemovePrefix(a, prefix):
+ """Returns 'a' without 'prefix' if it starts with 'prefix'."""
+ return a[len(prefix):] if a.startswith(prefix) else a
+
+
+def CalculateVariables(default_variables, params):
+ """Calculate additional variables for use in the build (called by gyp)."""
+ default_variables.setdefault('OS', gyp.common.GetFlavor(params))
+
+
+def Compilable(filename):
+ """Return true if the file is compilable (should be in OBJS)."""
+ return any(filename.endswith(e) for e in COMPILABLE_EXTENSIONS)
+
+
+def Linkable(filename):
+ """Return true if the file is linkable (should be on the link line)."""
+ return filename.endswith('.o')
+
+
+def NormjoinPathForceCMakeSource(base_path, rel_path):
+ """Resolves rel_path against base_path and returns the result.
+
+ If rel_path is an absolute path it is returned unchanged.
+ Otherwise it is resolved against base_path and normalized.
+ If the result is a relative path, it is forced to be relative to the
+ CMakeLists.txt.
+ """
+ if os.path.isabs(rel_path):
+ return rel_path
+ if any([rel_path.startswith(var) for var in FULL_PATH_VARS]):
+ return rel_path
+ # TODO: do we need to check base_path for absolute variables as well?
+ return os.path.join('${CMAKE_CURRENT_LIST_DIR}',
+ os.path.normpath(os.path.join(base_path, rel_path)))
+
+
+def NormjoinPath(base_path, rel_path):
+ """Resolves rel_path against base_path and returns the result.
+ TODO: what is this really used for?
+ If rel_path begins with '$' it is returned unchanged.
+ Otherwise it is resolved against base_path if relative, then normalized.
+ """
+ if rel_path.startswith('$') and not rel_path.startswith('${configuration}'):
+ return rel_path
+ return os.path.normpath(os.path.join(base_path, rel_path))
+
+
+def CMakeStringEscape(a):
+ """Escapes the string 'a' for use inside a CMake string.
+
+ This means escaping
+ '\' otherwise it may be seen as modifying the next character
+ '"' otherwise it will end the string
+ ';' otherwise the string becomes a list
+
+ The following do not need to be escaped
+ '#' when the lexer is in string state, this does not start a comment
+
+ The following are yet unknown
+ '$' generator variables (like ${obj}) must not be escaped,
+ but text $ should be escaped
+ what is wanted is to know which $ come from generator variables
+ """
+ return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
+
+
+def SetFileProperty(output, source_name, property_name, values, sep):
+ """Given a set of source file, sets the given property on them."""
+ output.write('set_source_files_properties(')
+ output.write(source_name)
+ output.write(' PROPERTIES ')
+ output.write(property_name)
+ output.write(' "')
+ for value in values:
+ output.write(CMakeStringEscape(value))
+ output.write(sep)
+ output.write('")\n')
+
+
+def SetFilesProperty(output, variable, property_name, values, sep):
+ """Given a set of source files, sets the given property on them."""
+ output.write('set_source_files_properties(')
+ WriteVariable(output, variable)
+ output.write(' PROPERTIES ')
+ output.write(property_name)
+ output.write(' "')
+ for value in values:
+ output.write(CMakeStringEscape(value))
+ output.write(sep)
+ output.write('")\n')
+
+
+def SetTargetProperty(output, target_name, property_name, values, sep=''):
+ """Given a target, sets the given property."""
+ output.write('set_target_properties(')
+ output.write(target_name)
+ output.write(' PROPERTIES ')
+ output.write(property_name)
+ output.write(' "')
+ for value in values:
+ output.write(CMakeStringEscape(value))
+ output.write(sep)
+ output.write('")\n')
+
+
+def SetVariable(output, variable_name, value):
+ """Sets a CMake variable."""
+ output.write('set(')
+ output.write(variable_name)
+ output.write(' "')
+ output.write(CMakeStringEscape(value))
+ output.write('")\n')
+
+
+def SetVariableList(output, variable_name, values):
+ """Sets a CMake variable to a list."""
+ if not values:
+ return SetVariable(output, variable_name, "")
+ if len(values) == 1:
+ return SetVariable(output, variable_name, values[0])
+ output.write('list(APPEND ')
+ output.write(variable_name)
+ output.write('\n "')
+ output.write('"\n "'.join([CMakeStringEscape(value) for value in values]))
+ output.write('")\n')
+
+
+def UnsetVariable(output, variable_name):
+ """Unsets a CMake variable."""
+ output.write('unset(')
+ output.write(variable_name)
+ output.write(')\n')
+
+
+def WriteVariable(output, variable_name, prepend=None):
+ if prepend:
+ output.write(prepend)
+ output.write('${')
+ output.write(variable_name)
+ output.write('}')
+
+
+class CMakeTargetType(object):
+ def __init__(self, command, modifier, property_modifier):
+ self.command = command
+ self.modifier = modifier
+ self.property_modifier = property_modifier
+
+
+cmake_target_type_from_gyp_target_type = {
+ 'executable': CMakeTargetType('add_executable', None, 'RUNTIME'),
+ 'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE'),
+ 'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY'),
+ 'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY'),
+ 'none': CMakeTargetType('add_custom_target', 'SOURCES', None),
+}
+
+
+def StringToCMakeTargetName(a):
+ """Converts the given string 'a' to a valid CMake target name.
+
+ All invalid characters are replaced by '_'.
+ Invalid for cmake: ' ', '/', '(', ')', '"'
+ Invalid for make: ':'
+ Invalid for unknown reasons but cause failures: '.'
+ """
+ return a.translate(string.maketrans(' /():."', '_______'))
+
+
+def WriteActions(target_name, actions, extra_sources, extra_deps,
+ path_to_gyp, output):
+ """Write CMake for the 'actions' in the target.
+
+ Args:
+ target_name: the name of the CMake target being generated.
+ actions: the Gyp 'actions' dict for this target.
+ extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
+ extra_deps: [<cmake_taget>] to append with generated targets.
+ path_to_gyp: relative path from CMakeLists.txt being generated to
+ the Gyp file in which the target being generated is defined.
+ """
+ for action in actions:
+ action_name = StringToCMakeTargetName(action['action_name'])
+ action_target_name = '%s__%s' % (target_name, action_name)
+
+ inputs = action['inputs']
+ inputs_name = action_target_name + '__input'
+ SetVariableList(output, inputs_name,
+ [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
+
+ outputs = action['outputs']
+ cmake_outputs = [NormjoinPathForceCMakeSource(path_to_gyp, out)
+ for out in outputs]
+ outputs_name = action_target_name + '__output'
+ SetVariableList(output, outputs_name, cmake_outputs)
+
+ # Build up a list of outputs.
+ # Collect the output dirs we'll need.
+ dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
+
+ if int(action.get('process_outputs_as_sources', False)):
+ extra_sources.extend(zip(cmake_outputs, outputs))
+
+ # add_custom_command
+ output.write('add_custom_command(OUTPUT ')
+ WriteVariable(output, outputs_name)
+ output.write('\n')
+
+ if len(dirs) > 0:
+ for directory in dirs:
+ output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ')
+ output.write(directory)
+ output.write('\n')
+
+ output.write(' COMMAND ')
+ output.write(gyp.common.EncodePOSIXShellList(action['action']))
+ output.write('\n')
+
+ output.write(' DEPENDS ')
+ WriteVariable(output, inputs_name)
+ output.write('\n')
+
+ output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
+ output.write(path_to_gyp)
+ output.write('\n')
+
+ output.write(' COMMENT ')
+ if 'message' in action:
+ output.write(action['message'])
+ else:
+ output.write(action_target_name)
+ output.write('\n')
+
+ output.write(' VERBATIM\n')
+ output.write(')\n')
+
+ # add_custom_target
+ output.write('add_custom_target(')
+ output.write(action_target_name)
+ output.write('\n DEPENDS ')
+ WriteVariable(output, outputs_name)
+ output.write('\n SOURCES ')
+ WriteVariable(output, inputs_name)
+ output.write('\n)\n')
+
+ extra_deps.append(action_target_name)
+
+
+def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source):
+ if rel_path.startswith(("${RULE_INPUT_PATH}","${RULE_INPUT_DIRNAME}")):
+ if any([rule_source.startswith(var) for var in FULL_PATH_VARS]):
+ return rel_path
+ return NormjoinPathForceCMakeSource(base_path, rel_path)
+
+
+def WriteRules(target_name, rules, extra_sources, extra_deps,
+ path_to_gyp, output):
+ """Write CMake for the 'rules' in the target.
+
+ Args:
+ target_name: the name of the CMake target being generated.
+ actions: the Gyp 'actions' dict for this target.
+ extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
+ extra_deps: [<cmake_taget>] to append with generated targets.
+ path_to_gyp: relative path from CMakeLists.txt being generated to
+ the Gyp file in which the target being generated is defined.
+ """
+ for rule in rules:
+ rule_name = StringToCMakeTargetName(target_name + '__' + rule['rule_name'])
+
+ inputs = rule.get('inputs', [])
+ inputs_name = rule_name + '__input'
+ SetVariableList(output, inputs_name,
+ [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
+ outputs = rule['outputs']
+ var_outputs = []
+
+ for count, rule_source in enumerate(rule.get('rule_sources', [])):
+ action_name = rule_name + '_' + str(count)
+
+ rule_source_dirname, rule_source_basename = os.path.split(rule_source)
+ rule_source_root, rule_source_ext = os.path.splitext(rule_source_basename)
+
+ SetVariable(output, 'RULE_INPUT_PATH', rule_source)
+ SetVariable(output, 'RULE_INPUT_DIRNAME', rule_source_dirname)
+ SetVariable(output, 'RULE_INPUT_NAME', rule_source_basename)
+ SetVariable(output, 'RULE_INPUT_ROOT', rule_source_root)
+ SetVariable(output, 'RULE_INPUT_EXT', rule_source_ext)
+
+ # Build up a list of outputs.
+ # Collect the output dirs we'll need.
+ dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
+
+ # Create variables for the output, as 'local' variable will be unset.
+ these_outputs = []
+ for output_index, out in enumerate(outputs):
+ output_name = action_name + '_' + str(output_index)
+ SetVariable(output, output_name,
+ NormjoinRulePathForceCMakeSource(path_to_gyp, out,
+ rule_source))
+ if int(rule.get('process_outputs_as_sources', False)):
+ extra_sources.append(('${' + output_name + '}', out))
+ these_outputs.append('${' + output_name + '}')
+ var_outputs.append('${' + output_name + '}')
+
+ # add_custom_command
+ output.write('add_custom_command(OUTPUT\n')
+ for out in these_outputs:
+ output.write(' ')
+ output.write(out)
+ output.write('\n')
+
+ for directory in dirs:
+ output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ')
+ output.write(directory)
+ output.write('\n')
+
+ output.write(' COMMAND ')
+ output.write(gyp.common.EncodePOSIXShellList(rule['action']))
+ output.write('\n')
+
+ output.write(' DEPENDS ')
+ WriteVariable(output, inputs_name)
+ output.write(' ')
+ output.write(NormjoinPath(path_to_gyp, rule_source))
+ output.write('\n')
+
+ # CMAKE_CURRENT_LIST_DIR is where the CMakeLists.txt lives.
+ # The cwd is the current build directory.
+ output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
+ output.write(path_to_gyp)
+ output.write('\n')
+
+ output.write(' COMMENT ')
+ if 'message' in rule:
+ output.write(rule['message'])
+ else:
+ output.write(action_name)
+ output.write('\n')
+
+ output.write(' VERBATIM\n')
+ output.write(')\n')
+
+ UnsetVariable(output, 'RULE_INPUT_PATH')
+ UnsetVariable(output, 'RULE_INPUT_DIRNAME')
+ UnsetVariable(output, 'RULE_INPUT_NAME')
+ UnsetVariable(output, 'RULE_INPUT_ROOT')
+ UnsetVariable(output, 'RULE_INPUT_EXT')
+
+ # add_custom_target
+ output.write('add_custom_target(')
+ output.write(rule_name)
+ output.write(' DEPENDS\n')
+ for out in var_outputs:
+ output.write(' ')
+ output.write(out)
+ output.write('\n')
+ output.write('SOURCES ')
+ WriteVariable(output, inputs_name)
+ output.write('\n')
+ for rule_source in rule.get('rule_sources', []):
+ output.write(' ')
+ output.write(NormjoinPath(path_to_gyp, rule_source))
+ output.write('\n')
+ output.write(')\n')
+
+ extra_deps.append(rule_name)
+
+
+def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
+ """Write CMake for the 'copies' in the target.
+
+ Args:
+ target_name: the name of the CMake target being generated.
+ actions: the Gyp 'actions' dict for this target.
+ extra_deps: [<cmake_taget>] to append with generated targets.
+ path_to_gyp: relative path from CMakeLists.txt being generated to
+ the Gyp file in which the target being generated is defined.
+ """
+ copy_name = target_name + '__copies'
+
+ # CMake gets upset with custom targets with OUTPUT which specify no output.
+ have_copies = any(copy['files'] for copy in copies)
+ if not have_copies:
+ output.write('add_custom_target(')
+ output.write(copy_name)
+ output.write(')\n')
+ extra_deps.append(copy_name)
+ return
+
+ class Copy(object):
+ def __init__(self, ext, command):
+ self.cmake_inputs = []
+ self.cmake_outputs = []
+ self.gyp_inputs = []
+ self.gyp_outputs = []
+ self.ext = ext
+ self.inputs_name = None
+ self.outputs_name = None
+ self.command = command
+
+ file_copy = Copy('', 'copy')
+ dir_copy = Copy('_dirs', 'copy_directory')
+
+ for copy in copies:
+ files = copy['files']
+ destination = copy['destination']
+ for src in files:
+ path = os.path.normpath(src)
+ basename = os.path.split(path)[1]
+ dst = os.path.join(destination, basename)
+
+ copy = file_copy if os.path.basename(src) else dir_copy
+
+ copy.cmake_inputs.append(NormjoinPathForceCMakeSource(path_to_gyp, src))
+ copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst))
+ copy.gyp_inputs.append(src)
+ copy.gyp_outputs.append(dst)
+
+ for copy in (file_copy, dir_copy):
+ if copy.cmake_inputs:
+ copy.inputs_name = copy_name + '__input' + copy.ext
+ SetVariableList(output, copy.inputs_name, copy.cmake_inputs)
+
+ copy.outputs_name = copy_name + '__output' + copy.ext
+ SetVariableList(output, copy.outputs_name, copy.cmake_outputs)
+
+ # add_custom_command
+ output.write('add_custom_command(\n')
+
+ output.write('OUTPUT')
+ for copy in (file_copy, dir_copy):
+ if copy.outputs_name:
+ WriteVariable(output, copy.outputs_name, ' ')
+ output.write('\n')
+
+ for copy in (file_copy, dir_copy):
+ for src, dst in zip(copy.gyp_inputs, copy.gyp_outputs):
+ # 'cmake -E copy src dst' will create the 'dst' directory if needed.
+ output.write('COMMAND ${CMAKE_COMMAND} -E %s ' % copy.command)
+ output.write(src)
+ output.write(' ')
+ output.write(dst)
+ output.write("\n")
+
+ output.write('DEPENDS')
+ for copy in (file_copy, dir_copy):
+ if copy.inputs_name:
+ WriteVariable(output, copy.inputs_name, ' ')
+ output.write('\n')
+
+ output.write('WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
+ output.write(path_to_gyp)
+ output.write('\n')
+
+ output.write('COMMENT Copying for ')
+ output.write(target_name)
+ output.write('\n')
+
+ output.write('VERBATIM\n')
+ output.write(')\n')
+
+ # add_custom_target
+ output.write('add_custom_target(')
+ output.write(copy_name)
+ output.write('\n DEPENDS')
+ for copy in (file_copy, dir_copy):
+ if copy.outputs_name:
+ WriteVariable(output, copy.outputs_name, ' ')
+ output.write('\n SOURCES')
+ if file_copy.inputs_name:
+ WriteVariable(output, file_copy.inputs_name, ' ')
+ output.write('\n)\n')
+
+ extra_deps.append(copy_name)
+
+
+def CreateCMakeTargetBaseName(qualified_target):
+ """This is the name we would like the target to have."""
+ _, gyp_target_name, gyp_target_toolset = (
+ gyp.common.ParseQualifiedTarget(qualified_target))
+ cmake_target_base_name = gyp_target_name
+ if gyp_target_toolset and gyp_target_toolset != 'target':
+ cmake_target_base_name += '_' + gyp_target_toolset
+ return StringToCMakeTargetName(cmake_target_base_name)
+
+
+def CreateCMakeTargetFullName(qualified_target):
+ """An unambiguous name for the target."""
+ gyp_file, gyp_target_name, gyp_target_toolset = (
+ gyp.common.ParseQualifiedTarget(qualified_target))
+ cmake_target_full_name = gyp_file + ':' + gyp_target_name
+ if gyp_target_toolset and gyp_target_toolset != 'target':
+ cmake_target_full_name += '_' + gyp_target_toolset
+ return StringToCMakeTargetName(cmake_target_full_name)
+
+
+class CMakeNamer(object):
+ """Converts Gyp target names into CMake target names.
+
+ CMake requires that target names be globally unique. One way to ensure
+ this is to fully qualify the names of the targets. Unfortunatly, this
+ ends up with all targets looking like "chrome_chrome_gyp_chrome" instead
+ of just "chrome". If this generator were only interested in building, it
+ would be possible to fully qualify all target names, then create
+ unqualified target names which depend on all qualified targets which
+ should have had that name. This is more or less what the 'make' generator
+ does with aliases. However, one goal of this generator is to create CMake
+ files for use with IDEs, and fully qualified names are not as user
+ friendly.
+
+ Since target name collision is rare, we do the above only when required.
+
+ Toolset variants are always qualified from the base, as this is required for
+ building. However, it also makes sense for an IDE, as it is possible for
+ defines to be different.
+ """
+ def __init__(self, target_list):
+ self.cmake_target_base_names_conficting = set()
+
+ cmake_target_base_names_seen = set()
+ for qualified_target in target_list:
+ cmake_target_base_name = CreateCMakeTargetBaseName(qualified_target)
+
+ if cmake_target_base_name not in cmake_target_base_names_seen:
+ cmake_target_base_names_seen.add(cmake_target_base_name)
+ else:
+ self.cmake_target_base_names_conficting.add(cmake_target_base_name)
+
+ def CreateCMakeTargetName(self, qualified_target):
+ base_name = CreateCMakeTargetBaseName(qualified_target)
+ if base_name in self.cmake_target_base_names_conficting:
+ return CreateCMakeTargetFullName(qualified_target)
+ return base_name
+
+
+def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
+ options, generator_flags, all_qualified_targets, output):
+
+ # The make generator does this always.
+ # TODO: It would be nice to be able to tell CMake all dependencies.
+ circular_libs = generator_flags.get('circular', True)
+
+ if not generator_flags.get('standalone', False):
+ output.write('\n#')
+ output.write(qualified_target)
+ output.write('\n')
+
+ gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
+ rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir)
+ rel_gyp_dir = os.path.dirname(rel_gyp_file)
+
+ # Relative path from build dir to top dir.
+ build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir)
+ # Relative path from build dir to gyp dir.
+ build_to_gyp = os.path.join(build_to_top, rel_gyp_dir)
+
+ path_from_cmakelists_to_gyp = build_to_gyp
+
+ spec = target_dicts.get(qualified_target, {})
+ config = spec.get('configurations', {}).get(config_to_use, {})
+
+ target_name = spec.get('target_name', '<missing target name>')
+ target_type = spec.get('type', '<missing target type>')
+ target_toolset = spec.get('toolset')
+
+ cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
+ if cmake_target_type is None:
+ print ('Target %s has unknown target type %s, skipping.' %
+ ( target_name, target_type ) )
+ return
+
+ SetVariable(output, 'TARGET', target_name)
+ SetVariable(output, 'TOOLSET', target_toolset)
+
+ cmake_target_name = namer.CreateCMakeTargetName(qualified_target)
+
+ extra_sources = []
+ extra_deps = []
+
+ # Actions must come first, since they can generate more OBJs for use below.
+ if 'actions' in spec:
+ WriteActions(cmake_target_name, spec['actions'], extra_sources, extra_deps,
+ path_from_cmakelists_to_gyp, output)
+
+ # Rules must be early like actions.
+ if 'rules' in spec:
+ WriteRules(cmake_target_name, spec['rules'], extra_sources, extra_deps,
+ path_from_cmakelists_to_gyp, output)
+
+ # Copies
+ if 'copies' in spec:
+ WriteCopies(cmake_target_name, spec['copies'], extra_deps,
+ path_from_cmakelists_to_gyp, output)
+
+ # Target and sources
+ srcs = spec.get('sources', [])
+
+ # Gyp separates the sheep from the goats based on file extensions.
+ # A full separation is done here because of flag handing (see below).
+ s_sources = []
+ c_sources = []
+ cxx_sources = []
+ linkable_sources = []
+ other_sources = []
+ for src in srcs:
+ _, ext = os.path.splitext(src)
+ src_type = COMPILABLE_EXTENSIONS.get(ext, None)
+ src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src);
+
+ if src_type == 's':
+ s_sources.append(src_norm_path)
+ elif src_type == 'cc':
+ c_sources.append(src_norm_path)
+ elif src_type == 'cxx':
+ cxx_sources.append(src_norm_path)
+ elif Linkable(ext):
+ linkable_sources.append(src_norm_path)
+ else:
+ other_sources.append(src_norm_path)
+
+ for extra_source in extra_sources:
+ src, real_source = extra_source
+ _, ext = os.path.splitext(real_source)
+ src_type = COMPILABLE_EXTENSIONS.get(ext, None)
+
+ if src_type == 's':
+ s_sources.append(src)
+ elif src_type == 'cc':
+ c_sources.append(src)
+ elif src_type == 'cxx':
+ cxx_sources.append(src)
+ elif Linkable(ext):
+ linkable_sources.append(src)
+ else:
+ other_sources.append(src)
+
+ s_sources_name = None
+ if s_sources:
+ s_sources_name = cmake_target_name + '__asm_srcs'
+ SetVariableList(output, s_sources_name, s_sources)
+
+ c_sources_name = None
+ if c_sources:
+ c_sources_name = cmake_target_name + '__c_srcs'
+ SetVariableList(output, c_sources_name, c_sources)
+
+ cxx_sources_name = None
+ if cxx_sources:
+ cxx_sources_name = cmake_target_name + '__cxx_srcs'
+ SetVariableList(output, cxx_sources_name, cxx_sources)
+
+ linkable_sources_name = None
+ if linkable_sources:
+ linkable_sources_name = cmake_target_name + '__linkable_srcs'
+ SetVariableList(output, linkable_sources_name, linkable_sources)
+
+ other_sources_name = None
+ if other_sources:
+ other_sources_name = cmake_target_name + '__other_srcs'
+ SetVariableList(output, other_sources_name, other_sources)
+
+ # CMake gets upset when executable targets provide no sources.
+ # http://www.cmake.org/pipermail/cmake/2010-July/038461.html
+ dummy_sources_name = None
+ has_sources = (s_sources_name or
+ c_sources_name or
+ cxx_sources_name or
+ linkable_sources_name or
+ other_sources_name)
+ if target_type == 'executable' and not has_sources:
+ dummy_sources_name = cmake_target_name + '__dummy_srcs'
+ SetVariable(output, dummy_sources_name,
+ "${obj}.${TOOLSET}/${TARGET}/genc/dummy.c")
+ output.write('if(NOT EXISTS "')
+ WriteVariable(output, dummy_sources_name)
+ output.write('")\n')
+ output.write(' file(WRITE "')
+ WriteVariable(output, dummy_sources_name)
+ output.write('" "")\n')
+ output.write("endif()\n")
+
+
+ # CMake is opposed to setting linker directories and considers the practice
+ # of setting linker directories dangerous. Instead, it favors the use of
+ # find_library and passing absolute paths to target_link_libraries.
+ # However, CMake does provide the command link_directories, which adds
+ # link directories to targets defined after it is called.
+ # As a result, link_directories must come before the target definition.
+ # CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
+ library_dirs = config.get('library_dirs')
+ if library_dirs is not None:
+ output.write('link_directories(')
+ for library_dir in library_dirs:
+ output.write(' ')
+ output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir))
+ output.write('\n')
+ output.write(')\n')
+
+ output.write(cmake_target_type.command)
+ output.write('(')
+ output.write(cmake_target_name)
+
+ if cmake_target_type.modifier is not None:
+ output.write(' ')
+ output.write(cmake_target_type.modifier)
+
+ if s_sources_name:
+ WriteVariable(output, s_sources_name, ' ')
+ if c_sources_name:
+ WriteVariable(output, c_sources_name, ' ')
+ if cxx_sources_name:
+ WriteVariable(output, cxx_sources_name, ' ')
+ if linkable_sources_name:
+ WriteVariable(output, linkable_sources_name, ' ')
+ if other_sources_name:
+ WriteVariable(output, other_sources_name, ' ')
+ if dummy_sources_name:
+ WriteVariable(output, dummy_sources_name, ' ')
+
+ output.write(')\n')
+
+ # Let CMake know if the 'all' target should depend on this target.
+ exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets
+ else 'FALSE')
+ SetTargetProperty(output, cmake_target_name,
+ 'EXCLUDE_FROM_ALL', exclude_from_all)
+ for extra_target_name in extra_deps:
+ SetTargetProperty(output, extra_target_name,
+ 'EXCLUDE_FROM_ALL', exclude_from_all)
+
+ # Output name and location.
+ if target_type != 'none':
+ # Link as 'C' if there are no other files
+ if not c_sources and not cxx_sources:
+ SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C'])
+
+ # Mark uncompiled sources as uncompiled.
+ if other_sources_name:
+ output.write('set_source_files_properties(')
+ WriteVariable(output, other_sources_name, '')
+ output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n')
+
+ # Mark object sources as linkable.
+ if linkable_sources_name:
+ output.write('set_source_files_properties(')
+ WriteVariable(output, other_sources_name, '')
+ output.write(' PROPERTIES EXTERNAL_OBJECT "TRUE")\n')
+
+ # Output directory
+ target_output_directory = spec.get('product_dir')
+ if target_output_directory is None:
+ if target_type in ('executable', 'loadable_module'):
+ target_output_directory = generator_default_variables['PRODUCT_DIR']
+ elif target_type == 'shared_library':
+ target_output_directory = '${builddir}/lib.${TOOLSET}'
+ elif spec.get('standalone_static_library', False):
+ target_output_directory = generator_default_variables['PRODUCT_DIR']
+ else:
+ base_path = gyp.common.RelativePath(os.path.dirname(gyp_file),
+ options.toplevel_dir)
+ target_output_directory = '${obj}.${TOOLSET}'
+ target_output_directory = (
+ os.path.join(target_output_directory, base_path))
+
+ cmake_target_output_directory = NormjoinPathForceCMakeSource(
+ path_from_cmakelists_to_gyp,
+ target_output_directory)
+ SetTargetProperty(output,
+ cmake_target_name,
+ cmake_target_type.property_modifier + '_OUTPUT_DIRECTORY',
+ cmake_target_output_directory)
+
+ # Output name
+ default_product_prefix = ''
+ default_product_name = target_name
+ default_product_ext = ''
+ if target_type == 'static_library':
+ static_library_prefix = generator_default_variables['STATIC_LIB_PREFIX']
+ default_product_name = RemovePrefix(default_product_name,
+ static_library_prefix)
+ default_product_prefix = static_library_prefix
+ default_product_ext = generator_default_variables['STATIC_LIB_SUFFIX']
+
+ elif target_type in ('loadable_module', 'shared_library'):
+ shared_library_prefix = generator_default_variables['SHARED_LIB_PREFIX']
+ default_product_name = RemovePrefix(default_product_name,
+ shared_library_prefix)
+ default_product_prefix = shared_library_prefix
+ default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX']
+
+ elif target_type != 'executable':
+ print ('ERROR: What output file should be generated?',
+ 'type', target_type, 'target', target_name)
+
+ product_prefix = spec.get('product_prefix', default_product_prefix)
+ product_name = spec.get('product_name', default_product_name)
+ product_ext = spec.get('product_extension')
+ if product_ext:
+ product_ext = '.' + product_ext
+ else:
+ product_ext = default_product_ext
+
+ SetTargetProperty(output, cmake_target_name, 'PREFIX', product_prefix)
+ SetTargetProperty(output, cmake_target_name,
+ cmake_target_type.property_modifier + '_OUTPUT_NAME',
+ product_name)
+ SetTargetProperty(output, cmake_target_name, 'SUFFIX', product_ext)
+
+ # Make the output of this target referenceable as a source.
+ cmake_target_output_basename = product_prefix + product_name + product_ext
+ cmake_target_output = os.path.join(cmake_target_output_directory,
+ cmake_target_output_basename)
+ SetFileProperty(output, cmake_target_output, 'GENERATED', ['TRUE'], '')
+
+ # Includes
+ includes = config.get('include_dirs')
+ if includes:
+ # This (target include directories) is what requires CMake 2.8.8
+ includes_name = cmake_target_name + '__include_dirs'
+ SetVariableList(output, includes_name,
+ [NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
+ for include in includes])
+ output.write('set_property(TARGET ')
+ output.write(cmake_target_name)
+ output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ')
+ WriteVariable(output, includes_name, '')
+ output.write(')\n')
+
+ # Defines
+ defines = config.get('defines')
+ if defines is not None:
+ SetTargetProperty(output,
+ cmake_target_name,
+ 'COMPILE_DEFINITIONS',
+ defines,
+ ';')
+
+ # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
+ # CMake currently does not have target C and CXX flags.
+ # So, instead of doing...
+
+ # cflags_c = config.get('cflags_c')
+ # if cflags_c is not None:
+ # SetTargetProperty(output, cmake_target_name,
+ # 'C_COMPILE_FLAGS', cflags_c, ' ')
+
+ # cflags_cc = config.get('cflags_cc')
+ # if cflags_cc is not None:
+ # SetTargetProperty(output, cmake_target_name,
+ # 'CXX_COMPILE_FLAGS', cflags_cc, ' ')
+
+ # Instead we must...
+ cflags = config.get('cflags', [])
+ cflags_c = config.get('cflags_c', [])
+ cflags_cxx = config.get('cflags_cc', [])
+ if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources):
+ SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ')
+
+ elif c_sources and not (s_sources or cxx_sources):
+ flags = []
+ flags.extend(cflags)
+ flags.extend(cflags_c)
+ SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
+
+ elif cxx_sources and not (s_sources or c_sources):
+ flags = []
+ flags.extend(cflags)
+ flags.extend(cflags_cxx)
+ SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
+
+ else:
+ # TODO: This is broken, one cannot generally set properties on files,
+ # as other targets may require different properties on the same files.
+ if s_sources and cflags:
+ SetFilesProperty(output, s_sources_name, 'COMPILE_FLAGS', cflags, ' ')
+
+ if c_sources and (cflags or cflags_c):
+ flags = []
+ flags.extend(cflags)
+ flags.extend(cflags_c)
+ SetFilesProperty(output, c_sources_name, 'COMPILE_FLAGS', flags, ' ')
+
+ if cxx_sources and (cflags or cflags_cxx):
+ flags = []
+ flags.extend(cflags)
+ flags.extend(cflags_cxx)
+ SetFilesProperty(output, cxx_sources_name, 'COMPILE_FLAGS', flags, ' ')
+
+ # Linker flags
+ ldflags = config.get('ldflags')
+ if ldflags is not None:
+ SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
+
+ # Note on Dependencies and Libraries:
+ # CMake wants to handle link order, resolving the link line up front.
+ # Gyp does not retain or enforce specifying enough information to do so.
+ # So do as other gyp generators and use --start-group and --end-group.
+ # Give CMake as little information as possible so that it doesn't mess it up.
+
+ # Dependencies
+ rawDeps = spec.get('dependencies', [])
+
+ static_deps = []
+ shared_deps = []
+ other_deps = []
+ for rawDep in rawDeps:
+ dep_cmake_name = namer.CreateCMakeTargetName(rawDep)
+ dep_spec = target_dicts.get(rawDep, {})
+ dep_target_type = dep_spec.get('type', None)
+
+ if dep_target_type == 'static_library':
+ static_deps.append(dep_cmake_name)
+ elif dep_target_type == 'shared_library':
+ shared_deps.append(dep_cmake_name)
+ else:
+ other_deps.append(dep_cmake_name)
+
+ # ensure all external dependencies are complete before internal dependencies
+ # extra_deps currently only depend on their own deps, so otherwise run early
+ if static_deps or shared_deps or other_deps:
+ for extra_dep in extra_deps:
+ output.write('add_dependencies(')
+ output.write(extra_dep)
+ output.write('\n')
+ for deps in (static_deps, shared_deps, other_deps):
+ for dep in gyp.common.uniquer(deps):
+ output.write(' ')
+ output.write(dep)
+ output.write('\n')
+ output.write(')\n')
+
+ linkable = target_type in ('executable', 'loadable_module', 'shared_library')
+ other_deps.extend(extra_deps)
+ if other_deps or (not linkable and (static_deps or shared_deps)):
+ output.write('add_dependencies(')
+ output.write(cmake_target_name)
+ output.write('\n')
+ for dep in gyp.common.uniquer(other_deps):
+ output.write(' ')
+ output.write(dep)
+ output.write('\n')
+ if not linkable:
+ for deps in (static_deps, shared_deps):
+ for lib_dep in gyp.common.uniquer(deps):
+ output.write(' ')
+ output.write(lib_dep)
+ output.write('\n')
+ output.write(')\n')
+
+ # Libraries
+ if linkable:
+ external_libs = [lib for lib in spec.get('libraries', []) if len(lib) > 0]
+ if external_libs or static_deps or shared_deps:
+ output.write('target_link_libraries(')
+ output.write(cmake_target_name)
+ output.write('\n')
+ if static_deps:
+ write_group = circular_libs and len(static_deps) > 1
+ if write_group:
+ output.write('-Wl,--start-group\n')
+ for dep in gyp.common.uniquer(static_deps):
+ output.write(' ')
+ output.write(dep)
+ output.write('\n')
+ if write_group:
+ output.write('-Wl,--end-group\n')
+ if shared_deps:
+ for dep in gyp.common.uniquer(shared_deps):
+ output.write(' ')
+ output.write(dep)
+ output.write('\n')
+ if external_libs:
+ for lib in gyp.common.uniquer(external_libs):
+ output.write(' ')
+ output.write(lib)
+ output.write('\n')
+
+ output.write(')\n')
+
+ UnsetVariable(output, 'TOOLSET')
+ UnsetVariable(output, 'TARGET')
+
+
+def GenerateOutputForConfig(target_list, target_dicts, data,
+ params, config_to_use):
+ options = params['options']
+ generator_flags = params['generator_flags']
+
+ # generator_dir: relative path from pwd to where make puts build files.
+ # Makes migrating from make to cmake easier, cmake doesn't put anything here.
+ # Each Gyp configuration creates a different CMakeLists.txt file
+ # to avoid incompatibilities between Gyp and CMake configurations.
+ generator_dir = os.path.relpath(options.generator_output or '.')
+
+ # output_dir: relative path from generator_dir to the build directory.
+ output_dir = generator_flags.get('output_dir', 'out')
+
+ # build_dir: relative path from source root to our output files.
+ # e.g. "out/Debug"
+ build_dir = os.path.normpath(os.path.join(generator_dir,
+ output_dir,
+ config_to_use))
+
+ toplevel_build = os.path.join(options.toplevel_dir, build_dir)
+
+ output_file = os.path.join(toplevel_build, 'CMakeLists.txt')
+ gyp.common.EnsureDirExists(output_file)
+
+ output = open(output_file, 'w')
+ output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+ output.write('cmake_policy(VERSION 2.8.8)\n')
+
+ gyp_file, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1])
+ output.write('project(')
+ output.write(project_target)
+ output.write(')\n')
+
+ SetVariable(output, 'configuration', config_to_use)
+
+ ar = None
+ cc = None
+ cxx = None
+
+ make_global_settings = data[gyp_file].get('make_global_settings', [])
+ build_to_top = gyp.common.InvertRelativePath(build_dir,
+ options.toplevel_dir)
+ for key, value in make_global_settings:
+ if key == 'AR':
+ ar = os.path.join(build_to_top, value)
+ if key == 'CC':
+ cc = os.path.join(build_to_top, value)
+ if key == 'CXX':
+ cxx = os.path.join(build_to_top, value)
+
+ ar = gyp.common.GetEnvironFallback(['AR_target', 'AR'], ar)
+ cc = gyp.common.GetEnvironFallback(['CC_target', 'CC'], cc)
+ cxx = gyp.common.GetEnvironFallback(['CXX_target', 'CXX'], cxx)
+
+ if ar:
+ SetVariable(output, 'CMAKE_AR', ar)
+ if cc:
+ SetVariable(output, 'CMAKE_C_COMPILER', cc)
+ if cxx:
+ SetVariable(output, 'CMAKE_CXX_COMPILER', cxx)
+
+ # The following appears to be as-yet undocumented.
+ # http://public.kitware.com/Bug/view.php?id=8392
+ output.write('enable_language(ASM)\n')
+ # ASM-ATT does not support .S files.
+ # output.write('enable_language(ASM-ATT)\n')
+
+ if cc:
+ SetVariable(output, 'CMAKE_ASM_COMPILER', cc)
+
+ SetVariable(output, 'builddir', '${CMAKE_CURRENT_BINARY_DIR}')
+ SetVariable(output, 'obj', '${builddir}/obj')
+ output.write('\n')
+
+ # TODO: Undocumented/unsupported (the CMake Java generator depends on it).
+ # CMake by default names the object resulting from foo.c to be foo.c.o.
+ # Gyp traditionally names the object resulting from foo.c foo.o.
+ # This should be irrelevant, but some targets extract .o files from .a
+ # and depend on the name of the extracted .o files.
+ output.write('set(CMAKE_C_OUTPUT_EXTENSION_REPLACE 1)\n')
+ output.write('set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n')
+ output.write('\n')
+
+ # Force ninja to use rsp files. Otherwise link and ar lines can get too long,
+ # resulting in 'Argument list too long' errors.
+ output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
+ output.write('\n')
+
+ namer = CMakeNamer(target_list)
+
+ # The list of targets upon which the 'all' target should depend.
+ # CMake has it's own implicit 'all' target, one is not created explicitly.
+ all_qualified_targets = set()
+ for build_file in params['build_files']:
+ for qualified_target in gyp.common.AllTargets(target_list,
+ target_dicts,
+ os.path.normpath(build_file)):
+ all_qualified_targets.add(qualified_target)
+
+ for qualified_target in target_list:
+ WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
+ options, generator_flags, all_qualified_targets, output)
+
+ output.close()
+
+
+def PerformBuild(data, configurations, params):
+ options = params['options']
+ generator_flags = params['generator_flags']
+
+ # generator_dir: relative path from pwd to where make puts build files.
+ # Makes migrating from make to cmake easier, cmake doesn't put anything here.
+ generator_dir = os.path.relpath(options.generator_output or '.')
+
+ # output_dir: relative path from generator_dir to the build directory.
+ output_dir = generator_flags.get('output_dir', 'out')
+
+ for config_name in configurations:
+ # build_dir: relative path from source root to our output files.
+ # e.g. "out/Debug"
+ build_dir = os.path.normpath(os.path.join(generator_dir,
+ output_dir,
+ config_name))
+ arguments = ['cmake', '-G', 'Ninja']
+ print 'Generating [%s]: %s' % (config_name, arguments)
+ subprocess.check_call(arguments, cwd=build_dir)
+
+ arguments = ['ninja', '-C', build_dir]
+ print 'Building [%s]: %s' % (config_name, arguments)
+ subprocess.check_call(arguments)
+
+
+def CallGenerateOutputForConfig(arglist):
+ # Ignore the interrupt signal so that the parent process catches it and
+ # kills all multiprocessing children.
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+ target_list, target_dicts, data, params, config_name = arglist
+ GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ user_config = params.get('generator_flags', {}).get('config', None)
+ if user_config:
+ GenerateOutputForConfig(target_list, target_dicts, data,
+ params, user_config)
+ else:
+ config_names = target_dicts[target_list[0]]['configurations'].keys()
+ if params['parallel']:
+ try:
+ pool = multiprocessing.Pool(len(config_names))
+ arglists = []
+ for config_name in config_names:
+ arglists.append((target_list, target_dicts, data,
+ params, config_name))
+ pool.map(CallGenerateOutputForConfig, arglists)
+ except KeyboardInterrupt, e:
+ pool.terminate()
+ raise e
+ else:
+ for config_name in config_names:
+ GenerateOutputForConfig(target_list, target_dicts, data,
+ params, config_name)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
new file mode 100644
index 0000000000..160eafe2ef
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
@@ -0,0 +1,99 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import os
+import gyp
+import gyp.common
+import gyp.msvs_emulation
+import json
+import sys
+
+generator_supports_multiple_toolsets = True
+
+generator_wants_static_library_dependencies_adjusted = False
+
+generator_filelist_paths = {
+}
+
+generator_default_variables = {
+}
+for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
+ 'LIB_DIR', 'SHARED_LIB_DIR']:
+ # Some gyp steps fail if these are empty(!).
+ generator_default_variables[dirname] = 'dir'
+for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
+ 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
+ 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
+ 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
+ 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
+ 'CONFIGURATION_NAME']:
+ generator_default_variables[unused] = ''
+
+
+def CalculateVariables(default_variables, params):
+ generator_flags = params.get('generator_flags', {})
+ for key, val in generator_flags.items():
+ default_variables.setdefault(key, val)
+ default_variables.setdefault('OS', gyp.common.GetFlavor(params))
+
+ flavor = gyp.common.GetFlavor(params)
+ if flavor =='win':
+ # Copy additional generator configuration data from VS, which is shared
+ # by the Windows Ninja generator.
+ import gyp.generator.msvs as msvs_generator
+ generator_additional_non_configuration_keys = getattr(msvs_generator,
+ 'generator_additional_non_configuration_keys', [])
+ generator_additional_path_sections = getattr(msvs_generator,
+ 'generator_additional_path_sections', [])
+
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
+
+
+def CalculateGeneratorInputInfo(params):
+ """Calculate the generator specific info that gets fed to input (called by
+ gyp)."""
+ generator_flags = params.get('generator_flags', {})
+ if generator_flags.get('adjust_static_libraries', False):
+ global generator_wants_static_library_dependencies_adjusted
+ generator_wants_static_library_dependencies_adjusted = True
+
+ toplevel = params['options'].toplevel_dir
+ generator_dir = os.path.relpath(params['options'].generator_output or '.')
+ # output_dir: relative path from generator_dir to the build directory.
+ output_dir = generator_flags.get('output_dir', 'out')
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, generator_dir, output_dir, 'gypfiles'))
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ 'toplevel': toplevel,
+ 'qualified_out_dir': qualified_out_dir,
+ }
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ # Map of target -> list of targets it depends on.
+ edges = {}
+
+ # Queue of targets to visit.
+ targets_to_visit = target_list[:]
+
+ while len(targets_to_visit) > 0:
+ target = targets_to_visit.pop()
+ if target in edges:
+ continue
+ edges[target] = []
+
+ for dep in target_dicts[target].get('dependencies', []):
+ edges[target].append(dep)
+ targets_to_visit.append(dep)
+
+ try:
+ filepath = params['generator_flags']['output_dir']
+ except KeyError:
+ filepath = '.'
+ filename = os.path.join(filepath, 'dump.json')
+ f = open(filename, 'w')
+ json.dump(edges, f)
+ f.close()
+ print 'Wrote json to %s.' % filename
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
new file mode 100644
index 0000000000..f4c7c12f59
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
@@ -0,0 +1,424 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""GYP backend that generates Eclipse CDT settings files.
+
+This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
+files that can be imported into an Eclipse CDT project. The XML file contains a
+list of include paths and symbols (i.e. defines).
+
+Because a full .cproject definition is not created by this generator, it's not
+possible to properly define the include dirs and symbols for each file
+individually. Instead, one set of includes/symbols is generated for the entire
+project. This works fairly well (and is a vast improvement in general), but may
+still result in a few indexer issues here and there.
+
+This generator has no automated tests, so expect it to be broken.
+"""
+
+from xml.sax.saxutils import escape
+import os.path
+import subprocess
+import gyp
+import gyp.common
+import gyp.msvs_emulation
+import shlex
+import xml.etree.cElementTree as ET
+
+generator_wants_static_library_dependencies_adjusted = False
+
+generator_default_variables = {
+}
+
+for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']:
+ # Some gyp steps fail if these are empty(!), so we convert them to variables
+ generator_default_variables[dirname] = '$' + dirname
+
+for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
+ 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
+ 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
+ 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
+ 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
+ 'CONFIGURATION_NAME']:
+ generator_default_variables[unused] = ''
+
+# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
+# part of the path when dealing with generated headers. This value will be
+# replaced dynamically for each configuration.
+generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \
+ '$SHARED_INTERMEDIATE_DIR'
+
+
+def CalculateVariables(default_variables, params):
+ generator_flags = params.get('generator_flags', {})
+ for key, val in generator_flags.items():
+ default_variables.setdefault(key, val)
+ flavor = gyp.common.GetFlavor(params)
+ default_variables.setdefault('OS', flavor)
+ if flavor == 'win':
+ # Copy additional generator configuration data from VS, which is shared
+ # by the Eclipse generator.
+ import gyp.generator.msvs as msvs_generator
+ generator_additional_non_configuration_keys = getattr(msvs_generator,
+ 'generator_additional_non_configuration_keys', [])
+ generator_additional_path_sections = getattr(msvs_generator,
+ 'generator_additional_path_sections', [])
+
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
+
+
+def CalculateGeneratorInputInfo(params):
+ """Calculate the generator specific info that gets fed to input (called by
+ gyp)."""
+ generator_flags = params.get('generator_flags', {})
+ if generator_flags.get('adjust_static_libraries', False):
+ global generator_wants_static_library_dependencies_adjusted
+ generator_wants_static_library_dependencies_adjusted = True
+
+
+def GetAllIncludeDirectories(target_list, target_dicts,
+ shared_intermediate_dirs, config_name, params,
+ compiler_path):
+ """Calculate the set of include directories to be used.
+
+ Returns:
+ A list including all the include_dir's specified for every target followed
+ by any include directories that were added as cflag compiler options.
+ """
+
+ gyp_includes_set = set()
+ compiler_includes_list = []
+
+ # Find compiler's default include dirs.
+ if compiler_path:
+ command = shlex.split(compiler_path)
+ command.extend(['-E', '-xc++', '-v', '-'])
+ proc = subprocess.Popen(args=command, stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ output = proc.communicate()[1]
+ # Extract the list of include dirs from the output, which has this format:
+ # ...
+ # #include "..." search starts here:
+ # #include <...> search starts here:
+ # /usr/include/c++/4.6
+ # /usr/local/include
+ # End of search list.
+ # ...
+ in_include_list = False
+ for line in output.splitlines():
+ if line.startswith('#include'):
+ in_include_list = True
+ continue
+ if line.startswith('End of search list.'):
+ break
+ if in_include_list:
+ include_dir = line.strip()
+ if include_dir not in compiler_includes_list:
+ compiler_includes_list.append(include_dir)
+
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == 'win':
+ generator_flags = params.get('generator_flags', {})
+ for target_name in target_list:
+ target = target_dicts[target_name]
+ if config_name in target['configurations']:
+ config = target['configurations'][config_name]
+
+ # Look for any include dirs that were explicitly added via cflags. This
+ # may be done in gyp files to force certain includes to come at the end.
+ # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
+ # remove this.
+ if flavor == 'win':
+ msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
+ cflags = msvs_settings.GetCflags(config_name)
+ else:
+ cflags = config['cflags']
+ for cflag in cflags:
+ if cflag.startswith('-I'):
+ include_dir = cflag[2:]
+ if include_dir not in compiler_includes_list:
+ compiler_includes_list.append(include_dir)
+
+ # Find standard gyp include dirs.
+ if config.has_key('include_dirs'):
+ include_dirs = config['include_dirs']
+ for shared_intermediate_dir in shared_intermediate_dirs:
+ for include_dir in include_dirs:
+ include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR',
+ shared_intermediate_dir)
+ if not os.path.isabs(include_dir):
+ base_dir = os.path.dirname(target_name)
+
+ include_dir = base_dir + '/' + include_dir
+ include_dir = os.path.abspath(include_dir)
+
+ gyp_includes_set.add(include_dir)
+
+ # Generate a list that has all the include dirs.
+ all_includes_list = list(gyp_includes_set)
+ all_includes_list.sort()
+ for compiler_include in compiler_includes_list:
+ if not compiler_include in gyp_includes_set:
+ all_includes_list.append(compiler_include)
+
+ # All done.
+ return all_includes_list
+
+
+def GetCompilerPath(target_list, data, options):
+ """Determine a command that can be used to invoke the compiler.
+
+ Returns:
+ If this is a gyp project that has explicit make settings, try to determine
+ the compiler from that. Otherwise, see if a compiler was specified via the
+ CC_target environment variable.
+ """
+ # First, see if the compiler is configured in make's settings.
+ build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
+ make_global_settings_dict = data[build_file].get('make_global_settings', {})
+ for key, value in make_global_settings_dict:
+ if key in ['CC', 'CXX']:
+ return os.path.join(options.toplevel_dir, value)
+
+ # Check to see if the compiler was specified as an environment variable.
+ for key in ['CC_target', 'CC', 'CXX']:
+ compiler = os.environ.get(key)
+ if compiler:
+ return compiler
+
+ return 'gcc'
+
+
+def GetAllDefines(target_list, target_dicts, data, config_name, params,
+ compiler_path):
+ """Calculate the defines for a project.
+
+ Returns:
+ A dict that includes explict defines declared in gyp files along with all of
+ the default defines that the compiler uses.
+ """
+
+ # Get defines declared in the gyp files.
+ all_defines = {}
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == 'win':
+ generator_flags = params.get('generator_flags', {})
+ for target_name in target_list:
+ target = target_dicts[target_name]
+
+ if flavor == 'win':
+ msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
+ extra_defines = msvs_settings.GetComputedDefines(config_name)
+ else:
+ extra_defines = []
+ if config_name in target['configurations']:
+ config = target['configurations'][config_name]
+ target_defines = config['defines']
+ else:
+ target_defines = []
+ for define in target_defines + extra_defines:
+ split_define = define.split('=', 1)
+ if len(split_define) == 1:
+ split_define.append('1')
+ if split_define[0].strip() in all_defines:
+ # Already defined
+ continue
+ all_defines[split_define[0].strip()] = split_define[1].strip()
+ # Get default compiler defines (if possible).
+ if flavor == 'win':
+ return all_defines # Default defines already processed in the loop above.
+ if compiler_path:
+ command = shlex.split(compiler_path)
+ command.extend(['-E', '-dM', '-'])
+ cpp_proc = subprocess.Popen(args=command, cwd='.',
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ cpp_output = cpp_proc.communicate()[0]
+ cpp_lines = cpp_output.split('\n')
+ for cpp_line in cpp_lines:
+ if not cpp_line.strip():
+ continue
+ cpp_line_parts = cpp_line.split(' ', 2)
+ key = cpp_line_parts[1]
+ if len(cpp_line_parts) >= 3:
+ val = cpp_line_parts[2]
+ else:
+ val = '1'
+ all_defines[key] = val
+
+ return all_defines
+
+
+def WriteIncludePaths(out, eclipse_langs, include_dirs):
+ """Write the includes section of a CDT settings export file."""
+
+ out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
+ 'settingswizards.IncludePaths">\n')
+ out.write(' <language name="holder for library settings"></language>\n')
+ for lang in eclipse_langs:
+ out.write(' <language name="%s">\n' % lang)
+ for include_dir in include_dirs:
+ out.write(' <includepath workspace_path="false">%s</includepath>\n' %
+ include_dir)
+ out.write(' </language>\n')
+ out.write(' </section>\n')
+
+
+def WriteMacros(out, eclipse_langs, defines):
+ """Write the macros section of a CDT settings export file."""
+
+ out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
+ 'settingswizards.Macros">\n')
+ out.write(' <language name="holder for library settings"></language>\n')
+ for lang in eclipse_langs:
+ out.write(' <language name="%s">\n' % lang)
+ for key in sorted(defines.iterkeys()):
+ out.write(' <macro><name>%s</name><value>%s</value></macro>\n' %
+ (escape(key), escape(defines[key])))
+ out.write(' </language>\n')
+ out.write(' </section>\n')
+
+
+def GenerateOutputForConfig(target_list, target_dicts, data, params,
+ config_name):
+ options = params['options']
+ generator_flags = params.get('generator_flags', {})
+
+ # build_dir: relative path from source root to our output files.
+ # e.g. "out/Debug"
+ build_dir = os.path.join(generator_flags.get('output_dir', 'out'),
+ config_name)
+
+ toplevel_build = os.path.join(options.toplevel_dir, build_dir)
+ # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
+ # SHARED_INTERMEDIATE_DIR. Include both possible locations.
+ shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
+ os.path.join(toplevel_build, 'gen')]
+
+ GenerateCdtSettingsFile(target_list,
+ target_dicts,
+ data,
+ params,
+ config_name,
+ os.path.join(toplevel_build,
+ 'eclipse-cdt-settings.xml'),
+ options,
+ shared_intermediate_dirs)
+ GenerateClasspathFile(target_list,
+ target_dicts,
+ options.toplevel_dir,
+ toplevel_build,
+ os.path.join(toplevel_build,
+ 'eclipse-classpath.xml'))
+
+
+def GenerateCdtSettingsFile(target_list, target_dicts, data, params,
+ config_name, out_name, options,
+ shared_intermediate_dirs):
+ gyp.common.EnsureDirExists(out_name)
+ with open(out_name, 'w') as out:
+ out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
+ out.write('<cdtprojectproperties>\n')
+
+ eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
+ 'GNU C++', 'GNU C', 'Assembly']
+ compiler_path = GetCompilerPath(target_list, data, options)
+ include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
+ shared_intermediate_dirs,
+ config_name, params, compiler_path)
+ WriteIncludePaths(out, eclipse_langs, include_dirs)
+ defines = GetAllDefines(target_list, target_dicts, data, config_name,
+ params, compiler_path)
+ WriteMacros(out, eclipse_langs, defines)
+
+ out.write('</cdtprojectproperties>\n')
+
+
+def GenerateClasspathFile(target_list, target_dicts, toplevel_dir,
+ toplevel_build, out_name):
+ '''Generates a classpath file suitable for symbol navigation and code
+ completion of Java code (such as in Android projects) by finding all
+ .java and .jar files used as action inputs.'''
+ gyp.common.EnsureDirExists(out_name)
+ result = ET.Element('classpath')
+
+ def AddElements(kind, paths):
+ # First, we need to normalize the paths so they are all relative to the
+ # toplevel dir.
+ rel_paths = set()
+ for path in paths:
+ if os.path.isabs(path):
+ rel_paths.add(os.path.relpath(path, toplevel_dir))
+ else:
+ rel_paths.add(path)
+
+ for path in sorted(rel_paths):
+ entry_element = ET.SubElement(result, 'classpathentry')
+ entry_element.set('kind', kind)
+ entry_element.set('path', path)
+
+ AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir))
+ AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
+ # Include the standard JRE container and a dummy out folder
+ AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER'])
+ # Include a dummy out folder so that Eclipse doesn't use the default /bin
+ # folder in the root of the project.
+ AddElements('output', [os.path.join(toplevel_build, '.eclipse-java-build')])
+
+ ET.ElementTree(result).write(out_name)
+
+
+def GetJavaJars(target_list, target_dicts, toplevel_dir):
+ '''Generates a sequence of all .jars used as inputs.'''
+ for target_name in target_list:
+ target = target_dicts[target_name]
+ for action in target.get('actions', []):
+ for input_ in action['inputs']:
+ if os.path.splitext(input_)[1] == '.jar' and not input_.startswith('$'):
+ if os.path.isabs(input_):
+ yield input_
+ else:
+ yield os.path.join(os.path.dirname(target_name), input_)
+
+
+def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
+ '''Generates a sequence of all likely java package root directories.'''
+ for target_name in target_list:
+ target = target_dicts[target_name]
+ for action in target.get('actions', []):
+ for input_ in action['inputs']:
+ if (os.path.splitext(input_)[1] == '.java' and
+ not input_.startswith('$')):
+ dir_ = os.path.dirname(os.path.join(os.path.dirname(target_name),
+ input_))
+ # If there is a parent 'src' or 'java' folder, navigate up to it -
+ # these are canonical package root names in Chromium. This will
+ # break if 'src' or 'java' exists in the package structure. This
+ # could be further improved by inspecting the java file for the
+ # package name if this proves to be too fragile in practice.
+ parent_search = dir_
+ while os.path.basename(parent_search) not in ['src', 'java']:
+ parent_search, _ = os.path.split(parent_search)
+ if not parent_search or parent_search == toplevel_dir:
+ # Didn't find a known root, just return the original path
+ yield dir_
+ break
+ else:
+ yield parent_search
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ """Generate an XML settings file that can be imported into a CDT project."""
+
+ if params['options'].generator_output:
+ raise NotImplementedError("--generator_output not implemented for eclipse")
+
+ user_config = params.get('generator_flags', {}).get('config', None)
+ if user_config:
+ GenerateOutputForConfig(target_list, target_dicts, data, params,
+ user_config)
+ else:
+ config_names = target_dicts[target_list[0]]['configurations'].keys()
+ for config_name in config_names:
+ GenerateOutputForConfig(target_list, target_dicts, data, params,
+ config_name)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
new file mode 100644
index 0000000000..3efdb9966a
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
@@ -0,0 +1,94 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""gypd output module
+
+This module produces gyp input as its output. Output files are given the
+.gypd extension to avoid overwriting the .gyp files that they are generated
+from. Internal references to .gyp files (such as those found in
+"dependencies" sections) are not adjusted to point to .gypd files instead;
+unlike other paths, which are relative to the .gyp or .gypd file, such paths
+are relative to the directory from which gyp was run to create the .gypd file.
+
+This generator module is intended to be a sample and a debugging aid, hence
+the "d" for "debug" in .gypd. It is useful to inspect the results of the
+various merges, expansions, and conditional evaluations performed by gyp
+and to see a representation of what would be fed to a generator module.
+
+It's not advisable to rename .gypd files produced by this module to .gyp,
+because they will have all merges, expansions, and evaluations already
+performed and the relevant constructs not present in the output; paths to
+dependencies may be wrong; and various sections that do not belong in .gyp
+files such as such as "included_files" and "*_excluded" will be present.
+Output will also be stripped of comments. This is not intended to be a
+general-purpose gyp pretty-printer; for that, you probably just want to
+run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
+comments but won't do all of the other things done to this module's output.
+
+The specific formatting of the output generated by this module is subject
+to change.
+"""
+
+
+import gyp.common
+import errno
+import os
+import pprint
+
+
+# These variables should just be spit back out as variable references.
+_generator_identity_variables = [
+ 'CONFIGURATION_NAME',
+ 'EXECUTABLE_PREFIX',
+ 'EXECUTABLE_SUFFIX',
+ 'INTERMEDIATE_DIR',
+ 'LIB_DIR',
+ 'PRODUCT_DIR',
+ 'RULE_INPUT_ROOT',
+ 'RULE_INPUT_DIRNAME',
+ 'RULE_INPUT_EXT',
+ 'RULE_INPUT_NAME',
+ 'RULE_INPUT_PATH',
+ 'SHARED_INTERMEDIATE_DIR',
+ 'SHARED_LIB_DIR',
+ 'SHARED_LIB_PREFIX',
+ 'SHARED_LIB_SUFFIX',
+ 'STATIC_LIB_PREFIX',
+ 'STATIC_LIB_SUFFIX',
+]
+
+# gypd doesn't define a default value for OS like many other generator
+# modules. Specify "-D OS=whatever" on the command line to provide a value.
+generator_default_variables = {
+}
+
+# gypd supports multiple toolsets
+generator_supports_multiple_toolsets = True
+
+# TODO(mark): This always uses <, which isn't right. The input module should
+# notify the generator to tell it which phase it is operating in, and this
+# module should use < for the early phase and then switch to > for the late
+# phase. Bonus points for carrying @ back into the output too.
+for v in _generator_identity_variables:
+ generator_default_variables[v] = '<(%s)' % v
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ output_files = {}
+ for qualified_target in target_list:
+ [input_file, target] = \
+ gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
+
+ if input_file[-4:] != '.gyp':
+ continue
+ input_file_stem = input_file[:-4]
+ output_file = input_file_stem + params['options'].suffix + '.gypd'
+
+ if not output_file in output_files:
+ output_files[output_file] = input_file
+
+ for output_file, input_file in output_files.iteritems():
+ output = open(output_file, 'w')
+ pprint.pprint(data[input_file], output)
+ output.close()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
new file mode 100644
index 0000000000..bd405f43a9
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
@@ -0,0 +1,56 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""gypsh output module
+
+gypsh is a GYP shell. It's not really a generator per se. All it does is
+fire up an interactive Python session with a few local variables set to the
+variables passed to the generator. Like gypd, it's intended as a debugging
+aid, to facilitate the exploration of .gyp structures after being processed
+by the input module.
+
+The expected usage is "gyp -f gypsh -D OS=desired_os".
+"""
+
+
+import code
+import sys
+
+
+# All of this stuff about generator variables was lovingly ripped from gypd.py.
+# That module has a much better description of what's going on and why.
+_generator_identity_variables = [
+ 'EXECUTABLE_PREFIX',
+ 'EXECUTABLE_SUFFIX',
+ 'INTERMEDIATE_DIR',
+ 'PRODUCT_DIR',
+ 'RULE_INPUT_ROOT',
+ 'RULE_INPUT_DIRNAME',
+ 'RULE_INPUT_EXT',
+ 'RULE_INPUT_NAME',
+ 'RULE_INPUT_PATH',
+ 'SHARED_INTERMEDIATE_DIR',
+]
+
+generator_default_variables = {
+}
+
+for v in _generator_identity_variables:
+ generator_default_variables[v] = '<(%s)' % v
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ locals = {
+ 'target_list': target_list,
+ 'target_dicts': target_dicts,
+ 'data': data,
+ }
+
+ # Use a banner that looks like the stock Python one and like what
+ # code.interact uses by default, but tack on something to indicate what
+ # locals are available, and identify gypsh.
+ banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
+ (sys.version, sys.platform, repr(sorted(locals.keys())))
+
+ code.interact(banner, local=locals)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
new file mode 100644
index 0000000000..64b9dd267b
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
@@ -0,0 +1,2220 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Notes:
+#
+# This is all roughly based on the Makefile system used by the Linux
+# kernel, but is a non-recursive make -- we put the entire dependency
+# graph in front of make and let it figure it out.
+#
+# The code below generates a separate .mk file for each target, but
+# all are sourced by the top-level Makefile. This means that all
+# variables in .mk-files clobber one another. Be careful to use :=
+# where appropriate for immediate evaluation, and similarly to watch
+# that you're not relying on a variable value to last beween different
+# .mk files.
+#
+# TODOs:
+#
+# Global settings and utility functions are currently stuffed in the
+# toplevel Makefile. It may make sense to generate some .mk files on
+# the side to keep the the files readable.
+
+import os
+import re
+import sys
+import subprocess
+import gyp
+import gyp.common
+import gyp.xcode_emulation
+from gyp.common import GetEnvironFallback
+from gyp.common import GypError
+
+generator_default_variables = {
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '',
+ 'STATIC_LIB_PREFIX': 'lib',
+ 'SHARED_LIB_PREFIX': 'lib',
+ 'STATIC_LIB_SUFFIX': '.a',
+ 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni',
+ 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen',
+ 'PRODUCT_DIR': '$(builddir)',
+ 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
+ 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
+ 'RULE_INPUT_PATH': '$(abspath $<)',
+ 'RULE_INPUT_EXT': '$(suffix $<)',
+ 'RULE_INPUT_NAME': '$(notdir $<)',
+ 'CONFIGURATION_NAME': '$(BUILDTYPE)',
+}
+
+# Make supports multiple toolsets
+generator_supports_multiple_toolsets = True
+
+# Request sorted dependencies in the order from dependents to dependencies.
+generator_wants_sorted_dependencies = False
+
+# Placates pylint.
+generator_additional_non_configuration_keys = []
+generator_additional_path_sections = []
+generator_extra_sources_for_rules = []
+generator_filelist_paths = None
+
+
+def CalculateVariables(default_variables, params):
+ """Calculate additional variables for use in the build (called by gyp)."""
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == 'mac':
+ default_variables.setdefault('OS', 'mac')
+ default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
+ default_variables.setdefault('SHARED_LIB_DIR',
+ generator_default_variables['PRODUCT_DIR'])
+ default_variables.setdefault('LIB_DIR',
+ generator_default_variables['PRODUCT_DIR'])
+
+ # Copy additional generator configuration data from Xcode, which is shared
+ # by the Mac Make generator.
+ import gyp.generator.xcode as xcode_generator
+ global generator_additional_non_configuration_keys
+ generator_additional_non_configuration_keys = getattr(xcode_generator,
+ 'generator_additional_non_configuration_keys', [])
+ global generator_additional_path_sections
+ generator_additional_path_sections = getattr(xcode_generator,
+ 'generator_additional_path_sections', [])
+ global generator_extra_sources_for_rules
+ generator_extra_sources_for_rules = getattr(xcode_generator,
+ 'generator_extra_sources_for_rules', [])
+ COMPILABLE_EXTENSIONS.update({'.m': 'objc', '.mm' : 'objcxx'})
+ else:
+ operating_system = flavor
+ if flavor == 'android':
+ operating_system = 'linux' # Keep this legacy behavior for now.
+ default_variables.setdefault('OS', operating_system)
+ default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
+ default_variables.setdefault('SHARED_LIB_DIR','$(builddir)/lib.$(TOOLSET)')
+ default_variables.setdefault('LIB_DIR', '$(obj).$(TOOLSET)')
+
+
+def CalculateGeneratorInputInfo(params):
+ """Calculate the generator specific info that gets fed to input (called by
+ gyp)."""
+ generator_flags = params.get('generator_flags', {})
+ android_ndk_version = generator_flags.get('android_ndk_version', None)
+ # Android NDK requires a strict link order.
+ if android_ndk_version:
+ global generator_wants_sorted_dependencies
+ generator_wants_sorted_dependencies = True
+
+ output_dir = params['options'].generator_output or \
+ params['options'].toplevel_dir
+ builddir_name = generator_flags.get('output_dir', 'out')
+ qualified_out_dir = os.path.normpath(os.path.join(
+ output_dir, builddir_name, 'gypfiles'))
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ 'toplevel': params['options'].toplevel_dir,
+ 'qualified_out_dir': qualified_out_dir,
+ }
+
+
+# The .d checking code below uses these functions:
+# wildcard, sort, foreach, shell, wordlist
+# wildcard can handle spaces, the rest can't.
+# Since I could find no way to make foreach work with spaces in filenames
+# correctly, the .d files have spaces replaced with another character. The .d
+# file for
+# Chromium\ Framework.framework/foo
+# is for example
+# out/Release/.deps/out/Release/Chromium?Framework.framework/foo
+# This is the replacement character.
+SPACE_REPLACEMENT = '?'
+
+
+LINK_COMMANDS_LINUX = """\
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
+
+quiet_cmd_alink_thin = AR($(TOOLSET)) $@
+cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
+
+# Due to circular dependencies between libraries :(, we wrap the
+# special "figure out circular dependencies" flags around the entire
+# input list during linking.
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
+
+# We support two kinds of shared objects (.so):
+# 1) shared_library, which is just bundling together many dependent libraries
+# into a link line.
+# 2) loadable_module, which is generating a module intended for dlopen().
+#
+# They differ only slightly:
+# In the former case, we want to package all dependent code into the .so.
+# In the latter case, we want to package just the API exposed by the
+# outermost module.
+# This means shared_library uses --whole-archive, while loadable_module doesn't.
+# (Note that --whole-archive is incompatible with the --start-group used in
+# normal linking.)
+
+# Other shared-object link notes:
+# - Set SONAME to the library filename so our binaries don't reference
+# the local, absolute paths used on the link command-line.
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
+"""
+
+LINK_COMMANDS_MAC = """\
+quiet_cmd_alink = LIBTOOL-STATIC $@
+cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
+
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+"""
+
+LINK_COMMANDS_ANDROID = """\
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
+
+quiet_cmd_alink_thin = AR($(TOOLSET)) $@
+cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
+
+# Due to circular dependencies between libraries :(, we wrap the
+# special "figure out circular dependencies" flags around the entire
+# input list during linking.
+quiet_cmd_link = LINK($(TOOLSET)) $@
+quiet_cmd_link_host = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
+cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+# Other shared-object link notes:
+# - Set SONAME to the library filename so our binaries don't reference
+# the local, absolute paths used on the link command-line.
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
+quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+"""
+
+
+LINK_COMMANDS_AIX = """\
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
+
+quiet_cmd_alink_thin = AR($(TOOLSET)) $@
+cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
+
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+"""
+
+
+# Header of toplevel Makefile.
+# This should go into the build tree, but it's easier to keep it here for now.
+SHARED_HEADER = ("""\
+# We borrow heavily from the kernel build setup, though we are simpler since
+# we don't have Kconfig tweaking settings on us.
+
+# The implicit make rules have it looking for RCS files, among other things.
+# We instead explicitly write all the rules we care about.
+# It's even quicker (saves ~200ms) to pass -r on the command line.
+MAKEFLAGS=-r
+
+# The source directory tree.
+srcdir := %(srcdir)s
+abs_srcdir := $(abspath $(srcdir))
+
+# The name of the builddir.
+builddir_name ?= %(builddir)s
+
+# The V=1 flag on command line makes us verbosely print command lines.
+ifdef V
+ quiet=
+else
+ quiet=quiet_
+endif
+
+# Specify BUILDTYPE=Release on the command line for a release build.
+BUILDTYPE ?= %(default_configuration)s
+
+# Directory all our build output goes into.
+# Note that this must be two directories beneath src/ for unit tests to pass,
+# as they reach into the src/ directory for data with relative paths.
+builddir ?= $(builddir_name)/$(BUILDTYPE)
+abs_builddir := $(abspath $(builddir))
+depsdir := $(builddir)/.deps
+
+# Object output directory.
+obj := $(builddir)/obj
+abs_obj := $(abspath $(obj))
+
+# We build up a list of every single one of the targets so we can slurp in the
+# generated dependency rule Makefiles in one pass.
+all_deps :=
+
+%(make_global_settings)s
+
+CC.target ?= %(CC.target)s
+CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
+CXX.target ?= %(CXX.target)s
+CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
+LINK.target ?= %(LINK.target)s
+LDFLAGS.target ?= $(LDFLAGS)
+AR.target ?= $(AR)
+
+# C++ apps need to be linked with g++.
+LINK ?= $(CXX.target)
+
+# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
+# to replicate this environment fallback in make as well.
+CC.host ?= %(CC.host)s
+CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
+CXX.host ?= %(CXX.host)s
+CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
+LINK.host ?= %(LINK.host)s
+LDFLAGS.host ?=
+AR.host ?= %(AR.host)s
+
+# Define a dir function that can handle spaces.
+# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
+# "leading spaces cannot appear in the text of the first argument as written.
+# These characters can be put into the argument value by variable substitution."
+empty :=
+space := $(empty) $(empty)
+
+# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
+replace_spaces = $(subst $(space),""" + SPACE_REPLACEMENT + """,$1)
+unreplace_spaces = $(subst """ + SPACE_REPLACEMENT + """,$(space),$1)
+dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
+
+# Flags to make gcc output dependency info. Note that you need to be
+# careful here to use the flags that ccache and distcc can understand.
+# We write to a dep file on the side first and then rename at the end
+# so we can't end up with a broken dep file.
+depfile = $(depsdir)/$(call replace_spaces,$@).d
+DEPFLAGS = -MMD -MF $(depfile).raw
+
+# We have to fixup the deps output in a few ways.
+# (1) the file output should mention the proper .o file.
+# ccache or distcc lose the path to the target, so we convert a rule of
+# the form:
+# foobar.o: DEP1 DEP2
+# into
+# path/to/foobar.o: DEP1 DEP2
+# (2) we want missing files not to cause us to fail to build.
+# We want to rewrite
+# foobar.o: DEP1 DEP2 \\
+# DEP3
+# to
+# DEP1:
+# DEP2:
+# DEP3:
+# so if the files are missing, they're just considered phony rules.
+# We have to do some pretty insane escaping to get those backslashes
+# and dollar signs past make, the shell, and sed at the same time.
+# Doesn't work with spaces, but that's fine: .d files have spaces in
+# their names replaced with other characters."""
+r"""
+define fixup_dep
+# The depfile may not exist if the input file didn't have any #includes.
+touch $(depfile).raw
+# Fixup path as in (1).
+sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
+# Add extra rules as in (2).
+# We remove slashes and replace spaces with new lines;
+# remove blank lines;
+# delete the first line and append a colon to the remaining lines.
+sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
+ grep -v '^$$' |\
+ sed -e 1d -e 's|$$|:|' \
+ >> $(depfile)
+rm $(depfile).raw
+endef
+"""
+"""
+# Command definitions:
+# - cmd_foo is the actual command to run;
+# - quiet_cmd_foo is the brief-output summary of the command.
+
+quiet_cmd_cc = CC($(TOOLSET)) $@
+cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
+
+quiet_cmd_cxx = CXX($(TOOLSET)) $@
+cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
+%(extra_commands)s
+quiet_cmd_touch = TOUCH $@
+cmd_touch = touch $@
+
+quiet_cmd_copy = COPY $@
+# send stderr to /dev/null to ignore messages when linking directories.
+cmd_copy = rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@"
+
+%(link_commands)s
+"""
+
+r"""
+# Define an escape_quotes function to escape single quotes.
+# This allows us to handle quotes properly as long as we always use
+# use single quotes and escape_quotes.
+escape_quotes = $(subst ','\'',$(1))
+# This comment is here just to include a ' to unconfuse syntax highlighting.
+# Define an escape_vars function to escape '$' variable syntax.
+# This allows us to read/write command lines with shell variables (e.g.
+# $LD_LIBRARY_PATH), without triggering make substitution.
+escape_vars = $(subst $$,$$$$,$(1))
+# Helper that expands to a shell command to echo a string exactly as it is in
+# make. This uses printf instead of echo because printf's behaviour with respect
+# to escape sequences is more portable than echo's across different shells
+# (e.g., dash, bash).
+exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))'
+"""
+"""
+# Helper to compare the command we're about to run against the command
+# we logged the last time we ran the command. Produces an empty
+# string (false) when the commands match.
+# Tricky point: Make has no string-equality test function.
+# The kernel uses the following, but it seems like it would have false
+# positives, where one string reordered its arguments.
+# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\
+# $(filter-out $(cmd_$@), $(cmd_$(1))))
+# We instead substitute each for the empty string into the other, and
+# say they're equal if both substitutions produce the empty string.
+# .d files contain """ + SPACE_REPLACEMENT + \
+ """ instead of spaces, take that into account.
+command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\
+ $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
+
+# Helper that is non-empty when a prerequisite changes.
+# Normally make does this implicitly, but we force rules to always run
+# so we can check their command lines.
+# $? -- new prerequisites
+# $| -- order-only dependencies
+prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
+
+# Helper that executes all postbuilds until one fails.
+define do_postbuilds
+ @E=0;\\
+ for p in $(POSTBUILDS); do\\
+ eval $$p;\\
+ E=$$?;\\
+ if [ $$E -ne 0 ]; then\\
+ break;\\
+ fi;\\
+ done;\\
+ if [ $$E -ne 0 ]; then\\
+ rm -rf "$@";\\
+ exit $$E;\\
+ fi
+endef
+
+# do_cmd: run a command via the above cmd_foo names, if necessary.
+# Should always run for a given target to handle command-line changes.
+# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
+# Third argument, if non-zero, makes it do POSTBUILDS processing.
+# Note: We intentionally do NOT call dirx for depfile, since it contains """ + \
+ SPACE_REPLACEMENT + """ for
+# spaces already and dirx strips the """ + SPACE_REPLACEMENT + \
+ """ characters.
+define do_cmd
+$(if $(or $(command_changed),$(prereq_changed)),
+ @$(call exact_echo, $($(quiet)cmd_$(1)))
+ @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
+ $(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))),
+ @$(cmd_$(1))
+ @echo " $(quiet_cmd_$(1)): Finished",
+ @$(cmd_$(1))
+ )
+ @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
+ @$(if $(2),$(fixup_dep))
+ $(if $(and $(3), $(POSTBUILDS)),
+ $(call do_postbuilds)
+ )
+)
+endef
+
+# Declare the "%(default_target)s" target first so it is the default,
+# even though we don't have the deps yet.
+.PHONY: %(default_target)s
+%(default_target)s:
+
+# make looks for ways to re-generate included makefiles, but in our case, we
+# don't have a direct way. Explicitly telling make that it has nothing to do
+# for them makes it go faster.
+%%.d: ;
+
+# Use FORCE_DO_CMD to force a target to run. Should be coupled with
+# do_cmd.
+.PHONY: FORCE_DO_CMD
+FORCE_DO_CMD:
+
+""")
+
+SHARED_HEADER_MAC_COMMANDS = """
+quiet_cmd_objc = CXX($(TOOLSET)) $@
+cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
+
+quiet_cmd_objcxx = CXX($(TOOLSET)) $@
+cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
+
+# Commands for precompiled header files.
+quiet_cmd_pch_c = CXX($(TOOLSET)) $@
+cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
+quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
+cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
+quiet_cmd_pch_m = CXX($(TOOLSET)) $@
+cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
+quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
+cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
+
+# gyp-mac-tool is written next to the root Makefile by gyp.
+# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
+# already.
+quiet_cmd_mac_tool = MACTOOL $(4) $<
+cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
+
+quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
+cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
+
+quiet_cmd_infoplist = INFOPLIST $@
+cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
+"""
+
+
+def WriteRootHeaderSuffixRules(writer):
+ extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower)
+
+ writer.write('# Suffix rules, putting all outputs into $(obj).\n')
+ for ext in extensions:
+ writer.write('$(obj).$(TOOLSET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD\n' % ext)
+ writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
+
+ writer.write('\n# Try building from generated source, too.\n')
+ for ext in extensions:
+ writer.write(
+ '$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n' % ext)
+ writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
+ writer.write('\n')
+ for ext in extensions:
+ writer.write('$(obj).$(TOOLSET)/%%.o: $(obj)/%%%s FORCE_DO_CMD\n' % ext)
+ writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
+ writer.write('\n')
+
+
+SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\
+# Suffix rules, putting all outputs into $(obj).
+""")
+
+
+SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\
+# Try building from generated source, too.
+""")
+
+
+SHARED_FOOTER = """\
+# "all" is a concatenation of the "all" targets from all the included
+# sub-makefiles. This is just here to clarify.
+all:
+
+# Add in dependency-tracking rules. $(all_deps) is the list of every single
+# target in our tree. Only consider the ones with .d (dependency) info:
+d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
+ifneq ($(d_files),)
+ include $(d_files)
+endif
+"""
+
+header = """\
+# This file is generated by gyp; do not edit.
+
+"""
+
+# Maps every compilable file extension to the do_cmd that compiles it.
+COMPILABLE_EXTENSIONS = {
+ '.c': 'cc',
+ '.cc': 'cxx',
+ '.cpp': 'cxx',
+ '.cxx': 'cxx',
+ '.s': 'cc',
+ '.S': 'cc',
+}
+
+def Compilable(filename):
+ """Return true if the file is compilable (should be in OBJS)."""
+ for res in (filename.endswith(e) for e in COMPILABLE_EXTENSIONS):
+ if res:
+ return True
+ return False
+
+
+def Linkable(filename):
+ """Return true if the file is linkable (should be on the link line)."""
+ return filename.endswith('.o')
+
+
+def Target(filename):
+ """Translate a compilable filename to its .o target."""
+ return os.path.splitext(filename)[0] + '.o'
+
+
+def EscapeShellArgument(s):
+ """Quotes an argument so that it will be interpreted literally by a POSIX
+ shell. Taken from
+ http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
+ """
+ return "'" + s.replace("'", "'\\''") + "'"
+
+
+def EscapeMakeVariableExpansion(s):
+ """Make has its own variable expansion syntax using $. We must escape it for
+ string to be interpreted literally."""
+ return s.replace('$', '$$')
+
+
+def EscapeCppDefine(s):
+ """Escapes a CPP define so that it will reach the compiler unaltered."""
+ s = EscapeShellArgument(s)
+ s = EscapeMakeVariableExpansion(s)
+ # '#' characters must be escaped even embedded in a string, else Make will
+ # treat it as the start of a comment.
+ return s.replace('#', r'\#')
+
+
+def QuoteIfNecessary(string):
+ """TODO: Should this ideally be replaced with one or more of the above
+ functions?"""
+ if '"' in string:
+ string = '"' + string.replace('"', '\\"') + '"'
+ return string
+
+
+def StringToMakefileVariable(string):
+ """Convert a string to a value that is acceptable as a make variable name."""
+ return re.sub('[^a-zA-Z0-9_]', '_', string)
+
+
+srcdir_prefix = ''
+def Sourceify(path):
+ """Convert a path to its source directory form."""
+ if '$(' in path:
+ return path
+ if os.path.isabs(path):
+ return path
+ return srcdir_prefix + path
+
+
+def QuoteSpaces(s, quote=r'\ '):
+ return s.replace(' ', quote)
+
+
+# TODO: Avoid code duplication with _ValidateSourcesForMSVSProject in msvs.py.
+def _ValidateSourcesForOSX(spec, all_sources):
+ """Makes sure if duplicate basenames are not specified in the source list.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ """
+ if spec.get('type', None) != 'static_library':
+ return
+
+ basenames = {}
+ for source in all_sources:
+ name, ext = os.path.splitext(source)
+ is_compiled_file = ext in [
+ '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
+ if not is_compiled_file:
+ continue
+ basename = os.path.basename(name) # Don't include extension.
+ basenames.setdefault(basename, []).append(source)
+
+ error = ''
+ for basename, files in basenames.iteritems():
+ if len(files) > 1:
+ error += ' %s: %s\n' % (basename, ' '.join(files))
+
+ if error:
+ print('static library %s has several files with the same basename:\n' %
+ spec['target_name'] + error + 'libtool on OS X will generate' +
+ ' warnings for them.')
+ raise GypError('Duplicate basenames in sources section, see list above')
+
+
+# Map from qualified target to path to output.
+target_outputs = {}
+# Map from qualified target to any linkable output. A subset
+# of target_outputs. E.g. when mybinary depends on liba, we want to
+# include liba in the linker line; when otherbinary depends on
+# mybinary, we just want to build mybinary first.
+target_link_deps = {}
+
+
+class MakefileWriter(object):
+ """MakefileWriter packages up the writing of one target-specific foobar.mk.
+
+ Its only real entry point is Write(), and is mostly used for namespacing.
+ """
+
+ def __init__(self, generator_flags, flavor):
+ self.generator_flags = generator_flags
+ self.flavor = flavor
+
+ self.suffix_rules_srcdir = {}
+ self.suffix_rules_objdir1 = {}
+ self.suffix_rules_objdir2 = {}
+
+ # Generate suffix rules for all compilable extensions.
+ for ext in COMPILABLE_EXTENSIONS.keys():
+ # Suffix rules for source folder.
+ self.suffix_rules_srcdir.update({ext: ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD
+ @$(call do_cmd,%s,1)
+""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
+
+ # Suffix rules for generated source files.
+ self.suffix_rules_objdir1.update({ext: ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD
+ @$(call do_cmd,%s,1)
+""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
+ self.suffix_rules_objdir2.update({ext: ("""\
+$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
+ @$(call do_cmd,%s,1)
+""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
+
+
+ def Write(self, qualified_target, base_path, output_filename, spec, configs,
+ part_of_all):
+ """The main entry point: writes a .mk file for a single target.
+
+ Arguments:
+ qualified_target: target we're generating
+ base_path: path relative to source root we're building in, used to resolve
+ target-relative paths
+ output_filename: output .mk file name to write
+ spec, configs: gyp info
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ gyp.common.EnsureDirExists(output_filename)
+
+ self.fp = open(output_filename, 'w')
+
+ self.fp.write(header)
+
+ self.qualified_target = qualified_target
+ self.path = base_path
+ self.target = spec['target_name']
+ self.type = spec['type']
+ self.toolset = spec['toolset']
+
+ self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
+ if self.flavor == 'mac':
+ self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
+ else:
+ self.xcode_settings = None
+
+ deps, link_deps = self.ComputeDeps(spec)
+
+ # Some of the generation below can add extra output, sources, or
+ # link dependencies. All of the out params of the functions that
+ # follow use names like extra_foo.
+ extra_outputs = []
+ extra_sources = []
+ extra_link_deps = []
+ extra_mac_bundle_resources = []
+ mac_bundle_deps = []
+
+ if self.is_mac_bundle:
+ self.output = self.ComputeMacBundleOutput(spec)
+ self.output_binary = self.ComputeMacBundleBinaryOutput(spec)
+ else:
+ self.output = self.output_binary = self.ComputeOutput(spec)
+
+ self.is_standalone_static_library = bool(
+ spec.get('standalone_static_library', 0))
+ self._INSTALLABLE_TARGETS = ('executable', 'loadable_module',
+ 'shared_library')
+ if (self.is_standalone_static_library or
+ self.type in self._INSTALLABLE_TARGETS):
+ self.alias = os.path.basename(self.output)
+ install_path = self._InstallableTargetInstallPath()
+ else:
+ self.alias = self.output
+ install_path = self.output
+
+ self.WriteLn("TOOLSET := " + self.toolset)
+ self.WriteLn("TARGET := " + self.target)
+
+ # Actions must come first, since they can generate more OBJs for use below.
+ if 'actions' in spec:
+ self.WriteActions(spec['actions'], extra_sources, extra_outputs,
+ extra_mac_bundle_resources, part_of_all)
+
+ # Rules must be early like actions.
+ if 'rules' in spec:
+ self.WriteRules(spec['rules'], extra_sources, extra_outputs,
+ extra_mac_bundle_resources, part_of_all)
+
+ if 'copies' in spec:
+ self.WriteCopies(spec['copies'], extra_outputs, part_of_all)
+
+ # Bundle resources.
+ if self.is_mac_bundle:
+ all_mac_bundle_resources = (
+ spec.get('mac_bundle_resources', []) + extra_mac_bundle_resources)
+ self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps)
+ self.WriteMacInfoPlist(mac_bundle_deps)
+
+ # Sources.
+ all_sources = spec.get('sources', []) + extra_sources
+ if all_sources:
+ if self.flavor == 'mac':
+ # libtool on OS X generates warnings for duplicate basenames in the same
+ # target.
+ _ValidateSourcesForOSX(spec, all_sources)
+ self.WriteSources(
+ configs, deps, all_sources, extra_outputs,
+ extra_link_deps, part_of_all,
+ gyp.xcode_emulation.MacPrefixHeader(
+ self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)),
+ self.Pchify))
+ sources = filter(Compilable, all_sources)
+ if sources:
+ self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
+ extensions = set([os.path.splitext(s)[1] for s in sources])
+ for ext in extensions:
+ if ext in self.suffix_rules_srcdir:
+ self.WriteLn(self.suffix_rules_srcdir[ext])
+ self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2)
+ for ext in extensions:
+ if ext in self.suffix_rules_objdir1:
+ self.WriteLn(self.suffix_rules_objdir1[ext])
+ for ext in extensions:
+ if ext in self.suffix_rules_objdir2:
+ self.WriteLn(self.suffix_rules_objdir2[ext])
+ self.WriteLn('# End of this set of suffix rules')
+
+ # Add dependency from bundle to bundle binary.
+ if self.is_mac_bundle:
+ mac_bundle_deps.append(self.output_binary)
+
+ self.WriteTarget(spec, configs, deps, extra_link_deps + link_deps,
+ mac_bundle_deps, extra_outputs, part_of_all)
+
+ # Update global list of target outputs, used in dependency tracking.
+ target_outputs[qualified_target] = install_path
+
+ # Update global list of link dependencies.
+ if self.type in ('static_library', 'shared_library'):
+ target_link_deps[qualified_target] = self.output_binary
+
+ # Currently any versions have the same effect, but in future the behavior
+ # could be different.
+ if self.generator_flags.get('android_ndk_version', None):
+ self.WriteAndroidNdkModuleRule(self.target, all_sources, link_deps)
+
+ self.fp.close()
+
+
+ def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
+ """Write a "sub-project" Makefile.
+
+ This is a small, wrapper Makefile that calls the top-level Makefile to build
+ the targets from a single gyp file (i.e. a sub-project).
+
+ Arguments:
+ output_filename: sub-project Makefile name to write
+ makefile_path: path to the top-level Makefile
+ targets: list of "all" targets for this sub-project
+ build_dir: build output directory, relative to the sub-project
+ """
+ gyp.common.EnsureDirExists(output_filename)
+ self.fp = open(output_filename, 'w')
+ self.fp.write(header)
+ # For consistency with other builders, put sub-project build output in the
+ # sub-project dir (see test/subdirectory/gyptest-subdir-all.py).
+ self.WriteLn('export builddir_name ?= %s' %
+ os.path.join(os.path.dirname(output_filename), build_dir))
+ self.WriteLn('.PHONY: all')
+ self.WriteLn('all:')
+ if makefile_path:
+ makefile_path = ' -C ' + makefile_path
+ self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets)))
+ self.fp.close()
+
+
+ def WriteActions(self, actions, extra_sources, extra_outputs,
+ extra_mac_bundle_resources, part_of_all):
+ """Write Makefile code for any 'actions' from the gyp input.
+
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ actions (used to make other pieces dependent on these
+ actions)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ env = self.GetSortedXcodeEnv()
+ for action in actions:
+ name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
+ action['action_name']))
+ self.WriteLn('### Rules for action "%s":' % action['action_name'])
+ inputs = action['inputs']
+ outputs = action['outputs']
+
+ # Build up a list of outputs.
+ # Collect the output dirs we'll need.
+ dirs = set()
+ for out in outputs:
+ dir = os.path.split(out)[0]
+ if dir:
+ dirs.add(dir)
+ if int(action.get('process_outputs_as_sources', False)):
+ extra_sources += outputs
+ if int(action.get('process_outputs_as_mac_bundle_resources', False)):
+ extra_mac_bundle_resources += outputs
+
+ # Write the actual command.
+ action_commands = action['action']
+ if self.flavor == 'mac':
+ action_commands = [gyp.xcode_emulation.ExpandEnvVars(command, env)
+ for command in action_commands]
+ command = gyp.common.EncodePOSIXShellList(action_commands)
+ if 'message' in action:
+ self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message']))
+ else:
+ self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name))
+ if len(dirs) > 0:
+ command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
+
+ cd_action = 'cd %s; ' % Sourceify(self.path or '.')
+
+ # command and cd_action get written to a toplevel variable called
+ # cmd_foo. Toplevel variables can't handle things that change per
+ # makefile like $(TARGET), so hardcode the target.
+ command = command.replace('$(TARGET)', self.target)
+ cd_action = cd_action.replace('$(TARGET)', self.target)
+
+ # Set LD_LIBRARY_PATH in case the action runs an executable from this
+ # build which links to shared libs from this build.
+ # actions run on the host, so they should in theory only use host
+ # libraries, but until everything is made cross-compile safe, also use
+ # target libraries.
+ # TODO(piman): when everything is cross-compile safe, remove lib.target
+ self.WriteLn('cmd_%s = LD_LIBRARY_PATH=$(builddir)/lib.host:'
+ '$(builddir)/lib.target:$$LD_LIBRARY_PATH; '
+ 'export LD_LIBRARY_PATH; '
+ '%s%s'
+ % (name, cd_action, command))
+ self.WriteLn()
+ outputs = map(self.Absolutify, outputs)
+ # The makefile rules are all relative to the top dir, but the gyp actions
+ # are defined relative to their containing dir. This replaces the obj
+ # variable for the action rule with an absolute version so that the output
+ # goes in the right place.
+ # Only write the 'obj' and 'builddir' rules for the "primary" output (:1);
+ # it's superfluous for the "extra outputs", and this avoids accidentally
+ # writing duplicate dummy rules for those outputs.
+ # Same for environment.
+ self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0]))
+ self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0]))
+ self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv())
+
+ for input in inputs:
+ assert ' ' not in input, (
+ "Spaces in action input filenames not supported (%s)" % input)
+ for output in outputs:
+ assert ' ' not in output, (
+ "Spaces in action output filenames not supported (%s)" % output)
+
+ # See the comment in WriteCopies about expanding env vars.
+ outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
+ inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
+
+ self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)),
+ part_of_all=part_of_all, command=name)
+
+ # Stuff the outputs in a variable so we can refer to them later.
+ outputs_variable = 'action_%s_outputs' % name
+ self.WriteLn('%s := %s' % (outputs_variable, ' '.join(outputs)))
+ extra_outputs.append('$(%s)' % outputs_variable)
+ self.WriteLn()
+
+ self.WriteLn()
+
+
+ def WriteRules(self, rules, extra_sources, extra_outputs,
+ extra_mac_bundle_resources, part_of_all):
+ """Write Makefile code for any 'rules' from the gyp input.
+
+ extra_sources: a list that will be filled in with newly generated source
+ files, if any
+ extra_outputs: a list that will be filled in with any outputs of these
+ rules (used to make other pieces dependent on these rules)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ env = self.GetSortedXcodeEnv()
+ for rule in rules:
+ name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
+ rule['rule_name']))
+ count = 0
+ self.WriteLn('### Generated for rule %s:' % name)
+
+ all_outputs = []
+
+ for rule_source in rule.get('rule_sources', []):
+ dirs = set()
+ (rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
+ (rule_source_root, rule_source_ext) = \
+ os.path.splitext(rule_source_basename)
+
+ outputs = [self.ExpandInputRoot(out, rule_source_root,
+ rule_source_dirname)
+ for out in rule['outputs']]
+
+ for out in outputs:
+ dir = os.path.dirname(out)
+ if dir:
+ dirs.add(dir)
+ if int(rule.get('process_outputs_as_sources', False)):
+ extra_sources += outputs
+ if int(rule.get('process_outputs_as_mac_bundle_resources', False)):
+ extra_mac_bundle_resources += outputs
+ inputs = map(Sourceify, map(self.Absolutify, [rule_source] +
+ rule.get('inputs', [])))
+ actions = ['$(call do_cmd,%s_%d)' % (name, count)]
+
+ if name == 'resources_grit':
+ # HACK: This is ugly. Grit intentionally doesn't touch the
+ # timestamp of its output file when the file doesn't change,
+ # which is fine in hash-based dependency systems like scons
+ # and forge, but not kosher in the make world. After some
+ # discussion, hacking around it here seems like the least
+ # amount of pain.
+ actions += ['@touch --no-create $@']
+
+ # See the comment in WriteCopies about expanding env vars.
+ outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
+ inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
+
+ outputs = map(self.Absolutify, outputs)
+ all_outputs += outputs
+ # Only write the 'obj' and 'builddir' rules for the "primary" output
+ # (:1); it's superfluous for the "extra outputs", and this avoids
+ # accidentally writing duplicate dummy rules for those outputs.
+ self.WriteLn('%s: obj := $(abs_obj)' % outputs[0])
+ self.WriteLn('%s: builddir := $(abs_builddir)' % outputs[0])
+ self.WriteMakeRule(outputs, inputs, actions,
+ command="%s_%d" % (name, count))
+ # Spaces in rule filenames are not supported, but rule variables have
+ # spaces in them (e.g. RULE_INPUT_PATH expands to '$(abspath $<)').
+ # The spaces within the variables are valid, so remove the variables
+ # before checking.
+ variables_with_spaces = re.compile(r'\$\([^ ]* \$<\)')
+ for output in outputs:
+ output = re.sub(variables_with_spaces, '', output)
+ assert ' ' not in output, (
+ "Spaces in rule filenames not yet supported (%s)" % output)
+ self.WriteLn('all_deps += %s' % ' '.join(outputs))
+
+ action = [self.ExpandInputRoot(ac, rule_source_root,
+ rule_source_dirname)
+ for ac in rule['action']]
+ mkdirs = ''
+ if len(dirs) > 0:
+ mkdirs = 'mkdir -p %s; ' % ' '.join(dirs)
+ cd_action = 'cd %s; ' % Sourceify(self.path or '.')
+
+ # action, cd_action, and mkdirs get written to a toplevel variable
+ # called cmd_foo. Toplevel variables can't handle things that change
+ # per makefile like $(TARGET), so hardcode the target.
+ if self.flavor == 'mac':
+ action = [gyp.xcode_emulation.ExpandEnvVars(command, env)
+ for command in action]
+ action = gyp.common.EncodePOSIXShellList(action)
+ action = action.replace('$(TARGET)', self.target)
+ cd_action = cd_action.replace('$(TARGET)', self.target)
+ mkdirs = mkdirs.replace('$(TARGET)', self.target)
+
+ # Set LD_LIBRARY_PATH in case the rule runs an executable from this
+ # build which links to shared libs from this build.
+ # rules run on the host, so they should in theory only use host
+ # libraries, but until everything is made cross-compile safe, also use
+ # target libraries.
+ # TODO(piman): when everything is cross-compile safe, remove lib.target
+ self.WriteLn(
+ "cmd_%(name)s_%(count)d = LD_LIBRARY_PATH="
+ "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; "
+ "export LD_LIBRARY_PATH; "
+ "%(cd_action)s%(mkdirs)s%(action)s" % {
+ 'action': action,
+ 'cd_action': cd_action,
+ 'count': count,
+ 'mkdirs': mkdirs,
+ 'name': name,
+ })
+ self.WriteLn(
+ 'quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % {
+ 'count': count,
+ 'name': name,
+ })
+ self.WriteLn()
+ count += 1
+
+ outputs_variable = 'rule_%s_outputs' % name
+ self.WriteList(all_outputs, outputs_variable)
+ extra_outputs.append('$(%s)' % outputs_variable)
+
+ self.WriteLn('### Finished generating for rule: %s' % name)
+ self.WriteLn()
+ self.WriteLn('### Finished generating for all rules')
+ self.WriteLn('')
+
+
+ def WriteCopies(self, copies, extra_outputs, part_of_all):
+ """Write Makefile code for any 'copies' from the gyp input.
+
+ extra_outputs: a list that will be filled in with any outputs of this action
+ (used to make other pieces dependent on this action)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+ self.WriteLn('### Generated for copy rule.')
+
+ variable = StringToMakefileVariable(self.qualified_target + '_copies')
+ outputs = []
+ for copy in copies:
+ for path in copy['files']:
+ # Absolutify() may call normpath, and will strip trailing slashes.
+ path = Sourceify(self.Absolutify(path))
+ filename = os.path.split(path)[1]
+ output = Sourceify(self.Absolutify(os.path.join(copy['destination'],
+ filename)))
+
+ # If the output path has variables in it, which happens in practice for
+ # 'copies', writing the environment as target-local doesn't work,
+ # because the variables are already needed for the target name.
+ # Copying the environment variables into global make variables doesn't
+ # work either, because then the .d files will potentially contain spaces
+ # after variable expansion, and .d file handling cannot handle spaces.
+ # As a workaround, manually expand variables at gyp time. Since 'copies'
+ # can't run scripts, there's no need to write the env then.
+ # WriteDoCmd() will escape spaces for .d files.
+ env = self.GetSortedXcodeEnv()
+ output = gyp.xcode_emulation.ExpandEnvVars(output, env)
+ path = gyp.xcode_emulation.ExpandEnvVars(path, env)
+ self.WriteDoCmd([output], [path], 'copy', part_of_all)
+ outputs.append(output)
+ self.WriteLn('%s = %s' % (variable, ' '.join(map(QuoteSpaces, outputs))))
+ extra_outputs.append('$(%s)' % variable)
+ self.WriteLn()
+
+
+ def WriteMacBundleResources(self, resources, bundle_deps):
+ """Writes Makefile code for 'mac_bundle_resources'."""
+ self.WriteLn('### Generated for mac_bundle_resources')
+
+ for output, res in gyp.xcode_emulation.GetMacBundleResources(
+ generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
+ map(Sourceify, map(self.Absolutify, resources))):
+ _, ext = os.path.splitext(output)
+ if ext != '.xcassets':
+ # Make does not supports '.xcassets' emulation.
+ self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource',
+ part_of_all=True)
+ bundle_deps.append(output)
+
+
+ def WriteMacInfoPlist(self, bundle_deps):
+ """Write Makefile code for bundle Info.plist files."""
+ info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
+ generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
+ lambda p: Sourceify(self.Absolutify(p)))
+ if not info_plist:
+ return
+ if defines:
+ # Create an intermediate file to store preprocessed results.
+ intermediate_plist = ('$(obj).$(TOOLSET)/$(TARGET)/' +
+ os.path.basename(info_plist))
+ self.WriteList(defines, intermediate_plist + ': INFOPLIST_DEFINES', '-D',
+ quoter=EscapeCppDefine)
+ self.WriteMakeRule([intermediate_plist], [info_plist],
+ ['$(call do_cmd,infoplist)',
+ # "Convert" the plist so that any weird whitespace changes from the
+ # preprocessor do not affect the XML parser in mac_tool.
+ '@plutil -convert xml1 $@ $@'])
+ info_plist = intermediate_plist
+ # plists can contain envvars and substitute them into the file.
+ self.WriteSortedXcodeEnv(
+ out, self.GetSortedXcodeEnv(additional_settings=extra_env))
+ self.WriteDoCmd([out], [info_plist], 'mac_tool,,,copy-info-plist',
+ part_of_all=True)
+ bundle_deps.append(out)
+
+
+ def WriteSources(self, configs, deps, sources,
+ extra_outputs, extra_link_deps,
+ part_of_all, precompiled_header):
+ """Write Makefile code for any 'sources' from the gyp input.
+ These are source files necessary to build the current target.
+
+ configs, deps, sources: input from gyp.
+ extra_outputs: a list of extra outputs this action should be dependent on;
+ used to serialize action/rules before compilation
+ extra_link_deps: a list that will be filled in with any outputs of
+ compilation (to be used in link lines)
+ part_of_all: flag indicating this target is part of 'all'
+ """
+
+ # Write configuration-specific variables for CFLAGS, etc.
+ for configname in sorted(configs.keys()):
+ config = configs[configname]
+ self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D',
+ quoter=EscapeCppDefine)
+
+ if self.flavor == 'mac':
+ cflags = self.xcode_settings.GetCflags(configname)
+ cflags_c = self.xcode_settings.GetCflagsC(configname)
+ cflags_cc = self.xcode_settings.GetCflagsCC(configname)
+ cflags_objc = self.xcode_settings.GetCflagsObjC(configname)
+ cflags_objcc = self.xcode_settings.GetCflagsObjCC(configname)
+ else:
+ cflags = config.get('cflags')
+ cflags_c = config.get('cflags_c')
+ cflags_cc = config.get('cflags_cc')
+
+ self.WriteLn("# Flags passed to all source files.");
+ self.WriteList(cflags, 'CFLAGS_%s' % configname)
+ self.WriteLn("# Flags passed to only C files.");
+ self.WriteList(cflags_c, 'CFLAGS_C_%s' % configname)
+ self.WriteLn("# Flags passed to only C++ files.");
+ self.WriteList(cflags_cc, 'CFLAGS_CC_%s' % configname)
+ if self.flavor == 'mac':
+ self.WriteLn("# Flags passed to only ObjC files.");
+ self.WriteList(cflags_objc, 'CFLAGS_OBJC_%s' % configname)
+ self.WriteLn("# Flags passed to only ObjC++ files.");
+ self.WriteList(cflags_objcc, 'CFLAGS_OBJCC_%s' % configname)
+ includes = config.get('include_dirs')
+ if includes:
+ includes = map(Sourceify, map(self.Absolutify, includes))
+ self.WriteList(includes, 'INCS_%s' % configname, prefix='-I')
+
+ compilable = filter(Compilable, sources)
+ objs = map(self.Objectify, map(self.Absolutify, map(Target, compilable)))
+ self.WriteList(objs, 'OBJS')
+
+ for obj in objs:
+ assert ' ' not in obj, (
+ "Spaces in object filenames not supported (%s)" % obj)
+ self.WriteLn('# Add to the list of files we specially track '
+ 'dependencies for.')
+ self.WriteLn('all_deps += $(OBJS)')
+ self.WriteLn()
+
+ # Make sure our dependencies are built first.
+ if deps:
+ self.WriteMakeRule(['$(OBJS)'], deps,
+ comment = 'Make sure our dependencies are built '
+ 'before any of us.',
+ order_only = True)
+
+ # Make sure the actions and rules run first.
+ # If they generate any extra headers etc., the per-.o file dep tracking
+ # will catch the proper rebuilds, so order only is still ok here.
+ if extra_outputs:
+ self.WriteMakeRule(['$(OBJS)'], extra_outputs,
+ comment = 'Make sure our actions/rules run '
+ 'before any of us.',
+ order_only = True)
+
+ pchdeps = precompiled_header.GetObjDependencies(compilable, objs )
+ if pchdeps:
+ self.WriteLn('# Dependencies from obj files to their precompiled headers')
+ for source, obj, gch in pchdeps:
+ self.WriteLn('%s: %s' % (obj, gch))
+ self.WriteLn('# End precompiled header dependencies')
+
+ if objs:
+ extra_link_deps.append('$(OBJS)')
+ self.WriteLn("""\
+# CFLAGS et al overrides must be target-local.
+# See "Target-specific Variable Values" in the GNU Make manual.""")
+ self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)")
+ self.WriteLn("$(OBJS): GYP_CFLAGS := "
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "%s " % precompiled_header.GetInclude('c') +
+ "$(CFLAGS_$(BUILDTYPE)) "
+ "$(CFLAGS_C_$(BUILDTYPE))")
+ self.WriteLn("$(OBJS): GYP_CXXFLAGS := "
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "%s " % precompiled_header.GetInclude('cc') +
+ "$(CFLAGS_$(BUILDTYPE)) "
+ "$(CFLAGS_CC_$(BUILDTYPE))")
+ if self.flavor == 'mac':
+ self.WriteLn("$(OBJS): GYP_OBJCFLAGS := "
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "%s " % precompiled_header.GetInclude('m') +
+ "$(CFLAGS_$(BUILDTYPE)) "
+ "$(CFLAGS_C_$(BUILDTYPE)) "
+ "$(CFLAGS_OBJC_$(BUILDTYPE))")
+ self.WriteLn("$(OBJS): GYP_OBJCXXFLAGS := "
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "%s " % precompiled_header.GetInclude('mm') +
+ "$(CFLAGS_$(BUILDTYPE)) "
+ "$(CFLAGS_CC_$(BUILDTYPE)) "
+ "$(CFLAGS_OBJCC_$(BUILDTYPE))")
+
+ self.WritePchTargets(precompiled_header.GetPchBuildCommands())
+
+ # If there are any object files in our input file list, link them into our
+ # output.
+ extra_link_deps += filter(Linkable, sources)
+
+ self.WriteLn()
+
+ def WritePchTargets(self, pch_commands):
+ """Writes make rules to compile prefix headers."""
+ if not pch_commands:
+ return
+
+ for gch, lang_flag, lang, input in pch_commands:
+ extra_flags = {
+ 'c': '$(CFLAGS_C_$(BUILDTYPE))',
+ 'cc': '$(CFLAGS_CC_$(BUILDTYPE))',
+ 'm': '$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))',
+ 'mm': '$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))',
+ }[lang]
+ var_name = {
+ 'c': 'GYP_PCH_CFLAGS',
+ 'cc': 'GYP_PCH_CXXFLAGS',
+ 'm': 'GYP_PCH_OBJCFLAGS',
+ 'mm': 'GYP_PCH_OBJCXXFLAGS',
+ }[lang]
+ self.WriteLn("%s: %s := %s " % (gch, var_name, lang_flag) +
+ "$(DEFS_$(BUILDTYPE)) "
+ "$(INCS_$(BUILDTYPE)) "
+ "$(CFLAGS_$(BUILDTYPE)) " +
+ extra_flags)
+
+ self.WriteLn('%s: %s FORCE_DO_CMD' % (gch, input))
+ self.WriteLn('\t@$(call do_cmd,pch_%s,1)' % lang)
+ self.WriteLn('')
+ assert ' ' not in gch, (
+ "Spaces in gch filenames not supported (%s)" % gch)
+ self.WriteLn('all_deps += %s' % gch)
+ self.WriteLn('')
+
+
+ def ComputeOutputBasename(self, spec):
+ """Return the 'output basename' of a gyp spec.
+
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ 'libfoobar.so'
+ """
+ assert not self.is_mac_bundle
+
+ if self.flavor == 'mac' and self.type in (
+ 'static_library', 'executable', 'shared_library', 'loadable_module'):
+ return self.xcode_settings.GetExecutablePath()
+
+ target = spec['target_name']
+ target_prefix = ''
+ target_ext = ''
+ if self.type == 'static_library':
+ if target[:3] == 'lib':
+ target = target[3:]
+ target_prefix = 'lib'
+ target_ext = '.a'
+ elif self.type in ('loadable_module', 'shared_library'):
+ if target[:3] == 'lib':
+ target = target[3:]
+ target_prefix = 'lib'
+ target_ext = '.so'
+ elif self.type == 'none':
+ target = '%s.stamp' % target
+ elif self.type != 'executable':
+ print ("ERROR: What output file should be generated?",
+ "type", self.type, "target", target)
+
+ target_prefix = spec.get('product_prefix', target_prefix)
+ target = spec.get('product_name', target)
+ product_ext = spec.get('product_extension')
+ if product_ext:
+ target_ext = '.' + product_ext
+
+ return target_prefix + target + target_ext
+
+
+ def _InstallImmediately(self):
+ return self.toolset == 'target' and self.flavor == 'mac' and self.type in (
+ 'static_library', 'executable', 'shared_library', 'loadable_module')
+
+
+ def ComputeOutput(self, spec):
+ """Return the 'output' (full output path) of a gyp spec.
+
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ '$(obj)/baz/libfoobar.so'
+ """
+ assert not self.is_mac_bundle
+
+ path = os.path.join('$(obj).' + self.toolset, self.path)
+ if self.type == 'executable' or self._InstallImmediately():
+ path = '$(builddir)'
+ path = spec.get('product_dir', path)
+ return os.path.join(path, self.ComputeOutputBasename(spec))
+
+
+ def ComputeMacBundleOutput(self, spec):
+ """Return the 'output' (full output path) to a bundle output directory."""
+ assert self.is_mac_bundle
+ path = generator_default_variables['PRODUCT_DIR']
+ return os.path.join(path, self.xcode_settings.GetWrapperName())
+
+
+ def ComputeMacBundleBinaryOutput(self, spec):
+ """Return the 'output' (full output path) to the binary in a bundle."""
+ path = generator_default_variables['PRODUCT_DIR']
+ return os.path.join(path, self.xcode_settings.GetExecutablePath())
+
+
+ def ComputeDeps(self, spec):
+ """Compute the dependencies of a gyp spec.
+
+ Returns a tuple (deps, link_deps), where each is a list of
+ filenames that will need to be put in front of make for either
+ building (deps) or linking (link_deps).
+ """
+ deps = []
+ link_deps = []
+ if 'dependencies' in spec:
+ deps.extend([target_outputs[dep] for dep in spec['dependencies']
+ if target_outputs[dep]])
+ for dep in spec['dependencies']:
+ if dep in target_link_deps:
+ link_deps.append(target_link_deps[dep])
+ deps.extend(link_deps)
+ # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)?
+ # This hack makes it work:
+ # link_deps.extend(spec.get('libraries', []))
+ return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
+
+
+ def WriteDependencyOnExtraOutputs(self, target, extra_outputs):
+ self.WriteMakeRule([self.output_binary], extra_outputs,
+ comment = 'Build our special outputs first.',
+ order_only = True)
+
+
+ def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps,
+ extra_outputs, part_of_all):
+ """Write Makefile code to produce the final target of the gyp spec.
+
+ spec, configs: input from gyp.
+ deps, link_deps: dependency lists; see ComputeDeps()
+ extra_outputs: any extra outputs that our target should depend on
+ part_of_all: flag indicating this target is part of 'all'
+ """
+
+ self.WriteLn('### Rules for final target.')
+
+ if extra_outputs:
+ self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs)
+ self.WriteMakeRule(extra_outputs, deps,
+ comment=('Preserve order dependency of '
+ 'special output on deps.'),
+ order_only = True)
+
+ target_postbuilds = {}
+ if self.type != 'none':
+ for configname in sorted(configs.keys()):
+ config = configs[configname]
+ if self.flavor == 'mac':
+ ldflags = self.xcode_settings.GetLdflags(configname,
+ generator_default_variables['PRODUCT_DIR'],
+ lambda p: Sourceify(self.Absolutify(p)))
+
+ # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
+ gyp_to_build = gyp.common.InvertRelativePath(self.path)
+ target_postbuild = self.xcode_settings.AddImplicitPostbuilds(
+ configname,
+ QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
+ self.output))),
+ QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
+ self.output_binary))))
+ if target_postbuild:
+ target_postbuilds[configname] = target_postbuild
+ else:
+ ldflags = config.get('ldflags', [])
+ # Compute an rpath for this output if needed.
+ if any(dep.endswith('.so') or '.so.' in dep for dep in deps):
+ # We want to get the literal string "$ORIGIN" into the link command,
+ # so we need lots of escaping.
+ ldflags.append(r'-Wl,-rpath=\$$ORIGIN/lib.%s/' % self.toolset)
+ ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' %
+ self.toolset)
+ library_dirs = config.get('library_dirs', [])
+ ldflags += [('-L%s' % library_dir) for library_dir in library_dirs]
+ self.WriteList(ldflags, 'LDFLAGS_%s' % configname)
+ if self.flavor == 'mac':
+ self.WriteList(self.xcode_settings.GetLibtoolflags(configname),
+ 'LIBTOOLFLAGS_%s' % configname)
+ libraries = spec.get('libraries')
+ if libraries:
+ # Remove duplicate entries
+ libraries = gyp.common.uniquer(libraries)
+ if self.flavor == 'mac':
+ libraries = self.xcode_settings.AdjustLibraries(libraries)
+ self.WriteList(libraries, 'LIBS')
+ self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' %
+ QuoteSpaces(self.output_binary))
+ self.WriteLn('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary))
+
+ if self.flavor == 'mac':
+ self.WriteLn('%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))' %
+ QuoteSpaces(self.output_binary))
+
+ # Postbuild actions. Like actions, but implicitly depend on the target's
+ # output.
+ postbuilds = []
+ if self.flavor == 'mac':
+ if target_postbuilds:
+ postbuilds.append('$(TARGET_POSTBUILDS_$(BUILDTYPE))')
+ postbuilds.extend(
+ gyp.xcode_emulation.GetSpecPostbuildCommands(spec))
+
+ if postbuilds:
+ # Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE),
+ # so we must output its definition first, since we declare variables
+ # using ":=".
+ self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv())
+
+ for configname in target_postbuilds:
+ self.WriteLn('%s: TARGET_POSTBUILDS_%s := %s' %
+ (QuoteSpaces(self.output),
+ configname,
+ gyp.common.EncodePOSIXShellList(target_postbuilds[configname])))
+
+ # Postbuilds expect to be run in the gyp file's directory, so insert an
+ # implicit postbuild to cd to there.
+ postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path]))
+ for i in xrange(len(postbuilds)):
+ if not postbuilds[i].startswith('$'):
+ postbuilds[i] = EscapeShellArgument(postbuilds[i])
+ self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output))
+ self.WriteLn('%s: POSTBUILDS := %s' % (
+ QuoteSpaces(self.output), ' '.join(postbuilds)))
+
+ # A bundle directory depends on its dependencies such as bundle resources
+ # and bundle binary. When all dependencies have been built, the bundle
+ # needs to be packaged.
+ if self.is_mac_bundle:
+ # If the framework doesn't contain a binary, then nothing depends
+ # on the actions -- make the framework depend on them directly too.
+ self.WriteDependencyOnExtraOutputs(self.output, extra_outputs)
+
+ # Bundle dependencies. Note that the code below adds actions to this
+ # target, so if you move these two lines, move the lines below as well.
+ self.WriteList(map(QuoteSpaces, bundle_deps), 'BUNDLE_DEPS')
+ self.WriteLn('%s: $(BUNDLE_DEPS)' % QuoteSpaces(self.output))
+
+ # After the framework is built, package it. Needs to happen before
+ # postbuilds, since postbuilds depend on this.
+ if self.type in ('shared_library', 'loadable_module'):
+ self.WriteLn('\t@$(call do_cmd,mac_package_framework,,,%s)' %
+ self.xcode_settings.GetFrameworkVersion())
+
+ # Bundle postbuilds can depend on the whole bundle, so run them after
+ # the bundle is packaged, not already after the bundle binary is done.
+ if postbuilds:
+ self.WriteLn('\t@$(call do_postbuilds)')
+ postbuilds = [] # Don't write postbuilds for target's output.
+
+ # Needed by test/mac/gyptest-rebuild.py.
+ self.WriteLn('\t@true # No-op, used by tests')
+
+ # Since this target depends on binary and resources which are in
+ # nested subfolders, the framework directory will be older than
+ # its dependencies usually. To prevent this rule from executing
+ # on every build (expensive, especially with postbuilds), expliclity
+ # update the time on the framework directory.
+ self.WriteLn('\t@touch -c %s' % QuoteSpaces(self.output))
+
+ if postbuilds:
+ assert not self.is_mac_bundle, ('Postbuilds for bundles should be done '
+ 'on the bundle, not the binary (target \'%s\')' % self.target)
+ assert 'product_dir' not in spec, ('Postbuilds do not work with '
+ 'custom product_dir')
+
+ if self.type == 'executable':
+ self.WriteLn('%s: LD_INPUTS := %s' % (
+ QuoteSpaces(self.output_binary),
+ ' '.join(map(QuoteSpaces, link_deps))))
+ if self.toolset == 'host' and self.flavor == 'android':
+ self.WriteDoCmd([self.output_binary], link_deps, 'link_host',
+ part_of_all, postbuilds=postbuilds)
+ else:
+ self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all,
+ postbuilds=postbuilds)
+
+ elif self.type == 'static_library':
+ for link_dep in link_deps:
+ assert ' ' not in link_dep, (
+ "Spaces in alink input filenames not supported (%s)" % link_dep)
+ if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
+ self.is_standalone_static_library):
+ self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin',
+ part_of_all, postbuilds=postbuilds)
+ else:
+ self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all,
+ postbuilds=postbuilds)
+ elif self.type == 'shared_library':
+ self.WriteLn('%s: LD_INPUTS := %s' % (
+ QuoteSpaces(self.output_binary),
+ ' '.join(map(QuoteSpaces, link_deps))))
+ self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all,
+ postbuilds=postbuilds)
+ elif self.type == 'loadable_module':
+ for link_dep in link_deps:
+ assert ' ' not in link_dep, (
+ "Spaces in module input filenames not supported (%s)" % link_dep)
+ if self.toolset == 'host' and self.flavor == 'android':
+ self.WriteDoCmd([self.output_binary], link_deps, 'solink_module_host',
+ part_of_all, postbuilds=postbuilds)
+ else:
+ self.WriteDoCmd(
+ [self.output_binary], link_deps, 'solink_module', part_of_all,
+ postbuilds=postbuilds)
+ elif self.type == 'none':
+ # Write a stamp line.
+ self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all,
+ postbuilds=postbuilds)
+ else:
+ print "WARNING: no output for", self.type, target
+
+ # Add an alias for each target (if there are any outputs).
+ # Installable target aliases are created below.
+ if ((self.output and self.output != self.target) and
+ (self.type not in self._INSTALLABLE_TARGETS)):
+ self.WriteMakeRule([self.target], [self.output],
+ comment='Add target alias', phony = True)
+ if part_of_all:
+ self.WriteMakeRule(['all'], [self.target],
+ comment = 'Add target alias to "all" target.',
+ phony = True)
+
+ # Add special-case rules for our installable targets.
+ # 1) They need to install to the build dir or "product" dir.
+ # 2) They get shortcuts for building (e.g. "make chrome").
+ # 3) They are part of "make all".
+ if (self.type in self._INSTALLABLE_TARGETS or
+ self.is_standalone_static_library):
+ if self.type == 'shared_library':
+ file_desc = 'shared library'
+ elif self.type == 'static_library':
+ file_desc = 'static library'
+ else:
+ file_desc = 'executable'
+ install_path = self._InstallableTargetInstallPath()
+ installable_deps = [self.output]
+ if (self.flavor == 'mac' and not 'product_dir' in spec and
+ self.toolset == 'target'):
+ # On mac, products are created in install_path immediately.
+ assert install_path == self.output, '%s != %s' % (
+ install_path, self.output)
+
+ # Point the target alias to the final binary output.
+ self.WriteMakeRule([self.target], [install_path],
+ comment='Add target alias', phony = True)
+ if install_path != self.output:
+ assert not self.is_mac_bundle # See comment a few lines above.
+ self.WriteDoCmd([install_path], [self.output], 'copy',
+ comment = 'Copy this to the %s output path.' %
+ file_desc, part_of_all=part_of_all)
+ installable_deps.append(install_path)
+ if self.output != self.alias and self.alias != self.target:
+ self.WriteMakeRule([self.alias], installable_deps,
+ comment = 'Short alias for building this %s.' %
+ file_desc, phony = True)
+ if part_of_all:
+ self.WriteMakeRule(['all'], [install_path],
+ comment = 'Add %s to "all" target.' % file_desc,
+ phony = True)
+
+
+ def WriteList(self, value_list, variable=None, prefix='',
+ quoter=QuoteIfNecessary):
+ """Write a variable definition that is a list of values.
+
+ E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
+ foo = blaha blahb
+ but in a pretty-printed style.
+ """
+ values = ''
+ if value_list:
+ value_list = [quoter(prefix + l) for l in value_list]
+ values = ' \\\n\t' + ' \\\n\t'.join(value_list)
+ self.fp.write('%s :=%s\n\n' % (variable, values))
+
+
+ def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None,
+ postbuilds=False):
+ """Write a Makefile rule that uses do_cmd.
+
+ This makes the outputs dependent on the command line that was run,
+ as well as support the V= make command line flag.
+ """
+ suffix = ''
+ if postbuilds:
+ assert ',' not in command
+ suffix = ',,1' # Tell do_cmd to honor $POSTBUILDS
+ self.WriteMakeRule(outputs, inputs,
+ actions = ['$(call do_cmd,%s%s)' % (command, suffix)],
+ comment = comment,
+ command = command,
+ force = True)
+ # Add our outputs to the list of targets we read depfiles from.
+ # all_deps is only used for deps file reading, and for deps files we replace
+ # spaces with ? because escaping doesn't work with make's $(sort) and
+ # other functions.
+ outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs]
+ self.WriteLn('all_deps += %s' % ' '.join(outputs))
+
+
+ def WriteMakeRule(self, outputs, inputs, actions=None, comment=None,
+ order_only=False, force=False, phony=False, command=None):
+ """Write a Makefile rule, with some extra tricks.
+
+ outputs: a list of outputs for the rule (note: this is not directly
+ supported by make; see comments below)
+ inputs: a list of inputs for the rule
+ actions: a list of shell commands to run for the rule
+ comment: a comment to put in the Makefile above the rule (also useful
+ for making this Python script's code self-documenting)
+ order_only: if true, makes the dependency order-only
+ force: if true, include FORCE_DO_CMD as an order-only dep
+ phony: if true, the rule does not actually generate the named output, the
+ output is just a name to run the rule
+ command: (optional) command name to generate unambiguous labels
+ """
+ outputs = map(QuoteSpaces, outputs)
+ inputs = map(QuoteSpaces, inputs)
+
+ if comment:
+ self.WriteLn('# ' + comment)
+ if phony:
+ self.WriteLn('.PHONY: ' + ' '.join(outputs))
+ if actions:
+ self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0])
+ force_append = ' FORCE_DO_CMD' if force else ''
+
+ if order_only:
+ # Order only rule: Just write a simple rule.
+ # TODO(evanm): just make order_only a list of deps instead of this hack.
+ self.WriteLn('%s: | %s%s' %
+ (' '.join(outputs), ' '.join(inputs), force_append))
+ elif len(outputs) == 1:
+ # Regular rule, one output: Just write a simple rule.
+ self.WriteLn('%s: %s%s' % (outputs[0], ' '.join(inputs), force_append))
+ else:
+ # Regular rule, more than one output: Multiple outputs are tricky in
+ # make. We will write three rules:
+ # - All outputs depend on an intermediate file.
+ # - Make .INTERMEDIATE depend on the intermediate.
+ # - The intermediate file depends on the inputs and executes the
+ # actual command.
+ # - The intermediate recipe will 'touch' the intermediate file.
+ # - The multi-output rule will have an do-nothing recipe.
+ intermediate = "%s.intermediate" % (command if command else self.target)
+ self.WriteLn('%s: %s' % (' '.join(outputs), intermediate))
+ self.WriteLn('\t%s' % '@:');
+ self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate))
+ self.WriteLn('%s: %s%s' %
+ (intermediate, ' '.join(inputs), force_append))
+ actions.insert(0, '$(call do_cmd,touch)')
+
+ if actions:
+ for action in actions:
+ self.WriteLn('\t%s' % action)
+ self.WriteLn()
+
+
+ def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps):
+ """Write a set of LOCAL_XXX definitions for Android NDK.
+
+ These variable definitions will be used by Android NDK but do nothing for
+ non-Android applications.
+
+ Arguments:
+ module_name: Android NDK module name, which must be unique among all
+ module names.
+ all_sources: A list of source files (will be filtered by Compilable).
+ link_deps: A list of link dependencies, which must be sorted in
+ the order from dependencies to dependents.
+ """
+ if self.type not in ('executable', 'shared_library', 'static_library'):
+ return
+
+ self.WriteLn('# Variable definitions for Android applications')
+ self.WriteLn('include $(CLEAR_VARS)')
+ self.WriteLn('LOCAL_MODULE := ' + module_name)
+ self.WriteLn('LOCAL_CFLAGS := $(CFLAGS_$(BUILDTYPE)) '
+ '$(DEFS_$(BUILDTYPE)) '
+ # LOCAL_CFLAGS is applied to both of C and C++. There is
+ # no way to specify $(CFLAGS_C_$(BUILDTYPE)) only for C
+ # sources.
+ '$(CFLAGS_C_$(BUILDTYPE)) '
+ # $(INCS_$(BUILDTYPE)) includes the prefix '-I' while
+ # LOCAL_C_INCLUDES does not expect it. So put it in
+ # LOCAL_CFLAGS.
+ '$(INCS_$(BUILDTYPE))')
+ # LOCAL_CXXFLAGS is obsolete and LOCAL_CPPFLAGS is preferred.
+ self.WriteLn('LOCAL_CPPFLAGS := $(CFLAGS_CC_$(BUILDTYPE))')
+ self.WriteLn('LOCAL_C_INCLUDES :=')
+ self.WriteLn('LOCAL_LDLIBS := $(LDFLAGS_$(BUILDTYPE)) $(LIBS)')
+
+ # Detect the C++ extension.
+ cpp_ext = {'.cc': 0, '.cpp': 0, '.cxx': 0}
+ default_cpp_ext = '.cpp'
+ for filename in all_sources:
+ ext = os.path.splitext(filename)[1]
+ if ext in cpp_ext:
+ cpp_ext[ext] += 1
+ if cpp_ext[ext] > cpp_ext[default_cpp_ext]:
+ default_cpp_ext = ext
+ self.WriteLn('LOCAL_CPP_EXTENSION := ' + default_cpp_ext)
+
+ self.WriteList(map(self.Absolutify, filter(Compilable, all_sources)),
+ 'LOCAL_SRC_FILES')
+
+ # Filter out those which do not match prefix and suffix and produce
+ # the resulting list without prefix and suffix.
+ def DepsToModules(deps, prefix, suffix):
+ modules = []
+ for filepath in deps:
+ filename = os.path.basename(filepath)
+ if filename.startswith(prefix) and filename.endswith(suffix):
+ modules.append(filename[len(prefix):-len(suffix)])
+ return modules
+
+ # Retrieve the default value of 'SHARED_LIB_SUFFIX'
+ params = {'flavor': 'linux'}
+ default_variables = {}
+ CalculateVariables(default_variables, params)
+
+ self.WriteList(
+ DepsToModules(link_deps,
+ generator_default_variables['SHARED_LIB_PREFIX'],
+ default_variables['SHARED_LIB_SUFFIX']),
+ 'LOCAL_SHARED_LIBRARIES')
+ self.WriteList(
+ DepsToModules(link_deps,
+ generator_default_variables['STATIC_LIB_PREFIX'],
+ generator_default_variables['STATIC_LIB_SUFFIX']),
+ 'LOCAL_STATIC_LIBRARIES')
+
+ if self.type == 'executable':
+ self.WriteLn('include $(BUILD_EXECUTABLE)')
+ elif self.type == 'shared_library':
+ self.WriteLn('include $(BUILD_SHARED_LIBRARY)')
+ elif self.type == 'static_library':
+ self.WriteLn('include $(BUILD_STATIC_LIBRARY)')
+ self.WriteLn()
+
+
+ def WriteLn(self, text=''):
+ self.fp.write(text + '\n')
+
+
+ def GetSortedXcodeEnv(self, additional_settings=None):
+ return gyp.xcode_emulation.GetSortedXcodeEnv(
+ self.xcode_settings, "$(abs_builddir)",
+ os.path.join("$(abs_srcdir)", self.path), "$(BUILDTYPE)",
+ additional_settings)
+
+
+ def GetSortedXcodePostbuildEnv(self):
+ # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
+ # TODO(thakis): It would be nice to have some general mechanism instead.
+ strip_save_file = self.xcode_settings.GetPerTargetSetting(
+ 'CHROMIUM_STRIP_SAVE_FILE', '')
+ # Even if strip_save_file is empty, explicitly write it. Else a postbuild
+ # might pick up an export from an earlier target.
+ return self.GetSortedXcodeEnv(
+ additional_settings={'CHROMIUM_STRIP_SAVE_FILE': strip_save_file})
+
+
+ def WriteSortedXcodeEnv(self, target, env):
+ for k, v in env:
+ # For
+ # foo := a\ b
+ # the escaped space does the right thing. For
+ # export foo := a\ b
+ # it does not -- the backslash is written to the env as literal character.
+ # So don't escape spaces in |env[k]|.
+ self.WriteLn('%s: export %s := %s' % (QuoteSpaces(target), k, v))
+
+
+ def Objectify(self, path):
+ """Convert a path to its output directory form."""
+ if '$(' in path:
+ path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/' % self.toolset)
+ if not '$(obj)' in path:
+ path = '$(obj).%s/$(TARGET)/%s' % (self.toolset, path)
+ return path
+
+
+ def Pchify(self, path, lang):
+ """Convert a prefix header path to its output directory form."""
+ path = self.Absolutify(path)
+ if '$(' in path:
+ path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/pch-%s' %
+ (self.toolset, lang))
+ return path
+ return '$(obj).%s/$(TARGET)/pch-%s/%s' % (self.toolset, lang, path)
+
+
+ def Absolutify(self, path):
+ """Convert a subdirectory-relative path into a base-relative path.
+ Skips over paths that contain variables."""
+ if '$(' in path:
+ # Don't call normpath in this case, as it might collapse the
+ # path too aggressively if it features '..'. However it's still
+ # important to strip trailing slashes.
+ return path.rstrip('/')
+ return os.path.normpath(os.path.join(self.path, path))
+
+
+ def ExpandInputRoot(self, template, expansion, dirname):
+ if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
+ return template
+ path = template % {
+ 'INPUT_ROOT': expansion,
+ 'INPUT_DIRNAME': dirname,
+ }
+ return path
+
+
+ def _InstallableTargetInstallPath(self):
+ """Returns the location of the final output for an installable target."""
+ # Xcode puts shared_library results into PRODUCT_DIR, and some gyp files
+ # rely on this. Emulate this behavior for mac.
+
+ # XXX(TooTallNate): disabling this code since we don't want this behavior...
+ #if (self.type == 'shared_library' and
+ # (self.flavor != 'mac' or self.toolset != 'target')):
+ # # Install all shared libs into a common directory (per toolset) for
+ # # convenient access with LD_LIBRARY_PATH.
+ # return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
+ return '$(builddir)/' + self.alias
+
+
+def WriteAutoRegenerationRule(params, root_makefile, makefile_name,
+ build_files):
+ """Write the target to regenerate the Makefile."""
+ options = params['options']
+ build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir)
+ for filename in params['build_files_arg']]
+
+ gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'],
+ options.toplevel_dir)
+ if not gyp_binary.startswith(os.sep):
+ gyp_binary = os.path.join('.', gyp_binary)
+
+ root_makefile.write(
+ "quiet_cmd_regen_makefile = ACTION Regenerating $@\n"
+ "cmd_regen_makefile = cd $(srcdir); %(cmd)s\n"
+ "%(makefile_name)s: %(deps)s\n"
+ "\t$(call do_cmd,regen_makefile)\n\n" % {
+ 'makefile_name': makefile_name,
+ 'deps': ' '.join(map(Sourceify, build_files)),
+ 'cmd': gyp.common.EncodePOSIXShellList(
+ [gyp_binary, '-fmake'] +
+ gyp.RegenerateFlags(options) +
+ build_files_args)})
+
+
+def PerformBuild(data, configurations, params):
+ options = params['options']
+ for config in configurations:
+ arguments = ['make']
+ if options.toplevel_dir and options.toplevel_dir != '.':
+ arguments += '-C', options.toplevel_dir
+ arguments.append('BUILDTYPE=' + config)
+ print 'Building [%s]: %s' % (config, arguments)
+ subprocess.check_call(arguments)
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ options = params['options']
+ flavor = gyp.common.GetFlavor(params)
+ generator_flags = params.get('generator_flags', {})
+ builddir_name = generator_flags.get('output_dir', 'out')
+ android_ndk_version = generator_flags.get('android_ndk_version', None)
+ default_target = generator_flags.get('default_target', 'all')
+
+ def CalculateMakefilePath(build_file, base_name):
+ """Determine where to write a Makefile for a given gyp file."""
+ # Paths in gyp files are relative to the .gyp file, but we want
+ # paths relative to the source root for the master makefile. Grab
+ # the path of the .gyp file as the base to relativize against.
+ # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
+ base_path = gyp.common.RelativePath(os.path.dirname(build_file),
+ options.depth)
+ # We write the file in the base_path directory.
+ output_file = os.path.join(options.depth, base_path, base_name)
+ if options.generator_output:
+ output_file = os.path.join(
+ options.depth, options.generator_output, base_path, base_name)
+ base_path = gyp.common.RelativePath(os.path.dirname(build_file),
+ options.toplevel_dir)
+ return base_path, output_file
+
+ # TODO: search for the first non-'Default' target. This can go
+ # away when we add verification that all targets have the
+ # necessary configurations.
+ default_configuration = None
+ toolsets = set([target_dicts[target]['toolset'] for target in target_list])
+ for target in target_list:
+ spec = target_dicts[target]
+ if spec['default_configuration'] != 'Default':
+ default_configuration = spec['default_configuration']
+ break
+ if not default_configuration:
+ default_configuration = 'Default'
+
+ srcdir = '.'
+ makefile_name = 'Makefile' + options.suffix
+ makefile_path = os.path.join(options.toplevel_dir, makefile_name)
+ if options.generator_output:
+ global srcdir_prefix
+ makefile_path = os.path.join(
+ options.toplevel_dir, options.generator_output, makefile_name)
+ srcdir = gyp.common.RelativePath(srcdir, options.generator_output)
+ srcdir_prefix = '$(srcdir)/'
+
+ flock_command= 'flock'
+ copy_archive_arguments = '-af'
+ header_params = {
+ 'default_target': default_target,
+ 'builddir': builddir_name,
+ 'default_configuration': default_configuration,
+ 'flock': flock_command,
+ 'flock_index': 1,
+ 'link_commands': LINK_COMMANDS_LINUX,
+ 'extra_commands': '',
+ 'srcdir': srcdir,
+ 'copy_archive_args': copy_archive_arguments,
+ }
+ if flavor == 'mac':
+ flock_command = './gyp-mac-tool flock'
+ header_params.update({
+ 'flock': flock_command,
+ 'flock_index': 2,
+ 'link_commands': LINK_COMMANDS_MAC,
+ 'extra_commands': SHARED_HEADER_MAC_COMMANDS,
+ })
+ elif flavor == 'android':
+ header_params.update({
+ 'link_commands': LINK_COMMANDS_ANDROID,
+ })
+ elif flavor == 'solaris':
+ header_params.update({
+ 'flock': './gyp-flock-tool flock',
+ 'flock_index': 2,
+ })
+ elif flavor == 'freebsd':
+ # Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific.
+ header_params.update({
+ 'flock': 'lockf',
+ })
+ elif flavor == 'openbsd':
+ copy_archive_arguments = '-pPRf'
+ header_params.update({
+ 'copy_archive_args': copy_archive_arguments,
+ })
+ elif flavor == 'aix':
+ copy_archive_arguments = '-pPRf'
+ header_params.update({
+ 'copy_archive_args': copy_archive_arguments,
+ 'link_commands': LINK_COMMANDS_AIX,
+ 'flock': './gyp-flock-tool flock',
+ 'flock_index': 2,
+ })
+
+ header_params.update({
+ 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'),
+ 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'),
+ 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'),
+ 'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'),
+ 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'gcc'),
+ 'AR.host': GetEnvironFallback(('AR_host', 'AR'), 'ar'),
+ 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'g++'),
+ 'LINK.host': GetEnvironFallback(('LINK_host', 'LINK'), '$(CXX.host)'),
+ })
+
+ build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
+ make_global_settings_array = data[build_file].get('make_global_settings', [])
+ wrappers = {}
+ for key, value in make_global_settings_array:
+ if key.endswith('_wrapper'):
+ wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value
+ make_global_settings = ''
+ for key, value in make_global_settings_array:
+ if re.match('.*_wrapper', key):
+ continue
+ if value[0] != '$':
+ value = '$(abspath %s)' % value
+ wrapper = wrappers.get(key)
+ if wrapper:
+ value = '%s %s' % (wrapper, value)
+ del wrappers[key]
+ if key in ('CC', 'CC.host', 'CXX', 'CXX.host'):
+ make_global_settings += (
+ 'ifneq (,$(filter $(origin %s), undefined default))\n' % key)
+ # Let gyp-time envvars win over global settings.
+ env_key = key.replace('.', '_') # CC.host -> CC_host
+ if env_key in os.environ:
+ value = os.environ[env_key]
+ make_global_settings += ' %s = %s\n' % (key, value)
+ make_global_settings += 'endif\n'
+ else:
+ make_global_settings += '%s ?= %s\n' % (key, value)
+ # TODO(ukai): define cmd when only wrapper is specified in
+ # make_global_settings.
+
+ header_params['make_global_settings'] = make_global_settings
+
+ gyp.common.EnsureDirExists(makefile_path)
+ root_makefile = open(makefile_path, 'w')
+ root_makefile.write(SHARED_HEADER % header_params)
+ # Currently any versions have the same effect, but in future the behavior
+ # could be different.
+ if android_ndk_version:
+ root_makefile.write(
+ '# Define LOCAL_PATH for build of Android applications.\n'
+ 'LOCAL_PATH := $(call my-dir)\n'
+ '\n')
+ for toolset in toolsets:
+ root_makefile.write('TOOLSET := %s\n' % toolset)
+ WriteRootHeaderSuffixRules(root_makefile)
+
+ # Put build-time support tools next to the root Makefile.
+ dest_path = os.path.dirname(makefile_path)
+ gyp.common.CopyTool(flavor, dest_path)
+
+ # Find the list of targets that derive from the gyp file(s) being built.
+ needed_targets = set()
+ for build_file in params['build_files']:
+ for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
+ needed_targets.add(target)
+
+ build_files = set()
+ include_list = set()
+ for qualified_target in target_list:
+ build_file, target, toolset = gyp.common.ParseQualifiedTarget(
+ qualified_target)
+
+ this_make_global_settings = data[build_file].get('make_global_settings', [])
+ assert make_global_settings_array == this_make_global_settings, (
+ "make_global_settings needs to be the same for all targets. %s vs. %s" %
+ (this_make_global_settings, make_global_settings))
+
+ build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))
+ included_files = data[build_file]['included_files']
+ for included_file in included_files:
+ # The included_files entries are relative to the dir of the build file
+ # that included them, so we have to undo that and then make them relative
+ # to the root dir.
+ relative_include_file = gyp.common.RelativePath(
+ gyp.common.UnrelativePath(included_file, build_file),
+ options.toplevel_dir)
+ abs_include_file = os.path.abspath(relative_include_file)
+ # If the include file is from the ~/.gyp dir, we should use absolute path
+ # so that relocating the src dir doesn't break the path.
+ if (params['home_dot_gyp'] and
+ abs_include_file.startswith(params['home_dot_gyp'])):
+ build_files.add(abs_include_file)
+ else:
+ build_files.add(relative_include_file)
+
+ base_path, output_file = CalculateMakefilePath(build_file,
+ target + '.' + toolset + options.suffix + '.mk')
+
+ spec = target_dicts[qualified_target]
+ configs = spec['configurations']
+
+ if flavor == 'mac':
+ gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
+
+ writer = MakefileWriter(generator_flags, flavor)
+ writer.Write(qualified_target, base_path, output_file, spec, configs,
+ part_of_all=qualified_target in needed_targets)
+
+ # Our root_makefile lives at the source root. Compute the relative path
+ # from there to the output_file for including.
+ mkfile_rel_path = gyp.common.RelativePath(output_file,
+ os.path.dirname(makefile_path))
+ include_list.add(mkfile_rel_path)
+
+ # Write out per-gyp (sub-project) Makefiles.
+ depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd())
+ for build_file in build_files:
+ # The paths in build_files were relativized above, so undo that before
+ # testing against the non-relativized items in target_list and before
+ # calculating the Makefile path.
+ build_file = os.path.join(depth_rel_path, build_file)
+ gyp_targets = [target_dicts[target]['target_name'] for target in target_list
+ if target.startswith(build_file) and
+ target in needed_targets]
+ # Only generate Makefiles for gyp files with targets.
+ if not gyp_targets:
+ continue
+ base_path, output_file = CalculateMakefilePath(build_file,
+ os.path.splitext(os.path.basename(build_file))[0] + '.Makefile')
+ makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path),
+ os.path.dirname(output_file))
+ writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets,
+ builddir_name)
+
+
+ # Write out the sorted list of includes.
+ root_makefile.write('\n')
+ for include_file in sorted(include_list):
+ # We wrap each .mk include in an if statement so users can tell make to
+ # not load a file by setting NO_LOAD. The below make code says, only
+ # load the .mk file if the .mk filename doesn't start with a token in
+ # NO_LOAD.
+ root_makefile.write(
+ "ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n"
+ " $(findstring $(join ^,$(prefix)),\\\n"
+ " $(join ^," + include_file + ")))),)\n")
+ root_makefile.write(" include " + include_file + "\n")
+ root_makefile.write("endif\n")
+ root_makefile.write('\n')
+
+ if (not generator_flags.get('standalone')
+ and generator_flags.get('auto_regeneration', True)):
+ WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
+
+ root_makefile.write(SHARED_FOOTER)
+
+ root_makefile.close()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
new file mode 100644
index 0000000000..6bfad0f3bd
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
@@ -0,0 +1,3494 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import copy
+import ntpath
+import os
+import posixpath
+import re
+import subprocess
+import sys
+
+import gyp.common
+import gyp.easy_xml as easy_xml
+import gyp.generator.ninja as ninja_generator
+import gyp.MSVSNew as MSVSNew
+import gyp.MSVSProject as MSVSProject
+import gyp.MSVSSettings as MSVSSettings
+import gyp.MSVSToolFile as MSVSToolFile
+import gyp.MSVSUserFile as MSVSUserFile
+import gyp.MSVSUtil as MSVSUtil
+import gyp.MSVSVersion as MSVSVersion
+from gyp.common import GypError
+from gyp.common import OrderedSet
+
+# TODO: Remove once bots are on 2.7, http://crbug.com/241769
+def _import_OrderedDict():
+ import collections
+ try:
+ return collections.OrderedDict
+ except AttributeError:
+ import gyp.ordered_dict
+ return gyp.ordered_dict.OrderedDict
+OrderedDict = _import_OrderedDict()
+
+
+# Regular expression for validating Visual Studio GUIDs. If the GUID
+# contains lowercase hex letters, MSVS will be fine. However,
+# IncrediBuild BuildConsole will parse the solution file, but then
+# silently skip building the target causing hard to track down errors.
+# Note that this only happens with the BuildConsole, and does not occur
+# if IncrediBuild is executed from inside Visual Studio. This regex
+# validates that the string looks like a GUID with all uppercase hex
+# letters.
+VALID_MSVS_GUID_CHARS = re.compile(r'^[A-F0-9\-]+$')
+
+
+generator_default_variables = {
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '.exe',
+ 'STATIC_LIB_PREFIX': '',
+ 'SHARED_LIB_PREFIX': '',
+ 'STATIC_LIB_SUFFIX': '.lib',
+ 'SHARED_LIB_SUFFIX': '.dll',
+ 'INTERMEDIATE_DIR': '$(IntDir)',
+ 'SHARED_INTERMEDIATE_DIR': '$(OutDir)obj/global_intermediate',
+ 'OS': 'win',
+ 'PRODUCT_DIR': '$(OutDir)',
+ 'LIB_DIR': '$(OutDir)lib',
+ 'RULE_INPUT_ROOT': '$(InputName)',
+ 'RULE_INPUT_DIRNAME': '$(InputDir)',
+ 'RULE_INPUT_EXT': '$(InputExt)',
+ 'RULE_INPUT_NAME': '$(InputFileName)',
+ 'RULE_INPUT_PATH': '$(InputPath)',
+ 'CONFIGURATION_NAME': '$(ConfigurationName)',
+}
+
+
+# The msvs specific sections that hold paths
+generator_additional_path_sections = [
+ 'msvs_cygwin_dirs',
+ 'msvs_props',
+]
+
+
+generator_additional_non_configuration_keys = [
+ 'msvs_cygwin_dirs',
+ 'msvs_cygwin_shell',
+ 'msvs_large_pdb',
+ 'msvs_shard',
+ 'msvs_external_builder',
+ 'msvs_external_builder_out_dir',
+ 'msvs_external_builder_build_cmd',
+ 'msvs_external_builder_clean_cmd',
+ 'msvs_external_builder_clcompile_cmd',
+ 'msvs_enable_winrt',
+ 'msvs_requires_importlibrary',
+ 'msvs_enable_winphone',
+ 'msvs_application_type_revision',
+ 'msvs_target_platform_version',
+ 'msvs_target_platform_minversion',
+]
+
+
+# List of precompiled header related keys.
+precomp_keys = [
+ 'msvs_precompiled_header',
+ 'msvs_precompiled_source',
+]
+
+
+cached_username = None
+
+
+cached_domain = None
+
+
+# TODO(gspencer): Switch the os.environ calls to be
+# win32api.GetDomainName() and win32api.GetUserName() once the
+# python version in depot_tools has been updated to work on Vista
+# 64-bit.
+def _GetDomainAndUserName():
+ if sys.platform not in ('win32', 'cygwin'):
+ return ('DOMAIN', 'USERNAME')
+ global cached_username
+ global cached_domain
+ if not cached_domain or not cached_username:
+ domain = os.environ.get('USERDOMAIN')
+ username = os.environ.get('USERNAME')
+ if not domain or not username:
+ call = subprocess.Popen(['net', 'config', 'Workstation'],
+ stdout=subprocess.PIPE)
+ config = call.communicate()[0]
+ username_re = re.compile(r'^User name\s+(\S+)', re.MULTILINE)
+ username_match = username_re.search(config)
+ if username_match:
+ username = username_match.group(1)
+ domain_re = re.compile(r'^Logon domain\s+(\S+)', re.MULTILINE)
+ domain_match = domain_re.search(config)
+ if domain_match:
+ domain = domain_match.group(1)
+ cached_domain = domain
+ cached_username = username
+ return (cached_domain, cached_username)
+
+fixpath_prefix = None
+
+
+def _NormalizedSource(source):
+ """Normalize the path.
+
+ But not if that gets rid of a variable, as this may expand to something
+ larger than one directory.
+
+ Arguments:
+ source: The path to be normalize.d
+
+ Returns:
+ The normalized path.
+ """
+ normalized = os.path.normpath(source)
+ if source.count('$') == normalized.count('$'):
+ source = normalized
+ return source
+
+
+def _FixPath(path):
+ """Convert paths to a form that will make sense in a vcproj file.
+
+ Arguments:
+ path: The path to convert, may contain / etc.
+ Returns:
+ The path with all slashes made into backslashes.
+ """
+ if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$':
+ path = os.path.join(fixpath_prefix, path)
+ path = path.replace('/', '\\')
+ path = _NormalizedSource(path)
+ if path and path[-1] == '\\':
+ path = path[:-1]
+ return path
+
+
+def _FixPaths(paths):
+ """Fix each of the paths of the list."""
+ return [_FixPath(i) for i in paths]
+
+
+def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
+ list_excluded=True, msvs_version=None):
+ """Converts a list split source file paths into a vcproj folder hierarchy.
+
+ Arguments:
+ sources: A list of source file paths split.
+ prefix: A list of source file path layers meant to apply to each of sources.
+ excluded: A set of excluded files.
+ msvs_version: A MSVSVersion object.
+
+ Returns:
+ A hierarchy of filenames and MSVSProject.Filter objects that matches the
+ layout of the source tree.
+ For example:
+ _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']],
+ prefix=['joe'])
+ -->
+ [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
+ MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
+ """
+ if not prefix: prefix = []
+ result = []
+ excluded_result = []
+ folders = OrderedDict()
+ # Gather files into the final result, excluded, or folders.
+ for s in sources:
+ if len(s) == 1:
+ filename = _NormalizedSource('\\'.join(prefix + s))
+ if filename in excluded:
+ excluded_result.append(filename)
+ else:
+ result.append(filename)
+ elif msvs_version and not msvs_version.UsesVcxproj():
+ # For MSVS 2008 and earlier, we need to process all files before walking
+ # the sub folders.
+ if not folders.get(s[0]):
+ folders[s[0]] = []
+ folders[s[0]].append(s[1:])
+ else:
+ contents = _ConvertSourcesToFilterHierarchy([s[1:]], prefix + [s[0]],
+ excluded=excluded,
+ list_excluded=list_excluded,
+ msvs_version=msvs_version)
+ contents = MSVSProject.Filter(s[0], contents=contents)
+ result.append(contents)
+ # Add a folder for excluded files.
+ if excluded_result and list_excluded:
+ excluded_folder = MSVSProject.Filter('_excluded_files',
+ contents=excluded_result)
+ result.append(excluded_folder)
+
+ if msvs_version and msvs_version.UsesVcxproj():
+ return result
+
+ # Populate all the folders.
+ for f in folders:
+ contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f],
+ excluded=excluded,
+ list_excluded=list_excluded,
+ msvs_version=msvs_version)
+ contents = MSVSProject.Filter(f, contents=contents)
+ result.append(contents)
+ return result
+
+
+def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
+ if not value: return
+ _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset)
+
+
+def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
+ # TODO(bradnelson): ugly hack, fix this more generally!!!
+ if 'Directories' in setting or 'Dependencies' in setting:
+ if type(value) == str:
+ value = value.replace('/', '\\')
+ else:
+ value = [i.replace('/', '\\') for i in value]
+ if not tools.get(tool_name):
+ tools[tool_name] = dict()
+ tool = tools[tool_name]
+ if tool.get(setting):
+ if only_if_unset: return
+ if type(tool[setting]) == list and type(value) == list:
+ tool[setting] += value
+ else:
+ raise TypeError(
+ 'Appending "%s" to a non-list setting "%s" for tool "%s" is '
+ 'not allowed, previous value: %s' % (
+ value, setting, tool_name, str(tool[setting])))
+ else:
+ tool[setting] = value
+
+
+def _ConfigPlatform(config_data):
+ return config_data.get('msvs_configuration_platform', 'Win32')
+
+
+def _ConfigBaseName(config_name, platform_name):
+ if config_name.endswith('_' + platform_name):
+ return config_name[0:-len(platform_name) - 1]
+ else:
+ return config_name
+
+
+def _ConfigFullName(config_name, config_data):
+ platform_name = _ConfigPlatform(config_data)
+ return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name)
+
+
+def _ConfigWindowsTargetPlatformVersion(config_data):
+ ver = config_data.get('msvs_windows_target_platform_version')
+ if not ver or re.match(r'^\d+', ver):
+ return ver
+ for key in [r'HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s',
+ r'HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s']:
+ sdkdir = MSVSVersion._RegistryGetValue(key % ver, 'InstallationFolder')
+ if not sdkdir:
+ continue
+ version = MSVSVersion._RegistryGetValue(key % ver, 'ProductVersion') or ''
+ # find a matching entry in sdkdir\include
+ names = sorted([x for x in os.listdir(r'%s\include' % sdkdir) \
+ if x.startswith(version)], reverse = True)
+ return names[0]
+
+
+def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
+ quote_cmd, do_setup_env):
+
+ if [x for x in cmd if '$(InputDir)' in x]:
+ input_dir_preamble = (
+ 'set INPUTDIR=$(InputDir)\n'
+ 'if NOT DEFINED INPUTDIR set INPUTDIR=.\\\n'
+ 'set INPUTDIR=%INPUTDIR:~0,-1%\n'
+ )
+ else:
+ input_dir_preamble = ''
+
+ if cygwin_shell:
+ # Find path to cygwin.
+ cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0])
+ # Prepare command.
+ direct_cmd = cmd
+ direct_cmd = [i.replace('$(IntDir)',
+ '`cygpath -m "${INTDIR}"`') for i in direct_cmd]
+ direct_cmd = [i.replace('$(OutDir)',
+ '`cygpath -m "${OUTDIR}"`') for i in direct_cmd]
+ direct_cmd = [i.replace('$(InputDir)',
+ '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd]
+ if has_input_path:
+ direct_cmd = [i.replace('$(InputPath)',
+ '`cygpath -m "${INPUTPATH}"`')
+ for i in direct_cmd]
+ direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd]
+ # direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd)
+ direct_cmd = ' '.join(direct_cmd)
+ # TODO(quote): regularize quoting path names throughout the module
+ cmd = ''
+ if do_setup_env:
+ cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && '
+ cmd += 'set CYGWIN=nontsec&& '
+ if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0:
+ cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& '
+ if direct_cmd.find('INTDIR') >= 0:
+ cmd += 'set INTDIR=$(IntDir)&& '
+ if direct_cmd.find('OUTDIR') >= 0:
+ cmd += 'set OUTDIR=$(OutDir)&& '
+ if has_input_path and direct_cmd.find('INPUTPATH') >= 0:
+ cmd += 'set INPUTPATH=$(InputPath) && '
+ cmd += 'bash -c "%(cmd)s"'
+ cmd = cmd % {'cygwin_dir': cygwin_dir,
+ 'cmd': direct_cmd}
+ return input_dir_preamble + cmd
+ else:
+ # Convert cat --> type to mimic unix.
+ if cmd[0] == 'cat':
+ command = ['type']
+ else:
+ command = [cmd[0].replace('/', '\\')]
+ # Add call before command to ensure that commands can be tied together one
+ # after the other without aborting in Incredibuild, since IB makes a bat
+ # file out of the raw command string, and some commands (like python) are
+ # actually batch files themselves.
+ command.insert(0, 'call')
+ # Fix the paths
+ # TODO(quote): This is a really ugly heuristic, and will miss path fixing
+ # for arguments like "--arg=path" or "/opt:path".
+ # If the argument starts with a slash or dash, it's probably a command line
+ # switch
+ arguments = [i if (i[:1] in "/-") else _FixPath(i) for i in cmd[1:]]
+ arguments = [i.replace('$(InputDir)', '%INPUTDIR%') for i in arguments]
+ arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments]
+ if quote_cmd:
+ # Support a mode for using cmd directly.
+ # Convert any paths to native form (first element is used directly).
+ # TODO(quote): regularize quoting path names throughout the module
+ arguments = ['"%s"' % i for i in arguments]
+ # Collapse into a single command.
+ return input_dir_preamble + ' '.join(command + arguments)
+
+
+def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env):
+ # Currently this weird argument munging is used to duplicate the way a
+ # python script would need to be run as part of the chrome tree.
+ # Eventually we should add some sort of rule_default option to set this
+ # per project. For now the behavior chrome needs is the default.
+ mcs = rule.get('msvs_cygwin_shell')
+ if mcs is None:
+ mcs = int(spec.get('msvs_cygwin_shell', 1))
+ elif isinstance(mcs, str):
+ mcs = int(mcs)
+ quote_cmd = int(rule.get('msvs_quote_cmd', 1))
+ return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path,
+ quote_cmd, do_setup_env=do_setup_env)
+
+
+def _AddActionStep(actions_dict, inputs, outputs, description, command):
+ """Merge action into an existing list of actions.
+
+ Care must be taken so that actions which have overlapping inputs either don't
+ get assigned to the same input, or get collapsed into one.
+
+ Arguments:
+ actions_dict: dictionary keyed on input name, which maps to a list of
+ dicts describing the actions attached to that input file.
+ inputs: list of inputs
+ outputs: list of outputs
+ description: description of the action
+ command: command line to execute
+ """
+ # Require there to be at least one input (call sites will ensure this).
+ assert inputs
+
+ action = {
+ 'inputs': inputs,
+ 'outputs': outputs,
+ 'description': description,
+ 'command': command,
+ }
+
+ # Pick where to stick this action.
+ # While less than optimal in terms of build time, attach them to the first
+ # input for now.
+ chosen_input = inputs[0]
+
+ # Add it there.
+ if chosen_input not in actions_dict:
+ actions_dict[chosen_input] = []
+ actions_dict[chosen_input].append(action)
+
+
+def _AddCustomBuildToolForMSVS(p, spec, primary_input,
+ inputs, outputs, description, cmd):
+ """Add a custom build tool to execute something.
+
+ Arguments:
+ p: the target project
+ spec: the target project dict
+ primary_input: input file to attach the build tool to
+ inputs: list of inputs
+ outputs: list of outputs
+ description: description of the action
+ cmd: command line to execute
+ """
+ inputs = _FixPaths(inputs)
+ outputs = _FixPaths(outputs)
+ tool = MSVSProject.Tool(
+ 'VCCustomBuildTool',
+ {'Description': description,
+ 'AdditionalDependencies': ';'.join(inputs),
+ 'Outputs': ';'.join(outputs),
+ 'CommandLine': cmd,
+ })
+ # Add to the properties of primary input for each config.
+ for config_name, c_data in spec['configurations'].iteritems():
+ p.AddFileConfig(_FixPath(primary_input),
+ _ConfigFullName(config_name, c_data), tools=[tool])
+
+
+def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
+ """Add actions accumulated into an actions_dict, merging as needed.
+
+ Arguments:
+ p: the target project
+ spec: the target project dict
+ actions_dict: dictionary keyed on input name, which maps to a list of
+ dicts describing the actions attached to that input file.
+ """
+ for primary_input in actions_dict:
+ inputs = OrderedSet()
+ outputs = OrderedSet()
+ descriptions = []
+ commands = []
+ for action in actions_dict[primary_input]:
+ inputs.update(OrderedSet(action['inputs']))
+ outputs.update(OrderedSet(action['outputs']))
+ descriptions.append(action['description'])
+ commands.append(action['command'])
+ # Add the custom build step for one input file.
+ description = ', and also '.join(descriptions)
+ command = '\r\n'.join(commands)
+ _AddCustomBuildToolForMSVS(p, spec,
+ primary_input=primary_input,
+ inputs=inputs,
+ outputs=outputs,
+ description=description,
+ cmd=command)
+
+
+def _RuleExpandPath(path, input_file):
+ """Given the input file to which a rule applied, string substitute a path.
+
+ Arguments:
+ path: a path to string expand
+ input_file: the file to which the rule applied.
+ Returns:
+ The string substituted path.
+ """
+ path = path.replace('$(InputName)',
+ os.path.splitext(os.path.split(input_file)[1])[0])
+ path = path.replace('$(InputDir)', os.path.dirname(input_file))
+ path = path.replace('$(InputExt)',
+ os.path.splitext(os.path.split(input_file)[1])[1])
+ path = path.replace('$(InputFileName)', os.path.split(input_file)[1])
+ path = path.replace('$(InputPath)', input_file)
+ return path
+
+
+def _FindRuleTriggerFiles(rule, sources):
+ """Find the list of files which a particular rule applies to.
+
+ Arguments:
+ rule: the rule in question
+ sources: the set of all known source files for this project
+ Returns:
+ The list of sources that trigger a particular rule.
+ """
+ return rule.get('rule_sources', [])
+
+
+def _RuleInputsAndOutputs(rule, trigger_file):
+ """Find the inputs and outputs generated by a rule.
+
+ Arguments:
+ rule: the rule in question.
+ trigger_file: the main trigger for this rule.
+ Returns:
+ The pair of (inputs, outputs) involved in this rule.
+ """
+ raw_inputs = _FixPaths(rule.get('inputs', []))
+ raw_outputs = _FixPaths(rule.get('outputs', []))
+ inputs = OrderedSet()
+ outputs = OrderedSet()
+ inputs.add(trigger_file)
+ for i in raw_inputs:
+ inputs.add(_RuleExpandPath(i, trigger_file))
+ for o in raw_outputs:
+ outputs.add(_RuleExpandPath(o, trigger_file))
+ return (inputs, outputs)
+
+
+def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
+ """Generate a native rules file.
+
+ Arguments:
+ p: the target project
+ rules: the set of rules to include
+ output_dir: the directory in which the project/gyp resides
+ spec: the project dict
+ options: global generator options
+ """
+ rules_filename = '%s%s.rules' % (spec['target_name'],
+ options.suffix)
+ rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename),
+ spec['target_name'])
+ # Add each rule.
+ for r in rules:
+ rule_name = r['rule_name']
+ rule_ext = r['extension']
+ inputs = _FixPaths(r.get('inputs', []))
+ outputs = _FixPaths(r.get('outputs', []))
+ # Skip a rule with no action and no inputs.
+ if 'action' not in r and not r.get('rule_sources', []):
+ continue
+ cmd = _BuildCommandLineForRule(spec, r, has_input_path=True,
+ do_setup_env=True)
+ rules_file.AddCustomBuildRule(name=rule_name,
+ description=r.get('message', rule_name),
+ extensions=[rule_ext],
+ additional_dependencies=inputs,
+ outputs=outputs,
+ cmd=cmd)
+ # Write out rules file.
+ rules_file.WriteIfChanged()
+
+ # Add rules file to project.
+ p.AddToolFile(rules_filename)
+
+
+def _Cygwinify(path):
+ path = path.replace('$(OutDir)', '$(OutDirCygwin)')
+ path = path.replace('$(IntDir)', '$(IntDirCygwin)')
+ return path
+
+
+def _GenerateExternalRules(rules, output_dir, spec,
+ sources, options, actions_to_add):
+ """Generate an external makefile to do a set of rules.
+
+ Arguments:
+ rules: the list of rules to include
+ output_dir: path containing project and gyp files
+ spec: project specification data
+ sources: set of sources known
+ options: global generator options
+ actions_to_add: The list of actions we will add to.
+ """
+ filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix)
+ mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
+ # Find cygwin style versions of some paths.
+ mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
+ mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
+ # Gather stuff needed to emit all: target.
+ all_inputs = OrderedSet()
+ all_outputs = OrderedSet()
+ all_output_dirs = OrderedSet()
+ first_outputs = []
+ for rule in rules:
+ trigger_files = _FindRuleTriggerFiles(rule, sources)
+ for tf in trigger_files:
+ inputs, outputs = _RuleInputsAndOutputs(rule, tf)
+ all_inputs.update(OrderedSet(inputs))
+ all_outputs.update(OrderedSet(outputs))
+ # Only use one target from each rule as the dependency for
+ # 'all' so we don't try to build each rule multiple times.
+ first_outputs.append(list(outputs)[0])
+ # Get the unique output directories for this rule.
+ output_dirs = [os.path.split(i)[0] for i in outputs]
+ for od in output_dirs:
+ all_output_dirs.add(od)
+ first_outputs_cyg = [_Cygwinify(i) for i in first_outputs]
+ # Write out all: target, including mkdir for each output directory.
+ mk_file.write('all: %s\n' % ' '.join(first_outputs_cyg))
+ for od in all_output_dirs:
+ if od:
+ mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od)
+ mk_file.write('\n')
+ # Define how each output is generated.
+ for rule in rules:
+ trigger_files = _FindRuleTriggerFiles(rule, sources)
+ for tf in trigger_files:
+ # Get all the inputs and outputs for this rule for this trigger file.
+ inputs, outputs = _RuleInputsAndOutputs(rule, tf)
+ inputs = [_Cygwinify(i) for i in inputs]
+ outputs = [_Cygwinify(i) for i in outputs]
+ # Prepare the command line for this rule.
+ cmd = [_RuleExpandPath(c, tf) for c in rule['action']]
+ cmd = ['"%s"' % i for i in cmd]
+ cmd = ' '.join(cmd)
+ # Add it to the makefile.
+ mk_file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs)))
+ mk_file.write('\t%s\n\n' % cmd)
+ # Close up the file.
+ mk_file.close()
+
+ # Add makefile to list of sources.
+ sources.add(filename)
+ # Add a build action to call makefile.
+ cmd = ['make',
+ 'OutDir=$(OutDir)',
+ 'IntDir=$(IntDir)',
+ '-j', '${NUMBER_OF_PROCESSORS_PLUS_1}',
+ '-f', filename]
+ cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True)
+ # Insert makefile as 0'th input, so it gets the action attached there,
+ # as this is easier to understand from in the IDE.
+ all_inputs = list(all_inputs)
+ all_inputs.insert(0, filename)
+ _AddActionStep(actions_to_add,
+ inputs=_FixPaths(all_inputs),
+ outputs=_FixPaths(all_outputs),
+ description='Running external rules for %s' %
+ spec['target_name'],
+ command=cmd)
+
+
+def _EscapeEnvironmentVariableExpansion(s):
+ """Escapes % characters.
+
+ Escapes any % characters so that Windows-style environment variable
+ expansions will leave them alone.
+ See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
+ to understand why we have to do this.
+
+ Args:
+ s: The string to be escaped.
+
+ Returns:
+ The escaped string.
+ """
+ s = s.replace('%', '%%')
+ return s
+
+
+quote_replacer_regex = re.compile(r'(\\*)"')
+
+
+def _EscapeCommandLineArgumentForMSVS(s):
+ """Escapes a Windows command-line argument.
+
+ So that the Win32 CommandLineToArgv function will turn the escaped result back
+ into the original string.
+ See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
+ ("Parsing C++ Command-Line Arguments") to understand why we have to do
+ this.
+
+ Args:
+ s: the string to be escaped.
+ Returns:
+ the escaped string.
+ """
+
+ def _Replace(match):
+ # For a literal quote, CommandLineToArgv requires an odd number of
+ # backslashes preceding it, and it produces half as many literal backslashes
+ # (rounded down). So we need to produce 2n+1 backslashes.
+ return 2 * match.group(1) + '\\"'
+
+ # Escape all quotes so that they are interpreted literally.
+ s = quote_replacer_regex.sub(_Replace, s)
+ # Now add unescaped quotes so that any whitespace is interpreted literally.
+ s = '"' + s + '"'
+ return s
+
+
+delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)')
+
+
+def _EscapeVCProjCommandLineArgListItem(s):
+ """Escapes command line arguments for MSVS.
+
+ The VCProj format stores string lists in a single string using commas and
+ semi-colons as separators, which must be quoted if they are to be
+ interpreted literally. However, command-line arguments may already have
+ quotes, and the VCProj parser is ignorant of the backslash escaping
+ convention used by CommandLineToArgv, so the command-line quotes and the
+ VCProj quotes may not be the same quotes. So to store a general
+ command-line argument in a VCProj list, we need to parse the existing
+ quoting according to VCProj's convention and quote any delimiters that are
+ not already quoted by that convention. The quotes that we add will also be
+ seen by CommandLineToArgv, so if backslashes precede them then we also have
+ to escape those backslashes according to the CommandLineToArgv
+ convention.
+
+ Args:
+ s: the string to be escaped.
+ Returns:
+ the escaped string.
+ """
+
+ def _Replace(match):
+ # For a non-literal quote, CommandLineToArgv requires an even number of
+ # backslashes preceding it, and it produces half as many literal
+ # backslashes. So we need to produce 2n backslashes.
+ return 2 * match.group(1) + '"' + match.group(2) + '"'
+
+ segments = s.split('"')
+ # The unquoted segments are at the even-numbered indices.
+ for i in range(0, len(segments), 2):
+ segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i])
+ # Concatenate back into a single string
+ s = '"'.join(segments)
+ if len(segments) % 2 == 0:
+ # String ends while still quoted according to VCProj's convention. This
+ # means the delimiter and the next list item that follow this one in the
+ # .vcproj file will be misinterpreted as part of this item. There is nothing
+ # we can do about this. Adding an extra quote would correct the problem in
+ # the VCProj but cause the same problem on the final command-line. Moving
+ # the item to the end of the list does works, but that's only possible if
+ # there's only one such item. Let's just warn the user.
+ print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' +
+ 'quotes in ' + s)
+ return s
+
+
+def _EscapeCppDefineForMSVS(s):
+ """Escapes a CPP define so that it will reach the compiler unaltered."""
+ s = _EscapeEnvironmentVariableExpansion(s)
+ s = _EscapeCommandLineArgumentForMSVS(s)
+ s = _EscapeVCProjCommandLineArgListItem(s)
+ # cl.exe replaces literal # characters with = in preprocesor definitions for
+ # some reason. Octal-encode to work around that.
+ s = s.replace('#', '\\%03o' % ord('#'))
+ return s
+
+
+quote_replacer_regex2 = re.compile(r'(\\+)"')
+
+
+def _EscapeCommandLineArgumentForMSBuild(s):
+ """Escapes a Windows command-line argument for use by MSBuild."""
+
+ def _Replace(match):
+ return (len(match.group(1)) / 2 * 4) * '\\' + '\\"'
+
+ # Escape all quotes so that they are interpreted literally.
+ s = quote_replacer_regex2.sub(_Replace, s)
+ return s
+
+
+def _EscapeMSBuildSpecialCharacters(s):
+ escape_dictionary = {
+ '%': '%25',
+ '$': '%24',
+ '@': '%40',
+ "'": '%27',
+ ';': '%3B',
+ '?': '%3F',
+ '*': '%2A'
+ }
+ result = ''.join([escape_dictionary.get(c, c) for c in s])
+ return result
+
+
+def _EscapeCppDefineForMSBuild(s):
+ """Escapes a CPP define so that it will reach the compiler unaltered."""
+ s = _EscapeEnvironmentVariableExpansion(s)
+ s = _EscapeCommandLineArgumentForMSBuild(s)
+ s = _EscapeMSBuildSpecialCharacters(s)
+ # cl.exe replaces literal # characters with = in preprocesor definitions for
+ # some reason. Octal-encode to work around that.
+ s = s.replace('#', '\\%03o' % ord('#'))
+ return s
+
+
+def _GenerateRulesForMSVS(p, output_dir, options, spec,
+ sources, excluded_sources,
+ actions_to_add):
+ """Generate all the rules for a particular project.
+
+ Arguments:
+ p: the project
+ output_dir: directory to emit rules to
+ options: global options passed to the generator
+ spec: the specification for this project
+ sources: the set of all known source files in this project
+ excluded_sources: the set of sources excluded from normal processing
+ actions_to_add: deferred list of actions to add in
+ """
+ rules = spec.get('rules', [])
+ rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
+ rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
+
+ # Handle rules that use a native rules file.
+ if rules_native:
+ _GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options)
+
+ # Handle external rules (non-native rules).
+ if rules_external:
+ _GenerateExternalRules(rules_external, output_dir, spec,
+ sources, options, actions_to_add)
+ _AdjustSourcesForRules(rules, sources, excluded_sources, False)
+
+
+def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild):
+ # Add outputs generated by each rule (if applicable).
+ for rule in rules:
+ # Add in the outputs from this rule.
+ trigger_files = _FindRuleTriggerFiles(rule, sources)
+ for trigger_file in trigger_files:
+ # Remove trigger_file from excluded_sources to let the rule be triggered
+ # (e.g. rule trigger ax_enums.idl is added to excluded_sources
+ # because it's also in an action's inputs in the same project)
+ excluded_sources.discard(_FixPath(trigger_file))
+ # Done if not processing outputs as sources.
+ if int(rule.get('process_outputs_as_sources', False)):
+ inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
+ inputs = OrderedSet(_FixPaths(inputs))
+ outputs = OrderedSet(_FixPaths(outputs))
+ inputs.remove(_FixPath(trigger_file))
+ sources.update(inputs)
+ if not is_msbuild:
+ excluded_sources.update(inputs)
+ sources.update(outputs)
+
+
+def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
+ """Take inputs with actions attached out of the list of exclusions.
+
+ Arguments:
+ excluded_sources: list of source files not to be built.
+ actions_to_add: dict of actions keyed on source file they're attached to.
+ Returns:
+ excluded_sources with files that have actions attached removed.
+ """
+ must_keep = OrderedSet(_FixPaths(actions_to_add.keys()))
+ return [s for s in excluded_sources if s not in must_keep]
+
+
+def _GetDefaultConfiguration(spec):
+ return spec['configurations'][spec['default_configuration']]
+
+
+def _GetGuidOfProject(proj_path, spec):
+ """Get the guid for the project.
+
+ Arguments:
+ proj_path: Path of the vcproj or vcxproj file to generate.
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ the guid.
+ Raises:
+ ValueError: if the specified GUID is invalid.
+ """
+ # Pluck out the default configuration.
+ default_config = _GetDefaultConfiguration(spec)
+ # Decide the guid of the project.
+ guid = default_config.get('msvs_guid')
+ if guid:
+ if VALID_MSVS_GUID_CHARS.match(guid) is None:
+ raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' %
+ (guid, VALID_MSVS_GUID_CHARS.pattern))
+ guid = '{%s}' % guid
+ guid = guid or MSVSNew.MakeGuid(proj_path)
+ return guid
+
+
+def _GetMsbuildToolsetOfProject(proj_path, spec, version):
+ """Get the platform toolset for the project.
+
+ Arguments:
+ proj_path: Path of the vcproj or vcxproj file to generate.
+ spec: The target dictionary containing the properties of the target.
+ version: The MSVSVersion object.
+ Returns:
+ the platform toolset string or None.
+ """
+ # Pluck out the default configuration.
+ default_config = _GetDefaultConfiguration(spec)
+ toolset = default_config.get('msbuild_toolset')
+ if not toolset and version.DefaultToolset():
+ toolset = version.DefaultToolset()
+ return toolset
+
+
+def _GenerateProject(project, options, version, generator_flags):
+ """Generates a vcproj file.
+
+ Arguments:
+ project: the MSVSProject object.
+ options: global generator options.
+ version: the MSVSVersion object.
+ generator_flags: dict of generator-specific flags.
+ Returns:
+ A list of source files that cannot be found on disk.
+ """
+ default_config = _GetDefaultConfiguration(project.spec)
+
+ # Skip emitting anything if told to with msvs_existing_vcproj option.
+ if default_config.get('msvs_existing_vcproj'):
+ return []
+
+ if version.UsesVcxproj():
+ return _GenerateMSBuildProject(project, options, version, generator_flags)
+ else:
+ return _GenerateMSVSProject(project, options, version, generator_flags)
+
+
+# TODO: Avoid code duplication with _ValidateSourcesForOSX in make.py.
+def _ValidateSourcesForMSVSProject(spec, version):
+ """Makes sure if duplicate basenames are not specified in the source list.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ version: The VisualStudioVersion object.
+ """
+ # This validation should not be applied to MSVC2010 and later.
+ assert not version.UsesVcxproj()
+
+ # TODO: Check if MSVC allows this for loadable_module targets.
+ if spec.get('type', None) not in ('static_library', 'shared_library'):
+ return
+ sources = spec.get('sources', [])
+ basenames = {}
+ for source in sources:
+ name, ext = os.path.splitext(source)
+ is_compiled_file = ext in [
+ '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
+ if not is_compiled_file:
+ continue
+ basename = os.path.basename(name) # Don't include extension.
+ basenames.setdefault(basename, []).append(source)
+
+ error = ''
+ for basename, files in basenames.iteritems():
+ if len(files) > 1:
+ error += ' %s: %s\n' % (basename, ' '.join(files))
+
+ if error:
+ print('static library %s has several files with the same basename:\n' %
+ spec['target_name'] + error + 'MSVC08 cannot handle that.')
+ raise GypError('Duplicate basenames in sources section, see list above')
+
+
+def _GenerateMSVSProject(project, options, version, generator_flags):
+ """Generates a .vcproj file. It may create .rules and .user files too.
+
+ Arguments:
+ project: The project object we will generate the file for.
+ options: Global options passed to the generator.
+ version: The VisualStudioVersion object.
+ generator_flags: dict of generator-specific flags.
+ """
+ spec = project.spec
+ gyp.common.EnsureDirExists(project.path)
+
+ platforms = _GetUniquePlatforms(spec)
+ p = MSVSProject.Writer(project.path, version, spec['target_name'],
+ project.guid, platforms)
+
+ # Get directory project file is in.
+ project_dir = os.path.split(project.path)[0]
+ gyp_path = _NormalizedSource(project.build_file)
+ relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
+
+ config_type = _GetMSVSConfigurationType(spec, project.build_file)
+ for config_name, config in spec['configurations'].iteritems():
+ _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
+
+ # MSVC08 and prior version cannot handle duplicate basenames in the same
+ # target.
+ # TODO: Take excluded sources into consideration if possible.
+ _ValidateSourcesForMSVSProject(spec, version)
+
+ # Prepare list of sources and excluded sources.
+ gyp_file = os.path.split(project.build_file)[1]
+ sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
+ gyp_file)
+
+ # Add rules.
+ actions_to_add = {}
+ _GenerateRulesForMSVS(p, project_dir, options, spec,
+ sources, excluded_sources,
+ actions_to_add)
+ list_excluded = generator_flags.get('msvs_list_excluded_files', True)
+ sources, excluded_sources, excluded_idl = (
+ _AdjustSourcesAndConvertToFilterHierarchy(spec, options, project_dir,
+ sources, excluded_sources,
+ list_excluded, version))
+
+ # Add in files.
+ missing_sources = _VerifySourcesExist(sources, project_dir)
+ p.AddFiles(sources)
+
+ _AddToolFilesToMSVS(p, spec)
+ _HandlePreCompiledHeaders(p, sources, spec)
+ _AddActions(actions_to_add, spec, relative_path_of_gyp_file)
+ _AddCopies(actions_to_add, spec)
+ _WriteMSVSUserFile(project.path, version, spec)
+
+ # NOTE: this stanza must appear after all actions have been decided.
+ # Don't excluded sources with actions attached, or they won't run.
+ excluded_sources = _FilterActionsFromExcluded(
+ excluded_sources, actions_to_add)
+ _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
+ list_excluded)
+ _AddAccumulatedActionsToMSVS(p, spec, actions_to_add)
+
+ # Write it out.
+ p.WriteIfChanged()
+
+ return missing_sources
+
+
+def _GetUniquePlatforms(spec):
+ """Returns the list of unique platforms for this spec, e.g ['win32', ...].
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ The MSVSUserFile object created.
+ """
+ # Gather list of unique platforms.
+ platforms = OrderedSet()
+ for configuration in spec['configurations']:
+ platforms.add(_ConfigPlatform(spec['configurations'][configuration]))
+ platforms = list(platforms)
+ return platforms
+
+
+def _CreateMSVSUserFile(proj_path, version, spec):
+ """Generates a .user file for the user running this Gyp program.
+
+ Arguments:
+ proj_path: The path of the project file being created. The .user file
+ shares the same path (with an appropriate suffix).
+ version: The VisualStudioVersion object.
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ The MSVSUserFile object created.
+ """
+ (domain, username) = _GetDomainAndUserName()
+ vcuser_filename = '.'.join([proj_path, domain, username, 'user'])
+ user_file = MSVSUserFile.Writer(vcuser_filename, version,
+ spec['target_name'])
+ return user_file
+
+
+def _GetMSVSConfigurationType(spec, build_file):
+ """Returns the configuration type for this project.
+
+ It's a number defined by Microsoft. May raise an exception.
+
+ Args:
+ spec: The target dictionary containing the properties of the target.
+ build_file: The path of the gyp file.
+ Returns:
+ An integer, the configuration type.
+ """
+ try:
+ config_type = {
+ 'executable': '1', # .exe
+ 'shared_library': '2', # .dll
+ 'loadable_module': '2', # .dll
+ 'static_library': '4', # .lib
+ 'none': '10', # Utility type
+ }[spec['type']]
+ except KeyError:
+ if spec.get('type'):
+ raise GypError('Target type %s is not a valid target type for '
+ 'target %s in %s.' %
+ (spec['type'], spec['target_name'], build_file))
+ else:
+ raise GypError('Missing type field for target %s in %s.' %
+ (spec['target_name'], build_file))
+ return config_type
+
+
+def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
+ """Adds a configuration to the MSVS project.
+
+ Many settings in a vcproj file are specific to a configuration. This
+ function the main part of the vcproj file that's configuration specific.
+
+ Arguments:
+ p: The target project being generated.
+ spec: The target dictionary containing the properties of the target.
+ config_type: The configuration type, a number as defined by Microsoft.
+ config_name: The name of the configuration.
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ """
+ # Get the information for this configuration
+ include_dirs, midl_include_dirs, resource_include_dirs = \
+ _GetIncludeDirs(config)
+ libraries = _GetLibraries(spec)
+ library_dirs = _GetLibraryDirs(config)
+ out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
+ defines = _GetDefines(config)
+ defines = [_EscapeCppDefineForMSVS(d) for d in defines]
+ disabled_warnings = _GetDisabledWarnings(config)
+ prebuild = config.get('msvs_prebuild')
+ postbuild = config.get('msvs_postbuild')
+ def_file = _GetModuleDefinition(spec)
+ precompiled_header = config.get('msvs_precompiled_header')
+
+ # Prepare the list of tools as a dictionary.
+ tools = dict()
+ # Add in user specified msvs_settings.
+ msvs_settings = config.get('msvs_settings', {})
+ MSVSSettings.ValidateMSVSSettings(msvs_settings)
+
+ # Prevent default library inheritance from the environment.
+ _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', ['$(NOINHERIT)'])
+
+ for tool in msvs_settings:
+ settings = config['msvs_settings'][tool]
+ for setting in settings:
+ _ToolAppend(tools, tool, setting, settings[setting])
+ # Add the information to the appropriate tool
+ _ToolAppend(tools, 'VCCLCompilerTool',
+ 'AdditionalIncludeDirectories', include_dirs)
+ _ToolAppend(tools, 'VCMIDLTool',
+ 'AdditionalIncludeDirectories', midl_include_dirs)
+ _ToolAppend(tools, 'VCResourceCompilerTool',
+ 'AdditionalIncludeDirectories', resource_include_dirs)
+ # Add in libraries.
+ _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', libraries)
+ _ToolAppend(tools, 'VCLinkerTool', 'AdditionalLibraryDirectories',
+ library_dirs)
+ if out_file:
+ _ToolAppend(tools, vc_tool, 'OutputFile', out_file, only_if_unset=True)
+ # Add defines.
+ _ToolAppend(tools, 'VCCLCompilerTool', 'PreprocessorDefinitions', defines)
+ _ToolAppend(tools, 'VCResourceCompilerTool', 'PreprocessorDefinitions',
+ defines)
+ # Change program database directory to prevent collisions.
+ _ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName',
+ '$(IntDir)$(ProjectName)\\vc80.pdb', only_if_unset=True)
+ # Add disabled warnings.
+ _ToolAppend(tools, 'VCCLCompilerTool',
+ 'DisableSpecificWarnings', disabled_warnings)
+ # Add Pre-build.
+ _ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild)
+ # Add Post-build.
+ _ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild)
+ # Turn on precompiled headers if appropriate.
+ if precompiled_header:
+ precompiled_header = os.path.split(precompiled_header)[1]
+ _ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2')
+ _ToolAppend(tools, 'VCCLCompilerTool',
+ 'PrecompiledHeaderThrough', precompiled_header)
+ _ToolAppend(tools, 'VCCLCompilerTool',
+ 'ForcedIncludeFiles', precompiled_header)
+ # Loadable modules don't generate import libraries;
+ # tell dependent projects to not expect one.
+ if spec['type'] == 'loadable_module':
+ _ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true')
+ # Set the module definition file if any.
+ if def_file:
+ _ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile', def_file)
+
+ _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name)
+
+
+def _GetIncludeDirs(config):
+ """Returns the list of directories to be used for #include directives.
+
+ Arguments:
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ Returns:
+ The list of directory paths.
+ """
+ # TODO(bradnelson): include_dirs should really be flexible enough not to
+ # require this sort of thing.
+ include_dirs = (
+ config.get('include_dirs', []) +
+ config.get('msvs_system_include_dirs', []))
+ midl_include_dirs = (
+ config.get('midl_include_dirs', []) +
+ config.get('msvs_system_include_dirs', []))
+ resource_include_dirs = config.get('resource_include_dirs', include_dirs)
+ include_dirs = _FixPaths(include_dirs)
+ midl_include_dirs = _FixPaths(midl_include_dirs)
+ resource_include_dirs = _FixPaths(resource_include_dirs)
+ return include_dirs, midl_include_dirs, resource_include_dirs
+
+
+def _GetLibraryDirs(config):
+ """Returns the list of directories to be used for library search paths.
+
+ Arguments:
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ Returns:
+ The list of directory paths.
+ """
+
+ library_dirs = config.get('library_dirs', [])
+ library_dirs = _FixPaths(library_dirs)
+ return library_dirs
+
+
+def _GetLibraries(spec):
+ """Returns the list of libraries for this configuration.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ The list of directory paths.
+ """
+ libraries = spec.get('libraries', [])
+ # Strip out -l, as it is not used on windows (but is needed so we can pass
+ # in libraries that are assumed to be in the default library path).
+ # Also remove duplicate entries, leaving only the last duplicate, while
+ # preserving order.
+ found = OrderedSet()
+ unique_libraries_list = []
+ for entry in reversed(libraries):
+ library = re.sub(r'^\-l', '', entry)
+ if not os.path.splitext(library)[1]:
+ library += '.lib'
+ if library not in found:
+ found.add(library)
+ unique_libraries_list.append(library)
+ unique_libraries_list.reverse()
+ return unique_libraries_list
+
+
+def _GetOutputFilePathAndTool(spec, msbuild):
+ """Returns the path and tool to use for this target.
+
+ Figures out the path of the file this spec will create and the name of
+ the VC tool that will create it.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ A triple of (file path, name of the vc tool, name of the msbuild tool)
+ """
+ # Select a name for the output file.
+ out_file = ''
+ vc_tool = ''
+ msbuild_tool = ''
+ output_file_map = {
+ 'executable': ('VCLinkerTool', 'Link', '$(OutDir)', '.exe'),
+ 'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
+ 'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
+ 'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)lib\\', '.lib'),
+ }
+ output_file_props = output_file_map.get(spec['type'])
+ if output_file_props and int(spec.get('msvs_auto_output_file', 1)):
+ vc_tool, msbuild_tool, out_dir, suffix = output_file_props
+ if spec.get('standalone_static_library', 0):
+ out_dir = '$(OutDir)'
+ out_dir = spec.get('product_dir', out_dir)
+ product_extension = spec.get('product_extension')
+ if product_extension:
+ suffix = '.' + product_extension
+ elif msbuild:
+ suffix = '$(TargetExt)'
+ prefix = spec.get('product_prefix', '')
+ product_name = spec.get('product_name', '$(ProjectName)')
+ out_file = ntpath.join(out_dir, prefix + product_name + suffix)
+ return out_file, vc_tool, msbuild_tool
+
+
+def _GetOutputTargetExt(spec):
+ """Returns the extension for this target, including the dot
+
+ If product_extension is specified, set target_extension to this to avoid
+ MSB8012, returns None otherwise. Ignores any target_extension settings in
+ the input files.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ A string with the extension, or None
+ """
+ target_extension = spec.get('product_extension')
+ if target_extension:
+ return '.' + target_extension
+ return None
+
+
+def _GetDefines(config):
+ """Returns the list of preprocessor definitions for this configuation.
+
+ Arguments:
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ Returns:
+ The list of preprocessor definitions.
+ """
+ defines = []
+ for d in config.get('defines', []):
+ if type(d) == list:
+ fd = '='.join([str(dpart) for dpart in d])
+ else:
+ fd = str(d)
+ defines.append(fd)
+ return defines
+
+
+def _GetDisabledWarnings(config):
+ return [str(i) for i in config.get('msvs_disabled_warnings', [])]
+
+
+def _GetModuleDefinition(spec):
+ def_file = ''
+ if spec['type'] in ['shared_library', 'loadable_module', 'executable']:
+ def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
+ if len(def_files) == 1:
+ def_file = _FixPath(def_files[0])
+ elif def_files:
+ raise ValueError(
+ 'Multiple module definition files in one target, target %s lists '
+ 'multiple .def files: %s' % (
+ spec['target_name'], ' '.join(def_files)))
+ return def_file
+
+
+def _ConvertToolsToExpectedForm(tools):
+ """Convert tools to a form expected by Visual Studio.
+
+ Arguments:
+ tools: A dictionary of settings; the tool name is the key.
+ Returns:
+ A list of Tool objects.
+ """
+ tool_list = []
+ for tool, settings in tools.iteritems():
+ # Collapse settings with lists.
+ settings_fixed = {}
+ for setting, value in settings.iteritems():
+ if type(value) == list:
+ if ((tool == 'VCLinkerTool' and
+ setting == 'AdditionalDependencies') or
+ setting == 'AdditionalOptions'):
+ settings_fixed[setting] = ' '.join(value)
+ else:
+ settings_fixed[setting] = ';'.join(value)
+ else:
+ settings_fixed[setting] = value
+ # Add in this tool.
+ tool_list.append(MSVSProject.Tool(tool, settings_fixed))
+ return tool_list
+
+
+def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name):
+ """Add to the project file the configuration specified by config.
+
+ Arguments:
+ p: The target project being generated.
+ spec: the target project dict.
+ tools: A dictionary of settings; the tool name is the key.
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ config_type: The configuration type, a number as defined by Microsoft.
+ config_name: The name of the configuration.
+ """
+ attributes = _GetMSVSAttributes(spec, config, config_type)
+ # Add in this configuration.
+ tool_list = _ConvertToolsToExpectedForm(tools)
+ p.AddConfig(_ConfigFullName(config_name, config),
+ attrs=attributes, tools=tool_list)
+
+
+def _GetMSVSAttributes(spec, config, config_type):
+ # Prepare configuration attributes.
+ prepared_attrs = {}
+ source_attrs = config.get('msvs_configuration_attributes', {})
+ for a in source_attrs:
+ prepared_attrs[a] = source_attrs[a]
+ # Add props files.
+ vsprops_dirs = config.get('msvs_props', [])
+ vsprops_dirs = _FixPaths(vsprops_dirs)
+ if vsprops_dirs:
+ prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs)
+ # Set configuration type.
+ prepared_attrs['ConfigurationType'] = config_type
+ output_dir = prepared_attrs.get('OutputDirectory',
+ '$(SolutionDir)$(ConfigurationName)')
+ prepared_attrs['OutputDirectory'] = _FixPath(output_dir) + '\\'
+ if 'IntermediateDirectory' not in prepared_attrs:
+ intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)'
+ prepared_attrs['IntermediateDirectory'] = _FixPath(intermediate) + '\\'
+ else:
+ intermediate = _FixPath(prepared_attrs['IntermediateDirectory']) + '\\'
+ intermediate = MSVSSettings.FixVCMacroSlashes(intermediate)
+ prepared_attrs['IntermediateDirectory'] = intermediate
+ return prepared_attrs
+
+
+def _AddNormalizedSources(sources_set, sources_array):
+ sources_set.update(_NormalizedSource(s) for s in sources_array)
+
+
+def _PrepareListOfSources(spec, generator_flags, gyp_file):
+ """Prepare list of sources and excluded sources.
+
+ Besides the sources specified directly in the spec, adds the gyp file so
+ that a change to it will cause a re-compile. Also adds appropriate sources
+ for actions and copies. Assumes later stage will un-exclude files which
+ have custom build steps attached.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ gyp_file: The name of the gyp file.
+ Returns:
+ A pair of (list of sources, list of excluded sources).
+ The sources will be relative to the gyp file.
+ """
+ sources = OrderedSet()
+ _AddNormalizedSources(sources, spec.get('sources', []))
+ excluded_sources = OrderedSet()
+ # Add in the gyp file.
+ if not generator_flags.get('standalone'):
+ sources.add(gyp_file)
+
+ # Add in 'action' inputs and outputs.
+ for a in spec.get('actions', []):
+ inputs = a['inputs']
+ inputs = [_NormalizedSource(i) for i in inputs]
+ # Add all inputs to sources and excluded sources.
+ inputs = OrderedSet(inputs)
+ sources.update(inputs)
+ if not spec.get('msvs_external_builder'):
+ excluded_sources.update(inputs)
+ if int(a.get('process_outputs_as_sources', False)):
+ _AddNormalizedSources(sources, a.get('outputs', []))
+ # Add in 'copies' inputs and outputs.
+ for cpy in spec.get('copies', []):
+ _AddNormalizedSources(sources, cpy.get('files', []))
+ return (sources, excluded_sources)
+
+
+def _AdjustSourcesAndConvertToFilterHierarchy(
+ spec, options, gyp_dir, sources, excluded_sources, list_excluded, version):
+ """Adjusts the list of sources and excluded sources.
+
+ Also converts the sets to lists.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ options: Global generator options.
+ gyp_dir: The path to the gyp file being processed.
+ sources: A set of sources to be included for this project.
+ excluded_sources: A set of sources to be excluded for this project.
+ version: A MSVSVersion object.
+ Returns:
+ A trio of (list of sources, list of excluded sources,
+ path of excluded IDL file)
+ """
+ # Exclude excluded sources coming into the generator.
+ excluded_sources.update(OrderedSet(spec.get('sources_excluded', [])))
+ # Add excluded sources into sources for good measure.
+ sources.update(excluded_sources)
+ # Convert to proper windows form.
+ # NOTE: sources goes from being a set to a list here.
+ # NOTE: excluded_sources goes from being a set to a list here.
+ sources = _FixPaths(sources)
+ # Convert to proper windows form.
+ excluded_sources = _FixPaths(excluded_sources)
+
+ excluded_idl = _IdlFilesHandledNonNatively(spec, sources)
+
+ precompiled_related = _GetPrecompileRelatedFiles(spec)
+ # Find the excluded ones, minus the precompiled header related ones.
+ fully_excluded = [i for i in excluded_sources if i not in precompiled_related]
+
+ # Convert to folders and the right slashes.
+ sources = [i.split('\\') for i in sources]
+ sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded,
+ list_excluded=list_excluded,
+ msvs_version=version)
+
+ # Prune filters with a single child to flatten ugly directory structures
+ # such as ../../src/modules/module1 etc.
+ if version.UsesVcxproj():
+ while all([isinstance(s, MSVSProject.Filter) for s in sources]) \
+ and len(set([s.name for s in sources])) == 1:
+ assert all([len(s.contents) == 1 for s in sources])
+ sources = [s.contents[0] for s in sources]
+ else:
+ while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
+ sources = sources[0].contents
+
+ return sources, excluded_sources, excluded_idl
+
+
+def _IdlFilesHandledNonNatively(spec, sources):
+ # If any non-native rules use 'idl' as an extension exclude idl files.
+ # Gather a list here to use later.
+ using_idl = False
+ for rule in spec.get('rules', []):
+ if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)):
+ using_idl = True
+ break
+ if using_idl:
+ excluded_idl = [i for i in sources if i.endswith('.idl')]
+ else:
+ excluded_idl = []
+ return excluded_idl
+
+
+def _GetPrecompileRelatedFiles(spec):
+ # Gather a list of precompiled header related sources.
+ precompiled_related = []
+ for _, config in spec['configurations'].iteritems():
+ for k in precomp_keys:
+ f = config.get(k)
+ if f:
+ precompiled_related.append(_FixPath(f))
+ return precompiled_related
+
+
+def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
+ list_excluded):
+ exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
+ for file_name, excluded_configs in exclusions.iteritems():
+ if (not list_excluded and
+ len(excluded_configs) == len(spec['configurations'])):
+ # If we're not listing excluded files, then they won't appear in the
+ # project, so don't try to configure them to be excluded.
+ pass
+ else:
+ for config_name, config in excluded_configs:
+ p.AddFileConfig(file_name, _ConfigFullName(config_name, config),
+ {'ExcludedFromBuild': 'true'})
+
+
+def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
+ exclusions = {}
+ # Exclude excluded sources from being built.
+ for f in excluded_sources:
+ excluded_configs = []
+ for config_name, config in spec['configurations'].iteritems():
+ precomped = [_FixPath(config.get(i, '')) for i in precomp_keys]
+ # Don't do this for ones that are precompiled header related.
+ if f not in precomped:
+ excluded_configs.append((config_name, config))
+ exclusions[f] = excluded_configs
+ # If any non-native rules use 'idl' as an extension exclude idl files.
+ # Exclude them now.
+ for f in excluded_idl:
+ excluded_configs = []
+ for config_name, config in spec['configurations'].iteritems():
+ excluded_configs.append((config_name, config))
+ exclusions[f] = excluded_configs
+ return exclusions
+
+
+def _AddToolFilesToMSVS(p, spec):
+ # Add in tool files (rules).
+ tool_files = OrderedSet()
+ for _, config in spec['configurations'].iteritems():
+ for f in config.get('msvs_tool_files', []):
+ tool_files.add(f)
+ for f in tool_files:
+ p.AddToolFile(f)
+
+
+def _HandlePreCompiledHeaders(p, sources, spec):
+ # Pre-compiled header source stubs need a different compiler flag
+ # (generate precompiled header) and any source file not of the same
+ # kind (i.e. C vs. C++) as the precompiled header source stub needs
+ # to have use of precompiled headers disabled.
+ extensions_excluded_from_precompile = []
+ for config_name, config in spec['configurations'].iteritems():
+ source = config.get('msvs_precompiled_source')
+ if source:
+ source = _FixPath(source)
+ # UsePrecompiledHeader=1 for if using precompiled headers.
+ tool = MSVSProject.Tool('VCCLCompilerTool',
+ {'UsePrecompiledHeader': '1'})
+ p.AddFileConfig(source, _ConfigFullName(config_name, config),
+ {}, tools=[tool])
+ basename, extension = os.path.splitext(source)
+ if extension == '.c':
+ extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx']
+ else:
+ extensions_excluded_from_precompile = ['.c']
+ def DisableForSourceTree(source_tree):
+ for source in source_tree:
+ if isinstance(source, MSVSProject.Filter):
+ DisableForSourceTree(source.contents)
+ else:
+ basename, extension = os.path.splitext(source)
+ if extension in extensions_excluded_from_precompile:
+ for config_name, config in spec['configurations'].iteritems():
+ tool = MSVSProject.Tool('VCCLCompilerTool',
+ {'UsePrecompiledHeader': '0',
+ 'ForcedIncludeFiles': '$(NOINHERIT)'})
+ p.AddFileConfig(_FixPath(source),
+ _ConfigFullName(config_name, config),
+ {}, tools=[tool])
+ # Do nothing if there was no precompiled source.
+ if extensions_excluded_from_precompile:
+ DisableForSourceTree(sources)
+
+
+def _AddActions(actions_to_add, spec, relative_path_of_gyp_file):
+ # Add actions.
+ actions = spec.get('actions', [])
+ # Don't setup_env every time. When all the actions are run together in one
+ # batch file in VS, the PATH will grow too long.
+ # Membership in this set means that the cygwin environment has been set up,
+ # and does not need to be set up again.
+ have_setup_env = set()
+ for a in actions:
+ # Attach actions to the gyp file if nothing else is there.
+ inputs = a.get('inputs') or [relative_path_of_gyp_file]
+ attached_to = inputs[0]
+ need_setup_env = attached_to not in have_setup_env
+ cmd = _BuildCommandLineForRule(spec, a, has_input_path=False,
+ do_setup_env=need_setup_env)
+ have_setup_env.add(attached_to)
+ # Add the action.
+ _AddActionStep(actions_to_add,
+ inputs=inputs,
+ outputs=a.get('outputs', []),
+ description=a.get('message', a['action_name']),
+ command=cmd)
+
+
+def _WriteMSVSUserFile(project_path, version, spec):
+ # Add run_as and test targets.
+ if 'run_as' in spec:
+ run_as = spec['run_as']
+ action = run_as.get('action', [])
+ environment = run_as.get('environment', [])
+ working_directory = run_as.get('working_directory', '.')
+ elif int(spec.get('test', 0)):
+ action = ['$(TargetPath)', '--gtest_print_time']
+ environment = []
+ working_directory = '.'
+ else:
+ return # Nothing to add
+ # Write out the user file.
+ user_file = _CreateMSVSUserFile(project_path, version, spec)
+ for config_name, c_data in spec['configurations'].iteritems():
+ user_file.AddDebugSettings(_ConfigFullName(config_name, c_data),
+ action, environment, working_directory)
+ user_file.WriteIfChanged()
+
+
+def _AddCopies(actions_to_add, spec):
+ copies = _GetCopies(spec)
+ for inputs, outputs, cmd, description in copies:
+ _AddActionStep(actions_to_add, inputs=inputs, outputs=outputs,
+ description=description, command=cmd)
+
+
+def _GetCopies(spec):
+ copies = []
+ # Add copies.
+ for cpy in spec.get('copies', []):
+ for src in cpy.get('files', []):
+ dst = os.path.join(cpy['destination'], os.path.basename(src))
+ # _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and
+ # outputs, so do the same for our generated command line.
+ if src.endswith('/'):
+ src_bare = src[:-1]
+ base_dir = posixpath.split(src_bare)[0]
+ outer_dir = posixpath.split(src_bare)[1]
+ cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % (
+ _FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir)
+ copies.append(([src], ['dummy_copies', dst], cmd,
+ 'Copying %s to %s' % (src, dst)))
+ else:
+ cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % (
+ _FixPath(cpy['destination']), _FixPath(src), _FixPath(dst))
+ copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, dst)))
+ return copies
+
+
+def _GetPathDict(root, path):
+ # |path| will eventually be empty (in the recursive calls) if it was initially
+ # relative; otherwise it will eventually end up as '\', 'D:\', etc.
+ if not path or path.endswith(os.sep):
+ return root
+ parent, folder = os.path.split(path)
+ parent_dict = _GetPathDict(root, parent)
+ if folder not in parent_dict:
+ parent_dict[folder] = dict()
+ return parent_dict[folder]
+
+
+def _DictsToFolders(base_path, bucket, flat):
+ # Convert to folders recursively.
+ children = []
+ for folder, contents in bucket.iteritems():
+ if type(contents) == dict:
+ folder_children = _DictsToFolders(os.path.join(base_path, folder),
+ contents, flat)
+ if flat:
+ children += folder_children
+ else:
+ folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder),
+ name='(' + folder + ')',
+ entries=folder_children)
+ children.append(folder_children)
+ else:
+ children.append(contents)
+ return children
+
+
+def _CollapseSingles(parent, node):
+ # Recursively explorer the tree of dicts looking for projects which are
+ # the sole item in a folder which has the same name as the project. Bring
+ # such projects up one level.
+ if (type(node) == dict and
+ len(node) == 1 and
+ node.keys()[0] == parent + '.vcproj'):
+ return node[node.keys()[0]]
+ if type(node) != dict:
+ return node
+ for child in node:
+ node[child] = _CollapseSingles(child, node[child])
+ return node
+
+
+def _GatherSolutionFolders(sln_projects, project_objects, flat):
+ root = {}
+ # Convert into a tree of dicts on path.
+ for p in sln_projects:
+ gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2]
+ gyp_dir = os.path.dirname(gyp_file)
+ path_dict = _GetPathDict(root, gyp_dir)
+ path_dict[target + '.vcproj'] = project_objects[p]
+ # Walk down from the top until we hit a folder that has more than one entry.
+ # In practice, this strips the top-level "src/" dir from the hierarchy in
+ # the solution.
+ while len(root) == 1 and type(root[root.keys()[0]]) == dict:
+ root = root[root.keys()[0]]
+ # Collapse singles.
+ root = _CollapseSingles('', root)
+ # Merge buckets until everything is a root entry.
+ return _DictsToFolders('', root, flat)
+
+
+def _GetPathOfProject(qualified_target, spec, options, msvs_version):
+ default_config = _GetDefaultConfiguration(spec)
+ proj_filename = default_config.get('msvs_existing_vcproj')
+ if not proj_filename:
+ proj_filename = (spec['target_name'] + options.suffix +
+ msvs_version.ProjectExtension())
+
+ build_file = gyp.common.BuildFile(qualified_target)
+ proj_path = os.path.join(os.path.dirname(build_file), proj_filename)
+ fix_prefix = None
+ if options.generator_output:
+ project_dir_path = os.path.dirname(os.path.abspath(proj_path))
+ proj_path = os.path.join(options.generator_output, proj_path)
+ fix_prefix = gyp.common.RelativePath(project_dir_path,
+ os.path.dirname(proj_path))
+ return proj_path, fix_prefix
+
+
+def _GetPlatformOverridesOfProject(spec):
+ # Prepare a dict indicating which project configurations are used for which
+ # solution configurations for this target.
+ config_platform_overrides = {}
+ for config_name, c in spec['configurations'].iteritems():
+ config_fullname = _ConfigFullName(config_name, c)
+ platform = c.get('msvs_target_platform', _ConfigPlatform(c))
+ fixed_config_fullname = '%s|%s' % (
+ _ConfigBaseName(config_name, _ConfigPlatform(c)), platform)
+ config_platform_overrides[config_fullname] = fixed_config_fullname
+ return config_platform_overrides
+
+
+def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
+ """Create a MSVSProject object for the targets found in target list.
+
+ Arguments:
+ target_list: the list of targets to generate project objects for.
+ target_dicts: the dictionary of specifications.
+ options: global generator options.
+ msvs_version: the MSVSVersion object.
+ Returns:
+ A set of created projects, keyed by target.
+ """
+ global fixpath_prefix
+ # Generate each project.
+ projects = {}
+ for qualified_target in target_list:
+ spec = target_dicts[qualified_target]
+ if spec['toolset'] != 'target':
+ raise GypError(
+ 'Multiple toolsets not supported in msvs build (target %s)' %
+ qualified_target)
+ proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec,
+ options, msvs_version)
+ guid = _GetGuidOfProject(proj_path, spec)
+ overrides = _GetPlatformOverridesOfProject(spec)
+ build_file = gyp.common.BuildFile(qualified_target)
+ # Create object for this project.
+ obj = MSVSNew.MSVSProject(
+ proj_path,
+ name=spec['target_name'],
+ guid=guid,
+ spec=spec,
+ build_file=build_file,
+ config_platform_overrides=overrides,
+ fixpath_prefix=fixpath_prefix)
+ # Set project toolset if any (MS build only)
+ if msvs_version.UsesVcxproj():
+ obj.set_msbuild_toolset(
+ _GetMsbuildToolsetOfProject(proj_path, spec, msvs_version))
+ projects[qualified_target] = obj
+ # Set all the dependencies, but not if we are using an external builder like
+ # ninja
+ for project in projects.values():
+ if not project.spec.get('msvs_external_builder'):
+ deps = project.spec.get('dependencies', [])
+ deps = [projects[d] for d in deps]
+ project.set_dependencies(deps)
+ return projects
+
+
+def _InitNinjaFlavor(params, target_list, target_dicts):
+ """Initialize targets for the ninja flavor.
+
+ This sets up the necessary variables in the targets to generate msvs projects
+ that use ninja as an external builder. The variables in the spec are only set
+ if they have not been set. This allows individual specs to override the
+ default values initialized here.
+ Arguments:
+ params: Params provided to the generator.
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ """
+ for qualified_target in target_list:
+ spec = target_dicts[qualified_target]
+ if spec.get('msvs_external_builder'):
+ # The spec explicitly defined an external builder, so don't change it.
+ continue
+
+ path_to_ninja = spec.get('msvs_path_to_ninja', 'ninja.exe')
+
+ spec['msvs_external_builder'] = 'ninja'
+ if not spec.get('msvs_external_builder_out_dir'):
+ gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
+ gyp_dir = os.path.dirname(gyp_file)
+ configuration = '$(Configuration)'
+ if params.get('target_arch') == 'x64':
+ configuration += '_x64'
+ spec['msvs_external_builder_out_dir'] = os.path.join(
+ gyp.common.RelativePath(params['options'].toplevel_dir, gyp_dir),
+ ninja_generator.ComputeOutputDir(params),
+ configuration)
+ if not spec.get('msvs_external_builder_build_cmd'):
+ spec['msvs_external_builder_build_cmd'] = [
+ path_to_ninja,
+ '-C',
+ '$(OutDir)',
+ '$(ProjectName)',
+ ]
+ if not spec.get('msvs_external_builder_clean_cmd'):
+ spec['msvs_external_builder_clean_cmd'] = [
+ path_to_ninja,
+ '-C',
+ '$(OutDir)',
+ '-tclean',
+ '$(ProjectName)',
+ ]
+
+
+def CalculateVariables(default_variables, params):
+ """Generated variables that require params to be known."""
+
+ generator_flags = params.get('generator_flags', {})
+
+ # Select project file format version (if unset, default to auto detecting).
+ msvs_version = MSVSVersion.SelectVisualStudioVersion(
+ generator_flags.get('msvs_version', 'auto'))
+ # Stash msvs_version for later (so we don't have to probe the system twice).
+ params['msvs_version'] = msvs_version
+
+ # Set a variable so conditions can be based on msvs_version.
+ default_variables['MSVS_VERSION'] = msvs_version.ShortName()
+
+ # To determine processor word size on Windows, in addition to checking
+ # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
+ # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which
+ # contains the actual word size of the system when running thru WOW64).
+ if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or
+ os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0):
+ default_variables['MSVS_OS_BITS'] = 64
+ else:
+ default_variables['MSVS_OS_BITS'] = 32
+
+ if gyp.common.GetFlavor(params) == 'ninja':
+ default_variables['SHARED_INTERMEDIATE_DIR'] = '$(OutDir)gen'
+
+
+def PerformBuild(data, configurations, params):
+ options = params['options']
+ msvs_version = params['msvs_version']
+ devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com')
+
+ for build_file, build_file_dict in data.iteritems():
+ (build_file_root, build_file_ext) = os.path.splitext(build_file)
+ if build_file_ext != '.gyp':
+ continue
+ sln_path = build_file_root + options.suffix + '.sln'
+ if options.generator_output:
+ sln_path = os.path.join(options.generator_output, sln_path)
+
+ for config in configurations:
+ arguments = [devenv, sln_path, '/Build', config]
+ print 'Building [%s]: %s' % (config, arguments)
+ rtn = subprocess.check_call(arguments)
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ """Generate .sln and .vcproj files.
+
+ This is the entry point for this generator.
+ Arguments:
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ data: Dictionary containing per .gyp data.
+ """
+ global fixpath_prefix
+
+ options = params['options']
+
+ # Get the project file format version back out of where we stashed it in
+ # GeneratorCalculatedVariables.
+ msvs_version = params['msvs_version']
+
+ generator_flags = params.get('generator_flags', {})
+
+ # Optionally shard targets marked with 'msvs_shard': SHARD_COUNT.
+ (target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts)
+
+ # Optionally use the large PDB workaround for targets marked with
+ # 'msvs_large_pdb': 1.
+ (target_list, target_dicts) = MSVSUtil.InsertLargePdbShims(
+ target_list, target_dicts, generator_default_variables)
+
+ # Optionally configure each spec to use ninja as the external builder.
+ if params.get('flavor') == 'ninja':
+ _InitNinjaFlavor(params, target_list, target_dicts)
+
+ # Prepare the set of configurations.
+ configs = set()
+ for qualified_target in target_list:
+ spec = target_dicts[qualified_target]
+ for config_name, config in spec['configurations'].iteritems():
+ configs.add(_ConfigFullName(config_name, config))
+ configs = list(configs)
+
+ # Figure out all the projects that will be generated and their guids
+ project_objects = _CreateProjectObjects(target_list, target_dicts, options,
+ msvs_version)
+
+ # Generate each project.
+ missing_sources = []
+ for project in project_objects.values():
+ fixpath_prefix = project.fixpath_prefix
+ missing_sources.extend(_GenerateProject(project, options, msvs_version,
+ generator_flags))
+ fixpath_prefix = None
+
+ for build_file in data:
+ # Validate build_file extension
+ if not build_file.endswith('.gyp'):
+ continue
+ sln_path = os.path.splitext(build_file)[0] + options.suffix + '.sln'
+ if options.generator_output:
+ sln_path = os.path.join(options.generator_output, sln_path)
+ # Get projects in the solution, and their dependents.
+ sln_projects = gyp.common.BuildFileTargets(target_list, build_file)
+ sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects)
+ # Create folder hierarchy.
+ root_entries = _GatherSolutionFolders(
+ sln_projects, project_objects, flat=msvs_version.FlatSolution())
+ # Create solution.
+ sln = MSVSNew.MSVSSolution(sln_path,
+ entries=root_entries,
+ variants=configs,
+ websiteProperties=False,
+ version=msvs_version)
+ sln.Write()
+
+ if missing_sources:
+ error_message = "Missing input files:\n" + \
+ '\n'.join(set(missing_sources))
+ if generator_flags.get('msvs_error_on_missing_sources', False):
+ raise GypError(error_message)
+ else:
+ print >> sys.stdout, "Warning: " + error_message
+
+
+def _GenerateMSBuildFiltersFile(filters_path, source_files,
+ rule_dependencies, extension_to_rule_name):
+ """Generate the filters file.
+
+ This file is used by Visual Studio to organize the presentation of source
+ files into folders.
+
+ Arguments:
+ filters_path: The path of the file to be created.
+ source_files: The hierarchical structure of all the sources.
+ extension_to_rule_name: A dictionary mapping file extensions to rules.
+ """
+ filter_group = []
+ source_group = []
+ _AppendFiltersForMSBuild('', source_files, rule_dependencies,
+ extension_to_rule_name, filter_group, source_group)
+ if filter_group:
+ content = ['Project',
+ {'ToolsVersion': '4.0',
+ 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
+ },
+ ['ItemGroup'] + filter_group,
+ ['ItemGroup'] + source_group
+ ]
+ easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True)
+ elif os.path.exists(filters_path):
+ # We don't need this filter anymore. Delete the old filter file.
+ os.unlink(filters_path)
+
+
+def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies,
+ extension_to_rule_name,
+ filter_group, source_group):
+ """Creates the list of filters and sources to be added in the filter file.
+
+ Args:
+ parent_filter_name: The name of the filter under which the sources are
+ found.
+ sources: The hierarchy of filters and sources to process.
+ extension_to_rule_name: A dictionary mapping file extensions to rules.
+ filter_group: The list to which filter entries will be appended.
+ source_group: The list to which source entries will be appeneded.
+ """
+ for source in sources:
+ if isinstance(source, MSVSProject.Filter):
+ # We have a sub-filter. Create the name of that sub-filter.
+ if not parent_filter_name:
+ filter_name = source.name
+ else:
+ filter_name = '%s\\%s' % (parent_filter_name, source.name)
+ # Add the filter to the group.
+ filter_group.append(
+ ['Filter', {'Include': filter_name},
+ ['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]])
+ # Recurse and add its dependents.
+ _AppendFiltersForMSBuild(filter_name, source.contents,
+ rule_dependencies, extension_to_rule_name,
+ filter_group, source_group)
+ else:
+ # It's a source. Create a source entry.
+ _, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
+ extension_to_rule_name)
+ source_entry = [element, {'Include': source}]
+ # Specify the filter it is part of, if any.
+ if parent_filter_name:
+ source_entry.append(['Filter', parent_filter_name])
+ source_group.append(source_entry)
+
+
+def _MapFileToMsBuildSourceType(source, rule_dependencies,
+ extension_to_rule_name):
+ """Returns the group and element type of the source file.
+
+ Arguments:
+ source: The source file name.
+ extension_to_rule_name: A dictionary mapping file extensions to rules.
+
+ Returns:
+ A pair of (group this file should be part of, the label of element)
+ """
+ _, ext = os.path.splitext(source)
+ if ext in extension_to_rule_name:
+ group = 'rule'
+ element = extension_to_rule_name[ext]
+ elif ext in ['.cc', '.cpp', '.c', '.cxx']:
+ group = 'compile'
+ element = 'ClCompile'
+ elif ext in ['.h', '.hxx']:
+ group = 'include'
+ element = 'ClInclude'
+ elif ext == '.rc':
+ group = 'resource'
+ element = 'ResourceCompile'
+ elif ext == '.asm':
+ group = 'masm'
+ element = 'MASM'
+ elif ext == '.idl':
+ group = 'midl'
+ element = 'Midl'
+ elif source in rule_dependencies:
+ group = 'rule_dependency'
+ element = 'CustomBuild'
+ else:
+ group = 'none'
+ element = 'None'
+ return (group, element)
+
+
+def _GenerateRulesForMSBuild(output_dir, options, spec,
+ sources, excluded_sources,
+ props_files_of_rules, targets_files_of_rules,
+ actions_to_add, rule_dependencies,
+ extension_to_rule_name):
+ # MSBuild rules are implemented using three files: an XML file, a .targets
+ # file and a .props file.
+ # See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx
+ # for more details.
+ rules = spec.get('rules', [])
+ rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
+ rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
+
+ msbuild_rules = []
+ for rule in rules_native:
+ # Skip a rule with no action and no inputs.
+ if 'action' not in rule and not rule.get('rule_sources', []):
+ continue
+ msbuild_rule = MSBuildRule(rule, spec)
+ msbuild_rules.append(msbuild_rule)
+ rule_dependencies.update(msbuild_rule.additional_dependencies.split(';'))
+ extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name
+ if msbuild_rules:
+ base = spec['target_name'] + options.suffix
+ props_name = base + '.props'
+ targets_name = base + '.targets'
+ xml_name = base + '.xml'
+
+ props_files_of_rules.add(props_name)
+ targets_files_of_rules.add(targets_name)
+
+ props_path = os.path.join(output_dir, props_name)
+ targets_path = os.path.join(output_dir, targets_name)
+ xml_path = os.path.join(output_dir, xml_name)
+
+ _GenerateMSBuildRulePropsFile(props_path, msbuild_rules)
+ _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules)
+ _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules)
+
+ if rules_external:
+ _GenerateExternalRules(rules_external, output_dir, spec,
+ sources, options, actions_to_add)
+ _AdjustSourcesForRules(rules, sources, excluded_sources, True)
+
+
+class MSBuildRule(object):
+ """Used to store information used to generate an MSBuild rule.
+
+ Attributes:
+ rule_name: The rule name, sanitized to use in XML.
+ target_name: The name of the target.
+ after_targets: The name of the AfterTargets element.
+ before_targets: The name of the BeforeTargets element.
+ depends_on: The name of the DependsOn element.
+ compute_output: The name of the ComputeOutput element.
+ dirs_to_make: The name of the DirsToMake element.
+ inputs: The name of the _inputs element.
+ tlog: The name of the _tlog element.
+ extension: The extension this rule applies to.
+ description: The message displayed when this rule is invoked.
+ additional_dependencies: A string listing additional dependencies.
+ outputs: The outputs of this rule.
+ command: The command used to run the rule.
+ """
+
+ def __init__(self, rule, spec):
+ self.display_name = rule['rule_name']
+ # Assure that the rule name is only characters and numbers
+ self.rule_name = re.sub(r'\W', '_', self.display_name)
+ # Create the various element names, following the example set by the
+ # Visual Studio 2008 to 2010 conversion. I don't know if VS2010
+ # is sensitive to the exact names.
+ self.target_name = '_' + self.rule_name
+ self.after_targets = self.rule_name + 'AfterTargets'
+ self.before_targets = self.rule_name + 'BeforeTargets'
+ self.depends_on = self.rule_name + 'DependsOn'
+ self.compute_output = 'Compute%sOutput' % self.rule_name
+ self.dirs_to_make = self.rule_name + 'DirsToMake'
+ self.inputs = self.rule_name + '_inputs'
+ self.tlog = self.rule_name + '_tlog'
+ self.extension = rule['extension']
+ if not self.extension.startswith('.'):
+ self.extension = '.' + self.extension
+
+ self.description = MSVSSettings.ConvertVCMacrosToMSBuild(
+ rule.get('message', self.rule_name))
+ old_additional_dependencies = _FixPaths(rule.get('inputs', []))
+ self.additional_dependencies = (
+ ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i)
+ for i in old_additional_dependencies]))
+ old_outputs = _FixPaths(rule.get('outputs', []))
+ self.outputs = ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i)
+ for i in old_outputs])
+ old_command = _BuildCommandLineForRule(spec, rule, has_input_path=True,
+ do_setup_env=True)
+ self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command)
+
+
+def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules):
+ """Generate the .props file."""
+ content = ['Project',
+ {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}]
+ for rule in msbuild_rules:
+ content.extend([
+ ['PropertyGroup',
+ {'Condition': "'$(%s)' == '' and '$(%s)' == '' and "
+ "'$(ConfigurationType)' != 'Makefile'" % (rule.before_targets,
+ rule.after_targets)
+ },
+ [rule.before_targets, 'Midl'],
+ [rule.after_targets, 'CustomBuild'],
+ ],
+ ['PropertyGroup',
+ [rule.depends_on,
+ {'Condition': "'$(ConfigurationType)' != 'Makefile'"},
+ '_SelectedFiles;$(%s)' % rule.depends_on
+ ],
+ ],
+ ['ItemDefinitionGroup',
+ [rule.rule_name,
+ ['CommandLineTemplate', rule.command],
+ ['Outputs', rule.outputs],
+ ['ExecutionDescription', rule.description],
+ ['AdditionalDependencies', rule.additional_dependencies],
+ ],
+ ]
+ ])
+ easy_xml.WriteXmlIfChanged(content, props_path, pretty=True, win32=True)
+
+
+def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
+ """Generate the .targets file."""
+ content = ['Project',
+ {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
+ }
+ ]
+ item_group = [
+ 'ItemGroup',
+ ['PropertyPageSchema',
+ {'Include': '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'}
+ ]
+ ]
+ for rule in msbuild_rules:
+ item_group.append(
+ ['AvailableItemName',
+ {'Include': rule.rule_name},
+ ['Targets', rule.target_name],
+ ])
+ content.append(item_group)
+
+ for rule in msbuild_rules:
+ content.append(
+ ['UsingTask',
+ {'TaskName': rule.rule_name,
+ 'TaskFactory': 'XamlTaskFactory',
+ 'AssemblyName': 'Microsoft.Build.Tasks.v4.0'
+ },
+ ['Task', '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'],
+ ])
+ for rule in msbuild_rules:
+ rule_name = rule.rule_name
+ target_outputs = '%%(%s.Outputs)' % rule_name
+ target_inputs = ('%%(%s.Identity);%%(%s.AdditionalDependencies);'
+ '$(MSBuildProjectFile)') % (rule_name, rule_name)
+ rule_inputs = '%%(%s.Identity)' % rule_name
+ extension_condition = ("'%(Extension)'=='.obj' or "
+ "'%(Extension)'=='.res' or "
+ "'%(Extension)'=='.rsc' or "
+ "'%(Extension)'=='.lib'")
+ remove_section = [
+ 'ItemGroup',
+ {'Condition': "'@(SelectedFiles)' != ''"},
+ [rule_name,
+ {'Remove': '@(%s)' % rule_name,
+ 'Condition': "'%(Identity)' != '@(SelectedFiles)'"
+ }
+ ]
+ ]
+ inputs_section = [
+ 'ItemGroup',
+ [rule.inputs, {'Include': '%%(%s.AdditionalDependencies)' % rule_name}]
+ ]
+ logging_section = [
+ 'ItemGroup',
+ [rule.tlog,
+ {'Include': '%%(%s.Outputs)' % rule_name,
+ 'Condition': ("'%%(%s.Outputs)' != '' and "
+ "'%%(%s.ExcludedFromBuild)' != 'true'" %
+ (rule_name, rule_name))
+ },
+ ['Source', "@(%s, '|')" % rule_name],
+ ['Inputs', "@(%s -> '%%(Fullpath)', ';')" % rule.inputs],
+ ],
+ ]
+ message_section = [
+ 'Message',
+ {'Importance': 'High',
+ 'Text': '%%(%s.ExecutionDescription)' % rule_name
+ }
+ ]
+ write_tlog_section = [
+ 'WriteLinesToFile',
+ {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
+ "'true'" % (rule.tlog, rule.tlog),
+ 'File': '$(IntDir)$(ProjectName).write.1.tlog',
+ 'Lines': "^%%(%s.Source);@(%s->'%%(Fullpath)')" % (rule.tlog,
+ rule.tlog)
+ }
+ ]
+ read_tlog_section = [
+ 'WriteLinesToFile',
+ {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
+ "'true'" % (rule.tlog, rule.tlog),
+ 'File': '$(IntDir)$(ProjectName).read.1.tlog',
+ 'Lines': "^%%(%s.Source);%%(%s.Inputs)" % (rule.tlog, rule.tlog)
+ }
+ ]
+ command_and_input_section = [
+ rule_name,
+ {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
+ "'true'" % (rule_name, rule_name),
+ 'EchoOff': 'true',
+ 'StandardOutputImportance': 'High',
+ 'StandardErrorImportance': 'High',
+ 'CommandLineTemplate': '%%(%s.CommandLineTemplate)' % rule_name,
+ 'AdditionalOptions': '%%(%s.AdditionalOptions)' % rule_name,
+ 'Inputs': rule_inputs
+ }
+ ]
+ content.extend([
+ ['Target',
+ {'Name': rule.target_name,
+ 'BeforeTargets': '$(%s)' % rule.before_targets,
+ 'AfterTargets': '$(%s)' % rule.after_targets,
+ 'Condition': "'@(%s)' != ''" % rule_name,
+ 'DependsOnTargets': '$(%s);%s' % (rule.depends_on,
+ rule.compute_output),
+ 'Outputs': target_outputs,
+ 'Inputs': target_inputs
+ },
+ remove_section,
+ inputs_section,
+ logging_section,
+ message_section,
+ write_tlog_section,
+ read_tlog_section,
+ command_and_input_section,
+ ],
+ ['PropertyGroup',
+ ['ComputeLinkInputsTargets',
+ '$(ComputeLinkInputsTargets);',
+ '%s;' % rule.compute_output
+ ],
+ ['ComputeLibInputsTargets',
+ '$(ComputeLibInputsTargets);',
+ '%s;' % rule.compute_output
+ ],
+ ],
+ ['Target',
+ {'Name': rule.compute_output,
+ 'Condition': "'@(%s)' != ''" % rule_name
+ },
+ ['ItemGroup',
+ [rule.dirs_to_make,
+ {'Condition': "'@(%s)' != '' and "
+ "'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name),
+ 'Include': '%%(%s.Outputs)' % rule_name
+ }
+ ],
+ ['Link',
+ {'Include': '%%(%s.Identity)' % rule.dirs_to_make,
+ 'Condition': extension_condition
+ }
+ ],
+ ['Lib',
+ {'Include': '%%(%s.Identity)' % rule.dirs_to_make,
+ 'Condition': extension_condition
+ }
+ ],
+ ['ImpLib',
+ {'Include': '%%(%s.Identity)' % rule.dirs_to_make,
+ 'Condition': extension_condition
+ }
+ ],
+ ],
+ ['MakeDir',
+ {'Directories': ("@(%s->'%%(RootDir)%%(Directory)')" %
+ rule.dirs_to_make)
+ }
+ ]
+ ],
+ ])
+ easy_xml.WriteXmlIfChanged(content, targets_path, pretty=True, win32=True)
+
+
+def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules):
+ # Generate the .xml file
+ content = [
+ 'ProjectSchemaDefinitions',
+ {'xmlns': ('clr-namespace:Microsoft.Build.Framework.XamlTypes;'
+ 'assembly=Microsoft.Build.Framework'),
+ 'xmlns:x': 'http://schemas.microsoft.com/winfx/2006/xaml',
+ 'xmlns:sys': 'clr-namespace:System;assembly=mscorlib',
+ 'xmlns:transformCallback':
+ 'Microsoft.Cpp.Dev10.ConvertPropertyCallback'
+ }
+ ]
+ for rule in msbuild_rules:
+ content.extend([
+ ['Rule',
+ {'Name': rule.rule_name,
+ 'PageTemplate': 'tool',
+ 'DisplayName': rule.display_name,
+ 'Order': '200'
+ },
+ ['Rule.DataSource',
+ ['DataSource',
+ {'Persistence': 'ProjectFile',
+ 'ItemType': rule.rule_name
+ }
+ ]
+ ],
+ ['Rule.Categories',
+ ['Category',
+ {'Name': 'General'},
+ ['Category.DisplayName',
+ ['sys:String', 'General'],
+ ],
+ ],
+ ['Category',
+ {'Name': 'Command Line',
+ 'Subtype': 'CommandLine'
+ },
+ ['Category.DisplayName',
+ ['sys:String', 'Command Line'],
+ ],
+ ],
+ ],
+ ['StringListProperty',
+ {'Name': 'Inputs',
+ 'Category': 'Command Line',
+ 'IsRequired': 'true',
+ 'Switch': ' '
+ },
+ ['StringListProperty.DataSource',
+ ['DataSource',
+ {'Persistence': 'ProjectFile',
+ 'ItemType': rule.rule_name,
+ 'SourceType': 'Item'
+ }
+ ]
+ ],
+ ],
+ ['StringProperty',
+ {'Name': 'CommandLineTemplate',
+ 'DisplayName': 'Command Line',
+ 'Visible': 'False',
+ 'IncludeInCommandLine': 'False'
+ }
+ ],
+ ['DynamicEnumProperty',
+ {'Name': rule.before_targets,
+ 'Category': 'General',
+ 'EnumProvider': 'Targets',
+ 'IncludeInCommandLine': 'False'
+ },
+ ['DynamicEnumProperty.DisplayName',
+ ['sys:String', 'Execute Before'],
+ ],
+ ['DynamicEnumProperty.Description',
+ ['sys:String', 'Specifies the targets for the build customization'
+ ' to run before.'
+ ],
+ ],
+ ['DynamicEnumProperty.ProviderSettings',
+ ['NameValuePair',
+ {'Name': 'Exclude',
+ 'Value': '^%s|^Compute' % rule.before_targets
+ }
+ ]
+ ],
+ ['DynamicEnumProperty.DataSource',
+ ['DataSource',
+ {'Persistence': 'ProjectFile',
+ 'HasConfigurationCondition': 'true'
+ }
+ ]
+ ],
+ ],
+ ['DynamicEnumProperty',
+ {'Name': rule.after_targets,
+ 'Category': 'General',
+ 'EnumProvider': 'Targets',
+ 'IncludeInCommandLine': 'False'
+ },
+ ['DynamicEnumProperty.DisplayName',
+ ['sys:String', 'Execute After'],
+ ],
+ ['DynamicEnumProperty.Description',
+ ['sys:String', ('Specifies the targets for the build customization'
+ ' to run after.')
+ ],
+ ],
+ ['DynamicEnumProperty.ProviderSettings',
+ ['NameValuePair',
+ {'Name': 'Exclude',
+ 'Value': '^%s|^Compute' % rule.after_targets
+ }
+ ]
+ ],
+ ['DynamicEnumProperty.DataSource',
+ ['DataSource',
+ {'Persistence': 'ProjectFile',
+ 'ItemType': '',
+ 'HasConfigurationCondition': 'true'
+ }
+ ]
+ ],
+ ],
+ ['StringListProperty',
+ {'Name': 'Outputs',
+ 'DisplayName': 'Outputs',
+ 'Visible': 'False',
+ 'IncludeInCommandLine': 'False'
+ }
+ ],
+ ['StringProperty',
+ {'Name': 'ExecutionDescription',
+ 'DisplayName': 'Execution Description',
+ 'Visible': 'False',
+ 'IncludeInCommandLine': 'False'
+ }
+ ],
+ ['StringListProperty',
+ {'Name': 'AdditionalDependencies',
+ 'DisplayName': 'Additional Dependencies',
+ 'IncludeInCommandLine': 'False',
+ 'Visible': 'false'
+ }
+ ],
+ ['StringProperty',
+ {'Subtype': 'AdditionalOptions',
+ 'Name': 'AdditionalOptions',
+ 'Category': 'Command Line'
+ },
+ ['StringProperty.DisplayName',
+ ['sys:String', 'Additional Options'],
+ ],
+ ['StringProperty.Description',
+ ['sys:String', 'Additional Options'],
+ ],
+ ],
+ ],
+ ['ItemType',
+ {'Name': rule.rule_name,
+ 'DisplayName': rule.display_name
+ }
+ ],
+ ['FileExtension',
+ {'Name': '*' + rule.extension,
+ 'ContentType': rule.rule_name
+ }
+ ],
+ ['ContentType',
+ {'Name': rule.rule_name,
+ 'DisplayName': '',
+ 'ItemType': rule.rule_name
+ }
+ ]
+ ])
+ easy_xml.WriteXmlIfChanged(content, xml_path, pretty=True, win32=True)
+
+
+def _GetConfigurationAndPlatform(name, settings):
+ configuration = name.rsplit('_', 1)[0]
+ platform = settings.get('msvs_configuration_platform', 'Win32')
+ return (configuration, platform)
+
+
+def _GetConfigurationCondition(name, settings):
+ return (r"'$(Configuration)|$(Platform)'=='%s|%s'" %
+ _GetConfigurationAndPlatform(name, settings))
+
+
+def _GetMSBuildProjectConfigurations(configurations):
+ group = ['ItemGroup', {'Label': 'ProjectConfigurations'}]
+ for (name, settings) in sorted(configurations.iteritems()):
+ configuration, platform = _GetConfigurationAndPlatform(name, settings)
+ designation = '%s|%s' % (configuration, platform)
+ group.append(
+ ['ProjectConfiguration', {'Include': designation},
+ ['Configuration', configuration],
+ ['Platform', platform]])
+ return [group]
+
+
+def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
+ namespace = os.path.splitext(gyp_file_name)[0]
+ properties = [
+ ['PropertyGroup', {'Label': 'Globals'},
+ ['ProjectGuid', guid],
+ ['Keyword', 'Win32Proj'],
+ ['RootNamespace', namespace],
+ ['IgnoreWarnCompileDuplicatedFilename', 'true'],
+ ]
+ ]
+
+ if os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or \
+ os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64':
+ properties[0].append(['PreferredToolArchitecture', 'x64'])
+
+ if spec.get('msvs_enable_winrt'):
+ properties[0].append(['DefaultLanguage', 'en-US'])
+ properties[0].append(['AppContainerApplication', 'true'])
+ if spec.get('msvs_application_type_revision'):
+ app_type_revision = spec.get('msvs_application_type_revision')
+ properties[0].append(['ApplicationTypeRevision', app_type_revision])
+ else:
+ properties[0].append(['ApplicationTypeRevision', '8.1'])
+
+ if spec.get('msvs_target_platform_version'):
+ target_platform_version = spec.get('msvs_target_platform_version')
+ properties[0].append(['WindowsTargetPlatformVersion',
+ target_platform_version])
+ if spec.get('msvs_target_platform_minversion'):
+ target_platform_minversion = spec.get('msvs_target_platform_minversion')
+ properties[0].append(['WindowsTargetPlatformMinVersion',
+ target_platform_minversion])
+ else:
+ properties[0].append(['WindowsTargetPlatformMinVersion',
+ target_platform_version])
+ if spec.get('msvs_enable_winphone'):
+ properties[0].append(['ApplicationType', 'Windows Phone'])
+ else:
+ properties[0].append(['ApplicationType', 'Windows Store'])
+
+ platform_name = None
+ msvs_windows_target_platform_version = None
+ for configuration in spec['configurations'].itervalues():
+ platform_name = platform_name or _ConfigPlatform(configuration)
+ msvs_windows_target_platform_version = \
+ msvs_windows_target_platform_version or \
+ _ConfigWindowsTargetPlatformVersion(configuration)
+ if platform_name and msvs_windows_target_platform_version:
+ break
+
+ if platform_name == 'ARM':
+ properties[0].append(['WindowsSDKDesktopARMSupport', 'true'])
+ if msvs_windows_target_platform_version:
+ properties[0].append(['WindowsTargetPlatformVersion', \
+ str(msvs_windows_target_platform_version)])
+
+ return properties
+
+def _GetMSBuildConfigurationDetails(spec, build_file):
+ properties = {}
+ for name, settings in spec['configurations'].iteritems():
+ msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file)
+ condition = _GetConfigurationCondition(name, settings)
+ character_set = msbuild_attributes.get('CharacterSet')
+ _AddConditionalProperty(properties, condition, 'ConfigurationType',
+ msbuild_attributes['ConfigurationType'])
+ if character_set:
+ if 'msvs_enable_winrt' not in spec :
+ _AddConditionalProperty(properties, condition, 'CharacterSet',
+ character_set)
+ return _GetMSBuildPropertyGroup(spec, 'Configuration', properties)
+
+
+def _GetMSBuildLocalProperties(msbuild_toolset):
+ # Currently the only local property we support is PlatformToolset
+ properties = {}
+ if msbuild_toolset:
+ properties = [
+ ['PropertyGroup', {'Label': 'Locals'},
+ ['PlatformToolset', msbuild_toolset],
+ ]
+ ]
+ return properties
+
+
+def _GetMSBuildPropertySheets(configurations):
+ user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props'
+ additional_props = {}
+ props_specified = False
+ for name, settings in sorted(configurations.iteritems()):
+ configuration = _GetConfigurationCondition(name, settings)
+ if settings.has_key('msbuild_props'):
+ additional_props[configuration] = _FixPaths(settings['msbuild_props'])
+ props_specified = True
+ else:
+ additional_props[configuration] = ''
+
+ if not props_specified:
+ return [
+ ['ImportGroup',
+ {'Label': 'PropertySheets'},
+ ['Import',
+ {'Project': user_props,
+ 'Condition': "exists('%s')" % user_props,
+ 'Label': 'LocalAppDataPlatform'
+ }
+ ]
+ ]
+ ]
+ else:
+ sheets = []
+ for condition, props in additional_props.iteritems():
+ import_group = [
+ 'ImportGroup',
+ {'Label': 'PropertySheets',
+ 'Condition': condition
+ },
+ ['Import',
+ {'Project': user_props,
+ 'Condition': "exists('%s')" % user_props,
+ 'Label': 'LocalAppDataPlatform'
+ }
+ ]
+ ]
+ for props_file in props:
+ import_group.append(['Import', {'Project':props_file}])
+ sheets.append(import_group)
+ return sheets
+
+def _ConvertMSVSBuildAttributes(spec, config, build_file):
+ config_type = _GetMSVSConfigurationType(spec, build_file)
+ msvs_attributes = _GetMSVSAttributes(spec, config, config_type)
+ msbuild_attributes = {}
+ for a in msvs_attributes:
+ if a in ['IntermediateDirectory', 'OutputDirectory']:
+ directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a])
+ if not directory.endswith('\\'):
+ directory += '\\'
+ msbuild_attributes[a] = directory
+ elif a == 'CharacterSet':
+ msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a])
+ elif a == 'ConfigurationType':
+ msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a])
+ else:
+ print 'Warning: Do not know how to convert MSVS attribute ' + a
+ return msbuild_attributes
+
+
+def _ConvertMSVSCharacterSet(char_set):
+ if char_set.isdigit():
+ char_set = {
+ '0': 'MultiByte',
+ '1': 'Unicode',
+ '2': 'MultiByte',
+ }[char_set]
+ return char_set
+
+
+def _ConvertMSVSConfigurationType(config_type):
+ if config_type.isdigit():
+ config_type = {
+ '1': 'Application',
+ '2': 'DynamicLibrary',
+ '4': 'StaticLibrary',
+ '10': 'Utility'
+ }[config_type]
+ return config_type
+
+
+def _GetMSBuildAttributes(spec, config, build_file):
+ if 'msbuild_configuration_attributes' not in config:
+ msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file)
+
+ else:
+ config_type = _GetMSVSConfigurationType(spec, build_file)
+ config_type = _ConvertMSVSConfigurationType(config_type)
+ msbuild_attributes = config.get('msbuild_configuration_attributes', {})
+ msbuild_attributes.setdefault('ConfigurationType', config_type)
+ output_dir = msbuild_attributes.get('OutputDirectory',
+ '$(SolutionDir)$(Configuration)')
+ msbuild_attributes['OutputDirectory'] = _FixPath(output_dir) + '\\'
+ if 'IntermediateDirectory' not in msbuild_attributes:
+ intermediate = _FixPath('$(Configuration)') + '\\'
+ msbuild_attributes['IntermediateDirectory'] = intermediate
+ if 'CharacterSet' in msbuild_attributes:
+ msbuild_attributes['CharacterSet'] = _ConvertMSVSCharacterSet(
+ msbuild_attributes['CharacterSet'])
+ if 'TargetName' not in msbuild_attributes:
+ prefix = spec.get('product_prefix', '')
+ product_name = spec.get('product_name', '$(ProjectName)')
+ target_name = prefix + product_name
+ msbuild_attributes['TargetName'] = target_name
+ if 'TargetExt' not in msbuild_attributes and 'product_extension' in spec:
+ ext = spec.get('product_extension')
+ msbuild_attributes['TargetExt'] = '.' + ext
+
+ if spec.get('msvs_external_builder'):
+ external_out_dir = spec.get('msvs_external_builder_out_dir', '.')
+ msbuild_attributes['OutputDirectory'] = _FixPath(external_out_dir) + '\\'
+
+ # Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile'
+ # (depending on the tool used) to avoid MSB8012 warning.
+ msbuild_tool_map = {
+ 'executable': 'Link',
+ 'shared_library': 'Link',
+ 'loadable_module': 'Link',
+ 'static_library': 'Lib',
+ }
+ msbuild_tool = msbuild_tool_map.get(spec['type'])
+ if msbuild_tool:
+ msbuild_settings = config['finalized_msbuild_settings']
+ out_file = msbuild_settings[msbuild_tool].get('OutputFile')
+ if out_file:
+ msbuild_attributes['TargetPath'] = _FixPath(out_file)
+ target_ext = msbuild_settings[msbuild_tool].get('TargetExt')
+ if target_ext:
+ msbuild_attributes['TargetExt'] = target_ext
+
+ return msbuild_attributes
+
+
+def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
+ # TODO(jeanluc) We could optimize out the following and do it only if
+ # there are actions.
+ # TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'.
+ new_paths = []
+ cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])[0]
+ if cygwin_dirs:
+ cyg_path = '$(MSBuildProjectDirectory)\\%s\\bin\\' % _FixPath(cygwin_dirs)
+ new_paths.append(cyg_path)
+ # TODO(jeanluc) Change the convention to have both a cygwin_dir and a
+ # python_dir.
+ python_path = cyg_path.replace('cygwin\\bin', 'python_26')
+ new_paths.append(python_path)
+ if new_paths:
+ new_paths = '$(ExecutablePath);' + ';'.join(new_paths)
+
+ properties = {}
+ for (name, configuration) in sorted(configurations.iteritems()):
+ condition = _GetConfigurationCondition(name, configuration)
+ attributes = _GetMSBuildAttributes(spec, configuration, build_file)
+ msbuild_settings = configuration['finalized_msbuild_settings']
+ _AddConditionalProperty(properties, condition, 'IntDir',
+ attributes['IntermediateDirectory'])
+ _AddConditionalProperty(properties, condition, 'OutDir',
+ attributes['OutputDirectory'])
+ _AddConditionalProperty(properties, condition, 'TargetName',
+ attributes['TargetName'])
+ if 'TargetExt' in attributes:
+ _AddConditionalProperty(properties, condition, 'TargetExt',
+ attributes['TargetExt'])
+
+ if attributes.get('TargetPath'):
+ _AddConditionalProperty(properties, condition, 'TargetPath',
+ attributes['TargetPath'])
+ if attributes.get('TargetExt'):
+ _AddConditionalProperty(properties, condition, 'TargetExt',
+ attributes['TargetExt'])
+
+ if new_paths:
+ _AddConditionalProperty(properties, condition, 'ExecutablePath',
+ new_paths)
+ tool_settings = msbuild_settings.get('', {})
+ for name, value in sorted(tool_settings.iteritems()):
+ formatted_value = _GetValueFormattedForMSBuild('', name, value)
+ _AddConditionalProperty(properties, condition, name, formatted_value)
+ return _GetMSBuildPropertyGroup(spec, None, properties)
+
+
+def _AddConditionalProperty(properties, condition, name, value):
+ """Adds a property / conditional value pair to a dictionary.
+
+ Arguments:
+ properties: The dictionary to be modified. The key is the name of the
+ property. The value is itself a dictionary; its key is the value and
+ the value a list of condition for which this value is true.
+ condition: The condition under which the named property has the value.
+ name: The name of the property.
+ value: The value of the property.
+ """
+ if name not in properties:
+ properties[name] = {}
+ values = properties[name]
+ if value not in values:
+ values[value] = []
+ conditions = values[value]
+ conditions.append(condition)
+
+
+# Regex for msvs variable references ( i.e. $(FOO) ).
+MSVS_VARIABLE_REFERENCE = re.compile(r'\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)')
+
+
+def _GetMSBuildPropertyGroup(spec, label, properties):
+ """Returns a PropertyGroup definition for the specified properties.
+
+ Arguments:
+ spec: The target project dict.
+ label: An optional label for the PropertyGroup.
+ properties: The dictionary to be converted. The key is the name of the
+ property. The value is itself a dictionary; its key is the value and
+ the value a list of condition for which this value is true.
+ """
+ group = ['PropertyGroup']
+ if label:
+ group.append({'Label': label})
+ num_configurations = len(spec['configurations'])
+ def GetEdges(node):
+ # Use a definition of edges such that user_of_variable -> used_varible.
+ # This happens to be easier in this case, since a variable's
+ # definition contains all variables it references in a single string.
+ edges = set()
+ for value in sorted(properties[node].keys()):
+ # Add to edges all $(...) references to variables.
+ #
+ # Variable references that refer to names not in properties are excluded
+ # These can exist for instance to refer built in definitions like
+ # $(SolutionDir).
+ #
+ # Self references are ignored. Self reference is used in a few places to
+ # append to the default value. I.e. PATH=$(PATH);other_path
+ edges.update(set([v for v in MSVS_VARIABLE_REFERENCE.findall(value)
+ if v in properties and v != node]))
+ return edges
+ properties_ordered = gyp.common.TopologicallySorted(
+ properties.keys(), GetEdges)
+ # Walk properties in the reverse of a topological sort on
+ # user_of_variable -> used_variable as this ensures variables are
+ # defined before they are used.
+ # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
+ for name in reversed(properties_ordered):
+ values = properties[name]
+ for value, conditions in sorted(values.iteritems()):
+ if len(conditions) == num_configurations:
+ # If the value is the same all configurations,
+ # just add one unconditional entry.
+ group.append([name, value])
+ else:
+ for condition in conditions:
+ group.append([name, {'Condition': condition}, value])
+ return [group]
+
+
+def _GetMSBuildToolSettingsSections(spec, configurations):
+ groups = []
+ for (name, configuration) in sorted(configurations.iteritems()):
+ msbuild_settings = configuration['finalized_msbuild_settings']
+ group = ['ItemDefinitionGroup',
+ {'Condition': _GetConfigurationCondition(name, configuration)}
+ ]
+ for tool_name, tool_settings in sorted(msbuild_settings.iteritems()):
+ # Skip the tool named '' which is a holder of global settings handled
+ # by _GetMSBuildConfigurationGlobalProperties.
+ if tool_name:
+ if tool_settings:
+ tool = [tool_name]
+ for name, value in sorted(tool_settings.iteritems()):
+ formatted_value = _GetValueFormattedForMSBuild(tool_name, name,
+ value)
+ tool.append([name, formatted_value])
+ group.append(tool)
+ groups.append(group)
+ return groups
+
+
+def _FinalizeMSBuildSettings(spec, configuration):
+ if 'msbuild_settings' in configuration:
+ converted = False
+ msbuild_settings = configuration['msbuild_settings']
+ MSVSSettings.ValidateMSBuildSettings(msbuild_settings)
+ else:
+ converted = True
+ msvs_settings = configuration.get('msvs_settings', {})
+ msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
+ include_dirs, midl_include_dirs, resource_include_dirs = \
+ _GetIncludeDirs(configuration)
+ libraries = _GetLibraries(spec)
+ library_dirs = _GetLibraryDirs(configuration)
+ out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
+ target_ext = _GetOutputTargetExt(spec)
+ defines = _GetDefines(configuration)
+ if converted:
+ # Visual Studio 2010 has TR1
+ defines = [d for d in defines if d != '_HAS_TR1=0']
+ # Warn of ignored settings
+ ignored_settings = ['msvs_tool_files']
+ for ignored_setting in ignored_settings:
+ value = configuration.get(ignored_setting)
+ if value:
+ print ('Warning: The automatic conversion to MSBuild does not handle '
+ '%s. Ignoring setting of %s' % (ignored_setting, str(value)))
+
+ defines = [_EscapeCppDefineForMSBuild(d) for d in defines]
+ disabled_warnings = _GetDisabledWarnings(configuration)
+ prebuild = configuration.get('msvs_prebuild')
+ postbuild = configuration.get('msvs_postbuild')
+ def_file = _GetModuleDefinition(spec)
+ precompiled_header = configuration.get('msvs_precompiled_header')
+
+ # Add the information to the appropriate tool
+ # TODO(jeanluc) We could optimize and generate these settings only if
+ # the corresponding files are found, e.g. don't generate ResourceCompile
+ # if you don't have any resources.
+ _ToolAppend(msbuild_settings, 'ClCompile',
+ 'AdditionalIncludeDirectories', include_dirs)
+ _ToolAppend(msbuild_settings, 'Midl',
+ 'AdditionalIncludeDirectories', midl_include_dirs)
+ _ToolAppend(msbuild_settings, 'ResourceCompile',
+ 'AdditionalIncludeDirectories', resource_include_dirs)
+ # Add in libraries, note that even for empty libraries, we want this
+ # set, to prevent inheriting default libraries from the enviroment.
+ _ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies',
+ libraries)
+ _ToolAppend(msbuild_settings, 'Link', 'AdditionalLibraryDirectories',
+ library_dirs)
+ if out_file:
+ _ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file,
+ only_if_unset=True)
+ if target_ext:
+ _ToolAppend(msbuild_settings, msbuild_tool, 'TargetExt', target_ext,
+ only_if_unset=True)
+ # Add defines.
+ _ToolAppend(msbuild_settings, 'ClCompile',
+ 'PreprocessorDefinitions', defines)
+ _ToolAppend(msbuild_settings, 'ResourceCompile',
+ 'PreprocessorDefinitions', defines)
+ # Add disabled warnings.
+ _ToolAppend(msbuild_settings, 'ClCompile',
+ 'DisableSpecificWarnings', disabled_warnings)
+ # Turn on precompiled headers if appropriate.
+ if precompiled_header:
+ precompiled_header = os.path.split(precompiled_header)[1]
+ _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'Use')
+ _ToolAppend(msbuild_settings, 'ClCompile',
+ 'PrecompiledHeaderFile', precompiled_header)
+ _ToolAppend(msbuild_settings, 'ClCompile',
+ 'ForcedIncludeFiles', [precompiled_header])
+ else:
+ _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'NotUsing')
+ # Turn off WinRT compilation
+ _ToolAppend(msbuild_settings, 'ClCompile', 'CompileAsWinRT', 'false')
+ # Turn on import libraries if appropriate
+ if spec.get('msvs_requires_importlibrary'):
+ _ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'false')
+ # Loadable modules don't generate import libraries;
+ # tell dependent projects to not expect one.
+ if spec['type'] == 'loadable_module':
+ _ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'true')
+ # Set the module definition file if any.
+ if def_file:
+ _ToolAppend(msbuild_settings, 'Link', 'ModuleDefinitionFile', def_file)
+ configuration['finalized_msbuild_settings'] = msbuild_settings
+ if prebuild:
+ _ToolAppend(msbuild_settings, 'PreBuildEvent', 'Command', prebuild)
+ if postbuild:
+ _ToolAppend(msbuild_settings, 'PostBuildEvent', 'Command', postbuild)
+
+
+def _GetValueFormattedForMSBuild(tool_name, name, value):
+ if type(value) == list:
+ # For some settings, VS2010 does not automatically extends the settings
+ # TODO(jeanluc) Is this what we want?
+ if name in ['AdditionalIncludeDirectories',
+ 'AdditionalLibraryDirectories',
+ 'AdditionalOptions',
+ 'DelayLoadDLLs',
+ 'DisableSpecificWarnings',
+ 'PreprocessorDefinitions']:
+ value.append('%%(%s)' % name)
+ # For most tools, entries in a list should be separated with ';' but some
+ # settings use a space. Check for those first.
+ exceptions = {
+ 'ClCompile': ['AdditionalOptions'],
+ 'Link': ['AdditionalOptions'],
+ 'Lib': ['AdditionalOptions']}
+ if tool_name in exceptions and name in exceptions[tool_name]:
+ char = ' '
+ else:
+ char = ';'
+ formatted_value = char.join(
+ [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value])
+ else:
+ formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value)
+ return formatted_value
+
+
+def _VerifySourcesExist(sources, root_dir):
+ """Verifies that all source files exist on disk.
+
+ Checks that all regular source files, i.e. not created at run time,
+ exist on disk. Missing files cause needless recompilation but no otherwise
+ visible errors.
+
+ Arguments:
+ sources: A recursive list of Filter/file names.
+ root_dir: The root directory for the relative path names.
+ Returns:
+ A list of source files that cannot be found on disk.
+ """
+ missing_sources = []
+ for source in sources:
+ if isinstance(source, MSVSProject.Filter):
+ missing_sources.extend(_VerifySourcesExist(source.contents, root_dir))
+ else:
+ if '$' not in source:
+ full_path = os.path.join(root_dir, source)
+ if not os.path.exists(full_path):
+ missing_sources.append(full_path)
+ return missing_sources
+
+
+def _GetMSBuildSources(spec, sources, exclusions, rule_dependencies,
+ extension_to_rule_name, actions_spec,
+ sources_handled_by_action, list_excluded):
+ groups = ['none', 'masm', 'midl', 'include', 'compile', 'resource', 'rule',
+ 'rule_dependency']
+ grouped_sources = {}
+ for g in groups:
+ grouped_sources[g] = []
+
+ _AddSources2(spec, sources, exclusions, grouped_sources,
+ rule_dependencies, extension_to_rule_name,
+ sources_handled_by_action, list_excluded)
+ sources = []
+ for g in groups:
+ if grouped_sources[g]:
+ sources.append(['ItemGroup'] + grouped_sources[g])
+ if actions_spec:
+ sources.append(['ItemGroup'] + actions_spec)
+ return sources
+
+
+def _AddSources2(spec, sources, exclusions, grouped_sources,
+ rule_dependencies, extension_to_rule_name,
+ sources_handled_by_action,
+ list_excluded):
+ extensions_excluded_from_precompile = []
+ for source in sources:
+ if isinstance(source, MSVSProject.Filter):
+ _AddSources2(spec, source.contents, exclusions, grouped_sources,
+ rule_dependencies, extension_to_rule_name,
+ sources_handled_by_action,
+ list_excluded)
+ else:
+ if not source in sources_handled_by_action:
+ detail = []
+ excluded_configurations = exclusions.get(source, [])
+ if len(excluded_configurations) == len(spec['configurations']):
+ detail.append(['ExcludedFromBuild', 'true'])
+ else:
+ for config_name, configuration in sorted(excluded_configurations):
+ condition = _GetConfigurationCondition(config_name, configuration)
+ detail.append(['ExcludedFromBuild',
+ {'Condition': condition},
+ 'true'])
+ # Add precompile if needed
+ for config_name, configuration in spec['configurations'].iteritems():
+ precompiled_source = configuration.get('msvs_precompiled_source', '')
+ if precompiled_source != '':
+ precompiled_source = _FixPath(precompiled_source)
+ if not extensions_excluded_from_precompile:
+ # If the precompiled header is generated by a C source, we must
+ # not try to use it for C++ sources, and vice versa.
+ basename, extension = os.path.splitext(precompiled_source)
+ if extension == '.c':
+ extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx']
+ else:
+ extensions_excluded_from_precompile = ['.c']
+
+ if precompiled_source == source:
+ condition = _GetConfigurationCondition(config_name, configuration)
+ detail.append(['PrecompiledHeader',
+ {'Condition': condition},
+ 'Create'
+ ])
+ else:
+ # Turn off precompiled header usage for source files of a
+ # different type than the file that generated the
+ # precompiled header.
+ for extension in extensions_excluded_from_precompile:
+ if source.endswith(extension):
+ detail.append(['PrecompiledHeader', ''])
+ detail.append(['ForcedIncludeFiles', ''])
+
+ group, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
+ extension_to_rule_name)
+ grouped_sources[group].append([element, {'Include': source}] + detail)
+
+
+def _GetMSBuildProjectReferences(project):
+ references = []
+ if project.dependencies:
+ group = ['ItemGroup']
+ for dependency in project.dependencies:
+ guid = dependency.guid
+ project_dir = os.path.split(project.path)[0]
+ relative_path = gyp.common.RelativePath(dependency.path, project_dir)
+ project_ref = ['ProjectReference',
+ {'Include': relative_path},
+ ['Project', guid],
+ ['ReferenceOutputAssembly', 'false']
+ ]
+ for config in dependency.spec.get('configurations', {}).itervalues():
+ if config.get('msvs_use_library_dependency_inputs', 0):
+ project_ref.append(['UseLibraryDependencyInputs', 'true'])
+ break
+ # If it's disabled in any config, turn it off in the reference.
+ if config.get('msvs_2010_disable_uldi_when_referenced', 0):
+ project_ref.append(['UseLibraryDependencyInputs', 'false'])
+ break
+ group.append(project_ref)
+ references.append(group)
+ return references
+
+
+def _GenerateMSBuildProject(project, options, version, generator_flags):
+ spec = project.spec
+ configurations = spec['configurations']
+ project_dir, project_file_name = os.path.split(project.path)
+ gyp.common.EnsureDirExists(project.path)
+ # Prepare list of sources and excluded sources.
+ gyp_path = _NormalizedSource(project.build_file)
+ relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
+
+ gyp_file = os.path.split(project.build_file)[1]
+ sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
+ gyp_file)
+ # Add rules.
+ actions_to_add = {}
+ props_files_of_rules = set()
+ targets_files_of_rules = set()
+ rule_dependencies = set()
+ extension_to_rule_name = {}
+ list_excluded = generator_flags.get('msvs_list_excluded_files', True)
+
+ # Don't generate rules if we are using an external builder like ninja.
+ if not spec.get('msvs_external_builder'):
+ _GenerateRulesForMSBuild(project_dir, options, spec,
+ sources, excluded_sources,
+ props_files_of_rules, targets_files_of_rules,
+ actions_to_add, rule_dependencies,
+ extension_to_rule_name)
+ else:
+ rules = spec.get('rules', [])
+ _AdjustSourcesForRules(rules, sources, excluded_sources, True)
+
+ sources, excluded_sources, excluded_idl = (
+ _AdjustSourcesAndConvertToFilterHierarchy(spec, options,
+ project_dir, sources,
+ excluded_sources,
+ list_excluded, version))
+
+ # Don't add actions if we are using an external builder like ninja.
+ if not spec.get('msvs_external_builder'):
+ _AddActions(actions_to_add, spec, project.build_file)
+ _AddCopies(actions_to_add, spec)
+
+ # NOTE: this stanza must appear after all actions have been decided.
+ # Don't excluded sources with actions attached, or they won't run.
+ excluded_sources = _FilterActionsFromExcluded(
+ excluded_sources, actions_to_add)
+
+ exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
+ actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild(
+ spec, actions_to_add)
+
+ _GenerateMSBuildFiltersFile(project.path + '.filters', sources,
+ rule_dependencies,
+ extension_to_rule_name)
+ missing_sources = _VerifySourcesExist(sources, project_dir)
+
+ for configuration in configurations.itervalues():
+ _FinalizeMSBuildSettings(spec, configuration)
+
+ # Add attributes to root element
+
+ import_default_section = [
+ ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.Default.props'}]]
+ import_cpp_props_section = [
+ ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]]
+ import_cpp_targets_section = [
+ ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]]
+ import_masm_props_section = [
+ ['Import',
+ {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.props'}]]
+ import_masm_targets_section = [
+ ['Import',
+ {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.targets'}]]
+ macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]]
+
+ content = [
+ 'Project',
+ {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003',
+ 'ToolsVersion': version.ProjectVersion(),
+ 'DefaultTargets': 'Build'
+ }]
+
+ content += _GetMSBuildProjectConfigurations(configurations)
+ content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name)
+ content += import_default_section
+ content += _GetMSBuildConfigurationDetails(spec, project.build_file)
+ if spec.get('msvs_enable_winphone'):
+ content += _GetMSBuildLocalProperties('v120_wp81')
+ else:
+ content += _GetMSBuildLocalProperties(project.msbuild_toolset)
+ content += import_cpp_props_section
+ content += import_masm_props_section
+ content += _GetMSBuildExtensions(props_files_of_rules)
+ content += _GetMSBuildPropertySheets(configurations)
+ content += macro_section
+ content += _GetMSBuildConfigurationGlobalProperties(spec, configurations,
+ project.build_file)
+ content += _GetMSBuildToolSettingsSections(spec, configurations)
+ content += _GetMSBuildSources(
+ spec, sources, exclusions, rule_dependencies, extension_to_rule_name,
+ actions_spec, sources_handled_by_action, list_excluded)
+ content += _GetMSBuildProjectReferences(project)
+ content += import_cpp_targets_section
+ content += import_masm_targets_section
+ content += _GetMSBuildExtensionTargets(targets_files_of_rules)
+
+ if spec.get('msvs_external_builder'):
+ content += _GetMSBuildExternalBuilderTargets(spec)
+
+ # TODO(jeanluc) File a bug to get rid of runas. We had in MSVS:
+ # has_run_as = _WriteMSVSUserFile(project.path, version, spec)
+
+ easy_xml.WriteXmlIfChanged(content, project.path, pretty=True, win32=True)
+
+ return missing_sources
+
+
+def _GetMSBuildExternalBuilderTargets(spec):
+ """Return a list of MSBuild targets for external builders.
+
+ The "Build" and "Clean" targets are always generated. If the spec contains
+ 'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
+ be generated, to support building selected C/C++ files.
+
+ Arguments:
+ spec: The gyp target spec.
+ Returns:
+ List of MSBuild 'Target' specs.
+ """
+ build_cmd = _BuildCommandLineForRuleRaw(
+ spec, spec['msvs_external_builder_build_cmd'],
+ False, False, False, False)
+ build_target = ['Target', {'Name': 'Build'}]
+ build_target.append(['Exec', {'Command': build_cmd}])
+
+ clean_cmd = _BuildCommandLineForRuleRaw(
+ spec, spec['msvs_external_builder_clean_cmd'],
+ False, False, False, False)
+ clean_target = ['Target', {'Name': 'Clean'}]
+ clean_target.append(['Exec', {'Command': clean_cmd}])
+
+ targets = [build_target, clean_target]
+
+ if spec.get('msvs_external_builder_clcompile_cmd'):
+ clcompile_cmd = _BuildCommandLineForRuleRaw(
+ spec, spec['msvs_external_builder_clcompile_cmd'],
+ False, False, False, False)
+ clcompile_target = ['Target', {'Name': 'ClCompile'}]
+ clcompile_target.append(['Exec', {'Command': clcompile_cmd}])
+ targets.append(clcompile_target)
+
+ return targets
+
+
+def _GetMSBuildExtensions(props_files_of_rules):
+ extensions = ['ImportGroup', {'Label': 'ExtensionSettings'}]
+ for props_file in props_files_of_rules:
+ extensions.append(['Import', {'Project': props_file}])
+ return [extensions]
+
+
+def _GetMSBuildExtensionTargets(targets_files_of_rules):
+ targets_node = ['ImportGroup', {'Label': 'ExtensionTargets'}]
+ for targets_file in sorted(targets_files_of_rules):
+ targets_node.append(['Import', {'Project': targets_file}])
+ return [targets_node]
+
+
+def _GenerateActionsForMSBuild(spec, actions_to_add):
+ """Add actions accumulated into an actions_to_add, merging as needed.
+
+ Arguments:
+ spec: the target project dict
+ actions_to_add: dictionary keyed on input name, which maps to a list of
+ dicts describing the actions attached to that input file.
+
+ Returns:
+ A pair of (action specification, the sources handled by this action).
+ """
+ sources_handled_by_action = OrderedSet()
+ actions_spec = []
+ for primary_input, actions in actions_to_add.iteritems():
+ inputs = OrderedSet()
+ outputs = OrderedSet()
+ descriptions = []
+ commands = []
+ for action in actions:
+ inputs.update(OrderedSet(action['inputs']))
+ outputs.update(OrderedSet(action['outputs']))
+ descriptions.append(action['description'])
+ cmd = action['command']
+ # For most actions, add 'call' so that actions that invoke batch files
+ # return and continue executing. msbuild_use_call provides a way to
+ # disable this but I have not seen any adverse effect from doing that
+ # for everything.
+ if action.get('msbuild_use_call', True):
+ cmd = 'call ' + cmd
+ commands.append(cmd)
+ # Add the custom build action for one input file.
+ description = ', and also '.join(descriptions)
+
+ # We can't join the commands simply with && because the command line will
+ # get too long. See also _AddActions: cygwin's setup_env mustn't be called
+ # for every invocation or the command that sets the PATH will grow too
+ # long.
+ command = '\r\n'.join([c + '\r\nif %errorlevel% neq 0 exit /b %errorlevel%'
+ for c in commands])
+ _AddMSBuildAction(spec,
+ primary_input,
+ inputs,
+ outputs,
+ command,
+ description,
+ sources_handled_by_action,
+ actions_spec)
+ return actions_spec, sources_handled_by_action
+
+
+def _AddMSBuildAction(spec, primary_input, inputs, outputs, cmd, description,
+ sources_handled_by_action, actions_spec):
+ command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd)
+ primary_input = _FixPath(primary_input)
+ inputs_array = _FixPaths(inputs)
+ outputs_array = _FixPaths(outputs)
+ additional_inputs = ';'.join([i for i in inputs_array
+ if i != primary_input])
+ outputs = ';'.join(outputs_array)
+ sources_handled_by_action.add(primary_input)
+ action_spec = ['CustomBuild', {'Include': primary_input}]
+ action_spec.extend(
+ # TODO(jeanluc) 'Document' for all or just if as_sources?
+ [['FileType', 'Document'],
+ ['Command', command],
+ ['Message', description],
+ ['Outputs', outputs]
+ ])
+ if additional_inputs:
+ action_spec.append(['AdditionalInputs', additional_inputs])
+ actions_spec.append(action_spec)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
new file mode 100755
index 0000000000..c0b021df50
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Unit tests for the msvs.py file. """
+
+import gyp.generator.msvs as msvs
+import unittest
+import StringIO
+
+
+class TestSequenceFunctions(unittest.TestCase):
+
+ def setUp(self):
+ self.stderr = StringIO.StringIO()
+
+ def test_GetLibraries(self):
+ self.assertEqual(
+ msvs._GetLibraries({}),
+ [])
+ self.assertEqual(
+ msvs._GetLibraries({'libraries': []}),
+ [])
+ self.assertEqual(
+ msvs._GetLibraries({'other':'foo', 'libraries': ['a.lib']}),
+ ['a.lib'])
+ self.assertEqual(
+ msvs._GetLibraries({'libraries': ['-la']}),
+ ['a.lib'])
+ self.assertEqual(
+ msvs._GetLibraries({'libraries': ['a.lib', 'b.lib', 'c.lib', '-lb.lib',
+ '-lb.lib', 'd.lib', 'a.lib']}),
+ ['c.lib', 'b.lib', 'd.lib', 'a.lib'])
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
new file mode 100644
index 0000000000..841067ed34
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
@@ -0,0 +1,2410 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import copy
+import hashlib
+import json
+import multiprocessing
+import os.path
+import re
+import signal
+import subprocess
+import sys
+import gyp
+import gyp.common
+from gyp.common import OrderedSet
+import gyp.msvs_emulation
+import gyp.MSVSUtil as MSVSUtil
+import gyp.xcode_emulation
+from cStringIO import StringIO
+
+from gyp.common import GetEnvironFallback
+import gyp.ninja_syntax as ninja_syntax
+
+generator_default_variables = {
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '',
+ 'STATIC_LIB_PREFIX': 'lib',
+ 'STATIC_LIB_SUFFIX': '.a',
+ 'SHARED_LIB_PREFIX': 'lib',
+
+ # Gyp expects the following variables to be expandable by the build
+ # system to the appropriate locations. Ninja prefers paths to be
+ # known at gyp time. To resolve this, introduce special
+ # variables starting with $! and $| (which begin with a $ so gyp knows it
+ # should be treated specially, but is otherwise an invalid
+ # ninja/shell variable) that are passed to gyp here but expanded
+ # before writing out into the target .ninja files; see
+ # ExpandSpecial.
+ # $! is used for variables that represent a path and that can only appear at
+ # the start of a string, while $| is used for variables that can appear
+ # anywhere in a string.
+ 'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR',
+ 'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen',
+ 'PRODUCT_DIR': '$!PRODUCT_DIR',
+ 'CONFIGURATION_NAME': '$|CONFIGURATION_NAME',
+
+ # Special variables that may be used by gyp 'rule' targets.
+ # We generate definitions for these variables on the fly when processing a
+ # rule.
+ 'RULE_INPUT_ROOT': '${root}',
+ 'RULE_INPUT_DIRNAME': '${dirname}',
+ 'RULE_INPUT_PATH': '${source}',
+ 'RULE_INPUT_EXT': '${ext}',
+ 'RULE_INPUT_NAME': '${name}',
+}
+
+# Placates pylint.
+generator_additional_non_configuration_keys = []
+generator_additional_path_sections = []
+generator_extra_sources_for_rules = []
+generator_filelist_paths = None
+
+generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
+
+def StripPrefix(arg, prefix):
+ if arg.startswith(prefix):
+ return arg[len(prefix):]
+ return arg
+
+
+def QuoteShellArgument(arg, flavor):
+ """Quote a string such that it will be interpreted as a single argument
+ by the shell."""
+ # Rather than attempting to enumerate the bad shell characters, just
+ # whitelist common OK ones and quote anything else.
+ if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg):
+ return arg # No quoting necessary.
+ if flavor == 'win':
+ return gyp.msvs_emulation.QuoteForRspFile(arg)
+ return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
+
+
+def Define(d, flavor):
+ """Takes a preprocessor define and returns a -D parameter that's ninja- and
+ shell-escaped."""
+ if flavor == 'win':
+ # cl.exe replaces literal # characters with = in preprocesor definitions for
+ # some reason. Octal-encode to work around that.
+ d = d.replace('#', '\\%03o' % ord('#'))
+ return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor)
+
+
+def AddArch(output, arch):
+ """Adds an arch string to an output path."""
+ output, extension = os.path.splitext(output)
+ return '%s.%s%s' % (output, arch, extension)
+
+
+class Target(object):
+ """Target represents the paths used within a single gyp target.
+
+ Conceptually, building a single target A is a series of steps:
+
+ 1) actions/rules/copies generates source/resources/etc.
+ 2) compiles generates .o files
+ 3) link generates a binary (library/executable)
+ 4) bundle merges the above in a mac bundle
+
+ (Any of these steps can be optional.)
+
+ From a build ordering perspective, a dependent target B could just
+ depend on the last output of this series of steps.
+
+ But some dependent commands sometimes need to reach inside the box.
+ For example, when linking B it needs to get the path to the static
+ library generated by A.
+
+ This object stores those paths. To keep things simple, member
+ variables only store concrete paths to single files, while methods
+ compute derived values like "the last output of the target".
+ """
+ def __init__(self, type):
+ # Gyp type ("static_library", etc.) of this target.
+ self.type = type
+ # File representing whether any input dependencies necessary for
+ # dependent actions have completed.
+ self.preaction_stamp = None
+ # File representing whether any input dependencies necessary for
+ # dependent compiles have completed.
+ self.precompile_stamp = None
+ # File representing the completion of actions/rules/copies, if any.
+ self.actions_stamp = None
+ # Path to the output of the link step, if any.
+ self.binary = None
+ # Path to the file representing the completion of building the bundle,
+ # if any.
+ self.bundle = None
+ # On Windows, incremental linking requires linking against all the .objs
+ # that compose a .lib (rather than the .lib itself). That list is stored
+ # here. In this case, we also need to save the compile_deps for the target,
+ # so that the the target that directly depends on the .objs can also depend
+ # on those.
+ self.component_objs = None
+ self.compile_deps = None
+ # Windows only. The import .lib is the output of a build step, but
+ # because dependents only link against the lib (not both the lib and the
+ # dll) we keep track of the import library here.
+ self.import_lib = None
+
+ def Linkable(self):
+ """Return true if this is a target that can be linked against."""
+ return self.type in ('static_library', 'shared_library')
+
+ def UsesToc(self, flavor):
+ """Return true if the target should produce a restat rule based on a TOC
+ file."""
+ # For bundles, the .TOC should be produced for the binary, not for
+ # FinalOutput(). But the naive approach would put the TOC file into the
+ # bundle, so don't do this for bundles for now.
+ if flavor == 'win' or self.bundle:
+ return False
+ return self.type in ('shared_library', 'loadable_module')
+
+ def PreActionInput(self, flavor):
+ """Return the path, if any, that should be used as a dependency of
+ any dependent action step."""
+ if self.UsesToc(flavor):
+ return self.FinalOutput() + '.TOC'
+ return self.FinalOutput() or self.preaction_stamp
+
+ def PreCompileInput(self):
+ """Return the path, if any, that should be used as a dependency of
+ any dependent compile step."""
+ return self.actions_stamp or self.precompile_stamp
+
+ def FinalOutput(self):
+ """Return the last output of the target, which depends on all prior
+ steps."""
+ return self.bundle or self.binary or self.actions_stamp
+
+
+# A small discourse on paths as used within the Ninja build:
+# All files we produce (both at gyp and at build time) appear in the
+# build directory (e.g. out/Debug).
+#
+# Paths within a given .gyp file are always relative to the directory
+# containing the .gyp file. Call these "gyp paths". This includes
+# sources as well as the starting directory a given gyp rule/action
+# expects to be run from. We call the path from the source root to
+# the gyp file the "base directory" within the per-.gyp-file
+# NinjaWriter code.
+#
+# All paths as written into the .ninja files are relative to the build
+# directory. Call these paths "ninja paths".
+#
+# We translate between these two notions of paths with two helper
+# functions:
+#
+# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
+# into the equivalent ninja path.
+#
+# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
+# an output file; the result can be namespaced such that it is unique
+# to the input file name as well as the output target name.
+
+class NinjaWriter(object):
+ def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir,
+ output_file, toplevel_build, output_file_name, flavor,
+ toplevel_dir=None):
+ """
+ base_dir: path from source root to directory containing this gyp file,
+ by gyp semantics, all input paths are relative to this
+ build_dir: path from source root to build output
+ toplevel_dir: path to the toplevel directory
+ """
+
+ self.hash_for_rules = hash_for_rules
+ self.target_outputs = target_outputs
+ self.base_dir = base_dir
+ self.build_dir = build_dir
+ self.ninja = ninja_syntax.Writer(output_file)
+ self.toplevel_build = toplevel_build
+ self.output_file_name = output_file_name
+
+ self.flavor = flavor
+ self.abs_build_dir = None
+ if toplevel_dir is not None:
+ self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir,
+ build_dir))
+ self.obj_ext = '.obj' if flavor == 'win' else '.o'
+ if flavor == 'win':
+ # See docstring of msvs_emulation.GenerateEnvironmentFiles().
+ self.win_env = {}
+ for arch in ('x86', 'x64'):
+ self.win_env[arch] = 'environment.' + arch
+
+ # Relative path from build output dir to base dir.
+ build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir)
+ self.build_to_base = os.path.join(build_to_top, base_dir)
+ # Relative path from base dir to build dir.
+ base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir)
+ self.base_to_build = os.path.join(base_to_top, build_dir)
+
+ def ExpandSpecial(self, path, product_dir=None):
+ """Expand specials like $!PRODUCT_DIR in |path|.
+
+ If |product_dir| is None, assumes the cwd is already the product
+ dir. Otherwise, |product_dir| is the relative path to the product
+ dir.
+ """
+
+ PRODUCT_DIR = '$!PRODUCT_DIR'
+ if PRODUCT_DIR in path:
+ if product_dir:
+ path = path.replace(PRODUCT_DIR, product_dir)
+ else:
+ path = path.replace(PRODUCT_DIR + '/', '')
+ path = path.replace(PRODUCT_DIR + '\\', '')
+ path = path.replace(PRODUCT_DIR, '.')
+
+ INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR'
+ if INTERMEDIATE_DIR in path:
+ int_dir = self.GypPathToUniqueOutput('gen')
+ # GypPathToUniqueOutput generates a path relative to the product dir,
+ # so insert product_dir in front if it is provided.
+ path = path.replace(INTERMEDIATE_DIR,
+ os.path.join(product_dir or '', int_dir))
+
+ CONFIGURATION_NAME = '$|CONFIGURATION_NAME'
+ path = path.replace(CONFIGURATION_NAME, self.config_name)
+
+ return path
+
+ def ExpandRuleVariables(self, path, root, dirname, source, ext, name):
+ if self.flavor == 'win':
+ path = self.msvs_settings.ConvertVSMacros(
+ path, config=self.config_name)
+ path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root)
+ path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'],
+ dirname)
+ path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source)
+ path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext)
+ path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name)
+ return path
+
+ def GypPathToNinja(self, path, env=None):
+ """Translate a gyp path to a ninja path, optionally expanding environment
+ variable references in |path| with |env|.
+
+ See the above discourse on path conversions."""
+ if env:
+ if self.flavor == 'mac':
+ path = gyp.xcode_emulation.ExpandEnvVars(path, env)
+ elif self.flavor == 'win':
+ path = gyp.msvs_emulation.ExpandMacros(path, env)
+ if path.startswith('$!'):
+ expanded = self.ExpandSpecial(path)
+ if self.flavor == 'win':
+ expanded = os.path.normpath(expanded)
+ return expanded
+ if '$|' in path:
+ path = self.ExpandSpecial(path)
+ assert '$' not in path, path
+ return os.path.normpath(os.path.join(self.build_to_base, path))
+
+ def GypPathToUniqueOutput(self, path, qualified=True):
+ """Translate a gyp path to a ninja path for writing output.
+
+ If qualified is True, qualify the resulting filename with the name
+ of the target. This is necessary when e.g. compiling the same
+ path twice for two separate output targets.
+
+ See the above discourse on path conversions."""
+
+ path = self.ExpandSpecial(path)
+ assert not path.startswith('$'), path
+
+ # Translate the path following this scheme:
+ # Input: foo/bar.gyp, target targ, references baz/out.o
+ # Output: obj/foo/baz/targ.out.o (if qualified)
+ # obj/foo/baz/out.o (otherwise)
+ # (and obj.host instead of obj for cross-compiles)
+ #
+ # Why this scheme and not some other one?
+ # 1) for a given input, you can compute all derived outputs by matching
+ # its path, even if the input is brought via a gyp file with '..'.
+ # 2) simple files like libraries and stamps have a simple filename.
+
+ obj = 'obj'
+ if self.toolset != 'target':
+ obj += '.' + self.toolset
+
+ path_dir, path_basename = os.path.split(path)
+ assert not os.path.isabs(path_dir), (
+ "'%s' can not be absolute path (see crbug.com/462153)." % path_dir)
+
+ if qualified:
+ path_basename = self.name + '.' + path_basename
+ return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
+ path_basename))
+
+ def WriteCollapsedDependencies(self, name, targets, order_only=None):
+ """Given a list of targets, return a path for a single file
+ representing the result of building all the targets or None.
+
+ Uses a stamp file if necessary."""
+
+ assert targets == filter(None, targets), targets
+ if len(targets) == 0:
+ assert not order_only
+ return None
+ if len(targets) > 1 or order_only:
+ stamp = self.GypPathToUniqueOutput(name + '.stamp')
+ targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only)
+ self.ninja.newline()
+ return targets[0]
+
+ def _SubninjaNameForArch(self, arch):
+ output_file_base = os.path.splitext(self.output_file_name)[0]
+ return '%s.%s.ninja' % (output_file_base, arch)
+
+ def WriteSpec(self, spec, config_name, generator_flags):
+ """The main entry point for NinjaWriter: write the build rules for a spec.
+
+ Returns a Target object, which represents the output paths for this spec.
+ Returns None if there are no outputs (e.g. a settings-only 'none' type
+ target)."""
+
+ self.config_name = config_name
+ self.name = spec['target_name']
+ self.toolset = spec['toolset']
+ config = spec['configurations'][config_name]
+ self.target = Target(spec['type'])
+ self.is_standalone_static_library = bool(
+ spec.get('standalone_static_library', 0))
+ # Track if this target contains any C++ files, to decide if gcc or g++
+ # should be used for linking.
+ self.uses_cpp = False
+
+ self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
+ self.xcode_settings = self.msvs_settings = None
+ if self.flavor == 'mac':
+ self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
+ if self.flavor == 'win':
+ self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
+ generator_flags)
+ arch = self.msvs_settings.GetArch(config_name)
+ self.ninja.variable('arch', self.win_env[arch])
+ self.ninja.variable('cc', '$cl_' + arch)
+ self.ninja.variable('cxx', '$cl_' + arch)
+ self.ninja.variable('cc_host', '$cl_' + arch)
+ self.ninja.variable('cxx_host', '$cl_' + arch)
+ self.ninja.variable('asm', '$ml_' + arch)
+
+ if self.flavor == 'mac':
+ self.archs = self.xcode_settings.GetActiveArchs(config_name)
+ if len(self.archs) > 1:
+ self.arch_subninjas = dict(
+ (arch, ninja_syntax.Writer(
+ OpenOutput(os.path.join(self.toplevel_build,
+ self._SubninjaNameForArch(arch)),
+ 'w')))
+ for arch in self.archs)
+
+ # Compute predepends for all rules.
+ # actions_depends is the dependencies this target depends on before running
+ # any of its action/rule/copy steps.
+ # compile_depends is the dependencies this target depends on before running
+ # any of its compile steps.
+ actions_depends = []
+ compile_depends = []
+ # TODO(evan): it is rather confusing which things are lists and which
+ # are strings. Fix these.
+ if 'dependencies' in spec:
+ for dep in spec['dependencies']:
+ if dep in self.target_outputs:
+ target = self.target_outputs[dep]
+ actions_depends.append(target.PreActionInput(self.flavor))
+ compile_depends.append(target.PreCompileInput())
+ actions_depends = filter(None, actions_depends)
+ compile_depends = filter(None, compile_depends)
+ actions_depends = self.WriteCollapsedDependencies('actions_depends',
+ actions_depends)
+ compile_depends = self.WriteCollapsedDependencies('compile_depends',
+ compile_depends)
+ self.target.preaction_stamp = actions_depends
+ self.target.precompile_stamp = compile_depends
+
+ # Write out actions, rules, and copies. These must happen before we
+ # compile any sources, so compute a list of predependencies for sources
+ # while we do it.
+ extra_sources = []
+ mac_bundle_depends = []
+ self.target.actions_stamp = self.WriteActionsRulesCopies(
+ spec, extra_sources, actions_depends, mac_bundle_depends)
+
+ # If we have actions/rules/copies, we depend directly on those, but
+ # otherwise we depend on dependent target's actions/rules/copies etc.
+ # We never need to explicitly depend on previous target's link steps,
+ # because no compile ever depends on them.
+ compile_depends_stamp = (self.target.actions_stamp or compile_depends)
+
+ # Write out the compilation steps, if any.
+ link_deps = []
+ sources = extra_sources + spec.get('sources', [])
+ if sources:
+ if self.flavor == 'mac' and len(self.archs) > 1:
+ # Write subninja file containing compile and link commands scoped to
+ # a single arch if a fat binary is being built.
+ for arch in self.archs:
+ self.ninja.subninja(self._SubninjaNameForArch(arch))
+
+ pch = None
+ if self.flavor == 'win':
+ gyp.msvs_emulation.VerifyMissingSources(
+ sources, self.abs_build_dir, generator_flags, self.GypPathToNinja)
+ pch = gyp.msvs_emulation.PrecompiledHeader(
+ self.msvs_settings, config_name, self.GypPathToNinja,
+ self.GypPathToUniqueOutput, self.obj_ext)
+ else:
+ pch = gyp.xcode_emulation.MacPrefixHeader(
+ self.xcode_settings, self.GypPathToNinja,
+ lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))
+ link_deps = self.WriteSources(
+ self.ninja, config_name, config, sources, compile_depends_stamp, pch,
+ spec)
+ # Some actions/rules output 'sources' that are already object files.
+ obj_outputs = [f for f in sources if f.endswith(self.obj_ext)]
+ if obj_outputs:
+ if self.flavor != 'mac' or len(self.archs) == 1:
+ link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
+ else:
+ print "Warning: Actions/rules writing object files don't work with " \
+ "multiarch targets, dropping. (target %s)" % spec['target_name']
+ elif self.flavor == 'mac' and len(self.archs) > 1:
+ link_deps = collections.defaultdict(list)
+
+ compile_deps = self.target.actions_stamp or actions_depends
+ if self.flavor == 'win' and self.target.type == 'static_library':
+ self.target.component_objs = link_deps
+ self.target.compile_deps = compile_deps
+
+ # Write out a link step, if needed.
+ output = None
+ is_empty_bundle = not link_deps and not mac_bundle_depends
+ if link_deps or self.target.actions_stamp or actions_depends:
+ output = self.WriteTarget(spec, config_name, config, link_deps,
+ compile_deps)
+ if self.is_mac_bundle:
+ mac_bundle_depends.append(output)
+
+ # Bundle all of the above together, if needed.
+ if self.is_mac_bundle:
+ output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle)
+
+ if not output:
+ return None
+
+ assert self.target.FinalOutput(), output
+ return self.target
+
+ def _WinIdlRule(self, source, prebuild, outputs):
+ """Handle the implicit VS .idl rule for one source file. Fills |outputs|
+ with files that are generated."""
+ outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
+ source, self.config_name)
+ outdir = self.GypPathToNinja(outdir)
+ def fix_path(path, rel=None):
+ path = os.path.join(outdir, path)
+ dirname, basename = os.path.split(source)
+ root, ext = os.path.splitext(basename)
+ path = self.ExpandRuleVariables(
+ path, root, dirname, source, ext, basename)
+ if rel:
+ path = os.path.relpath(path, rel)
+ return path
+ vars = [(name, fix_path(value, outdir)) for name, value in vars]
+ output = [fix_path(p) for p in output]
+ vars.append(('outdir', outdir))
+ vars.append(('idlflags', flags))
+ input = self.GypPathToNinja(source)
+ self.ninja.build(output, 'idl', input,
+ variables=vars, order_only=prebuild)
+ outputs.extend(output)
+
+ def WriteWinIdlFiles(self, spec, prebuild):
+ """Writes rules to match MSVS's implicit idl handling."""
+ assert self.flavor == 'win'
+ if self.msvs_settings.HasExplicitIdlRulesOrActions(spec):
+ return []
+ outputs = []
+ for source in filter(lambda x: x.endswith('.idl'), spec['sources']):
+ self._WinIdlRule(source, prebuild, outputs)
+ return outputs
+
+ def WriteActionsRulesCopies(self, spec, extra_sources, prebuild,
+ mac_bundle_depends):
+ """Write out the Actions, Rules, and Copies steps. Return a path
+ representing the outputs of these steps."""
+ outputs = []
+ if self.is_mac_bundle:
+ mac_bundle_resources = spec.get('mac_bundle_resources', [])[:]
+ else:
+ mac_bundle_resources = []
+ extra_mac_bundle_resources = []
+
+ if 'actions' in spec:
+ outputs += self.WriteActions(spec['actions'], extra_sources, prebuild,
+ extra_mac_bundle_resources)
+ if 'rules' in spec:
+ outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
+ mac_bundle_resources,
+ extra_mac_bundle_resources)
+ if 'copies' in spec:
+ outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends)
+
+ if 'sources' in spec and self.flavor == 'win':
+ outputs += self.WriteWinIdlFiles(spec, prebuild)
+
+ stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
+
+ if self.is_mac_bundle:
+ xcassets = self.WriteMacBundleResources(
+ extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
+ partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends)
+ self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends)
+
+ return stamp
+
+ def GenerateDescription(self, verb, message, fallback):
+ """Generate and return a description of a build step.
+
+ |verb| is the short summary, e.g. ACTION or RULE.
+ |message| is a hand-written description, or None if not available.
+ |fallback| is the gyp-level name of the step, usable as a fallback.
+ """
+ if self.toolset != 'target':
+ verb += '(%s)' % self.toolset
+ if message:
+ return '%s %s' % (verb, self.ExpandSpecial(message))
+ else:
+ return '%s %s: %s' % (verb, self.name, fallback)
+
+ def WriteActions(self, actions, extra_sources, prebuild,
+ extra_mac_bundle_resources):
+ # Actions cd into the base directory.
+ env = self.GetToolchainEnv()
+ all_outputs = []
+ for action in actions:
+ # First write out a rule for the action.
+ name = '%s_%s' % (action['action_name'], self.hash_for_rules)
+ description = self.GenerateDescription('ACTION',
+ action.get('message', None),
+ name)
+ is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
+ if self.flavor == 'win' else False)
+ args = action['action']
+ depfile = action.get('depfile', None)
+ if depfile:
+ depfile = self.ExpandSpecial(depfile, self.base_to_build)
+ pool = 'console' if int(action.get('ninja_use_console', 0)) else None
+ rule_name, _ = self.WriteNewNinjaRule(name, args, description,
+ is_cygwin, env, pool,
+ depfile=depfile)
+
+ inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
+ if int(action.get('process_outputs_as_sources', False)):
+ extra_sources += action['outputs']
+ if int(action.get('process_outputs_as_mac_bundle_resources', False)):
+ extra_mac_bundle_resources += action['outputs']
+ outputs = [self.GypPathToNinja(o, env) for o in action['outputs']]
+
+ # Then write out an edge using the rule.
+ self.ninja.build(outputs, rule_name, inputs,
+ order_only=prebuild)
+ all_outputs += outputs
+
+ self.ninja.newline()
+
+ return all_outputs
+
+ def WriteRules(self, rules, extra_sources, prebuild,
+ mac_bundle_resources, extra_mac_bundle_resources):
+ env = self.GetToolchainEnv()
+ all_outputs = []
+ for rule in rules:
+ # Skip a rule with no action and no inputs.
+ if 'action' not in rule and not rule.get('rule_sources', []):
+ continue
+
+ # First write out a rule for the rule action.
+ name = '%s_%s' % (rule['rule_name'], self.hash_for_rules)
+
+ args = rule['action']
+ description = self.GenerateDescription(
+ 'RULE',
+ rule.get('message', None),
+ ('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
+ is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
+ if self.flavor == 'win' else False)
+ pool = 'console' if int(rule.get('ninja_use_console', 0)) else None
+ rule_name, args = self.WriteNewNinjaRule(
+ name, args, description, is_cygwin, env, pool)
+
+ # TODO: if the command references the outputs directly, we should
+ # simplify it to just use $out.
+
+ # Rules can potentially make use of some special variables which
+ # must vary per source file.
+ # Compute the list of variables we'll need to provide.
+ special_locals = ('source', 'root', 'dirname', 'ext', 'name')
+ needed_variables = set(['source'])
+ for argument in args:
+ for var in special_locals:
+ if '${%s}' % var in argument:
+ needed_variables.add(var)
+
+ def cygwin_munge(path):
+ # pylint: disable=cell-var-from-loop
+ if is_cygwin:
+ return path.replace('\\', '/')
+ return path
+
+ inputs = [self.GypPathToNinja(i, env) for i in rule.get('inputs', [])]
+
+ # If there are n source files matching the rule, and m additional rule
+ # inputs, then adding 'inputs' to each build edge written below will
+ # write m * n inputs. Collapsing reduces this to m + n.
+ sources = rule.get('rule_sources', [])
+ num_inputs = len(inputs)
+ if prebuild:
+ num_inputs += 1
+ if num_inputs > 2 and len(sources) > 2:
+ inputs = [self.WriteCollapsedDependencies(
+ rule['rule_name'], inputs, order_only=prebuild)]
+ prebuild = []
+
+ # For each source file, write an edge that generates all the outputs.
+ for source in sources:
+ source = os.path.normpath(source)
+ dirname, basename = os.path.split(source)
+ root, ext = os.path.splitext(basename)
+
+ # Gather the list of inputs and outputs, expanding $vars if possible.
+ outputs = [self.ExpandRuleVariables(o, root, dirname,
+ source, ext, basename)
+ for o in rule['outputs']]
+
+ if int(rule.get('process_outputs_as_sources', False)):
+ extra_sources += outputs
+
+ was_mac_bundle_resource = source in mac_bundle_resources
+ if was_mac_bundle_resource or \
+ int(rule.get('process_outputs_as_mac_bundle_resources', False)):
+ extra_mac_bundle_resources += outputs
+ # Note: This is n_resources * n_outputs_in_rule. Put to-be-removed
+ # items in a set and remove them all in a single pass if this becomes
+ # a performance issue.
+ if was_mac_bundle_resource:
+ mac_bundle_resources.remove(source)
+
+ extra_bindings = []
+ for var in needed_variables:
+ if var == 'root':
+ extra_bindings.append(('root', cygwin_munge(root)))
+ elif var == 'dirname':
+ # '$dirname' is a parameter to the rule action, which means
+ # it shouldn't be converted to a Ninja path. But we don't
+ # want $!PRODUCT_DIR in there either.
+ dirname_expanded = self.ExpandSpecial(dirname, self.base_to_build)
+ extra_bindings.append(('dirname', cygwin_munge(dirname_expanded)))
+ elif var == 'source':
+ # '$source' is a parameter to the rule action, which means
+ # it shouldn't be converted to a Ninja path. But we don't
+ # want $!PRODUCT_DIR in there either.
+ source_expanded = self.ExpandSpecial(source, self.base_to_build)
+ extra_bindings.append(('source', cygwin_munge(source_expanded)))
+ elif var == 'ext':
+ extra_bindings.append(('ext', ext))
+ elif var == 'name':
+ extra_bindings.append(('name', cygwin_munge(basename)))
+ else:
+ assert var == None, repr(var)
+
+ outputs = [self.GypPathToNinja(o, env) for o in outputs]
+ if self.flavor == 'win':
+ # WriteNewNinjaRule uses unique_name for creating an rsp file on win.
+ extra_bindings.append(('unique_name',
+ hashlib.md5(outputs[0]).hexdigest()))
+ self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
+ implicit=inputs,
+ order_only=prebuild,
+ variables=extra_bindings)
+
+ all_outputs.extend(outputs)
+
+ return all_outputs
+
+ def WriteCopies(self, copies, prebuild, mac_bundle_depends):
+ outputs = []
+ env = self.GetToolchainEnv()
+ for copy in copies:
+ for path in copy['files']:
+ # Normalize the path so trailing slashes don't confuse us.
+ path = os.path.normpath(path)
+ basename = os.path.split(path)[1]
+ src = self.GypPathToNinja(path, env)
+ dst = self.GypPathToNinja(os.path.join(copy['destination'], basename),
+ env)
+ outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild)
+ if self.is_mac_bundle:
+ # gyp has mac_bundle_resources to copy things into a bundle's
+ # Resources folder, but there's no built-in way to copy files to other
+ # places in the bundle. Hence, some targets use copies for this. Check
+ # if this file is copied into the current bundle, and if so add it to
+ # the bundle depends so that dependent targets get rebuilt if the copy
+ # input changes.
+ if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()):
+ mac_bundle_depends.append(dst)
+
+ return outputs
+
+ def WriteMacBundleResources(self, resources, bundle_depends):
+ """Writes ninja edges for 'mac_bundle_resources'."""
+ xcassets = []
+ for output, res in gyp.xcode_emulation.GetMacBundleResources(
+ generator_default_variables['PRODUCT_DIR'],
+ self.xcode_settings, map(self.GypPathToNinja, resources)):
+ output = self.ExpandSpecial(output)
+ if os.path.splitext(output)[-1] != '.xcassets':
+ isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
+ self.ninja.build(output, 'mac_tool', res,
+ variables=[('mactool_cmd', 'copy-bundle-resource'), \
+ ('binary', isBinary)])
+ bundle_depends.append(output)
+ else:
+ xcassets.append(res)
+ return xcassets
+
+ def WriteMacXCassets(self, xcassets, bundle_depends):
+ """Writes ninja edges for 'mac_bundle_resources' .xcassets files.
+
+ This add an invocation of 'actool' via the 'mac_tool.py' helper script.
+ It assumes that the assets catalogs define at least one imageset and
+ thus an Assets.car file will be generated in the application resources
+ directory. If this is not the case, then the build will probably be done
+ at each invocation of ninja."""
+ if not xcassets:
+ return
+
+ extra_arguments = {}
+ settings_to_arg = {
+ 'XCASSETS_APP_ICON': 'app-icon',
+ 'XCASSETS_LAUNCH_IMAGE': 'launch-image',
+ }
+ settings = self.xcode_settings.xcode_settings[self.config_name]
+ for settings_key, arg_name in settings_to_arg.iteritems():
+ value = settings.get(settings_key)
+ if value:
+ extra_arguments[arg_name] = value
+
+ partial_info_plist = None
+ if extra_arguments:
+ partial_info_plist = self.GypPathToUniqueOutput(
+ 'assetcatalog_generated_info.plist')
+ extra_arguments['output-partial-info-plist'] = partial_info_plist
+
+ outputs = []
+ outputs.append(
+ os.path.join(
+ self.xcode_settings.GetBundleResourceFolder(),
+ 'Assets.car'))
+ if partial_info_plist:
+ outputs.append(partial_info_plist)
+
+ keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor)
+ extra_env = self.xcode_settings.GetPerTargetSettings()
+ env = self.GetSortedXcodeEnv(additional_settings=extra_env)
+ env = self.ComputeExportEnvString(env)
+
+ bundle_depends.extend(self.ninja.build(
+ outputs, 'compile_xcassets', xcassets,
+ variables=[('env', env), ('keys', keys)]))
+ return partial_info_plist
+
+ def WriteMacInfoPlist(self, partial_info_plist, bundle_depends):
+ """Write build rules for bundle Info.plist files."""
+ info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
+ generator_default_variables['PRODUCT_DIR'],
+ self.xcode_settings, self.GypPathToNinja)
+ if not info_plist:
+ return
+ out = self.ExpandSpecial(out)
+ if defines:
+ # Create an intermediate file to store preprocessed results.
+ intermediate_plist = self.GypPathToUniqueOutput(
+ os.path.basename(info_plist))
+ defines = ' '.join([Define(d, self.flavor) for d in defines])
+ info_plist = self.ninja.build(
+ intermediate_plist, 'preprocess_infoplist', info_plist,
+ variables=[('defines',defines)])
+
+ env = self.GetSortedXcodeEnv(additional_settings=extra_env)
+ env = self.ComputeExportEnvString(env)
+
+ if partial_info_plist:
+ intermediate_plist = self.GypPathToUniqueOutput('merged_info.plist')
+ info_plist = self.ninja.build(
+ intermediate_plist, 'merge_infoplist',
+ [partial_info_plist, info_plist])
+
+ keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
+ keys = QuoteShellArgument(json.dumps(keys), self.flavor)
+ isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
+ self.ninja.build(out, 'copy_infoplist', info_plist,
+ variables=[('env', env), ('keys', keys),
+ ('binary', isBinary)])
+ bundle_depends.append(out)
+
+ def WriteSources(self, ninja_file, config_name, config, sources, predepends,
+ precompiled_header, spec):
+ """Write build rules to compile all of |sources|."""
+ if self.toolset == 'host':
+ self.ninja.variable('ar', '$ar_host')
+ self.ninja.variable('cc', '$cc_host')
+ self.ninja.variable('cxx', '$cxx_host')
+ self.ninja.variable('ld', '$ld_host')
+ self.ninja.variable('ldxx', '$ldxx_host')
+ self.ninja.variable('nm', '$nm_host')
+ self.ninja.variable('readelf', '$readelf_host')
+
+ if self.flavor != 'mac' or len(self.archs) == 1:
+ return self.WriteSourcesForArch(
+ self.ninja, config_name, config, sources, predepends,
+ precompiled_header, spec)
+ else:
+ return dict((arch, self.WriteSourcesForArch(
+ self.arch_subninjas[arch], config_name, config, sources, predepends,
+ precompiled_header, spec, arch=arch))
+ for arch in self.archs)
+
+ def WriteSourcesForArch(self, ninja_file, config_name, config, sources,
+ predepends, precompiled_header, spec, arch=None):
+ """Write build rules to compile all of |sources|."""
+
+ extra_defines = []
+ if self.flavor == 'mac':
+ cflags = self.xcode_settings.GetCflags(config_name, arch=arch)
+ cflags_c = self.xcode_settings.GetCflagsC(config_name)
+ cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
+ cflags_objc = ['$cflags_c'] + \
+ self.xcode_settings.GetCflagsObjC(config_name)
+ cflags_objcc = ['$cflags_cc'] + \
+ self.xcode_settings.GetCflagsObjCC(config_name)
+ elif self.flavor == 'win':
+ asmflags = self.msvs_settings.GetAsmflags(config_name)
+ cflags = self.msvs_settings.GetCflags(config_name)
+ cflags_c = self.msvs_settings.GetCflagsC(config_name)
+ cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
+ extra_defines = self.msvs_settings.GetComputedDefines(config_name)
+ # See comment at cc_command for why there's two .pdb files.
+ pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
+ config_name, self.ExpandSpecial)
+ if not pdbpath_c:
+ obj = 'obj'
+ if self.toolset != 'target':
+ obj += '.' + self.toolset
+ pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
+ pdbpath_c = pdbpath + '.c.pdb'
+ pdbpath_cc = pdbpath + '.cc.pdb'
+ self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c])
+ self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc])
+ self.WriteVariableList(ninja_file, 'pchprefix', [self.name])
+ else:
+ cflags = config.get('cflags', [])
+ cflags_c = config.get('cflags_c', [])
+ cflags_cc = config.get('cflags_cc', [])
+
+ # Respect environment variables related to build, but target-specific
+ # flags can still override them.
+ if self.toolset == 'target':
+ cflags_c = (os.environ.get('CPPFLAGS', '').split() +
+ os.environ.get('CFLAGS', '').split() + cflags_c)
+ cflags_cc = (os.environ.get('CPPFLAGS', '').split() +
+ os.environ.get('CXXFLAGS', '').split() + cflags_cc)
+ elif self.toolset == 'host':
+ cflags_c = (os.environ.get('CPPFLAGS_host', '').split() +
+ os.environ.get('CFLAGS_host', '').split() + cflags_c)
+ cflags_cc = (os.environ.get('CPPFLAGS_host', '').split() +
+ os.environ.get('CXXFLAGS_host', '').split() + cflags_cc)
+
+ defines = config.get('defines', []) + extra_defines
+ self.WriteVariableList(ninja_file, 'defines',
+ [Define(d, self.flavor) for d in defines])
+ if self.flavor == 'win':
+ self.WriteVariableList(ninja_file, 'asmflags',
+ map(self.ExpandSpecial, asmflags))
+ self.WriteVariableList(ninja_file, 'rcflags',
+ [QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
+ for f in self.msvs_settings.GetRcflags(config_name,
+ self.GypPathToNinja)])
+
+ include_dirs = config.get('include_dirs', [])
+
+ env = self.GetToolchainEnv()
+ if self.flavor == 'win':
+ include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
+ config_name)
+ self.WriteVariableList(ninja_file, 'includes',
+ [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
+ for i in include_dirs])
+
+ if self.flavor == 'win':
+ midl_include_dirs = config.get('midl_include_dirs', [])
+ midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs(
+ midl_include_dirs, config_name)
+ self.WriteVariableList(ninja_file, 'midl_includes',
+ [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
+ for i in midl_include_dirs])
+
+ pch_commands = precompiled_header.GetPchBuildCommands(arch)
+ if self.flavor == 'mac':
+ # Most targets use no precompiled headers, so only write these if needed.
+ for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'),
+ ('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]:
+ include = precompiled_header.GetInclude(ext, arch)
+ if include: ninja_file.variable(var, include)
+
+ arflags = config.get('arflags', [])
+
+ self.WriteVariableList(ninja_file, 'cflags',
+ map(self.ExpandSpecial, cflags))
+ self.WriteVariableList(ninja_file, 'cflags_c',
+ map(self.ExpandSpecial, cflags_c))
+ self.WriteVariableList(ninja_file, 'cflags_cc',
+ map(self.ExpandSpecial, cflags_cc))
+ if self.flavor == 'mac':
+ self.WriteVariableList(ninja_file, 'cflags_objc',
+ map(self.ExpandSpecial, cflags_objc))
+ self.WriteVariableList(ninja_file, 'cflags_objcc',
+ map(self.ExpandSpecial, cflags_objcc))
+ self.WriteVariableList(ninja_file, 'arflags',
+ map(self.ExpandSpecial, arflags))
+ ninja_file.newline()
+ outputs = []
+ has_rc_source = False
+ for source in sources:
+ filename, ext = os.path.splitext(source)
+ ext = ext[1:]
+ obj_ext = self.obj_ext
+ if ext in ('cc', 'cpp', 'cxx'):
+ command = 'cxx'
+ self.uses_cpp = True
+ elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
+ command = 'cc'
+ elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
+ command = 'cc_s'
+ elif (self.flavor == 'win' and ext == 'asm' and
+ not self.msvs_settings.HasExplicitAsmRules(spec)):
+ command = 'asm'
+ # Add the _asm suffix as msvs is capable of handling .cc and
+ # .asm files of the same name without collision.
+ obj_ext = '_asm.obj'
+ elif self.flavor == 'mac' and ext == 'm':
+ command = 'objc'
+ elif self.flavor == 'mac' and ext == 'mm':
+ command = 'objcxx'
+ self.uses_cpp = True
+ elif self.flavor == 'win' and ext == 'rc':
+ command = 'rc'
+ obj_ext = '.res'
+ has_rc_source = True
+ else:
+ # Ignore unhandled extensions.
+ continue
+ input = self.GypPathToNinja(source)
+ output = self.GypPathToUniqueOutput(filename + obj_ext)
+ if arch is not None:
+ output = AddArch(output, arch)
+ implicit = precompiled_header.GetObjDependencies([input], [output], arch)
+ variables = []
+ if self.flavor == 'win':
+ variables, output, implicit = precompiled_header.GetFlagsModifications(
+ input, output, implicit, command, cflags_c, cflags_cc,
+ self.ExpandSpecial)
+ ninja_file.build(output, command, input,
+ implicit=[gch for _, _, gch in implicit],
+ order_only=predepends, variables=variables)
+ outputs.append(output)
+
+ if has_rc_source:
+ resource_include_dirs = config.get('resource_include_dirs', include_dirs)
+ self.WriteVariableList(ninja_file, 'resource_includes',
+ [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
+ for i in resource_include_dirs])
+
+ self.WritePchTargets(ninja_file, pch_commands)
+
+ ninja_file.newline()
+ return outputs
+
+ def WritePchTargets(self, ninja_file, pch_commands):
+ """Writes ninja rules to compile prefix headers."""
+ if not pch_commands:
+ return
+
+ for gch, lang_flag, lang, input in pch_commands:
+ var_name = {
+ 'c': 'cflags_pch_c',
+ 'cc': 'cflags_pch_cc',
+ 'm': 'cflags_pch_objc',
+ 'mm': 'cflags_pch_objcc',
+ }[lang]
+
+ map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }
+ cmd = map.get(lang)
+ ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
+
+ def WriteLink(self, spec, config_name, config, link_deps):
+ """Write out a link step. Fills out target.binary. """
+ if self.flavor != 'mac' or len(self.archs) == 1:
+ return self.WriteLinkForArch(
+ self.ninja, spec, config_name, config, link_deps)
+ else:
+ output = self.ComputeOutput(spec)
+ inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec,
+ config_name, config, link_deps[arch],
+ arch=arch)
+ for arch in self.archs]
+ extra_bindings = []
+ build_output = output
+ if not self.is_mac_bundle:
+ self.AppendPostbuildVariable(extra_bindings, spec, output, output)
+
+ # TODO(yyanagisawa): more work needed to fix:
+ # https://code.google.com/p/gyp/issues/detail?id=411
+ if (spec['type'] in ('shared_library', 'loadable_module') and
+ not self.is_mac_bundle):
+ extra_bindings.append(('lib', output))
+ self.ninja.build([output, output + '.TOC'], 'solipo', inputs,
+ variables=extra_bindings)
+ else:
+ self.ninja.build(build_output, 'lipo', inputs, variables=extra_bindings)
+ return output
+
+ def WriteLinkForArch(self, ninja_file, spec, config_name, config,
+ link_deps, arch=None):
+ """Write out a link step. Fills out target.binary. """
+ command = {
+ 'executable': 'link',
+ 'loadable_module': 'solink_module',
+ 'shared_library': 'solink',
+ }[spec['type']]
+ command_suffix = ''
+
+ implicit_deps = set()
+ solibs = set()
+ order_deps = set()
+
+ if 'dependencies' in spec:
+ # Two kinds of dependencies:
+ # - Linkable dependencies (like a .a or a .so): add them to the link line.
+ # - Non-linkable dependencies (like a rule that generates a file
+ # and writes a stamp file): add them to implicit_deps
+ extra_link_deps = set()
+ for dep in spec['dependencies']:
+ target = self.target_outputs.get(dep)
+ if not target:
+ continue
+ linkable = target.Linkable()
+ if linkable:
+ new_deps = []
+ if (self.flavor == 'win' and
+ target.component_objs and
+ self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
+ new_deps = target.component_objs
+ if target.compile_deps:
+ order_deps.add(target.compile_deps)
+ elif self.flavor == 'win' and target.import_lib:
+ new_deps = [target.import_lib]
+ elif target.UsesToc(self.flavor):
+ solibs.add(target.binary)
+ implicit_deps.add(target.binary + '.TOC')
+ else:
+ new_deps = [target.binary]
+ for new_dep in new_deps:
+ if new_dep not in extra_link_deps:
+ extra_link_deps.add(new_dep)
+ link_deps.append(new_dep)
+
+ final_output = target.FinalOutput()
+ if not linkable or final_output != target.binary:
+ implicit_deps.add(final_output)
+
+ extra_bindings = []
+ if self.uses_cpp and self.flavor != 'win':
+ extra_bindings.append(('ld', '$ldxx'))
+
+ output = self.ComputeOutput(spec, arch)
+ if arch is None and not self.is_mac_bundle:
+ self.AppendPostbuildVariable(extra_bindings, spec, output, output)
+
+ is_executable = spec['type'] == 'executable'
+ # The ldflags config key is not used on mac or win. On those platforms
+ # linker flags are set via xcode_settings and msvs_settings, respectively.
+ env_ldflags = os.environ.get('LDFLAGS', '').split()
+ if self.flavor == 'mac':
+ ldflags = self.xcode_settings.GetLdflags(config_name,
+ self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
+ self.GypPathToNinja, arch)
+ ldflags = env_ldflags + ldflags
+ elif self.flavor == 'win':
+ manifest_base_name = self.GypPathToUniqueOutput(
+ self.ComputeOutputFileName(spec))
+ ldflags, intermediate_manifest, manifest_files = \
+ self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja,
+ self.ExpandSpecial, manifest_base_name,
+ output, is_executable,
+ self.toplevel_build)
+ ldflags = env_ldflags + ldflags
+ self.WriteVariableList(ninja_file, 'manifests', manifest_files)
+ implicit_deps = implicit_deps.union(manifest_files)
+ if intermediate_manifest:
+ self.WriteVariableList(
+ ninja_file, 'intermediatemanifest', [intermediate_manifest])
+ command_suffix = _GetWinLinkRuleNameSuffix(
+ self.msvs_settings.IsEmbedManifest(config_name))
+ def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
+ if def_file:
+ implicit_deps.add(def_file)
+ else:
+ # Respect environment variables related to build, but target-specific
+ # flags can still override them.
+ ldflags = env_ldflags + config.get('ldflags', [])
+ if is_executable and len(solibs):
+ rpath = 'lib/'
+ if self.toolset != 'target':
+ rpath += self.toolset
+ ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
+ ldflags.append('-Wl,-rpath-link=%s' % rpath)
+ self.WriteVariableList(ninja_file, 'ldflags',
+ map(self.ExpandSpecial, ldflags))
+
+ library_dirs = config.get('library_dirs', [])
+ if self.flavor == 'win':
+ library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name)
+ for l in library_dirs]
+ library_dirs = ['/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l),
+ self.flavor)
+ for l in library_dirs]
+ else:
+ library_dirs = [QuoteShellArgument('-L' + self.GypPathToNinja(l),
+ self.flavor)
+ for l in library_dirs]
+
+ libraries = gyp.common.uniquer(map(self.ExpandSpecial,
+ spec.get('libraries', [])))
+ if self.flavor == 'mac':
+ libraries = self.xcode_settings.AdjustLibraries(libraries, config_name)
+ elif self.flavor == 'win':
+ libraries = self.msvs_settings.AdjustLibraries(libraries)
+
+ self.WriteVariableList(ninja_file, 'libs', library_dirs + libraries)
+
+ linked_binary = output
+
+ if command in ('solink', 'solink_module'):
+ extra_bindings.append(('soname', os.path.split(output)[1]))
+ extra_bindings.append(('lib',
+ gyp.common.EncodePOSIXShellArgument(output)))
+ if self.flavor != 'win':
+ link_file_list = output
+ if self.is_mac_bundle:
+ # 'Dependency Framework.framework/Versions/A/Dependency Framework' ->
+ # 'Dependency Framework.framework.rsp'
+ link_file_list = self.xcode_settings.GetWrapperName()
+ if arch:
+ link_file_list += '.' + arch
+ link_file_list += '.rsp'
+ # If an rspfile contains spaces, ninja surrounds the filename with
+ # quotes around it and then passes it to open(), creating a file with
+ # quotes in its name (and when looking for the rsp file, the name
+ # makes it through bash which strips the quotes) :-/
+ link_file_list = link_file_list.replace(' ', '_')
+ extra_bindings.append(
+ ('link_file_list',
+ gyp.common.EncodePOSIXShellArgument(link_file_list)))
+ if self.flavor == 'win':
+ extra_bindings.append(('binary', output))
+ if ('/NOENTRY' not in ldflags and
+ not self.msvs_settings.GetNoImportLibrary(config_name)):
+ self.target.import_lib = output + '.lib'
+ extra_bindings.append(('implibflag',
+ '/IMPLIB:%s' % self.target.import_lib))
+ pdbname = self.msvs_settings.GetPDBName(
+ config_name, self.ExpandSpecial, output + '.pdb')
+ output = [output, self.target.import_lib]
+ if pdbname:
+ output.append(pdbname)
+ elif not self.is_mac_bundle:
+ output = [output, output + '.TOC']
+ else:
+ command = command + '_notoc'
+ elif self.flavor == 'win':
+ extra_bindings.append(('binary', output))
+ pdbname = self.msvs_settings.GetPDBName(
+ config_name, self.ExpandSpecial, output + '.pdb')
+ if pdbname:
+ output = [output, pdbname]
+
+
+ if len(solibs):
+ extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs)))
+
+ ninja_file.build(output, command + command_suffix, link_deps,
+ implicit=list(implicit_deps),
+ order_only=list(order_deps),
+ variables=extra_bindings)
+ return linked_binary
+
+ def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
+ extra_link_deps = any(self.target_outputs.get(dep).Linkable()
+ for dep in spec.get('dependencies', [])
+ if dep in self.target_outputs)
+ if spec['type'] == 'none' or (not link_deps and not extra_link_deps):
+ # TODO(evan): don't call this function for 'none' target types, as
+ # it doesn't do anything, and we fake out a 'binary' with a stamp file.
+ self.target.binary = compile_deps
+ self.target.type = 'none'
+ elif spec['type'] == 'static_library':
+ self.target.binary = self.ComputeOutput(spec)
+ if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
+ self.is_standalone_static_library):
+ self.ninja.build(self.target.binary, 'alink_thin', link_deps,
+ order_only=compile_deps)
+ else:
+ variables = []
+ if self.xcode_settings:
+ libtool_flags = self.xcode_settings.GetLibtoolflags(config_name)
+ if libtool_flags:
+ variables.append(('libtool_flags', libtool_flags))
+ if self.msvs_settings:
+ libflags = self.msvs_settings.GetLibFlags(config_name,
+ self.GypPathToNinja)
+ variables.append(('libflags', libflags))
+
+ if self.flavor != 'mac' or len(self.archs) == 1:
+ self.AppendPostbuildVariable(variables, spec,
+ self.target.binary, self.target.binary)
+ self.ninja.build(self.target.binary, 'alink', link_deps,
+ order_only=compile_deps, variables=variables)
+ else:
+ inputs = []
+ for arch in self.archs:
+ output = self.ComputeOutput(spec, arch)
+ self.arch_subninjas[arch].build(output, 'alink', link_deps[arch],
+ order_only=compile_deps,
+ variables=variables)
+ inputs.append(output)
+ # TODO: It's not clear if libtool_flags should be passed to the alink
+ # call that combines single-arch .a files into a fat .a file.
+ self.AppendPostbuildVariable(variables, spec,
+ self.target.binary, self.target.binary)
+ self.ninja.build(self.target.binary, 'alink', inputs,
+ # FIXME: test proving order_only=compile_deps isn't
+ # needed.
+ variables=variables)
+ else:
+ self.target.binary = self.WriteLink(spec, config_name, config, link_deps)
+ return self.target.binary
+
+ def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
+ assert self.is_mac_bundle
+ package_framework = spec['type'] in ('shared_library', 'loadable_module')
+ output = self.ComputeMacBundleOutput()
+ if is_empty:
+ output += '.stamp'
+ variables = []
+ self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
+ is_command_start=not package_framework)
+ if package_framework and not is_empty:
+ variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
+ self.ninja.build(output, 'package_framework', mac_bundle_depends,
+ variables=variables)
+ else:
+ self.ninja.build(output, 'stamp', mac_bundle_depends,
+ variables=variables)
+ self.target.bundle = output
+ return output
+
+ def GetToolchainEnv(self, additional_settings=None):
+ """Returns the variables toolchain would set for build steps."""
+ env = self.GetSortedXcodeEnv(additional_settings=additional_settings)
+ if self.flavor == 'win':
+ env = self.GetMsvsToolchainEnv(
+ additional_settings=additional_settings)
+ return env
+
+ def GetMsvsToolchainEnv(self, additional_settings=None):
+ """Returns the variables Visual Studio would set for build steps."""
+ return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
+ config=self.config_name)
+
+ def GetSortedXcodeEnv(self, additional_settings=None):
+ """Returns the variables Xcode would set for build steps."""
+ assert self.abs_build_dir
+ abs_build_dir = self.abs_build_dir
+ return gyp.xcode_emulation.GetSortedXcodeEnv(
+ self.xcode_settings, abs_build_dir,
+ os.path.join(abs_build_dir, self.build_to_base), self.config_name,
+ additional_settings)
+
+ def GetSortedXcodePostbuildEnv(self):
+ """Returns the variables Xcode would set for postbuild steps."""
+ postbuild_settings = {}
+ # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
+ # TODO(thakis): It would be nice to have some general mechanism instead.
+ strip_save_file = self.xcode_settings.GetPerTargetSetting(
+ 'CHROMIUM_STRIP_SAVE_FILE')
+ if strip_save_file:
+ postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file
+ return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)
+
+ def AppendPostbuildVariable(self, variables, spec, output, binary,
+ is_command_start=False):
+ """Adds a 'postbuild' variable if there is a postbuild for |output|."""
+ postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start)
+ if postbuild:
+ variables.append(('postbuilds', postbuild))
+
+ def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
+ """Returns a shell command that runs all the postbuilds, and removes
+ |output| if any of them fails. If |is_command_start| is False, then the
+ returned string will start with ' && '."""
+ if not self.xcode_settings or spec['type'] == 'none' or not output:
+ return ''
+ output = QuoteShellArgument(output, self.flavor)
+ postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
+ if output_binary is not None:
+ postbuilds = self.xcode_settings.AddImplicitPostbuilds(
+ self.config_name,
+ os.path.normpath(os.path.join(self.base_to_build, output)),
+ QuoteShellArgument(
+ os.path.normpath(os.path.join(self.base_to_build, output_binary)),
+ self.flavor),
+ postbuilds, quiet=True)
+
+ if not postbuilds:
+ return ''
+ # Postbuilds expect to be run in the gyp file's directory, so insert an
+ # implicit postbuild to cd to there.
+ postbuilds.insert(0, gyp.common.EncodePOSIXShellList(
+ ['cd', self.build_to_base]))
+ env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
+ # G will be non-null if any postbuild fails. Run all postbuilds in a
+ # subshell.
+ commands = env + ' (' + \
+ ' && '.join([ninja_syntax.escape(command) for command in postbuilds])
+ command_string = (commands + '); G=$$?; '
+ # Remove the final output if any postbuild failed.
+ '((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)')
+ if is_command_start:
+ return '(' + command_string + ' && '
+ else:
+ return '$ && (' + command_string
+
+ def ComputeExportEnvString(self, env):
+ """Given an environment, returns a string looking like
+ 'export FOO=foo; export BAR="${FOO} bar;'
+ that exports |env| to the shell."""
+ export_str = []
+ for k, v in env:
+ export_str.append('export %s=%s;' %
+ (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))))
+ return ' '.join(export_str)
+
+ def ComputeMacBundleOutput(self):
+ """Return the 'output' (full output path) to a bundle output directory."""
+ assert self.is_mac_bundle
+ path = generator_default_variables['PRODUCT_DIR']
+ return self.ExpandSpecial(
+ os.path.join(path, self.xcode_settings.GetWrapperName()))
+
+ def ComputeOutputFileName(self, spec, type=None):
+ """Compute the filename of the final output for the current target."""
+ if not type:
+ type = spec['type']
+
+ default_variables = copy.copy(generator_default_variables)
+ CalculateVariables(default_variables, {'flavor': self.flavor})
+
+ # Compute filename prefix: the product prefix, or a default for
+ # the product type.
+ DEFAULT_PREFIX = {
+ 'loadable_module': default_variables['SHARED_LIB_PREFIX'],
+ 'shared_library': default_variables['SHARED_LIB_PREFIX'],
+ 'static_library': default_variables['STATIC_LIB_PREFIX'],
+ 'executable': default_variables['EXECUTABLE_PREFIX'],
+ }
+ prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, ''))
+
+ # Compute filename extension: the product extension, or a default
+ # for the product type.
+ DEFAULT_EXTENSION = {
+ 'loadable_module': default_variables['SHARED_LIB_SUFFIX'],
+ 'shared_library': default_variables['SHARED_LIB_SUFFIX'],
+ 'static_library': default_variables['STATIC_LIB_SUFFIX'],
+ 'executable': default_variables['EXECUTABLE_SUFFIX'],
+ }
+ extension = spec.get('product_extension')
+ if extension:
+ extension = '.' + extension
+ else:
+ extension = DEFAULT_EXTENSION.get(type, '')
+
+ if 'product_name' in spec:
+ # If we were given an explicit name, use that.
+ target = spec['product_name']
+ else:
+ # Otherwise, derive a name from the target name.
+ target = spec['target_name']
+ if prefix == 'lib':
+ # Snip out an extra 'lib' from libs if appropriate.
+ target = StripPrefix(target, 'lib')
+
+ if type in ('static_library', 'loadable_module', 'shared_library',
+ 'executable'):
+ return '%s%s%s' % (prefix, target, extension)
+ elif type == 'none':
+ return '%s.stamp' % target
+ else:
+ raise Exception('Unhandled output type %s' % type)
+
+ def ComputeOutput(self, spec, arch=None):
+ """Compute the path for the final output of the spec."""
+ type = spec['type']
+
+ if self.flavor == 'win':
+ override = self.msvs_settings.GetOutputName(self.config_name,
+ self.ExpandSpecial)
+ if override:
+ return override
+
+ if arch is None and self.flavor == 'mac' and type in (
+ 'static_library', 'executable', 'shared_library', 'loadable_module'):
+ filename = self.xcode_settings.GetExecutablePath()
+ else:
+ filename = self.ComputeOutputFileName(spec, type)
+
+ if arch is None and 'product_dir' in spec:
+ path = os.path.join(spec['product_dir'], filename)
+ return self.ExpandSpecial(path)
+
+ # Some products go into the output root, libraries go into shared library
+ # dir, and everything else goes into the normal place.
+ type_in_output_root = ['executable', 'loadable_module']
+ if self.flavor == 'mac' and self.toolset == 'target':
+ type_in_output_root += ['shared_library', 'static_library']
+ elif self.flavor == 'win' and self.toolset == 'target':
+ type_in_output_root += ['shared_library']
+
+ if arch is not None:
+ # Make sure partial executables don't end up in a bundle or the regular
+ # output directory.
+ archdir = 'arch'
+ if self.toolset != 'target':
+ archdir = os.path.join('arch', '%s' % self.toolset)
+ return os.path.join(archdir, AddArch(filename, arch))
+ elif type in type_in_output_root or self.is_standalone_static_library:
+ return filename
+ elif type == 'shared_library':
+ libdir = 'lib'
+ if self.toolset != 'target':
+ libdir = os.path.join('lib', '%s' % self.toolset)
+ return os.path.join(libdir, filename)
+ else:
+ return self.GypPathToUniqueOutput(filename, qualified=False)
+
+ def WriteVariableList(self, ninja_file, var, values):
+ assert not isinstance(values, str)
+ if values is None:
+ values = []
+ ninja_file.variable(var, ' '.join(values))
+
+ def WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool,
+ depfile=None):
+ """Write out a new ninja "rule" statement for a given command.
+
+ Returns the name of the new rule, and a copy of |args| with variables
+ expanded."""
+
+ if self.flavor == 'win':
+ args = [self.msvs_settings.ConvertVSMacros(
+ arg, self.base_to_build, config=self.config_name)
+ for arg in args]
+ description = self.msvs_settings.ConvertVSMacros(
+ description, config=self.config_name)
+ elif self.flavor == 'mac':
+ # |env| is an empty list on non-mac.
+ args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args]
+ description = gyp.xcode_emulation.ExpandEnvVars(description, env)
+
+ # TODO: we shouldn't need to qualify names; we do it because
+ # currently the ninja rule namespace is global, but it really
+ # should be scoped to the subninja.
+ rule_name = self.name
+ if self.toolset == 'target':
+ rule_name += '.' + self.toolset
+ rule_name += '.' + name
+ rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name)
+
+ # Remove variable references, but not if they refer to the magic rule
+ # variables. This is not quite right, as it also protects these for
+ # actions, not just for rules where they are valid. Good enough.
+ protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ]
+ protect = '(?!' + '|'.join(map(re.escape, protect)) + ')'
+ description = re.sub(protect + r'\$', '_', description)
+
+ # gyp dictates that commands are run from the base directory.
+ # cd into the directory before running, and adjust paths in
+ # the arguments to point to the proper locations.
+ rspfile = None
+ rspfile_content = None
+ args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args]
+ if self.flavor == 'win':
+ rspfile = rule_name + '.$unique_name.rsp'
+ # The cygwin case handles this inside the bash sub-shell.
+ run_in = '' if is_cygwin else ' ' + self.build_to_base
+ if is_cygwin:
+ rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(
+ args, self.build_to_base)
+ else:
+ rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args)
+ command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable +
+ rspfile + run_in)
+ else:
+ env = self.ComputeExportEnvString(env)
+ command = gyp.common.EncodePOSIXShellList(args)
+ command = 'cd %s; ' % self.build_to_base + env + command
+
+ # GYP rules/actions express being no-ops by not touching their outputs.
+ # Avoid executing downstream dependencies in this case by specifying
+ # restat=1 to ninja.
+ self.ninja.rule(rule_name, command, description, depfile=depfile,
+ restat=True, pool=pool,
+ rspfile=rspfile, rspfile_content=rspfile_content)
+ self.ninja.newline()
+
+ return rule_name, args
+
+
+def CalculateVariables(default_variables, params):
+ """Calculate additional variables for use in the build (called by gyp)."""
+ global generator_additional_non_configuration_keys
+ global generator_additional_path_sections
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == 'mac':
+ default_variables.setdefault('OS', 'mac')
+ default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
+ default_variables.setdefault('SHARED_LIB_DIR',
+ generator_default_variables['PRODUCT_DIR'])
+ default_variables.setdefault('LIB_DIR',
+ generator_default_variables['PRODUCT_DIR'])
+
+ # Copy additional generator configuration data from Xcode, which is shared
+ # by the Mac Ninja generator.
+ import gyp.generator.xcode as xcode_generator
+ generator_additional_non_configuration_keys = getattr(xcode_generator,
+ 'generator_additional_non_configuration_keys', [])
+ generator_additional_path_sections = getattr(xcode_generator,
+ 'generator_additional_path_sections', [])
+ global generator_extra_sources_for_rules
+ generator_extra_sources_for_rules = getattr(xcode_generator,
+ 'generator_extra_sources_for_rules', [])
+ elif flavor == 'win':
+ exts = gyp.MSVSUtil.TARGET_TYPE_EXT
+ default_variables.setdefault('OS', 'win')
+ default_variables['EXECUTABLE_SUFFIX'] = '.' + exts['executable']
+ default_variables['STATIC_LIB_PREFIX'] = ''
+ default_variables['STATIC_LIB_SUFFIX'] = '.' + exts['static_library']
+ default_variables['SHARED_LIB_PREFIX'] = ''
+ default_variables['SHARED_LIB_SUFFIX'] = '.' + exts['shared_library']
+
+ # Copy additional generator configuration data from VS, which is shared
+ # by the Windows Ninja generator.
+ import gyp.generator.msvs as msvs_generator
+ generator_additional_non_configuration_keys = getattr(msvs_generator,
+ 'generator_additional_non_configuration_keys', [])
+ generator_additional_path_sections = getattr(msvs_generator,
+ 'generator_additional_path_sections', [])
+
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
+ else:
+ operating_system = flavor
+ if flavor == 'android':
+ operating_system = 'linux' # Keep this legacy behavior for now.
+ default_variables.setdefault('OS', operating_system)
+ default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
+ default_variables.setdefault('SHARED_LIB_DIR',
+ os.path.join('$!PRODUCT_DIR', 'lib'))
+ default_variables.setdefault('LIB_DIR',
+ os.path.join('$!PRODUCT_DIR', 'obj'))
+
+def ComputeOutputDir(params):
+ """Returns the path from the toplevel_dir to the build output directory."""
+ # generator_dir: relative path from pwd to where make puts build files.
+ # Makes migrating from make to ninja easier, ninja doesn't put anything here.
+ generator_dir = os.path.relpath(params['options'].generator_output or '.')
+
+ # output_dir: relative path from generator_dir to the build directory.
+ output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
+
+ # Relative path from source root to our output files. e.g. "out"
+ return os.path.normpath(os.path.join(generator_dir, output_dir))
+
+
+def CalculateGeneratorInputInfo(params):
+ """Called by __init__ to initialize generator values based on params."""
+ # E.g. "out/gypfiles"
+ toplevel = params['options'].toplevel_dir
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, ComputeOutputDir(params), 'gypfiles'))
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ 'toplevel': toplevel,
+ 'qualified_out_dir': qualified_out_dir,
+ }
+
+
+def OpenOutput(path, mode='w'):
+ """Open |path| for writing, creating directories if necessary."""
+ gyp.common.EnsureDirExists(path)
+ return open(path, mode)
+
+
+def CommandWithWrapper(cmd, wrappers, prog):
+ wrapper = wrappers.get(cmd, '')
+ if wrapper:
+ return wrapper + ' ' + prog
+ return prog
+
+
+def GetDefaultConcurrentLinks():
+ """Returns a best-guess for a number of concurrent links."""
+ pool_size = int(os.environ.get('GYP_LINK_CONCURRENCY', 0))
+ if pool_size:
+ return pool_size
+
+ if sys.platform in ('win32', 'cygwin'):
+ import ctypes
+
+ class MEMORYSTATUSEX(ctypes.Structure):
+ _fields_ = [
+ ("dwLength", ctypes.c_ulong),
+ ("dwMemoryLoad", ctypes.c_ulong),
+ ("ullTotalPhys", ctypes.c_ulonglong),
+ ("ullAvailPhys", ctypes.c_ulonglong),
+ ("ullTotalPageFile", ctypes.c_ulonglong),
+ ("ullAvailPageFile", ctypes.c_ulonglong),
+ ("ullTotalVirtual", ctypes.c_ulonglong),
+ ("ullAvailVirtual", ctypes.c_ulonglong),
+ ("sullAvailExtendedVirtual", ctypes.c_ulonglong),
+ ]
+
+ stat = MEMORYSTATUSEX()
+ stat.dwLength = ctypes.sizeof(stat)
+ ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
+
+ # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
+ # on a 64 GB machine.
+ mem_limit = max(1, stat.ullTotalPhys / (5 * (2 ** 30))) # total / 5GB
+ hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32)))
+ return min(mem_limit, hard_cap)
+ elif sys.platform.startswith('linux'):
+ if os.path.exists("/proc/meminfo"):
+ with open("/proc/meminfo") as meminfo:
+ memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
+ for line in meminfo:
+ match = memtotal_re.match(line)
+ if not match:
+ continue
+ # Allow 8Gb per link on Linux because Gold is quite memory hungry
+ return max(1, int(match.group(1)) / (8 * (2 ** 20)))
+ return 1
+ elif sys.platform == 'darwin':
+ try:
+ avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
+ # A static library debug build of Chromium's unit_tests takes ~2.7GB, so
+ # 4GB per ld process allows for some more bloat.
+ return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB
+ except:
+ return 1
+ else:
+ # TODO(scottmg): Implement this for other platforms.
+ return 1
+
+
+def _GetWinLinkRuleNameSuffix(embed_manifest):
+ """Returns the suffix used to select an appropriate linking rule depending on
+ whether the manifest embedding is enabled."""
+ return '_embed' if embed_manifest else ''
+
+
+def _AddWinLinkRules(master_ninja, embed_manifest):
+ """Adds link rules for Windows platform to |master_ninja|."""
+ def FullLinkCommand(ldcmd, out, binary_type):
+ resource_name = {
+ 'exe': '1',
+ 'dll': '2',
+ }[binary_type]
+ return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \
+ '%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \
+ '$manifests' % {
+ 'python': sys.executable,
+ 'out': out,
+ 'ldcmd': ldcmd,
+ 'resname': resource_name,
+ 'embed': embed_manifest }
+ rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
+ use_separate_mspdbsrv = (
+ int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0)
+ dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper()
+ dllcmd = ('%s gyp-win-tool link-wrapper $arch %s '
+ '$ld /nologo $implibflag /DLL /OUT:$binary '
+ '@$binary.rsp' % (sys.executable, use_separate_mspdbsrv))
+ dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll')
+ master_ninja.rule('solink' + rule_name_suffix,
+ description=dlldesc, command=dllcmd,
+ rspfile='$binary.rsp',
+ rspfile_content='$libs $in_newline $ldflags',
+ restat=True,
+ pool='link_pool')
+ master_ninja.rule('solink_module' + rule_name_suffix,
+ description=dlldesc, command=dllcmd,
+ rspfile='$binary.rsp',
+ rspfile_content='$libs $in_newline $ldflags',
+ restat=True,
+ pool='link_pool')
+ # Note that ldflags goes at the end so that it has the option of
+ # overriding default settings earlier in the command line.
+ exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s '
+ '$ld /nologo /OUT:$binary @$binary.rsp' %
+ (sys.executable, use_separate_mspdbsrv))
+ exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe')
+ master_ninja.rule('link' + rule_name_suffix,
+ description='LINK%s $binary' % rule_name_suffix.upper(),
+ command=exe_cmd,
+ rspfile='$binary.rsp',
+ rspfile_content='$in_newline $libs $ldflags',
+ pool='link_pool')
+
+
+def GenerateOutputForConfig(target_list, target_dicts, data, params,
+ config_name):
+ options = params['options']
+ flavor = gyp.common.GetFlavor(params)
+ generator_flags = params.get('generator_flags', {})
+
+ # build_dir: relative path from source root to our output files.
+ # e.g. "out/Debug"
+ build_dir = os.path.normpath(
+ os.path.join(ComputeOutputDir(params), config_name))
+
+ toplevel_build = os.path.join(options.toplevel_dir, build_dir)
+
+ master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja'))
+ master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
+
+ # Put build-time support tools in out/{config_name}.
+ gyp.common.CopyTool(flavor, toplevel_build)
+
+ # Grab make settings for CC/CXX.
+ # The rules are
+ # - The priority from low to high is gcc/g++, the 'make_global_settings' in
+ # gyp, the environment variable.
+ # - If there is no 'make_global_settings' for CC.host/CXX.host or
+ # 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
+ # to cc/cxx.
+ if flavor == 'win':
+ ar = 'lib.exe'
+ # cc and cxx must be set to the correct architecture by overriding with one
+ # of cl_x86 or cl_x64 below.
+ cc = 'UNSET'
+ cxx = 'UNSET'
+ ld = 'link.exe'
+ ld_host = '$ld'
+ else:
+ ar = 'ar'
+ cc = 'cc'
+ cxx = 'c++'
+ ld = '$cc'
+ ldxx = '$cxx'
+ ld_host = '$cc_host'
+ ldxx_host = '$cxx_host'
+
+ ar_host = 'ar'
+ cc_host = None
+ cxx_host = None
+ cc_host_global_setting = None
+ cxx_host_global_setting = None
+ clang_cl = None
+ nm = 'nm'
+ nm_host = 'nm'
+ readelf = 'readelf'
+ readelf_host = 'readelf'
+
+ build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
+ make_global_settings = data[build_file].get('make_global_settings', [])
+ build_to_root = gyp.common.InvertRelativePath(build_dir,
+ options.toplevel_dir)
+ wrappers = {}
+ for key, value in make_global_settings:
+ if key == 'AR':
+ ar = os.path.join(build_to_root, value)
+ if key == 'AR.host':
+ ar_host = os.path.join(build_to_root, value)
+ if key == 'CC':
+ cc = os.path.join(build_to_root, value)
+ if cc.endswith('clang-cl'):
+ clang_cl = cc
+ if key == 'CXX':
+ cxx = os.path.join(build_to_root, value)
+ if key == 'CC.host':
+ cc_host = os.path.join(build_to_root, value)
+ cc_host_global_setting = value
+ if key == 'CXX.host':
+ cxx_host = os.path.join(build_to_root, value)
+ cxx_host_global_setting = value
+ if key == 'LD':
+ ld = os.path.join(build_to_root, value)
+ if key == 'LD.host':
+ ld_host = os.path.join(build_to_root, value)
+ if key == 'NM':
+ nm = os.path.join(build_to_root, value)
+ if key == 'NM.host':
+ nm_host = os.path.join(build_to_root, value)
+ if key == 'READELF':
+ readelf = os.path.join(build_to_root, value)
+ if key == 'READELF.host':
+ readelf_host = os.path.join(build_to_root, value)
+ if key.endswith('_wrapper'):
+ wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
+
+ # Support wrappers from environment variables too.
+ for key, value in os.environ.iteritems():
+ if key.lower().endswith('_wrapper'):
+ key_prefix = key[:-len('_wrapper')]
+ key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
+ wrappers[key_prefix] = os.path.join(build_to_root, value)
+
+ if flavor == 'win':
+ configs = [target_dicts[qualified_target]['configurations'][config_name]
+ for qualified_target in target_list]
+ shared_system_includes = None
+ if not generator_flags.get('ninja_use_custom_environment_files', 0):
+ shared_system_includes = \
+ gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes(
+ configs, generator_flags)
+ cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
+ toplevel_build, generator_flags, shared_system_includes, OpenOutput)
+ for arch, path in cl_paths.iteritems():
+ if clang_cl:
+ # If we have selected clang-cl, use that instead.
+ path = clang_cl
+ command = CommandWithWrapper('CC', wrappers,
+ QuoteShellArgument(path, 'win'))
+ if clang_cl:
+ # Use clang-cl to cross-compile for x86 or x86_64.
+ command += (' -m32' if arch == 'x86' else ' -m64')
+ master_ninja.variable('cl_' + arch, command)
+
+ cc = GetEnvironFallback(['CC_target', 'CC'], cc)
+ master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
+ cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
+ master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx))
+
+ if flavor == 'win':
+ master_ninja.variable('ld', ld)
+ master_ninja.variable('idl', 'midl.exe')
+ master_ninja.variable('ar', ar)
+ master_ninja.variable('rc', 'rc.exe')
+ master_ninja.variable('ml_x86', 'ml.exe')
+ master_ninja.variable('ml_x64', 'ml64.exe')
+ master_ninja.variable('mt', 'mt.exe')
+ else:
+ master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
+ master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
+ master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], ar))
+ if flavor != 'mac':
+ # Mac does not use readelf/nm for .TOC generation, so avoiding polluting
+ # the master ninja with extra unused variables.
+ master_ninja.variable(
+ 'nm', GetEnvironFallback(['NM_target', 'NM'], nm))
+ master_ninja.variable(
+ 'readelf', GetEnvironFallback(['READELF_target', 'READELF'], readelf))
+
+ if generator_supports_multiple_toolsets:
+ if not cc_host:
+ cc_host = cc
+ if not cxx_host:
+ cxx_host = cxx
+
+ master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], ar_host))
+ master_ninja.variable('nm_host', GetEnvironFallback(['NM_host'], nm_host))
+ master_ninja.variable('readelf_host',
+ GetEnvironFallback(['READELF_host'], readelf_host))
+ cc_host = GetEnvironFallback(['CC_host'], cc_host)
+ cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
+
+ # The environment variable could be used in 'make_global_settings', like
+ # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
+ if '$(CC)' in cc_host and cc_host_global_setting:
+ cc_host = cc_host_global_setting.replace('$(CC)', cc)
+ if '$(CXX)' in cxx_host and cxx_host_global_setting:
+ cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
+ master_ninja.variable('cc_host',
+ CommandWithWrapper('CC.host', wrappers, cc_host))
+ master_ninja.variable('cxx_host',
+ CommandWithWrapper('CXX.host', wrappers, cxx_host))
+ if flavor == 'win':
+ master_ninja.variable('ld_host', ld_host)
+ else:
+ master_ninja.variable('ld_host', CommandWithWrapper(
+ 'LINK', wrappers, ld_host))
+ master_ninja.variable('ldxx_host', CommandWithWrapper(
+ 'LINK', wrappers, ldxx_host))
+
+ master_ninja.newline()
+
+ master_ninja.pool('link_pool', depth=GetDefaultConcurrentLinks())
+ master_ninja.newline()
+
+ deps = 'msvc' if flavor == 'win' else 'gcc'
+
+ if flavor != 'win':
+ master_ninja.rule(
+ 'cc',
+ description='CC $out',
+ command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
+ '$cflags_pch_c -c $in -o $out'),
+ depfile='$out.d',
+ deps=deps)
+ master_ninja.rule(
+ 'cc_s',
+ description='CC $out',
+ command=('$cc $defines $includes $cflags $cflags_c '
+ '$cflags_pch_c -c $in -o $out'))
+ master_ninja.rule(
+ 'cxx',
+ description='CXX $out',
+ command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc '
+ '$cflags_pch_cc -c $in -o $out'),
+ depfile='$out.d',
+ deps=deps)
+ else:
+ # TODO(scottmg) Separate pdb names is a test to see if it works around
+ # http://crbug.com/142362. It seems there's a race between the creation of
+ # the .pdb by the precompiled header step for .cc and the compilation of
+ # .c files. This should be handled by mspdbsrv, but rarely errors out with
+ # c1xx : fatal error C1033: cannot open program database
+ # By making the rules target separate pdb files this might be avoided.
+ cc_command = ('ninja -t msvc -e $arch ' +
+ '-- '
+ '$cc /nologo /showIncludes /FC '
+ '@$out.rsp /c $in /Fo$out /Fd$pdbname_c ')
+ cxx_command = ('ninja -t msvc -e $arch ' +
+ '-- '
+ '$cxx /nologo /showIncludes /FC '
+ '@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ')
+ master_ninja.rule(
+ 'cc',
+ description='CC $out',
+ command=cc_command,
+ rspfile='$out.rsp',
+ rspfile_content='$defines $includes $cflags $cflags_c',
+ deps=deps)
+ master_ninja.rule(
+ 'cxx',
+ description='CXX $out',
+ command=cxx_command,
+ rspfile='$out.rsp',
+ rspfile_content='$defines $includes $cflags $cflags_cc',
+ deps=deps)
+ master_ninja.rule(
+ 'idl',
+ description='IDL $in',
+ command=('%s gyp-win-tool midl-wrapper $arch $outdir '
+ '$tlb $h $dlldata $iid $proxy $in '
+ '$midl_includes $idlflags' % sys.executable))
+ master_ninja.rule(
+ 'rc',
+ description='RC $in',
+ # Note: $in must be last otherwise rc.exe complains.
+ command=('%s gyp-win-tool rc-wrapper '
+ '$arch $rc $defines $resource_includes $rcflags /fo$out $in' %
+ sys.executable))
+ master_ninja.rule(
+ 'asm',
+ description='ASM $out',
+ command=('%s gyp-win-tool asm-wrapper '
+ '$arch $asm $defines $includes $asmflags /c /Fo $out $in' %
+ sys.executable))
+
+ if flavor != 'mac' and flavor != 'win':
+ master_ninja.rule(
+ 'alink',
+ description='AR $out',
+ command='rm -f $out && $ar rcs $arflags $out $in')
+ master_ninja.rule(
+ 'alink_thin',
+ description='AR $out',
+ command='rm -f $out && $ar rcsT $arflags $out $in')
+
+ # This allows targets that only need to depend on $lib's API to declare an
+ # order-only dependency on $lib.TOC and avoid relinking such downstream
+ # dependencies when $lib changes only in non-public ways.
+ # The resulting string leaves an uninterpolated %{suffix} which
+ # is used in the final substitution below.
+ mtime_preserving_solink_base = (
+ 'if [ ! -e $lib -o ! -e $lib.TOC ]; then '
+ '%(solink)s && %(extract_toc)s > $lib.TOC; else '
+ '%(solink)s && %(extract_toc)s > $lib.tmp && '
+ 'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; '
+ 'fi; fi'
+ % { 'solink':
+ '$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
+ 'extract_toc':
+ ('{ $readelf -d $lib | grep SONAME ; '
+ '$nm -gD -f p $lib | cut -f1-2 -d\' \'; }')})
+
+ master_ninja.rule(
+ 'solink',
+ description='SOLINK $lib',
+ restat=True,
+ command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
+ rspfile='$link_file_list',
+ rspfile_content=
+ '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs',
+ pool='link_pool')
+ master_ninja.rule(
+ 'solink_module',
+ description='SOLINK(module) $lib',
+ restat=True,
+ command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
+ rspfile='$link_file_list',
+ rspfile_content='-Wl,--start-group $in -Wl,--end-group $solibs $libs',
+ pool='link_pool')
+ master_ninja.rule(
+ 'link',
+ description='LINK $out',
+ command=('$ld $ldflags -o $out '
+ '-Wl,--start-group $in -Wl,--end-group $solibs $libs'),
+ pool='link_pool')
+ elif flavor == 'win':
+ master_ninja.rule(
+ 'alink',
+ description='LIB $out',
+ command=('%s gyp-win-tool link-wrapper $arch False '
+ '$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' %
+ sys.executable),
+ rspfile='$out.rsp',
+ rspfile_content='$in_newline $libflags')
+ _AddWinLinkRules(master_ninja, embed_manifest=True)
+ _AddWinLinkRules(master_ninja, embed_manifest=False)
+ else:
+ master_ninja.rule(
+ 'objc',
+ description='OBJC $out',
+ command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc '
+ '$cflags_pch_objc -c $in -o $out'),
+ depfile='$out.d',
+ deps=deps)
+ master_ninja.rule(
+ 'objcxx',
+ description='OBJCXX $out',
+ command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc '
+ '$cflags_pch_objcc -c $in -o $out'),
+ depfile='$out.d',
+ deps=deps)
+ master_ninja.rule(
+ 'alink',
+ description='LIBTOOL-STATIC $out, POSTBUILDS',
+ command='rm -f $out && '
+ './gyp-mac-tool filter-libtool libtool $libtool_flags '
+ '-static -o $out $in'
+ '$postbuilds')
+ master_ninja.rule(
+ 'lipo',
+ description='LIPO $out, POSTBUILDS',
+ command='rm -f $out && lipo -create $in -output $out$postbuilds')
+ master_ninja.rule(
+ 'solipo',
+ description='SOLIPO $out, POSTBUILDS',
+ command=(
+ 'rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&'
+ '%(extract_toc)s > $lib.TOC'
+ % { 'extract_toc':
+ '{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
+ 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}))
+
+
+ # Record the public interface of $lib in $lib.TOC. See the corresponding
+ # comment in the posix section above for details.
+ solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
+ mtime_preserving_solink_base = (
+ 'if [ ! -e $lib -o ! -e $lib.TOC ] || '
+ # Always force dependent targets to relink if this library
+ # reexports something. Handling this correctly would require
+ # recursive TOC dumping but this is rare in practice, so punt.
+ 'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
+ '%(solink)s && %(extract_toc)s > $lib.TOC; '
+ 'else '
+ '%(solink)s && %(extract_toc)s > $lib.tmp && '
+ 'if ! cmp -s $lib.tmp $lib.TOC; then '
+ 'mv $lib.tmp $lib.TOC ; '
+ 'fi; '
+ 'fi'
+ % { 'solink': solink_base,
+ 'extract_toc':
+ '{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
+ 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
+
+
+ solink_suffix = '@$link_file_list$postbuilds'
+ master_ninja.rule(
+ 'solink',
+ description='SOLINK $lib, POSTBUILDS',
+ restat=True,
+ command=mtime_preserving_solink_base % {'suffix': solink_suffix,
+ 'type': '-shared'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
+ pool='link_pool')
+ master_ninja.rule(
+ 'solink_notoc',
+ description='SOLINK $lib, POSTBUILDS',
+ restat=True,
+ command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
+ pool='link_pool')
+
+ master_ninja.rule(
+ 'solink_module',
+ description='SOLINK(module) $lib, POSTBUILDS',
+ restat=True,
+ command=mtime_preserving_solink_base % {'suffix': solink_suffix,
+ 'type': '-bundle'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
+ pool='link_pool')
+ master_ninja.rule(
+ 'solink_module_notoc',
+ description='SOLINK(module) $lib, POSTBUILDS',
+ restat=True,
+ command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'},
+ rspfile='$link_file_list',
+ rspfile_content='$in $solibs $libs',
+ pool='link_pool')
+
+ master_ninja.rule(
+ 'link',
+ description='LINK $out, POSTBUILDS',
+ command=('$ld $ldflags -o $out '
+ '$in $solibs $libs$postbuilds'),
+ pool='link_pool')
+ master_ninja.rule(
+ 'preprocess_infoplist',
+ description='PREPROCESS INFOPLIST $out',
+ command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
+ 'plutil -convert xml1 $out $out'))
+ master_ninja.rule(
+ 'copy_infoplist',
+ description='COPY INFOPLIST $in',
+ command='$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys')
+ master_ninja.rule(
+ 'merge_infoplist',
+ description='MERGE INFOPLISTS $in',
+ command='$env ./gyp-mac-tool merge-info-plist $out $in')
+ master_ninja.rule(
+ 'compile_xcassets',
+ description='COMPILE XCASSETS $in',
+ command='$env ./gyp-mac-tool compile-xcassets $keys $in')
+ master_ninja.rule(
+ 'mac_tool',
+ description='MACTOOL $mactool_cmd $in',
+ command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary')
+ master_ninja.rule(
+ 'package_framework',
+ description='PACKAGE FRAMEWORK $out, POSTBUILDS',
+ command='./gyp-mac-tool package-framework $out $version$postbuilds '
+ '&& touch $out')
+ if flavor == 'win':
+ master_ninja.rule(
+ 'stamp',
+ description='STAMP $out',
+ command='%s gyp-win-tool stamp $out' % sys.executable)
+ master_ninja.rule(
+ 'copy',
+ description='COPY $in $out',
+ command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable)
+ else:
+ master_ninja.rule(
+ 'stamp',
+ description='STAMP $out',
+ command='${postbuilds}touch $out')
+ master_ninja.rule(
+ 'copy',
+ description='COPY $in $out',
+ command='rm -rf $out && cp -af $in $out')
+ master_ninja.newline()
+
+ all_targets = set()
+ for build_file in params['build_files']:
+ for target in gyp.common.AllTargets(target_list,
+ target_dicts,
+ os.path.normpath(build_file)):
+ all_targets.add(target)
+ all_outputs = set()
+
+ # target_outputs is a map from qualified target name to a Target object.
+ target_outputs = {}
+ # target_short_names is a map from target short name to a list of Target
+ # objects.
+ target_short_names = {}
+
+ # short name of targets that were skipped because they didn't contain anything
+ # interesting.
+ # NOTE: there may be overlap between this an non_empty_target_names.
+ empty_target_names = set()
+
+ # Set of non-empty short target names.
+ # NOTE: there may be overlap between this an empty_target_names.
+ non_empty_target_names = set()
+
+ for qualified_target in target_list:
+ # qualified_target is like: third_party/icu/icu.gyp:icui18n#target
+ build_file, name, toolset = \
+ gyp.common.ParseQualifiedTarget(qualified_target)
+
+ this_make_global_settings = data[build_file].get('make_global_settings', [])
+ assert make_global_settings == this_make_global_settings, (
+ "make_global_settings needs to be the same for all targets. %s vs. %s" %
+ (this_make_global_settings, make_global_settings))
+
+ spec = target_dicts[qualified_target]
+ if flavor == 'mac':
+ gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
+
+ # If build_file is a symlink, we must not follow it because there's a chance
+ # it could point to a path above toplevel_dir, and we cannot correctly deal
+ # with that case at the moment.
+ build_file = gyp.common.RelativePath(build_file, options.toplevel_dir,
+ False)
+
+ qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name,
+ toolset)
+ hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest()
+
+ base_path = os.path.dirname(build_file)
+ obj = 'obj'
+ if toolset != 'target':
+ obj += '.' + toolset
+ output_file = os.path.join(obj, base_path, name + '.ninja')
+
+ ninja_output = StringIO()
+ writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir,
+ ninja_output,
+ toplevel_build, output_file,
+ flavor, toplevel_dir=options.toplevel_dir)
+
+ target = writer.WriteSpec(spec, config_name, generator_flags)
+
+ if ninja_output.tell() > 0:
+ # Only create files for ninja files that actually have contents.
+ with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file:
+ ninja_file.write(ninja_output.getvalue())
+ ninja_output.close()
+ master_ninja.subninja(output_file)
+
+ if target:
+ if name != target.FinalOutput() and spec['toolset'] == 'target':
+ target_short_names.setdefault(name, []).append(target)
+ target_outputs[qualified_target] = target
+ if qualified_target in all_targets:
+ all_outputs.add(target.FinalOutput())
+ non_empty_target_names.add(name)
+ else:
+ empty_target_names.add(name)
+
+ if target_short_names:
+ # Write a short name to build this target. This benefits both the
+ # "build chrome" case as well as the gyp tests, which expect to be
+ # able to run actions and build libraries by their short name.
+ master_ninja.newline()
+ master_ninja.comment('Short names for targets.')
+ for short_name in target_short_names:
+ master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
+ target_short_names[short_name]])
+
+ # Write phony targets for any empty targets that weren't written yet. As
+ # short names are not necessarily unique only do this for short names that
+ # haven't already been output for another target.
+ empty_target_names = empty_target_names - non_empty_target_names
+ if empty_target_names:
+ master_ninja.newline()
+ master_ninja.comment('Empty targets (output for completeness).')
+ for name in sorted(empty_target_names):
+ master_ninja.build(name, 'phony')
+
+ if all_outputs:
+ master_ninja.newline()
+ master_ninja.build('all', 'phony', list(all_outputs))
+ master_ninja.default(generator_flags.get('default_target', 'all'))
+
+ master_ninja_file.close()
+
+
+def PerformBuild(data, configurations, params):
+ options = params['options']
+ for config in configurations:
+ builddir = os.path.join(options.toplevel_dir, 'out', config)
+ arguments = ['ninja', '-C', builddir]
+ print 'Building [%s]: %s' % (config, arguments)
+ subprocess.check_call(arguments)
+
+
+def CallGenerateOutputForConfig(arglist):
+ # Ignore the interrupt signal so that the parent process catches it and
+ # kills all multiprocessing children.
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+ (target_list, target_dicts, data, params, config_name) = arglist
+ GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ # Update target_dicts for iOS device builds.
+ target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
+ target_dicts)
+
+ user_config = params.get('generator_flags', {}).get('config', None)
+ if gyp.common.GetFlavor(params) == 'win':
+ target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
+ target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
+ target_list, target_dicts, generator_default_variables)
+
+ if user_config:
+ GenerateOutputForConfig(target_list, target_dicts, data, params,
+ user_config)
+ else:
+ config_names = target_dicts[target_list[0]]['configurations'].keys()
+ if params['parallel']:
+ try:
+ pool = multiprocessing.Pool(len(config_names))
+ arglists = []
+ for config_name in config_names:
+ arglists.append(
+ (target_list, target_dicts, data, params, config_name))
+ pool.map(CallGenerateOutputForConfig, arglists)
+ except KeyboardInterrupt, e:
+ pool.terminate()
+ raise e
+ else:
+ for config_name in config_names:
+ GenerateOutputForConfig(target_list, target_dicts, data, params,
+ config_name)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
new file mode 100644
index 0000000000..1767b2f45a
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Unit tests for the ninja.py file. """
+
+import gyp.generator.ninja as ninja
+import unittest
+import StringIO
+import sys
+import TestCommon
+
+
+class TestPrefixesAndSuffixes(unittest.TestCase):
+ def test_BinaryNamesWindows(self):
+ # These cannot run on non-Windows as they require a VS installation to
+ # correctly handle variable expansion.
+ if sys.platform.startswith('win'):
+ writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
+ 'build.ninja', 'win')
+ spec = { 'target_name': 'wee' }
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
+ endswith('.exe'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
+ endswith('.dll'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
+ endswith('.lib'))
+
+ def test_BinaryNamesLinux(self):
+ writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
+ 'build.ninja', 'linux')
+ spec = { 'target_name': 'wee' }
+ self.assertTrue('.' not in writer.ComputeOutputFileName(spec,
+ 'executable'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
+ startswith('lib'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
+ startswith('lib'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
+ endswith('.so'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
+ endswith('.a'))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
new file mode 100644
index 0000000000..0e3fb9301e
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
@@ -0,0 +1,1300 @@
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import filecmp
+import gyp.common
+import gyp.xcodeproj_file
+import gyp.xcode_ninja
+import errno
+import os
+import sys
+import posixpath
+import re
+import shutil
+import subprocess
+import tempfile
+
+
+# Project files generated by this module will use _intermediate_var as a
+# custom Xcode setting whose value is a DerivedSources-like directory that's
+# project-specific and configuration-specific. The normal choice,
+# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive
+# as it is likely that multiple targets within a single project file will want
+# to access the same set of generated files. The other option,
+# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,
+# it is not configuration-specific. INTERMEDIATE_DIR is defined as
+# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).
+_intermediate_var = 'INTERMEDIATE_DIR'
+
+# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all
+# targets that share the same BUILT_PRODUCTS_DIR.
+_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'
+
+_library_search_paths_var = 'LIBRARY_SEARCH_PATHS'
+
+generator_default_variables = {
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '',
+ 'STATIC_LIB_PREFIX': 'lib',
+ 'SHARED_LIB_PREFIX': 'lib',
+ 'STATIC_LIB_SUFFIX': '.a',
+ 'SHARED_LIB_SUFFIX': '.dylib',
+ # INTERMEDIATE_DIR is a place for targets to build up intermediate products.
+ # It is specific to each build environment. It is only guaranteed to exist
+ # and be constant within the context of a project, corresponding to a single
+ # input file. Some build environments may allow their intermediate directory
+ # to be shared on a wider scale, but this is not guaranteed.
+ 'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var,
+ 'OS': 'mac',
+ 'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)',
+ 'LIB_DIR': '$(BUILT_PRODUCTS_DIR)',
+ 'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)',
+ 'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)',
+ 'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)',
+ 'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)',
+ 'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)',
+ 'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var,
+ 'CONFIGURATION_NAME': '$(CONFIGURATION)',
+}
+
+# The Xcode-specific sections that hold paths.
+generator_additional_path_sections = [
+ 'mac_bundle_resources',
+ 'mac_framework_headers',
+ 'mac_framework_private_headers',
+ # 'mac_framework_dirs', input already handles _dirs endings.
+]
+
+# The Xcode-specific keys that exist on targets and aren't moved down to
+# configurations.
+generator_additional_non_configuration_keys = [
+ 'ios_app_extension',
+ 'ios_watch_app',
+ 'ios_watchkit_extension',
+ 'mac_bundle',
+ 'mac_bundle_resources',
+ 'mac_framework_headers',
+ 'mac_framework_private_headers',
+ 'mac_xctest_bundle',
+ 'xcode_create_dependents_test_runner',
+]
+
+# We want to let any rules apply to files that are resources also.
+generator_extra_sources_for_rules = [
+ 'mac_bundle_resources',
+ 'mac_framework_headers',
+ 'mac_framework_private_headers',
+]
+
+generator_filelist_paths = None
+
+# Xcode's standard set of library directories, which don't need to be duplicated
+# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
+xcode_standard_library_dirs = frozenset([
+ '$(SDKROOT)/usr/lib',
+ '$(SDKROOT)/usr/local/lib',
+])
+
+def CreateXCConfigurationList(configuration_names):
+ xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []})
+ if len(configuration_names) == 0:
+ configuration_names = ['Default']
+ for configuration_name in configuration_names:
+ xcbc = gyp.xcodeproj_file.XCBuildConfiguration({
+ 'name': configuration_name})
+ xccl.AppendProperty('buildConfigurations', xcbc)
+ xccl.SetProperty('defaultConfigurationName', configuration_names[0])
+ return xccl
+
+
+class XcodeProject(object):
+ def __init__(self, gyp_path, path, build_file_dict):
+ self.gyp_path = gyp_path
+ self.path = path
+ self.project = gyp.xcodeproj_file.PBXProject(path=path)
+ projectDirPath = gyp.common.RelativePath(
+ os.path.dirname(os.path.abspath(self.gyp_path)),
+ os.path.dirname(path) or '.')
+ self.project.SetProperty('projectDirPath', projectDirPath)
+ self.project_file = \
+ gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project})
+ self.build_file_dict = build_file_dict
+
+ # TODO(mark): add destructor that cleans up self.path if created_dir is
+ # True and things didn't complete successfully. Or do something even
+ # better with "try"?
+ self.created_dir = False
+ try:
+ os.makedirs(self.path)
+ self.created_dir = True
+ except OSError, e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ def Finalize1(self, xcode_targets, serialize_all_tests):
+ # Collect a list of all of the build configuration names used by the
+ # various targets in the file. It is very heavily advised to keep each
+ # target in an entire project (even across multiple project files) using
+ # the same set of configuration names.
+ configurations = []
+ for xct in self.project.GetProperty('targets'):
+ xccl = xct.GetProperty('buildConfigurationList')
+ xcbcs = xccl.GetProperty('buildConfigurations')
+ for xcbc in xcbcs:
+ name = xcbc.GetProperty('name')
+ if name not in configurations:
+ configurations.append(name)
+
+ # Replace the XCConfigurationList attached to the PBXProject object with
+ # a new one specifying all of the configuration names used by the various
+ # targets.
+ try:
+ xccl = CreateXCConfigurationList(configurations)
+ self.project.SetProperty('buildConfigurationList', xccl)
+ except:
+ sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)
+ raise
+
+ # The need for this setting is explained above where _intermediate_var is
+ # defined. The comments below about wanting to avoid project-wide build
+ # settings apply here too, but this needs to be set on a project-wide basis
+ # so that files relative to the _intermediate_var setting can be displayed
+ # properly in the Xcode UI.
+ #
+ # Note that for configuration-relative files such as anything relative to
+ # _intermediate_var, for the purposes of UI tree view display, Xcode will
+ # only resolve the configuration name once, when the project file is
+ # opened. If the active build configuration is changed, the project file
+ # must be closed and reopened if it is desired for the tree view to update.
+ # This is filed as Apple radar 6588391.
+ xccl.SetBuildSetting(_intermediate_var,
+ '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)')
+ xccl.SetBuildSetting(_shared_intermediate_var,
+ '$(SYMROOT)/DerivedSources/$(CONFIGURATION)')
+
+ # Set user-specified project-wide build settings and config files. This
+ # is intended to be used very sparingly. Really, almost everything should
+ # go into target-specific build settings sections. The project-wide
+ # settings are only intended to be used in cases where Xcode attempts to
+ # resolve variable references in a project context as opposed to a target
+ # context, such as when resolving sourceTree references while building up
+ # the tree tree view for UI display.
+ # Any values set globally are applied to all configurations, then any
+ # per-configuration values are applied.
+ for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems():
+ xccl.SetBuildSetting(xck, xcv)
+ if 'xcode_config_file' in self.build_file_dict:
+ config_ref = self.project.AddOrGetFileInRootGroup(
+ self.build_file_dict['xcode_config_file'])
+ xccl.SetBaseConfiguration(config_ref)
+ build_file_configurations = self.build_file_dict.get('configurations', {})
+ if build_file_configurations:
+ for config_name in configurations:
+ build_file_configuration_named = \
+ build_file_configurations.get(config_name, {})
+ if build_file_configuration_named:
+ xcc = xccl.ConfigurationNamed(config_name)
+ for xck, xcv in build_file_configuration_named.get('xcode_settings',
+ {}).iteritems():
+ xcc.SetBuildSetting(xck, xcv)
+ if 'xcode_config_file' in build_file_configuration_named:
+ config_ref = self.project.AddOrGetFileInRootGroup(
+ build_file_configurations[config_name]['xcode_config_file'])
+ xcc.SetBaseConfiguration(config_ref)
+
+ # Sort the targets based on how they appeared in the input.
+ # TODO(mark): Like a lot of other things here, this assumes internal
+ # knowledge of PBXProject - in this case, of its "targets" property.
+
+ # ordinary_targets are ordinary targets that are already in the project
+ # file. run_test_targets are the targets that run unittests and should be
+ # used for the Run All Tests target. support_targets are the action/rule
+ # targets used by GYP file targets, just kept for the assert check.
+ ordinary_targets = []
+ run_test_targets = []
+ support_targets = []
+
+ # targets is full list of targets in the project.
+ targets = []
+
+ # does the it define it's own "all"?
+ has_custom_all = False
+
+ # targets_for_all is the list of ordinary_targets that should be listed
+ # in this project's "All" target. It includes each non_runtest_target
+ # that does not have suppress_wildcard set.
+ targets_for_all = []
+
+ for target in self.build_file_dict['targets']:
+ target_name = target['target_name']
+ toolset = target['toolset']
+ qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name,
+ toolset)
+ xcode_target = xcode_targets[qualified_target]
+ # Make sure that the target being added to the sorted list is already in
+ # the unsorted list.
+ assert xcode_target in self.project._properties['targets']
+ targets.append(xcode_target)
+ ordinary_targets.append(xcode_target)
+ if xcode_target.support_target:
+ support_targets.append(xcode_target.support_target)
+ targets.append(xcode_target.support_target)
+
+ if not int(target.get('suppress_wildcard', False)):
+ targets_for_all.append(xcode_target)
+
+ if target_name.lower() == 'all':
+ has_custom_all = True;
+
+ # If this target has a 'run_as' attribute, add its target to the
+ # targets, and add it to the test targets.
+ if target.get('run_as'):
+ # Make a target to run something. It should have one
+ # dependency, the parent xcode target.
+ xccl = CreateXCConfigurationList(configurations)
+ run_target = gyp.xcodeproj_file.PBXAggregateTarget({
+ 'name': 'Run ' + target_name,
+ 'productName': xcode_target.GetProperty('productName'),
+ 'buildConfigurationList': xccl,
+ },
+ parent=self.project)
+ run_target.AddDependency(xcode_target)
+
+ command = target['run_as']
+ script = ''
+ if command.get('working_directory'):
+ script = script + 'cd "%s"\n' % \
+ gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
+ command.get('working_directory'))
+
+ if command.get('environment'):
+ script = script + "\n".join(
+ ['export %s="%s"' %
+ (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
+ for (key, val) in command.get('environment').iteritems()]) + "\n"
+
+ # Some test end up using sockets, files on disk, etc. and can get
+ # confused if more then one test runs at a time. The generator
+ # flag 'xcode_serialize_all_test_runs' controls the forcing of all
+ # tests serially. It defaults to True. To get serial runs this
+ # little bit of python does the same as the linux flock utility to
+ # make sure only one runs at a time.
+ command_prefix = ''
+ if serialize_all_tests:
+ command_prefix = \
+"""python -c "import fcntl, subprocess, sys
+file = open('$TMPDIR/GYP_serialize_test_runs', 'a')
+fcntl.flock(file.fileno(), fcntl.LOCK_EX)
+sys.exit(subprocess.call(sys.argv[1:]))" """
+
+ # If we were unable to exec for some reason, we want to exit
+ # with an error, and fixup variable references to be shell
+ # syntax instead of xcode syntax.
+ script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \
+ gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
+ gyp.common.EncodePOSIXShellList(command.get('action')))
+
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
+ 'shellScript': script,
+ 'showEnvVarsInLog': 0,
+ })
+ run_target.AppendProperty('buildPhases', ssbp)
+
+ # Add the run target to the project file.
+ targets.append(run_target)
+ run_test_targets.append(run_target)
+ xcode_target.test_runner = run_target
+
+
+ # Make sure that the list of targets being replaced is the same length as
+ # the one replacing it, but allow for the added test runner targets.
+ assert len(self.project._properties['targets']) == \
+ len(ordinary_targets) + len(support_targets)
+
+ self.project._properties['targets'] = targets
+
+ # Get rid of unnecessary levels of depth in groups like the Source group.
+ self.project.RootGroupsTakeOverOnlyChildren(True)
+
+ # Sort the groups nicely. Do this after sorting the targets, because the
+ # Products group is sorted based on the order of the targets.
+ self.project.SortGroups()
+
+ # Create an "All" target if there's more than one target in this project
+ # file and the project didn't define its own "All" target. Put a generated
+ # "All" target first so that people opening up the project for the first
+ # time will build everything by default.
+ if len(targets_for_all) > 1 and not has_custom_all:
+ xccl = CreateXCConfigurationList(configurations)
+ all_target = gyp.xcodeproj_file.PBXAggregateTarget(
+ {
+ 'buildConfigurationList': xccl,
+ 'name': 'All',
+ },
+ parent=self.project)
+
+ for target in targets_for_all:
+ all_target.AddDependency(target)
+
+ # TODO(mark): This is evil because it relies on internal knowledge of
+ # PBXProject._properties. It's important to get the "All" target first,
+ # though.
+ self.project._properties['targets'].insert(0, all_target)
+
+ # The same, but for run_test_targets.
+ if len(run_test_targets) > 1:
+ xccl = CreateXCConfigurationList(configurations)
+ run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(
+ {
+ 'buildConfigurationList': xccl,
+ 'name': 'Run All Tests',
+ },
+ parent=self.project)
+ for run_test_target in run_test_targets:
+ run_all_tests_target.AddDependency(run_test_target)
+
+ # Insert after the "All" target, which must exist if there is more than
+ # one run_test_target.
+ self.project._properties['targets'].insert(1, run_all_tests_target)
+
+ def Finalize2(self, xcode_targets, xcode_target_to_target_dict):
+ # Finalize2 needs to happen in a separate step because the process of
+ # updating references to other projects depends on the ordering of targets
+ # within remote project files. Finalize1 is responsible for sorting duty,
+ # and once all project files are sorted, Finalize2 can come in and update
+ # these references.
+
+ # To support making a "test runner" target that will run all the tests
+ # that are direct dependents of any given target, we look for
+ # xcode_create_dependents_test_runner being set on an Aggregate target,
+ # and generate a second target that will run the tests runners found under
+ # the marked target.
+ for bf_tgt in self.build_file_dict['targets']:
+ if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)):
+ tgt_name = bf_tgt['target_name']
+ toolset = bf_tgt['toolset']
+ qualified_target = gyp.common.QualifiedTarget(self.gyp_path,
+ tgt_name, toolset)
+ xcode_target = xcode_targets[qualified_target]
+ if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):
+ # Collect all the run test targets.
+ all_run_tests = []
+ pbxtds = xcode_target.GetProperty('dependencies')
+ for pbxtd in pbxtds:
+ pbxcip = pbxtd.GetProperty('targetProxy')
+ dependency_xct = pbxcip.GetProperty('remoteGlobalIDString')
+ if hasattr(dependency_xct, 'test_runner'):
+ all_run_tests.append(dependency_xct.test_runner)
+
+ # Directly depend on all the runners as they depend on the target
+ # that builds them.
+ if len(all_run_tests) > 0:
+ run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({
+ 'name': 'Run %s Tests' % tgt_name,
+ 'productName': tgt_name,
+ },
+ parent=self.project)
+ for run_test_target in all_run_tests:
+ run_all_target.AddDependency(run_test_target)
+
+ # Insert the test runner after the related target.
+ idx = self.project._properties['targets'].index(xcode_target)
+ self.project._properties['targets'].insert(idx + 1, run_all_target)
+
+ # Update all references to other projects, to make sure that the lists of
+ # remote products are complete. Otherwise, Xcode will fill them in when
+ # it opens the project file, which will result in unnecessary diffs.
+ # TODO(mark): This is evil because it relies on internal knowledge of
+ # PBXProject._other_pbxprojects.
+ for other_pbxproject in self.project._other_pbxprojects.keys():
+ self.project.AddOrGetProjectReference(other_pbxproject)
+
+ self.project.SortRemoteProductReferences()
+
+ # Give everything an ID.
+ self.project_file.ComputeIDs()
+
+ # Make sure that no two objects in the project file have the same ID. If
+ # multiple objects wind up with the same ID, upon loading the file, Xcode
+ # will only recognize one object (the last one in the file?) and the
+ # results are unpredictable.
+ self.project_file.EnsureNoIDCollisions()
+
+ def Write(self):
+ # Write the project file to a temporary location first. Xcode watches for
+ # changes to the project file and presents a UI sheet offering to reload
+ # the project when it does change. However, in some cases, especially when
+ # multiple projects are open or when Xcode is busy, things don't work so
+ # seamlessly. Sometimes, Xcode is able to detect that a project file has
+ # changed but can't unload it because something else is referencing it.
+ # To mitigate this problem, and to avoid even having Xcode present the UI
+ # sheet when an open project is rewritten for inconsequential changes, the
+ # project file is written to a temporary file in the xcodeproj directory
+ # first. The new temporary file is then compared to the existing project
+ # file, if any. If they differ, the new file replaces the old; otherwise,
+ # the new project file is simply deleted. Xcode properly detects a file
+ # being renamed over an open project file as a change and so it remains
+ # able to present the "project file changed" sheet under this system.
+ # Writing to a temporary file first also avoids the possible problem of
+ # Xcode rereading an incomplete project file.
+ (output_fd, new_pbxproj_path) = \
+ tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.',
+ dir=self.path)
+
+ try:
+ output_file = os.fdopen(output_fd, 'wb')
+
+ self.project_file.Print(output_file)
+ output_file.close()
+
+ pbxproj_path = os.path.join(self.path, 'project.pbxproj')
+
+ same = False
+ try:
+ same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
+ except OSError, e:
+ if e.errno != errno.ENOENT:
+ raise
+
+ if same:
+ # The new file is identical to the old one, just get rid of the new
+ # one.
+ os.unlink(new_pbxproj_path)
+ else:
+ # The new file is different from the old one, or there is no old one.
+ # Rename the new file to the permanent name.
+ #
+ # tempfile.mkstemp uses an overly restrictive mode, resulting in a
+ # file that can only be read by the owner, regardless of the umask.
+ # There's no reason to not respect the umask here, which means that
+ # an extra hoop is required to fetch it and reset the new file's mode.
+ #
+ # No way to get the umask without setting a new one? Set a safe one
+ # and then set it back to the old value.
+ umask = os.umask(077)
+ os.umask(umask)
+
+ os.chmod(new_pbxproj_path, 0666 & ~umask)
+ os.rename(new_pbxproj_path, pbxproj_path)
+
+ except Exception:
+ # Don't leave turds behind. In fact, if this code was responsible for
+ # creating the xcodeproj directory, get rid of that too.
+ os.unlink(new_pbxproj_path)
+ if self.created_dir:
+ shutil.rmtree(self.path, True)
+ raise
+
+
+def AddSourceToTarget(source, type, pbxp, xct):
+ # TODO(mark): Perhaps source_extensions and library_extensions can be made a
+ # little bit fancier.
+ source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's', 'swift']
+
+ # .o is conceptually more of a "source" than a "library," but Xcode thinks
+ # of "sources" as things to compile and "libraries" (or "frameworks") as
+ # things to link with. Adding an object file to an Xcode target's frameworks
+ # phase works properly.
+ library_extensions = ['a', 'dylib', 'framework', 'o']
+
+ basename = posixpath.basename(source)
+ (root, ext) = posixpath.splitext(basename)
+ if ext:
+ ext = ext[1:].lower()
+
+ if ext in source_extensions and type != 'none':
+ xct.SourcesPhase().AddFile(source)
+ elif ext in library_extensions and type != 'none':
+ xct.FrameworksPhase().AddFile(source)
+ else:
+ # Files that aren't added to a sources or frameworks build phase can still
+ # go into the project file, just not as part of a build phase.
+ pbxp.AddOrGetFileInRootGroup(source)
+
+
+def AddResourceToTarget(resource, pbxp, xct):
+ # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
+ # where it's used.
+ xct.ResourcesPhase().AddFile(resource)
+
+
+def AddHeaderToTarget(header, pbxp, xct, is_public):
+ # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
+ # where it's used.
+ settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public]
+ xct.HeadersPhase().AddFile(header, settings)
+
+
+_xcode_variable_re = re.compile(r'(\$\((.*?)\))')
+def ExpandXcodeVariables(string, expansions):
+ """Expands Xcode-style $(VARIABLES) in string per the expansions dict.
+
+ In some rare cases, it is appropriate to expand Xcode variables when a
+ project file is generated. For any substring $(VAR) in string, if VAR is a
+ key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
+ Any $(VAR) substring in string for which VAR is not a key in the expansions
+ dict will remain in the returned string.
+ """
+
+ matches = _xcode_variable_re.findall(string)
+ if matches == None:
+ return string
+
+ matches.reverse()
+ for match in matches:
+ (to_replace, variable) = match
+ if not variable in expansions:
+ continue
+
+ replacement = expansions[variable]
+ string = re.sub(re.escape(to_replace), replacement, string)
+
+ return string
+
+
+_xcode_define_re = re.compile(r'([\\\"\' ])')
+def EscapeXcodeDefine(s):
+ """We must escape the defines that we give to XCode so that it knows not to
+ split on spaces and to respect backslash and quote literals. However, we
+ must not quote the define, or Xcode will incorrectly intepret variables
+ especially $(inherited)."""
+ return re.sub(_xcode_define_re, r'\\\1', s)
+
+
+def PerformBuild(data, configurations, params):
+ options = params['options']
+
+ for build_file, build_file_dict in data.iteritems():
+ (build_file_root, build_file_ext) = os.path.splitext(build_file)
+ if build_file_ext != '.gyp':
+ continue
+ xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
+ if options.generator_output:
+ xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
+
+ for config in configurations:
+ arguments = ['xcodebuild', '-project', xcodeproj_path]
+ arguments += ['-configuration', config]
+ print "Building [%s]: %s" % (config, arguments)
+ subprocess.check_call(arguments)
+
+
+def CalculateGeneratorInputInfo(params):
+ toplevel = params['options'].toplevel_dir
+ if params.get('flavor') == 'ninja':
+ generator_dir = os.path.relpath(params['options'].generator_output or '.')
+ output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
+ output_dir = os.path.normpath(os.path.join(generator_dir, output_dir))
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, output_dir, 'gypfiles-xcode-ninja'))
+ else:
+ output_dir = os.path.normpath(os.path.join(toplevel, 'xcodebuild'))
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, output_dir, 'gypfiles'))
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ 'toplevel': toplevel,
+ 'qualified_out_dir': qualified_out_dir,
+ }
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ # Optionally configure each spec to use ninja as the external builder.
+ ninja_wrapper = params.get('flavor') == 'ninja'
+ if ninja_wrapper:
+ (target_list, target_dicts, data) = \
+ gyp.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params)
+
+ options = params['options']
+ generator_flags = params.get('generator_flags', {})
+ parallel_builds = generator_flags.get('xcode_parallel_builds', True)
+ serialize_all_tests = \
+ generator_flags.get('xcode_serialize_all_test_runs', True)
+ upgrade_check_project_version = \
+ generator_flags.get('xcode_upgrade_check_project_version', None)
+
+ # Format upgrade_check_project_version with leading zeros as needed.
+ if upgrade_check_project_version:
+ upgrade_check_project_version = str(upgrade_check_project_version)
+ while len(upgrade_check_project_version) < 4:
+ upgrade_check_project_version = '0' + upgrade_check_project_version
+
+ skip_excluded_files = \
+ not generator_flags.get('xcode_list_excluded_files', True)
+ xcode_projects = {}
+ for build_file, build_file_dict in data.iteritems():
+ (build_file_root, build_file_ext) = os.path.splitext(build_file)
+ if build_file_ext != '.gyp':
+ continue
+ xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
+ if options.generator_output:
+ xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
+ xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)
+ xcode_projects[build_file] = xcp
+ pbxp = xcp.project
+
+ # Set project-level attributes from multiple options
+ project_attributes = {};
+ if parallel_builds:
+ project_attributes['BuildIndependentTargetsInParallel'] = 'YES'
+ if upgrade_check_project_version:
+ project_attributes['LastUpgradeCheck'] = upgrade_check_project_version
+ project_attributes['LastTestingUpgradeCheck'] = \
+ upgrade_check_project_version
+ project_attributes['LastSwiftUpdateCheck'] = \
+ upgrade_check_project_version
+ pbxp.SetProperty('attributes', project_attributes)
+
+ # Add gyp/gypi files to project
+ if not generator_flags.get('standalone'):
+ main_group = pbxp.GetProperty('mainGroup')
+ build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'})
+ main_group.AppendChild(build_group)
+ for included_file in build_file_dict['included_files']:
+ build_group.AddOrGetFileByPath(included_file, False)
+
+ xcode_targets = {}
+ xcode_target_to_target_dict = {}
+ for qualified_target in target_list:
+ [build_file, target_name, toolset] = \
+ gyp.common.ParseQualifiedTarget(qualified_target)
+
+ spec = target_dicts[qualified_target]
+ if spec['toolset'] != 'target':
+ raise Exception(
+ 'Multiple toolsets not supported in xcode build (target %s)' %
+ qualified_target)
+ configuration_names = [spec['default_configuration']]
+ for configuration_name in sorted(spec['configurations'].keys()):
+ if configuration_name not in configuration_names:
+ configuration_names.append(configuration_name)
+ xcp = xcode_projects[build_file]
+ pbxp = xcp.project
+
+ # Set up the configurations for the target according to the list of names
+ # supplied.
+ xccl = CreateXCConfigurationList(configuration_names)
+
+ # Create an XCTarget subclass object for the target. The type with
+ # "+bundle" appended will be used if the target has "mac_bundle" set.
+ # loadable_modules not in a mac_bundle are mapped to
+ # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
+ # to create a single-file mh_bundle.
+ _types = {
+ 'executable': 'com.apple.product-type.tool',
+ 'loadable_module': 'com.googlecode.gyp.xcode.bundle',
+ 'shared_library': 'com.apple.product-type.library.dynamic',
+ 'static_library': 'com.apple.product-type.library.static',
+ 'mac_kernel_extension': 'com.apple.product-type.kernel-extension',
+ 'executable+bundle': 'com.apple.product-type.application',
+ 'loadable_module+bundle': 'com.apple.product-type.bundle',
+ 'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
+ 'shared_library+bundle': 'com.apple.product-type.framework',
+ 'executable+extension+bundle': 'com.apple.product-type.app-extension',
+ 'executable+watch+extension+bundle':
+ 'com.apple.product-type.watchkit-extension',
+ 'executable+watch+bundle':
+ 'com.apple.product-type.application.watchapp',
+ 'mac_kernel_extension+bundle': 'com.apple.product-type.kernel-extension',
+ }
+
+ target_properties = {
+ 'buildConfigurationList': xccl,
+ 'name': target_name,
+ }
+
+ type = spec['type']
+ is_xctest = int(spec.get('mac_xctest_bundle', 0))
+ is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
+ is_app_extension = int(spec.get('ios_app_extension', 0))
+ is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0))
+ is_watch_app = int(spec.get('ios_watch_app', 0))
+ if type != 'none':
+ type_bundle_key = type
+ if is_xctest:
+ type_bundle_key += '+xctest'
+ assert type == 'loadable_module', (
+ 'mac_xctest_bundle targets must have type loadable_module '
+ '(target %s)' % target_name)
+ elif is_app_extension:
+ assert is_bundle, ('ios_app_extension flag requires mac_bundle '
+ '(target %s)' % target_name)
+ type_bundle_key += '+extension+bundle'
+ elif is_watchkit_extension:
+ assert is_bundle, ('ios_watchkit_extension flag requires mac_bundle '
+ '(target %s)' % target_name)
+ type_bundle_key += '+watch+extension+bundle'
+ elif is_watch_app:
+ assert is_bundle, ('ios_watch_app flag requires mac_bundle '
+ '(target %s)' % target_name)
+ type_bundle_key += '+watch+bundle'
+ elif is_bundle:
+ type_bundle_key += '+bundle'
+
+ xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
+ try:
+ target_properties['productType'] = _types[type_bundle_key]
+ except KeyError, e:
+ gyp.common.ExceptionAppend(e, "-- unknown product type while "
+ "writing target %s" % target_name)
+ raise
+ else:
+ xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget
+ assert not is_bundle, (
+ 'mac_bundle targets cannot have type none (target "%s")' %
+ target_name)
+ assert not is_xctest, (
+ 'mac_xctest_bundle targets cannot have type none (target "%s")' %
+ target_name)
+
+ target_product_name = spec.get('product_name')
+ if target_product_name is not None:
+ target_properties['productName'] = target_product_name
+
+ xct = xctarget_type(target_properties, parent=pbxp,
+ force_outdir=spec.get('product_dir'),
+ force_prefix=spec.get('product_prefix'),
+ force_extension=spec.get('product_extension'))
+ pbxp.AppendProperty('targets', xct)
+ xcode_targets[qualified_target] = xct
+ xcode_target_to_target_dict[xct] = spec
+
+ spec_actions = spec.get('actions', [])
+ spec_rules = spec.get('rules', [])
+
+ # Xcode has some "issues" with checking dependencies for the "Compile
+ # sources" step with any source files/headers generated by actions/rules.
+ # To work around this, if a target is building anything directly (not
+ # type "none"), then a second target is used to run the GYP actions/rules
+ # and is made a dependency of this target. This way the work is done
+ # before the dependency checks for what should be recompiled.
+ support_xct = None
+ # The Xcode "issues" don't affect xcode-ninja builds, since the dependency
+ # logic all happens in ninja. Don't bother creating the extra targets in
+ # that case.
+ if type != 'none' and (spec_actions or spec_rules) and not ninja_wrapper:
+ support_xccl = CreateXCConfigurationList(configuration_names);
+ support_target_suffix = generator_flags.get(
+ 'support_target_suffix', ' Support')
+ support_target_properties = {
+ 'buildConfigurationList': support_xccl,
+ 'name': target_name + support_target_suffix,
+ }
+ if target_product_name:
+ support_target_properties['productName'] = \
+ target_product_name + ' Support'
+ support_xct = \
+ gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties,
+ parent=pbxp)
+ pbxp.AppendProperty('targets', support_xct)
+ xct.AddDependency(support_xct)
+ # Hang the support target off the main target so it can be tested/found
+ # by the generator during Finalize.
+ xct.support_target = support_xct
+
+ prebuild_index = 0
+
+ # Add custom shell script phases for "actions" sections.
+ for action in spec_actions:
+ # There's no need to write anything into the script to ensure that the
+ # output directories already exist, because Xcode will look at the
+ # declared outputs and automatically ensure that they exist for us.
+
+ # Do we have a message to print when this action runs?
+ message = action.get('message')
+ if message:
+ message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message)
+ else:
+ message = ''
+
+ # Turn the list into a string that can be passed to a shell.
+ action_string = gyp.common.EncodePOSIXShellList(action['action'])
+
+ # Convert Xcode-type variable references to sh-compatible environment
+ # variable references.
+ message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)
+ action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
+ action_string)
+
+ script = ''
+ # Include the optional message
+ if message_sh:
+ script += message_sh + '\n'
+ # Be sure the script runs in exec, and that if exec fails, the script
+ # exits signalling an error.
+ script += 'exec ' + action_string_sh + '\nexit 1\n'
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
+ 'inputPaths': action['inputs'],
+ 'name': 'Action "' + action['action_name'] + '"',
+ 'outputPaths': action['outputs'],
+ 'shellScript': script,
+ 'showEnvVarsInLog': 0,
+ })
+
+ if support_xct:
+ support_xct.AppendProperty('buildPhases', ssbp)
+ else:
+ # TODO(mark): this assumes too much knowledge of the internals of
+ # xcodeproj_file; some of these smarts should move into xcodeproj_file
+ # itself.
+ xct._properties['buildPhases'].insert(prebuild_index, ssbp)
+ prebuild_index = prebuild_index + 1
+
+ # TODO(mark): Should verify that at most one of these is specified.
+ if int(action.get('process_outputs_as_sources', False)):
+ for output in action['outputs']:
+ AddSourceToTarget(output, type, pbxp, xct)
+
+ if int(action.get('process_outputs_as_mac_bundle_resources', False)):
+ for output in action['outputs']:
+ AddResourceToTarget(output, pbxp, xct)
+
+ # tgt_mac_bundle_resources holds the list of bundle resources so
+ # the rule processing can check against it.
+ if is_bundle:
+ tgt_mac_bundle_resources = spec.get('mac_bundle_resources', [])
+ else:
+ tgt_mac_bundle_resources = []
+
+ # Add custom shell script phases driving "make" for "rules" sections.
+ #
+ # Xcode's built-in rule support is almost powerful enough to use directly,
+ # but there are a few significant deficiencies that render them unusable.
+ # There are workarounds for some of its inadequacies, but in aggregate,
+ # the workarounds added complexity to the generator, and some workarounds
+ # actually require input files to be crafted more carefully than I'd like.
+ # Consequently, until Xcode rules are made more capable, "rules" input
+ # sections will be handled in Xcode output by shell script build phases
+ # performed prior to the compilation phase.
+ #
+ # The following problems with Xcode rules were found. The numbers are
+ # Apple radar IDs. I hope that these shortcomings are addressed, I really
+ # liked having the rules handled directly in Xcode during the period that
+ # I was prototyping this.
+ #
+ # 6588600 Xcode compiles custom script rule outputs too soon, compilation
+ # fails. This occurs when rule outputs from distinct inputs are
+ # interdependent. The only workaround is to put rules and their
+ # inputs in a separate target from the one that compiles the rule
+ # outputs. This requires input file cooperation and it means that
+ # process_outputs_as_sources is unusable.
+ # 6584932 Need to declare that custom rule outputs should be excluded from
+ # compilation. A possible workaround is to lie to Xcode about a
+ # rule's output, giving it a dummy file it doesn't know how to
+ # compile. The rule action script would need to touch the dummy.
+ # 6584839 I need a way to declare additional inputs to a custom rule.
+ # A possible workaround is a shell script phase prior to
+ # compilation that touches a rule's primary input files if any
+ # would-be additional inputs are newer than the output. Modifying
+ # the source tree - even just modification times - feels dirty.
+ # 6564240 Xcode "custom script" build rules always dump all environment
+ # variables. This is a low-prioroty problem and is not a
+ # show-stopper.
+ rules_by_ext = {}
+ for rule in spec_rules:
+ rules_by_ext[rule['extension']] = rule
+
+ # First, some definitions:
+ #
+ # A "rule source" is a file that was listed in a target's "sources"
+ # list and will have a rule applied to it on the basis of matching the
+ # rule's "extensions" attribute. Rule sources are direct inputs to
+ # rules.
+ #
+ # Rule definitions may specify additional inputs in their "inputs"
+ # attribute. These additional inputs are used for dependency tracking
+ # purposes.
+ #
+ # A "concrete output" is a rule output with input-dependent variables
+ # resolved. For example, given a rule with:
+ # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],
+ # if the target's "sources" list contained "one.ext" and "two.ext",
+ # the "concrete output" for rule input "two.ext" would be "two.cc". If
+ # a rule specifies multiple outputs, each input file that the rule is
+ # applied to will have the same number of concrete outputs.
+ #
+ # If any concrete outputs are outdated or missing relative to their
+ # corresponding rule_source or to any specified additional input, the
+ # rule action must be performed to generate the concrete outputs.
+
+ # concrete_outputs_by_rule_source will have an item at the same index
+ # as the rule['rule_sources'] that it corresponds to. Each item is a
+ # list of all of the concrete outputs for the rule_source.
+ concrete_outputs_by_rule_source = []
+
+ # concrete_outputs_all is a flat list of all concrete outputs that this
+ # rule is able to produce, given the known set of input files
+ # (rule_sources) that apply to it.
+ concrete_outputs_all = []
+
+ # messages & actions are keyed by the same indices as rule['rule_sources']
+ # and concrete_outputs_by_rule_source. They contain the message and
+ # action to perform after resolving input-dependent variables. The
+ # message is optional, in which case None is stored for each rule source.
+ messages = []
+ actions = []
+
+ for rule_source in rule.get('rule_sources', []):
+ rule_source_dirname, rule_source_basename = \
+ posixpath.split(rule_source)
+ (rule_source_root, rule_source_ext) = \
+ posixpath.splitext(rule_source_basename)
+
+ # These are the same variable names that Xcode uses for its own native
+ # rule support. Because Xcode's rule engine is not being used, they
+ # need to be expanded as they are written to the makefile.
+ rule_input_dict = {
+ 'INPUT_FILE_BASE': rule_source_root,
+ 'INPUT_FILE_SUFFIX': rule_source_ext,
+ 'INPUT_FILE_NAME': rule_source_basename,
+ 'INPUT_FILE_PATH': rule_source,
+ 'INPUT_FILE_DIRNAME': rule_source_dirname,
+ }
+
+ concrete_outputs_for_this_rule_source = []
+ for output in rule.get('outputs', []):
+ # Fortunately, Xcode and make both use $(VAR) format for their
+ # variables, so the expansion is the only transformation necessary.
+ # Any remaning $(VAR)-type variables in the string can be given
+ # directly to make, which will pick up the correct settings from
+ # what Xcode puts into the environment.
+ concrete_output = ExpandXcodeVariables(output, rule_input_dict)
+ concrete_outputs_for_this_rule_source.append(concrete_output)
+
+ # Add all concrete outputs to the project.
+ pbxp.AddOrGetFileInRootGroup(concrete_output)
+
+ concrete_outputs_by_rule_source.append( \
+ concrete_outputs_for_this_rule_source)
+ concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)
+
+ # TODO(mark): Should verify that at most one of these is specified.
+ if int(rule.get('process_outputs_as_sources', False)):
+ for output in concrete_outputs_for_this_rule_source:
+ AddSourceToTarget(output, type, pbxp, xct)
+
+ # If the file came from the mac_bundle_resources list or if the rule
+ # is marked to process outputs as bundle resource, do so.
+ was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources
+ if was_mac_bundle_resource or \
+ int(rule.get('process_outputs_as_mac_bundle_resources', False)):
+ for output in concrete_outputs_for_this_rule_source:
+ AddResourceToTarget(output, pbxp, xct)
+
+ # Do we have a message to print when this rule runs?
+ message = rule.get('message')
+ if message:
+ message = gyp.common.EncodePOSIXShellArgument(message)
+ message = ExpandXcodeVariables(message, rule_input_dict)
+ messages.append(message)
+
+ # Turn the list into a string that can be passed to a shell.
+ action_string = gyp.common.EncodePOSIXShellList(rule['action'])
+
+ action = ExpandXcodeVariables(action_string, rule_input_dict)
+ actions.append(action)
+
+ if len(concrete_outputs_all) > 0:
+ # TODO(mark): There's a possibilty for collision here. Consider
+ # target "t" rule "A_r" and target "t_A" rule "r".
+ makefile_name = '%s.make' % re.sub(
+ '[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name']))
+ makefile_path = os.path.join(xcode_projects[build_file].path,
+ makefile_name)
+ # TODO(mark): try/close? Write to a temporary file and swap it only
+ # if it's got changes?
+ makefile = open(makefile_path, 'wb')
+
+ # make will build the first target in the makefile by default. By
+ # convention, it's called "all". List all (or at least one)
+ # concrete output for each rule source as a prerequisite of the "all"
+ # target.
+ makefile.write('all: \\\n')
+ for concrete_output_index in \
+ xrange(0, len(concrete_outputs_by_rule_source)):
+ # Only list the first (index [0]) concrete output of each input
+ # in the "all" target. Otherwise, a parallel make (-j > 1) would
+ # attempt to process each input multiple times simultaneously.
+ # Otherwise, "all" could just contain the entire list of
+ # concrete_outputs_all.
+ concrete_output = \
+ concrete_outputs_by_rule_source[concrete_output_index][0]
+ if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
+ eol = ''
+ else:
+ eol = ' \\'
+ makefile.write(' %s%s\n' % (concrete_output, eol))
+
+ for (rule_source, concrete_outputs, message, action) in \
+ zip(rule['rule_sources'], concrete_outputs_by_rule_source,
+ messages, actions):
+ makefile.write('\n')
+
+ # Add a rule that declares it can build each concrete output of a
+ # rule source. Collect the names of the directories that are
+ # required.
+ concrete_output_dirs = []
+ for concrete_output_index in xrange(0, len(concrete_outputs)):
+ concrete_output = concrete_outputs[concrete_output_index]
+ if concrete_output_index == 0:
+ bol = ''
+ else:
+ bol = ' '
+ makefile.write('%s%s \\\n' % (bol, concrete_output))
+
+ concrete_output_dir = posixpath.dirname(concrete_output)
+ if (concrete_output_dir and
+ concrete_output_dir not in concrete_output_dirs):
+ concrete_output_dirs.append(concrete_output_dir)
+
+ makefile.write(' : \\\n')
+
+ # The prerequisites for this rule are the rule source itself and
+ # the set of additional rule inputs, if any.
+ prerequisites = [rule_source]
+ prerequisites.extend(rule.get('inputs', []))
+ for prerequisite_index in xrange(0, len(prerequisites)):
+ prerequisite = prerequisites[prerequisite_index]
+ if prerequisite_index == len(prerequisites) - 1:
+ eol = ''
+ else:
+ eol = ' \\'
+ makefile.write(' %s%s\n' % (prerequisite, eol))
+
+ # Make sure that output directories exist before executing the rule
+ # action.
+ if len(concrete_output_dirs) > 0:
+ makefile.write('\t@mkdir -p "%s"\n' %
+ '" "'.join(concrete_output_dirs))
+
+ # The rule message and action have already had the necessary variable
+ # substitutions performed.
+ if message:
+ # Mark it with note: so Xcode picks it up in build output.
+ makefile.write('\t@echo note: %s\n' % message)
+ makefile.write('\t%s\n' % action)
+
+ makefile.close()
+
+ # It might be nice to ensure that needed output directories exist
+ # here rather than in each target in the Makefile, but that wouldn't
+ # work if there ever was a concrete output that had an input-dependent
+ # variable anywhere other than in the leaf position.
+
+ # Don't declare any inputPaths or outputPaths. If they're present,
+ # Xcode will provide a slight optimization by only running the script
+ # phase if any output is missing or outdated relative to any input.
+ # Unfortunately, it will also assume that all outputs are touched by
+ # the script, and if the outputs serve as files in a compilation
+ # phase, they will be unconditionally rebuilt. Since make might not
+ # rebuild everything that could be declared here as an output, this
+ # extra compilation activity is unnecessary. With inputPaths and
+ # outputPaths not supplied, make will always be called, but it knows
+ # enough to not do anything when everything is up-to-date.
+
+ # To help speed things up, pass -j COUNT to make so it does some work
+ # in parallel. Don't use ncpus because Xcode will build ncpus targets
+ # in parallel and if each target happens to have a rules step, there
+ # would be ncpus^2 things going. With a machine that has 2 quad-core
+ # Xeons, a build can quickly run out of processes based on
+ # scheduling/other tasks, and randomly failing builds are no good.
+ script = \
+"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"
+if [ "${JOB_COUNT}" -gt 4 ]; then
+ JOB_COUNT=4
+fi
+exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
+exit 1
+""" % makefile_name
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
+ 'name': 'Rule "' + rule['rule_name'] + '"',
+ 'shellScript': script,
+ 'showEnvVarsInLog': 0,
+ })
+
+ if support_xct:
+ support_xct.AppendProperty('buildPhases', ssbp)
+ else:
+ # TODO(mark): this assumes too much knowledge of the internals of
+ # xcodeproj_file; some of these smarts should move into xcodeproj_file
+ # itself.
+ xct._properties['buildPhases'].insert(prebuild_index, ssbp)
+ prebuild_index = prebuild_index + 1
+
+ # Extra rule inputs also go into the project file. Concrete outputs were
+ # already added when they were computed.
+ groups = ['inputs', 'inputs_excluded']
+ if skip_excluded_files:
+ groups = [x for x in groups if not x.endswith('_excluded')]
+ for group in groups:
+ for item in rule.get(group, []):
+ pbxp.AddOrGetFileInRootGroup(item)
+
+ # Add "sources".
+ for source in spec.get('sources', []):
+ (source_root, source_extension) = posixpath.splitext(source)
+ if source_extension[1:] not in rules_by_ext:
+ # AddSourceToTarget will add the file to a root group if it's not
+ # already there.
+ AddSourceToTarget(source, type, pbxp, xct)
+ else:
+ pbxp.AddOrGetFileInRootGroup(source)
+
+ # Add "mac_bundle_resources" and "mac_framework_private_headers" if
+ # it's a bundle of any type.
+ if is_bundle:
+ for resource in tgt_mac_bundle_resources:
+ (resource_root, resource_extension) = posixpath.splitext(resource)
+ if resource_extension[1:] not in rules_by_ext:
+ AddResourceToTarget(resource, pbxp, xct)
+ else:
+ pbxp.AddOrGetFileInRootGroup(resource)
+
+ for header in spec.get('mac_framework_private_headers', []):
+ AddHeaderToTarget(header, pbxp, xct, False)
+
+ # Add "mac_framework_headers". These can be valid for both frameworks
+ # and static libraries.
+ if is_bundle or type == 'static_library':
+ for header in spec.get('mac_framework_headers', []):
+ AddHeaderToTarget(header, pbxp, xct, True)
+
+ # Add "copies".
+ pbxcp_dict = {}
+ for copy_group in spec.get('copies', []):
+ dest = copy_group['destination']
+ if dest[0] not in ('/', '$'):
+ # Relative paths are relative to $(SRCROOT).
+ dest = '$(SRCROOT)/' + dest
+
+ code_sign = int(copy_group.get('xcode_code_sign', 0))
+ settings = (None, '{ATTRIBUTES = (CodeSignOnCopy, ); }')[code_sign];
+
+ # Coalesce multiple "copies" sections in the same target with the same
+ # "destination" property into the same PBXCopyFilesBuildPhase, otherwise
+ # they'll wind up with ID collisions.
+ pbxcp = pbxcp_dict.get(dest, None)
+ if pbxcp is None:
+ pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
+ 'name': 'Copy to ' + copy_group['destination']
+ },
+ parent=xct)
+ pbxcp.SetDestination(dest)
+
+ # TODO(mark): The usual comment about this knowing too much about
+ # gyp.xcodeproj_file internals applies.
+ xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
+
+ pbxcp_dict[dest] = pbxcp
+
+ for file in copy_group['files']:
+ pbxcp.AddFile(file, settings)
+
+ # Excluded files can also go into the project file.
+ if not skip_excluded_files:
+ for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers',
+ 'mac_framework_private_headers']:
+ excluded_key = key + '_excluded'
+ for item in spec.get(excluded_key, []):
+ pbxp.AddOrGetFileInRootGroup(item)
+
+ # So can "inputs" and "outputs" sections of "actions" groups.
+ groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded']
+ if skip_excluded_files:
+ groups = [x for x in groups if not x.endswith('_excluded')]
+ for action in spec.get('actions', []):
+ for group in groups:
+ for item in action.get(group, []):
+ # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not
+ # sources.
+ if not item.startswith('$(BUILT_PRODUCTS_DIR)/'):
+ pbxp.AddOrGetFileInRootGroup(item)
+
+ for postbuild in spec.get('postbuilds', []):
+ action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action'])
+ script = 'exec ' + action_string_sh + '\nexit 1\n'
+
+ # Make the postbuild step depend on the output of ld or ar from this
+ # target. Apparently putting the script step after the link step isn't
+ # sufficient to ensure proper ordering in all cases. With an input
+ # declared but no outputs, the script step should run every time, as
+ # desired.
+ ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
+ 'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'],
+ 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"',
+ 'shellScript': script,
+ 'showEnvVarsInLog': 0,
+ })
+ xct.AppendProperty('buildPhases', ssbp)
+
+ # Add dependencies before libraries, because adding a dependency may imply
+ # adding a library. It's preferable to keep dependencies listed first
+ # during a link phase so that they can override symbols that would
+ # otherwise be provided by libraries, which will usually include system
+ # libraries. On some systems, ld is finicky and even requires the
+ # libraries to be ordered in such a way that unresolved symbols in
+ # earlier-listed libraries may only be resolved by later-listed libraries.
+ # The Mac linker doesn't work that way, but other platforms do, and so
+ # their linker invocations need to be constructed in this way. There's
+ # no compelling reason for Xcode's linker invocations to differ.
+
+ if 'dependencies' in spec:
+ for dependency in spec['dependencies']:
+ xct.AddDependency(xcode_targets[dependency])
+ # The support project also gets the dependencies (in case they are
+ # needed for the actions/rules to work).
+ if support_xct:
+ support_xct.AddDependency(xcode_targets[dependency])
+
+ if 'libraries' in spec:
+ for library in spec['libraries']:
+ xct.FrameworksPhase().AddFile(library)
+ # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.
+ # I wish Xcode handled this automatically.
+ library_dir = posixpath.dirname(library)
+ if library_dir not in xcode_standard_library_dirs and (
+ not xct.HasBuildSetting(_library_search_paths_var) or
+ library_dir not in xct.GetBuildSetting(_library_search_paths_var)):
+ xct.AppendBuildSetting(_library_search_paths_var, library_dir)
+
+ for configuration_name in configuration_names:
+ configuration = spec['configurations'][configuration_name]
+ xcbc = xct.ConfigurationNamed(configuration_name)
+ for include_dir in configuration.get('mac_framework_dirs', []):
+ xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
+ for include_dir in configuration.get('include_dirs', []):
+ xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
+ for library_dir in configuration.get('library_dirs', []):
+ if library_dir not in xcode_standard_library_dirs and (
+ not xcbc.HasBuildSetting(_library_search_paths_var) or
+ library_dir not in xcbc.GetBuildSetting(_library_search_paths_var)):
+ xcbc.AppendBuildSetting(_library_search_paths_var, library_dir)
+
+ if 'defines' in configuration:
+ for define in configuration['defines']:
+ set_define = EscapeXcodeDefine(define)
+ xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
+ if 'xcode_settings' in configuration:
+ for xck, xcv in configuration['xcode_settings'].iteritems():
+ xcbc.SetBuildSetting(xck, xcv)
+ if 'xcode_config_file' in configuration:
+ config_ref = pbxp.AddOrGetFileInRootGroup(
+ configuration['xcode_config_file'])
+ xcbc.SetBaseConfiguration(config_ref)
+
+ build_files = []
+ for build_file, build_file_dict in data.iteritems():
+ if build_file.endswith('.gyp'):
+ build_files.append(build_file)
+
+ for build_file in build_files:
+ xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)
+
+ for build_file in build_files:
+ xcode_projects[build_file].Finalize2(xcode_targets,
+ xcode_target_to_target_dict)
+
+ for build_file in build_files:
+ xcode_projects[build_file].Write()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
new file mode 100644
index 0000000000..260324a43f
--- /dev/null
+++ b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Unit tests for the xcode.py file. """
+
+import gyp.generator.xcode as xcode
+import unittest
+import sys
+
+
+class TestEscapeXcodeDefine(unittest.TestCase):
+ if sys.platform == 'darwin':
+ def test_InheritedRemainsUnescaped(self):
+ self.assertEqual(xcode.EscapeXcodeDefine('$(inherited)'), '$(inherited)')
+
+ def test_Escaping(self):
+ self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\')
+
+if __name__ == '__main__':
+ unittest.main()