summaryrefslogtreecommitdiff
path: root/tools/gyp
diff options
context:
space:
mode:
authorRefael Ackermann <refack@gmail.com>2017-03-14 18:12:22 -0400
committerRefael Ackermann <refack@gmail.com>2017-04-18 13:22:28 -0400
commit3a334b12807eae369d487e0dce594681661fdcc5 (patch)
tree5008bb0acd6dde1bb544acd5fd4c608641d9daa4 /tools/gyp
parent1e7f6b157133d89b841de9bbc3348c474717b83d (diff)
downloadandroid-node-v8-3a334b12807eae369d487e0dce594681661fdcc5.tar.gz
android-node-v8-3a334b12807eae369d487e0dce594681661fdcc5.tar.bz2
android-node-v8-3a334b12807eae369d487e0dce594681661fdcc5.zip
tools: update gyp to eb296f6
* [win] Add support for MS VS2017 (via Registry) REF: https://chromium.googlesource.com/external/gyp/+/eb296f67da078ec01f5e3a9ea9cdc6d26d680161 PR-URL: https://github.com/nodejs/node/pull/12450 Reviewed-By: João Reis <reis@janeasystems.com>
Diffstat (limited to 'tools/gyp')
-rw-r--r--tools/gyp/AUTHORS3
-rw-r--r--tools/gyp/DEPS9
-rw-r--r--tools/gyp/PRESUBMIT.py13
-rw-r--r--tools/gyp/README.md4
-rw-r--r--tools/gyp/codereview.settings12
-rwxr-xr-xtools/gyp/gyptest.py1
-rw-r--r--tools/gyp/pylib/gyp/MSVSSettings.py1
-rw-r--r--tools/gyp/pylib/gyp/MSVSUtil.py3
-rw-r--r--tools/gyp/pylib/gyp/MSVSVersion.py54
-rw-r--r--tools/gyp/pylib/gyp/common.py13
-rw-r--r--tools/gyp/pylib/gyp/generator/analyzer.py293
-rw-r--r--tools/gyp/pylib/gyp/generator/cmake.py65
-rw-r--r--tools/gyp/pylib/gyp/generator/compile_commands_json.py115
-rw-r--r--tools/gyp/pylib/gyp/generator/make.py19
-rw-r--r--tools/gyp/pylib/gyp/generator/msvs.py112
-rw-r--r--tools/gyp/pylib/gyp/generator/ninja.py141
-rw-r--r--tools/gyp/pylib/gyp/generator/xcode.py61
-rw-r--r--tools/gyp/pylib/gyp/input.py35
-rwxr-xr-xtools/gyp/pylib/gyp/mac_tool.py188
-rw-r--r--tools/gyp/pylib/gyp/msvs_emulation.py13
-rwxr-xr-xtools/gyp/pylib/gyp/win_tool.py18
-rw-r--r--tools/gyp/pylib/gyp/xcode_emulation.py238
-rw-r--r--tools/gyp/pylib/gyp/xcode_ninja.py25
-rw-r--r--tools/gyp/pylib/gyp/xcodeproj_file.py120
-rwxr-xr-xtools/gyp/tools/pretty_gyp.py27
25 files changed, 1131 insertions, 452 deletions
diff --git a/tools/gyp/AUTHORS b/tools/gyp/AUTHORS
index dcd231ceb2..130c816058 100644
--- a/tools/gyp/AUTHORS
+++ b/tools/gyp/AUTHORS
@@ -10,3 +10,6 @@ Steven Knight <knight@baldmt.com>
Ryan Norton <rnorton10@gmail.com>
David J. Sankel <david@sankelsoftware.com>
Eric N. Vander Weele <ericvw@gmail.com>
+Tom Freudenberg <th.freudenberg@gmail.com>
+Julien Brianceau <jbriance@cisco.com>
+Refael Ackermann <refack@gmail.com>
diff --git a/tools/gyp/DEPS b/tools/gyp/DEPS
index 2e1120f274..167fb779b0 100644
--- a/tools/gyp/DEPS
+++ b/tools/gyp/DEPS
@@ -3,8 +3,7 @@
# (You don't need to use gclient for normal GYP development work.)
vars = {
- "chrome_trunk": "http://src.chromium.org/svn/trunk",
- "googlecode_url": "http://%s.googlecode.com/svn",
+ "chromium_git": "https://chromium.googlesource.com/",
}
deps = {
@@ -13,12 +12,12 @@ deps = {
deps_os = {
"win": {
"third_party/cygwin":
- Var("chrome_trunk") + "/deps/third_party/cygwin@66844",
+ Var("chromium_git") + "chromium/deps/cygwin@4fbd5b9",
"third_party/python_26":
- Var("chrome_trunk") + "/tools/third_party/python_26@89111",
+ Var("chromium_git") + "chromium/deps/python_26@5bb4080",
"src/third_party/pefile":
- (Var("googlecode_url") % "pefile") + "/trunk@63",
+ Var("chromium_git") + "external/pefile@72c6ae4",
},
}
diff --git a/tools/gyp/PRESUBMIT.py b/tools/gyp/PRESUBMIT.py
index f6c8a357af..4bc1b8ca26 100644
--- a/tools/gyp/PRESUBMIT.py
+++ b/tools/gyp/PRESUBMIT.py
@@ -124,16 +124,3 @@ def CheckChangeOnCommit(input_api, output_api):
finally:
sys.path = old_sys_path
return report
-
-
-TRYBOTS = [
- 'linux_try',
- 'mac_try',
- 'win_try',
-]
-
-
-def GetPreferredTryMasters(_, change):
- return {
- 'client.gyp': { t: set(['defaulttests']) for t in TRYBOTS },
- }
diff --git a/tools/gyp/README.md b/tools/gyp/README.md
new file mode 100644
index 0000000000..c0d73ac958
--- /dev/null
+++ b/tools/gyp/README.md
@@ -0,0 +1,4 @@
+GYP can Generate Your Projects.
+===================================
+
+Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can check out ```md-pages``` branch to read those documents offline.
diff --git a/tools/gyp/codereview.settings b/tools/gyp/codereview.settings
index faf37f1145..27fb9f99e2 100644
--- a/tools/gyp/codereview.settings
+++ b/tools/gyp/codereview.settings
@@ -1,10 +1,6 @@
-# This file is used by gcl to get repository specific information.
-CODE_REVIEW_SERVER: codereview.chromium.org
+# This file is used by git cl to get repository specific information.
CC_LIST: gyp-developer@googlegroups.com
-VIEW_VC: https://chromium.googlesource.com/external/gyp/+/
-TRY_ON_UPLOAD: False
-TRYSERVER_PROJECT: gyp
-TRYSERVER_PATCHLEVEL: 1
-TRYSERVER_ROOT: gyp
-TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl
+CODE_REVIEW_SERVER: codereview.chromium.org
+GERRIT_HOST: True
PROJECT: gyp
+VIEW_VC: https://chromium.googlesource.com/external/gyp/+/
diff --git a/tools/gyp/gyptest.py b/tools/gyp/gyptest.py
index 8e4fc47d5c..98957e1633 100755
--- a/tools/gyp/gyptest.py
+++ b/tools/gyp/gyptest.py
@@ -10,6 +10,7 @@ gyptest.py -- test runner for GYP tests.
import os
import optparse
+import shlex
import subprocess
import sys
diff --git a/tools/gyp/pylib/gyp/MSVSSettings.py b/tools/gyp/pylib/gyp/MSVSSettings.py
index 4985756bdd..8ae19180ea 100644
--- a/tools/gyp/pylib/gyp/MSVSSettings.py
+++ b/tools/gyp/pylib/gyp/MSVSSettings.py
@@ -592,6 +592,7 @@ _Same(_compile, 'UndefinePreprocessorDefinitions', _string_list) # /U
_Same(_compile, 'UseFullPaths', _boolean) # /FC
_Same(_compile, 'WholeProgramOptimization', _boolean) # /GL
_Same(_compile, 'XMLDocumentationFileName', _file_name)
+_Same(_compile, 'CompileAsWinRT', _boolean) # /ZW
_Same(_compile, 'AssemblerOutput',
_Enumeration(['NoListing',
diff --git a/tools/gyp/pylib/gyp/MSVSUtil.py b/tools/gyp/pylib/gyp/MSVSUtil.py
index 0b32e91180..96dea6c2c9 100644
--- a/tools/gyp/pylib/gyp/MSVSUtil.py
+++ b/tools/gyp/pylib/gyp/MSVSUtil.py
@@ -14,6 +14,7 @@ TARGET_TYPE_EXT = {
'loadable_module': 'dll',
'shared_library': 'dll',
'static_library': 'lib',
+ 'windows_driver': 'sys',
}
@@ -110,7 +111,7 @@ def ShardTargets(target_list, target_dicts):
else:
new_target_dicts[t] = target_dicts[t]
# Shard dependencies.
- for t in new_target_dicts:
+ for t in sorted(new_target_dicts):
for deptype in ('dependencies', 'dependencies_original'):
dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
new_dependencies = []
diff --git a/tools/gyp/pylib/gyp/MSVSVersion.py b/tools/gyp/pylib/gyp/MSVSVersion.py
index d9bfa684fa..c981e64b87 100644
--- a/tools/gyp/pylib/gyp/MSVSVersion.py
+++ b/tools/gyp/pylib/gyp/MSVSVersion.py
@@ -18,7 +18,7 @@ class VisualStudioVersion(object):
def __init__(self, short_name, description,
solution_version, project_version, flat_sln, uses_vcxproj,
- path, sdk_based, default_toolset=None):
+ path, sdk_based, default_toolset=None, compatible_sdks=None):
self.short_name = short_name
self.description = description
self.solution_version = solution_version
@@ -28,6 +28,9 @@ class VisualStudioVersion(object):
self.path = path
self.sdk_based = sdk_based
self.default_toolset = default_toolset
+ compatible_sdks = compatible_sdks or []
+ compatible_sdks.sort(key=lambda v: float(v.replace('v', '')), reverse=True)
+ self.compatible_sdks = compatible_sdks
def ShortName(self):
return self.short_name
@@ -68,17 +71,19 @@ class VisualStudioVersion(object):
of a user override."""
return self.default_toolset
- def SetupScript(self, target_arch):
+ def _SetupScriptInternal(self, target_arch):
"""Returns a command (with arguments) to be used to set up the
environment."""
- # Check if we are running in the SDK command line environment and use
- # the setup script from the SDK if so. |target_arch| should be either
- # 'x86' or 'x64'.
+ # If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the
+ # depot_tools build tools and should run SetEnv.Cmd to set up the
+ # environment. The check for WindowsSDKDir alone is not sufficient because
+ # this is set by running vcvarsall.bat.
assert target_arch in ('x86', 'x64')
sdk_dir = os.environ.get('WindowsSDKDir')
- if self.sdk_based and sdk_dir:
- return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
- '/' + target_arch]
+ if sdk_dir:
+ setup_path = os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd'))
+ if self.sdk_based and sdk_dir and os.path.exists(setup_path):
+ return [setup_path, '/' + target_arch]
else:
# We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
@@ -106,6 +111,14 @@ class VisualStudioVersion(object):
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
+ def SetupScript(self, target_arch):
+ script_data = self._SetupScriptInternal(target_arch)
+ script_path = script_data[0]
+ if not os.path.exists(script_path):
+ raise Exception('%s is missing - make sure VC++ tools are installed.' %
+ script_path)
+ return script_data
+
def _RegistryQueryBase(sysdir, key, value):
"""Use reg.exe to read a particular key.
@@ -226,6 +239,16 @@ def _CreateVersion(name, path, sdk_based=False):
if path:
path = os.path.normpath(path)
versions = {
+ '2017': VisualStudioVersion('2017',
+ 'Visual Studio 2017',
+ solution_version='12.00',
+ project_version='15.0',
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset='v141',
+ compatible_sdks=['v8.1', 'v10.0']),
'2015': VisualStudioVersion('2015',
'Visual Studio 2015',
solution_version='12.00',
@@ -338,7 +361,6 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
A list of visual studio versions installed in descending order of
usage preference.
Base this on the registry and a quick check if devenv.exe exists.
- Only versions 8-10 are considered.
Possibilities are:
2005(e) - Visual Studio 2005 (8)
2008(e) - Visual Studio 2008 (9)
@@ -346,6 +368,7 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
2012(e) - Visual Studio 2012 (11)
2013(e) - Visual Studio 2013 (12)
2015 - Visual Studio 2015 (14)
+ 2017 - Visual Studio 2017 (15)
Where (e) is e for express editions of MSVS and blank otherwise.
"""
version_to_year = {
@@ -355,6 +378,7 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
'11.0': '2012',
'12.0': '2013',
'14.0': '2015',
+ '15.0': '2017'
}
versions = []
for version in versions_to_check:
@@ -385,13 +409,18 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
# The old method above does not work when only SDK is installed.
keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
- r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
+ r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7',
+ r'HKLM\Software\Microsoft\VisualStudio\SxS\VS7',
+ r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VS7']
for index in range(len(keys)):
path = _RegistryGetValue(keys[index], version)
if not path:
continue
path = _ConvertToCygpath(path)
- if version != '14.0': # There is no Express edition for 2015.
+ if version == '15.0':
+ if os.path.exists(path):
+ versions.append(_CreateVersion('2017', path))
+ elif version != '14.0': # There is no Express edition for 2015.
versions.append(_CreateVersion(version_to_year[version] + 'e',
os.path.join(path, '..'), sdk_based=True))
@@ -410,7 +439,7 @@ def SelectVisualStudioVersion(version='auto', allow_fallback=True):
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
- 'auto': ('14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
+ 'auto': ('15.0', '14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
@@ -422,6 +451,7 @@ def SelectVisualStudioVersion(version='auto', allow_fallback=True):
'2013': ('12.0',),
'2013e': ('12.0',),
'2015': ('14.0',),
+ '2017': ('15.0',),
}
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
if override_path:
diff --git a/tools/gyp/pylib/gyp/common.py b/tools/gyp/pylib/gyp/common.py
index d482a20df3..a1e1db5f12 100644
--- a/tools/gyp/pylib/gyp/common.py
+++ b/tools/gyp/pylib/gyp/common.py
@@ -425,13 +425,15 @@ def GetFlavor(params):
return 'freebsd'
if sys.platform.startswith('openbsd'):
return 'openbsd'
+ if sys.platform.startswith('netbsd'):
+ return 'netbsd'
if sys.platform.startswith('aix'):
return 'aix'
return 'linux'
-def CopyTool(flavor, out_path):
+def CopyTool(flavor, out_path, generator_flags={}):
"""Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
to |out_path|."""
# aix and solaris just need flock emulation. mac and win use more complicated
@@ -451,11 +453,18 @@ def CopyTool(flavor, out_path):
with open(source_path) as source_file:
source = source_file.readlines()
+ # Set custom header flags.
+ header = '# Generated by gyp. Do not edit.\n'
+ mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None)
+ if flavor == 'mac' and mac_toolchain_dir:
+ header += "import os;\nos.environ['DEVELOPER_DIR']='%s'\n" \
+ % mac_toolchain_dir
+
# Add header and write it out.
tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix)
with open(tool_path, 'w') as tool_file:
tool_file.write(
- ''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:]))
+ ''.join([source[0], header] + source[1:]))
# Make file executable.
os.chmod(tool_path, 0755)
diff --git a/tools/gyp/pylib/gyp/generator/analyzer.py b/tools/gyp/pylib/gyp/generator/analyzer.py
index f403d4e266..921c1a6b71 100644
--- a/tools/gyp/pylib/gyp/generator/analyzer.py
+++ b/tools/gyp/pylib/gyp/generator/analyzer.py
@@ -7,23 +7,59 @@ This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
the generator flag config_path) the path of a json file that dictates the files
and targets to search for. The following keys are supported:
files: list of paths (relative) of the files to search for.
-targets: list of targets to search for. The target names are unqualified.
+test_targets: unqualified target names to search for. Any target in this list
+that depends upon a file in |files| is output regardless of the type of target
+or chain of dependencies.
+additional_compile_targets: Unqualified targets to search for in addition to
+test_targets. Targets in the combined list that depend upon a file in |files|
+are not necessarily output. For example, if the target is of type none then the
+target is not output (but one of the descendants of the target will be).
The following is output:
error: only supplied if there is an error.
-targets: the set of targets passed in via targets that either directly or
- indirectly depend upon the set of paths supplied in files.
-build_targets: minimal set of targets that directly depend on the changed
- files and need to be built. The expectation is this set of targets is passed
- into a build step.
+compile_targets: minimal set of targets that directly or indirectly (for
+ targets of type none) depend on the files in |files| and is one of the
+ supplied targets or a target that one of the supplied targets depends on.
+ The expectation is this set of targets is passed into a build step. This list
+ always contains the output of test_targets as well.
+test_targets: set of targets from the supplied |test_targets| that either
+ directly or indirectly depend upon a file in |files|. This list if useful
+ if additional processing needs to be done for certain targets after the
+ build, such as running tests.
status: outputs one of three values: none of the supplied files were found,
one of the include files changed so that it should be assumed everything
- changed (in this case targets and build_targets are not output) or at
+ changed (in this case test_targets and compile_targets are not output) or at
least one file was found.
-invalid_targets: list of supplied targets thare were not found.
+invalid_targets: list of supplied targets that were not found.
+
+Example:
+Consider a graph like the following:
+ A D
+ / \
+B C
+A depends upon both B and C, A is of type none and B and C are executables.
+D is an executable, has no dependencies and nothing depends on it.
+If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and
+files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then
+the following is output:
+|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc
+and the supplied target A depends upon it. A is not output as a build_target
+as it is of type none with no rules and actions.
+|test_targets| = ["B"] B directly depends upon the change file b.cc.
+
+Even though the file d.cc, which D depends upon, has changed D is not output
+as it was not supplied by way of |additional_compile_targets| or |test_targets|.
If the generator flag analyzer_output_path is specified, output is written
there. Otherwise output is written to stdout.
+
+In Gyp the "all" target is shorthand for the root targets in the files passed
+to gyp. For example, if file "a.gyp" contains targets "a1" and
+"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency
+on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2".
+Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not
+directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
+then the "all" target includes "b1" and "b2".
"""
import gyp.common
@@ -210,6 +246,8 @@ class Config(object):
def __init__(self):
self.files = []
self.targets = set()
+ self.additional_compile_target_names = set()
+ self.test_target_names = set()
def Init(self, params):
"""Initializes Config. This is a separate method as it raises an exception
@@ -229,7 +267,9 @@ class Config(object):
if not isinstance(config, dict):
raise Exception('config_path must be a JSON file containing a dictionary')
self.files = config.get('files', [])
- self.targets = set(config.get('targets', []))
+ self.additional_compile_target_names = set(
+ config.get('additional_compile_targets', []))
+ self.test_target_names = set(config.get('test_targets', []))
def _WasBuildFileModified(build_file, data, files, toplevel_dir):
@@ -280,12 +320,13 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
"""Returns a tuple of the following:
. A dictionary mapping from fully qualified name to Target.
. A list of the targets that have a source file in |files|.
- . Set of root Targets reachable from the the files |build_files|.
+ . Targets that constitute the 'all' target. See description at top of file
+ for details on the 'all' target.
This sets the |match_status| of the targets that contain any of the source
files in |files| to MATCH_STATUS_MATCHES.
|toplevel_dir| is the root of the source tree."""
# Maps from target name to Target.
- targets = {}
+ name_to_target = {}
# Targets that matched.
matching_targets = []
@@ -305,7 +346,8 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
while len(targets_to_visit) > 0:
target_name = targets_to_visit.pop()
- created_target, target = _GetOrCreateTargetByName(targets, target_name)
+ created_target, target = _GetOrCreateTargetByName(name_to_target,
+ target_name)
if created_target:
roots.add(target)
elif target.visited:
@@ -348,22 +390,25 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
for dep in target_dicts[target_name].get('dependencies', []):
targets_to_visit.append(dep)
- created_dep_target, dep_target = _GetOrCreateTargetByName(targets, dep)
+ created_dep_target, dep_target = _GetOrCreateTargetByName(name_to_target,
+ dep)
if not created_dep_target:
roots.discard(dep_target)
target.deps.add(dep_target)
dep_target.back_deps.add(target)
- return targets, matching_targets, roots & build_file_targets
+ return name_to_target, matching_targets, roots & build_file_targets
def _GetUnqualifiedToTargetMapping(all_targets, to_find):
- """Returns a mapping (dictionary) from unqualified name to Target for all the
- Targets in |to_find|."""
+ """Returns a tuple of the following:
+ . mapping (dictionary) from unqualified name to Target for all the
+ Targets in |to_find|.
+ . any target names not found. If this is empty all targets were found."""
result = {}
if not to_find:
- return result
+ return {}, []
to_find = set(to_find)
for target_name in all_targets.keys():
extracted = gyp.common.ParseQualifiedTarget(target_name)
@@ -371,13 +416,14 @@ def _GetUnqualifiedToTargetMapping(all_targets, to_find):
to_find.remove(extracted[1])
result[extracted[1]] = all_targets[target_name]
if not to_find:
- return result
- return result
+ return result, []
+ return result, [x for x in to_find]
-def _DoesTargetDependOn(target):
- """Returns true if |target| or any of its dependencies matches the supplied
- set of paths. This updates |matches| of the Targets as it recurses.
+def _DoesTargetDependOnMatchingTargets(target):
+ """Returns true if |target| or any of its dependencies is one of the
+ targets containing the files supplied as input to analyzer. This updates
+ |matches| of the Targets as it recurses.
target: the Target to look for."""
if target.match_status == MATCH_STATUS_DOESNT_MATCH:
return False
@@ -385,7 +431,7 @@ def _DoesTargetDependOn(target):
target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY:
return True
for dep in target.deps:
- if _DoesTargetDependOn(dep):
+ if _DoesTargetDependOnMatchingTargets(dep):
target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
print '\t', target.name, 'matches by dep', dep.name
return True
@@ -393,19 +439,20 @@ def _DoesTargetDependOn(target):
return False
-def _GetTargetsDependingOn(possible_targets):
+def _GetTargetsDependingOnMatchingTargets(possible_targets):
"""Returns the list of Targets in |possible_targets| that depend (either
- directly on indirectly) on the matched targets.
+ directly on indirectly) on at least one of the targets containing the files
+ supplied as input to analyzer.
possible_targets: targets to search from."""
found = []
print 'Targets that matched by dependency:'
for target in possible_targets:
- if _DoesTargetDependOn(target):
+ if _DoesTargetDependOnMatchingTargets(target):
found.append(target)
return found
-def _AddBuildTargets(target, roots, add_if_no_ancestor, result):
+def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
"""Recurses through all targets that depend on |target|, adding all targets
that need to be built (and are in |roots|) to |result|.
roots: set of root targets.
@@ -416,10 +463,10 @@ def _AddBuildTargets(target, roots, add_if_no_ancestor, result):
return
target.visited = True
- target.in_roots = not target.back_deps and target in roots
+ target.in_roots = target in roots
for back_dep_target in target.back_deps:
- _AddBuildTargets(back_dep_target, roots, False, result)
+ _AddCompileTargets(back_dep_target, roots, False, result)
target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
target.in_roots |= back_dep_target.in_roots
target.is_or_has_linked_ancestor |= (
@@ -437,7 +484,7 @@ def _AddBuildTargets(target, roots, add_if_no_ancestor, result):
(add_if_no_ancestor or target.requires_build)) or
(target.is_static_library and add_if_no_ancestor and
not target.is_or_has_linked_ancestor)):
- print '\t\tadding to build targets', target.name, 'executable', \
+ print '\t\tadding to compile targets', target.name, 'executable', \
target.is_executable, 'added_to_compile_targets', \
target.added_to_compile_targets, 'add_if_no_ancestor', \
add_if_no_ancestor, 'requires_build', target.requires_build, \
@@ -447,14 +494,14 @@ def _AddBuildTargets(target, roots, add_if_no_ancestor, result):
target.added_to_compile_targets = True
-def _GetBuildTargets(matching_targets, roots):
+def _GetCompileTargets(matching_targets, supplied_targets):
"""Returns the set of Targets that require a build.
matching_targets: targets that changed and need to be built.
- roots: set of root targets in the build files to search from."""
+ supplied_targets: set of targets supplied to analyzer to search from."""
result = set()
for target in matching_targets:
- print '\tfinding build targets for match', target.name
- _AddBuildTargets(target, roots, True, result)
+ print 'finding compile targets for match', target.name
+ _AddCompileTargets(target, supplied_targets, True, result)
return result
@@ -479,6 +526,16 @@ def _WriteOutput(params, **values):
print 'Targets that require a build:'
for target in values['build_targets']:
print '\t', target
+ if 'compile_targets' in values:
+ values['compile_targets'].sort()
+ print 'Targets that need to be built:'
+ for target in values['compile_targets']:
+ print '\t', target
+ if 'test_targets' in values:
+ values['test_targets'].sort()
+ print 'Test targets:'
+ for target in values['test_targets']:
+ print '\t', target
output_path = params.get('generator_flags', {}).get(
'analyzer_output_path', None)
@@ -538,11 +595,104 @@ def CalculateVariables(default_variables, params):
default_variables.setdefault('OS', operating_system)
+class TargetCalculator(object):
+ """Calculates the matching test_targets and matching compile_targets."""
+ def __init__(self, files, additional_compile_target_names, test_target_names,
+ data, target_list, target_dicts, toplevel_dir, build_files):
+ self._additional_compile_target_names = set(additional_compile_target_names)
+ self._test_target_names = set(test_target_names)
+ self._name_to_target, self._changed_targets, self._root_targets = (
+ _GenerateTargets(data, target_list, target_dicts, toplevel_dir,
+ frozenset(files), build_files))
+ self._unqualified_mapping, self.invalid_targets = (
+ _GetUnqualifiedToTargetMapping(self._name_to_target,
+ self._supplied_target_names_no_all()))
+
+ def _supplied_target_names(self):
+ return self._additional_compile_target_names | self._test_target_names
+
+ def _supplied_target_names_no_all(self):
+ """Returns the supplied test targets without 'all'."""
+ result = self._supplied_target_names();
+ result.discard('all')
+ return result
+
+ def is_build_impacted(self):
+ """Returns true if the supplied files impact the build at all."""
+ return self._changed_targets
+
+ def find_matching_test_target_names(self):
+ """Returns the set of output test targets."""
+ assert self.is_build_impacted()
+ # Find the test targets first. 'all' is special cased to mean all the
+ # root targets. To deal with all the supplied |test_targets| are expanded
+ # to include the root targets during lookup. If any of the root targets
+ # match, we remove it and replace it with 'all'.
+ test_target_names_no_all = set(self._test_target_names)
+ test_target_names_no_all.discard('all')
+ test_targets_no_all = _LookupTargets(test_target_names_no_all,
+ self._unqualified_mapping)
+ test_target_names_contains_all = 'all' in self._test_target_names
+ if test_target_names_contains_all:
+ test_targets = [x for x in (set(test_targets_no_all) |
+ set(self._root_targets))]
+ else:
+ test_targets = [x for x in test_targets_no_all]
+ print 'supplied test_targets'
+ for target_name in self._test_target_names:
+ print '\t', target_name
+ print 'found test_targets'
+ for target in test_targets:
+ print '\t', target.name
+ print 'searching for matching test targets'
+ matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
+ matching_test_targets_contains_all = (test_target_names_contains_all and
+ set(matching_test_targets) &
+ set(self._root_targets))
+ if matching_test_targets_contains_all:
+ # Remove any of the targets for all that were not explicitly supplied,
+ # 'all' is subsequentely added to the matching names below.
+ matching_test_targets = [x for x in (set(matching_test_targets) &
+ set(test_targets_no_all))]
+ print 'matched test_targets'
+ for target in matching_test_targets:
+ print '\t', target.name
+ matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1]
+ for target in matching_test_targets]
+ if matching_test_targets_contains_all:
+ matching_target_names.append('all')
+ print '\tall'
+ return matching_target_names
+
+ def find_matching_compile_target_names(self):
+ """Returns the set of output compile targets."""
+ assert self.is_build_impacted();
+ # Compile targets are found by searching up from changed targets.
+ # Reset the visited status for _GetBuildTargets.
+ for target in self._name_to_target.itervalues():
+ target.visited = False
+
+ supplied_targets = _LookupTargets(self._supplied_target_names_no_all(),
+ self._unqualified_mapping)
+ if 'all' in self._supplied_target_names():
+ supplied_targets = [x for x in (set(supplied_targets) |
+ set(self._root_targets))]
+ print 'Supplied test_targets & compile_targets'
+ for target in supplied_targets:
+ print '\t', target.name
+ print 'Finding compile targets'
+ compile_targets = _GetCompileTargets(self._changed_targets,
+ supplied_targets)
+ return [gyp.common.ParseQualifiedTarget(target.name)[1]
+ for target in compile_targets]
+
+
def GenerateOutput(target_list, target_dicts, data, params):
"""Called by gyp as the final stage. Outputs results."""
config = Config()
try:
config.Init(params)
+
if not config.files:
raise Exception('Must specify files to analyze via config_path generator '
'flag')
@@ -553,55 +703,38 @@ def GenerateOutput(target_list, target_dicts, data, params):
if _WasGypIncludeFileModified(params, config.files):
result_dict = { 'status': all_changed_string,
- 'targets': list(config.targets) }
+ 'test_targets': list(config.test_target_names),
+ 'compile_targets': list(
+ config.additional_compile_target_names |
+ config.test_target_names) }
_WriteOutput(params, **result_dict)
return
- all_targets, matching_targets, roots = _GenerateTargets(
- data, target_list, target_dicts, toplevel_dir, frozenset(config.files),
- params['build_files'])
-
- print 'roots:'
- for root in roots:
- print '\t', root.name
-
- unqualified_mapping = _GetUnqualifiedToTargetMapping(all_targets,
- config.targets)
- invalid_targets = None
- if len(unqualified_mapping) != len(config.targets):
- invalid_targets = _NamesNotIn(config.targets, unqualified_mapping)
-
- if matching_targets:
- search_targets = _LookupTargets(config.targets, unqualified_mapping)
- print 'supplied targets'
- for target in config.targets:
- print '\t', target
- print 'expanded supplied targets'
- for target in search_targets:
- print '\t', target.name
- matched_search_targets = _GetTargetsDependingOn(search_targets)
- print 'raw matched search targets:'
- for target in matched_search_targets:
- print '\t', target.name
- # Reset the visited status for _GetBuildTargets.
- for target in all_targets.itervalues():
- target.visited = False
- print 'Finding build targets'
- build_targets = _GetBuildTargets(matching_targets, roots)
- matched_search_targets = [gyp.common.ParseQualifiedTarget(target.name)[1]
- for target in matched_search_targets]
- build_targets = [gyp.common.ParseQualifiedTarget(target.name)[1]
- for target in build_targets]
- else:
- matched_search_targets = []
- build_targets = []
-
- result_dict = { 'targets': matched_search_targets,
- 'status': found_dependency_string if matching_targets else
- no_dependency_string,
- 'build_targets': build_targets}
- if invalid_targets:
- result_dict['invalid_targets'] = invalid_targets
+ calculator = TargetCalculator(config.files,
+ config.additional_compile_target_names,
+ config.test_target_names, data,
+ target_list, target_dicts, toplevel_dir,
+ params['build_files'])
+ if not calculator.is_build_impacted():
+ result_dict = { 'status': no_dependency_string,
+ 'test_targets': [],
+ 'compile_targets': [] }
+ if calculator.invalid_targets:
+ result_dict['invalid_targets'] = calculator.invalid_targets
+ _WriteOutput(params, **result_dict)
+ return
+
+ test_target_names = calculator.find_matching_test_target_names()
+ compile_target_names = calculator.find_matching_compile_target_names()
+ found_at_least_one_target = compile_target_names or test_target_names
+ result_dict = { 'test_targets': test_target_names,
+ 'status': found_dependency_string if
+ found_at_least_one_target else no_dependency_string,
+ 'compile_targets': list(
+ set(compile_target_names) |
+ set(test_target_names)) }
+ if calculator.invalid_targets:
+ result_dict['invalid_targets'] = calculator.invalid_targets
_WriteOutput(params, **result_dict)
except Exception as e:
diff --git a/tools/gyp/pylib/gyp/generator/cmake.py b/tools/gyp/pylib/gyp/generator/cmake.py
index eece6ea98d..a2b96291aa 100644
--- a/tools/gyp/pylib/gyp/generator/cmake.py
+++ b/tools/gyp/pylib/gyp/generator/cmake.py
@@ -34,6 +34,7 @@ import signal
import string
import subprocess
import gyp.common
+import gyp.xcode_emulation
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
@@ -55,7 +56,7 @@ generator_default_variables = {
'CONFIGURATION_NAME': '${configuration}',
}
-FULL_PATH_VARS = ('${CMAKE_SOURCE_DIR}', '${builddir}', '${obj}')
+FULL_PATH_VARS = ('${CMAKE_CURRENT_LIST_DIR}', '${builddir}', '${obj}')
generator_supports_multiple_toolsets = True
generator_wants_static_library_dependencies_adjusted = True
@@ -103,7 +104,7 @@ def NormjoinPathForceCMakeSource(base_path, rel_path):
if any([rel_path.startswith(var) for var in FULL_PATH_VARS]):
return rel_path
# TODO: do we need to check base_path for absolute variables as well?
- return os.path.join('${CMAKE_SOURCE_DIR}',
+ return os.path.join('${CMAKE_CURRENT_LIST_DIR}',
os.path.normpath(os.path.join(base_path, rel_path)))
@@ -293,7 +294,7 @@ def WriteActions(target_name, actions, extra_sources, extra_deps,
WriteVariable(output, inputs_name)
output.write('\n')
- output.write(' WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
+ output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
output.write(path_to_gyp)
output.write('\n')
@@ -398,9 +399,9 @@ def WriteRules(target_name, rules, extra_sources, extra_deps,
output.write(NormjoinPath(path_to_gyp, rule_source))
output.write('\n')
- # CMAKE_SOURCE_DIR is where the CMakeLists.txt lives.
+ # CMAKE_CURRENT_LIST_DIR is where the CMakeLists.txt lives.
# The cwd is the current build directory.
- output.write(' WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
+ output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
output.write(path_to_gyp)
output.write('\n')
@@ -522,7 +523,7 @@ def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
WriteVariable(output, copy.inputs_name, ' ')
output.write('\n')
- output.write('WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
+ output.write('WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
output.write(path_to_gyp)
output.write('\n')
@@ -608,8 +609,8 @@ class CMakeNamer(object):
def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
- options, generator_flags, all_qualified_targets, output):
-
+ options, generator_flags, all_qualified_targets, flavor,
+ output):
# The make generator does this always.
# TODO: It would be nice to be able to tell CMake all dependencies.
circular_libs = generator_flags.get('circular', True)
@@ -633,6 +634,10 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
spec = target_dicts.get(qualified_target, {})
config = spec.get('configurations', {}).get(config_to_use, {})
+ xcode_settings = None
+ if flavor == 'mac':
+ xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
+
target_name = spec.get('target_name', '<missing target name>')
target_type = spec.get('type', '<missing target type>')
target_toolset = spec.get('toolset')
@@ -904,10 +909,10 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
defines = config.get('defines')
if defines is not None:
SetTargetProperty(output,
- cmake_target_name,
- 'COMPILE_DEFINITIONS',
- defines,
- ';')
+ cmake_target_name,
+ 'COMPILE_DEFINITIONS',
+ defines,
+ ';')
# Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
# CMake currently does not have target C and CXX flags.
@@ -927,6 +932,13 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
cflags = config.get('cflags', [])
cflags_c = config.get('cflags_c', [])
cflags_cxx = config.get('cflags_cc', [])
+ if xcode_settings:
+ cflags = xcode_settings.GetCflags(config_to_use)
+ cflags_c = xcode_settings.GetCflagsC(config_to_use)
+ cflags_cxx = xcode_settings.GetCflagsCC(config_to_use)
+ #cflags_objc = xcode_settings.GetCflagsObjC(config_to_use)
+ #cflags_objcc = xcode_settings.GetCflagsObjCC(config_to_use)
+
if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources):
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ')
@@ -965,6 +977,13 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
if ldflags is not None:
SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
+ # XCode settings
+ xcode_settings = config.get('xcode_settings', {})
+ for xcode_setting, xcode_value in xcode_settings.viewitems():
+ SetTargetProperty(output, cmake_target_name,
+ "XCODE_ATTRIBUTE_%s" % xcode_setting, xcode_value,
+ '' if isinstance(xcode_value, str) else ' ')
+
# Note on Dependencies and Libraries:
# CMake wants to handle link order, resolving the link line up front.
# Gyp does not retain or enforce specifying enough information to do so.
@@ -1029,7 +1048,7 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
output.write(cmake_target_name)
output.write('\n')
if static_deps:
- write_group = circular_libs and len(static_deps) > 1
+ write_group = circular_libs and len(static_deps) > 1 and flavor != 'mac'
if write_group:
output.write('-Wl,--start-group\n')
for dep in gyp.common.uniquer(static_deps):
@@ -1045,9 +1064,9 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
output.write('\n')
if external_libs:
for lib in gyp.common.uniquer(external_libs):
- output.write(' ')
- output.write(lib)
- output.write('\n')
+ output.write(' "')
+ output.write(RemovePrefix(lib, "$(SDKROOT)"))
+ output.write('"\n')
output.write(')\n')
@@ -1059,6 +1078,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
params, config_to_use):
options = params['options']
generator_flags = params['generator_flags']
+ flavor = gyp.common.GetFlavor(params)
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to cmake easier, cmake doesn't put anything here.
@@ -1126,7 +1146,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
if cc:
SetVariable(output, 'CMAKE_ASM_COMPILER', cc)
- SetVariable(output, 'builddir', '${CMAKE_BINARY_DIR}')
+ SetVariable(output, 'builddir', '${CMAKE_CURRENT_BINARY_DIR}')
SetVariable(output, 'obj', '${builddir}/obj')
output.write('\n')
@@ -1141,7 +1161,9 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
# Force ninja to use rsp files. Otherwise link and ar lines can get too long,
# resulting in 'Argument list too long' errors.
- output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
+ # However, rsp files don't work correctly on Mac.
+ if flavor != 'mac':
+ output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
output.write('\n')
namer = CMakeNamer(target_list)
@@ -1156,8 +1178,13 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
all_qualified_targets.add(qualified_target)
for qualified_target in target_list:
+ if flavor == 'mac':
+ gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
+ spec = target_dicts[qualified_target]
+ gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[gyp_file], spec)
+
WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
- options, generator_flags, all_qualified_targets, output)
+ options, generator_flags, all_qualified_targets, flavor, output)
output.close()
diff --git a/tools/gyp/pylib/gyp/generator/compile_commands_json.py b/tools/gyp/pylib/gyp/generator/compile_commands_json.py
deleted file mode 100644
index 575db63c4e..0000000000
--- a/tools/gyp/pylib/gyp/generator/compile_commands_json.py
+++ /dev/null
@@ -1,115 +0,0 @@
-# Copyright (c) 2016 Ben Noordhuis <info@bnoordhuis.nl>. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import gyp.common
-import gyp.xcode_emulation
-import json
-import os
-
-generator_additional_non_configuration_keys = []
-generator_additional_path_sections = []
-generator_extra_sources_for_rules = []
-generator_filelist_paths = None
-generator_supports_multiple_toolsets = True
-generator_wants_sorted_dependencies = False
-
-# Lifted from make.py. The actual values don't matter much.
-generator_default_variables = {
- 'CONFIGURATION_NAME': '$(BUILDTYPE)',
- 'EXECUTABLE_PREFIX': '',
- 'EXECUTABLE_SUFFIX': '',
- 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni',
- 'PRODUCT_DIR': '$(builddir)',
- 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s',
- 'RULE_INPUT_EXT': '$(suffix $<)',
- 'RULE_INPUT_NAME': '$(notdir $<)',
- 'RULE_INPUT_PATH': '$(abspath $<)',
- 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s',
- 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen',
- 'SHARED_LIB_PREFIX': 'lib',
- 'STATIC_LIB_PREFIX': 'lib',
- 'STATIC_LIB_SUFFIX': '.a',
-}
-
-
-def IsMac(params):
- return 'mac' == gyp.common.GetFlavor(params)
-
-
-def CalculateVariables(default_variables, params):
- default_variables.setdefault('OS', gyp.common.GetFlavor(params))
-
-
-def AddCommandsForTarget(cwd, target, params, per_config_commands):
- output_dir = params['generator_flags']['output_dir']
- for configuration_name, configuration in target['configurations'].iteritems():
- builddir_name = os.path.join(output_dir, configuration_name)
-
- if IsMac(params):
- xcode_settings = gyp.xcode_emulation.XcodeSettings(target)
- cflags = xcode_settings.GetCflags(configuration_name)
- cflags_c = xcode_settings.GetCflagsC(configuration_name)
- cflags_cc = xcode_settings.GetCflagsCC(configuration_name)
- else:
- cflags = configuration.get('cflags', [])
- cflags_c = configuration.get('cflags_c', [])
- cflags_cc = configuration.get('cflags_cc', [])
-
- cflags_c = cflags + cflags_c
- cflags_cc = cflags + cflags_cc
-
- defines = configuration.get('defines', [])
- defines = ['-D' + s for s in defines]
-
- # TODO(bnoordhuis) Handle generated source files.
- sources = target.get('sources', [])
- sources = [s for s in sources if s.endswith('.c') or s.endswith('.cc')]
-
- def resolve(filename):
- return os.path.abspath(os.path.join(cwd, filename))
-
- # TODO(bnoordhuis) Handle generated header files.
- include_dirs = configuration.get('include_dirs', [])
- include_dirs = [s for s in include_dirs if not s.startswith('$(obj)')]
- includes = ['-I' + resolve(s) for s in include_dirs]
-
- defines = gyp.common.EncodePOSIXShellList(defines)
- includes = gyp.common.EncodePOSIXShellList(includes)
- cflags_c = gyp.common.EncodePOSIXShellList(cflags_c)
- cflags_cc = gyp.common.EncodePOSIXShellList(cflags_cc)
-
- commands = per_config_commands.setdefault(configuration_name, [])
- for source in sources:
- file = resolve(source)
- isc = source.endswith('.c')
- cc = 'cc' if isc else 'c++'
- cflags = cflags_c if isc else cflags_cc
- command = ' '.join((cc, defines, includes, cflags,
- '-c', gyp.common.EncodePOSIXShellArgument(file)))
- commands.append(dict(command=command, directory=output_dir, file=file))
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- per_config_commands = {}
- for qualified_target, target in target_dicts.iteritems():
- build_file, target_name, toolset = (
- gyp.common.ParseQualifiedTarget(qualified_target))
- if IsMac(params):
- settings = data[build_file]
- gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(settings, target)
- cwd = os.path.dirname(build_file)
- AddCommandsForTarget(cwd, target, params, per_config_commands)
-
- output_dir = params['generator_flags']['output_dir']
- for configuration_name, commands in per_config_commands.iteritems():
- filename = os.path.join(output_dir,
- configuration_name,
- 'compile_commands.json')
- gyp.common.EnsureDirExists(filename)
- fp = open(filename, 'w')
- json.dump(commands, fp=fp, indent=0, check_circular=False)
-
-
-def PerformBuild(data, configurations, params):
- pass
diff --git a/tools/gyp/pylib/gyp/generator/make.py b/tools/gyp/pylib/gyp/generator/make.py
index d9adddaa9b..e80ebaed78 100644
--- a/tools/gyp/pylib/gyp/generator/make.py
+++ b/tools/gyp/pylib/gyp/generator/make.py
@@ -147,7 +147,7 @@ cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
# special "figure out circular dependencies" flags around the entire
# input list during linking.
quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) $(LIBS) -Wl,--end-group
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
# We support two kinds of shared objects (.so):
# 1) shared_library, which is just bundling together many dependent libraries
@@ -1587,7 +1587,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
for link_dep in link_deps:
assert ' ' not in link_dep, (
"Spaces in alink input filenames not supported (%s)" % link_dep)
- if (self.flavor not in ('mac', 'openbsd', 'win') and not
+ if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
self.is_standalone_static_library):
self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin',
part_of_all, postbuilds=postbuilds)
@@ -1754,7 +1754,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# Hash the target name to avoid generating overlong filenames.
cmddigest = hashlib.sha1(command if command else self.target).hexdigest()
- intermediate = "%s.intermediate" % cmddigest
+ intermediate = "%s.intermediate" % (cmddigest)
self.WriteLn('%s: %s' % (' '.join(outputs), intermediate))
self.WriteLn('\t%s' % '@:');
self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate))
@@ -2055,6 +2055,11 @@ def GenerateOutput(target_list, target_dicts, data, params):
header_params.update({
'flock': 'lockf',
})
+ elif flavor == 'openbsd':
+ copy_archive_arguments = '-pPRf'
+ header_params.update({
+ 'copy_archive_args': copy_archive_arguments,
+ })
elif flavor == 'aix':
copy_archive_arguments = '-pPRf'
header_params.update({
@@ -2069,10 +2074,10 @@ def GenerateOutput(target_list, target_dicts, data, params):
'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'),
'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'),
'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'),
- 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'gcc'),
- 'AR.host': GetEnvironFallback(('AR_host', 'AR'), 'ar'),
- 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'g++'),
- 'LINK.host': GetEnvironFallback(('LINK_host', 'LINK'), '$(CXX.host)'),
+ 'CC.host': GetEnvironFallback(('CC_host',), 'gcc'),
+ 'AR.host': GetEnvironFallback(('AR_host',), 'ar'),
+ 'CXX.host': GetEnvironFallback(('CXX_host',), 'g++'),
+ 'LINK.host': GetEnvironFallback(('LINK_host',), '$(CXX.host)'),
})
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
diff --git a/tools/gyp/pylib/gyp/generator/msvs.py b/tools/gyp/pylib/gyp/generator/msvs.py
index 44cc1304a2..ab92979e5c 100644
--- a/tools/gyp/pylib/gyp/generator/msvs.py
+++ b/tools/gyp/pylib/gyp/generator/msvs.py
@@ -46,6 +46,8 @@ VALID_MSVS_GUID_CHARS = re.compile(r'^[A-F0-9\-]+$')
generator_default_variables = {
+ 'DRIVER_PREFIX': '',
+ 'DRIVER_SUFFIX': '.sys',
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '.exe',
'STATIC_LIB_PREFIX': '',
@@ -91,6 +93,7 @@ generator_additional_non_configuration_keys = [
'msvs_target_platform_minversion',
]
+generator_filelist_paths = None
# List of precompiled header related keys.
precomp_keys = [
@@ -256,6 +259,8 @@ def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
if not tools.get(tool_name):
tools[tool_name] = dict()
tool = tools[tool_name]
+ if 'CompileAsWinRT' == setting:
+ return
if tool.get(setting):
if only_if_unset: return
if type(tool[setting]) == list and type(value) == list:
@@ -269,6 +274,10 @@ def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
tool[setting] = value
+def _ConfigTargetVersion(config_data):
+ return config_data.get('msvs_target_version', 'Windows7')
+
+
def _ConfigPlatform(config_data):
return config_data.get('msvs_configuration_platform', 'Win32')
@@ -285,6 +294,23 @@ def _ConfigFullName(config_name, config_data):
return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name)
+def _ConfigWindowsTargetPlatformVersion(config_data, version):
+ config_ver = config_data.get('msvs_windows_sdk_version')
+ vers = [config_ver] if config_ver else version.compatible_sdks
+ for ver in vers:
+ for key in [
+ r'HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s',
+ r'HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s']:
+ sdk_dir = MSVSVersion._RegistryGetValue(key % ver, 'InstallationFolder')
+ if not sdk_dir:
+ continue
+ version = MSVSVersion._RegistryGetValue(key % ver, 'ProductVersion') or ''
+ # Find a matching entry in sdk_dir\include.
+ names = sorted([x for x in os.listdir(r'%s\include' % sdk_dir)
+ if x.startswith(version)], reverse=True)
+ return names[0]
+
+
def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
quote_cmd, do_setup_env):
@@ -901,6 +927,8 @@ def _GetMsbuildToolsetOfProject(proj_path, spec, version):
toolset = default_config.get('msbuild_toolset')
if not toolset and version.DefaultToolset():
toolset = version.DefaultToolset()
+ if spec['type'] == 'windows_driver':
+ toolset = 'WindowsKernelModeDriver10.0'
return toolset
@@ -1084,6 +1112,7 @@ def _GetMSVSConfigurationType(spec, build_file):
'shared_library': '2', # .dll
'loadable_module': '2', # .dll
'static_library': '4', # .lib
+ 'windows_driver': '5', # .sys
'none': '10', # Utility type
}[spec['type']]
except KeyError:
@@ -1268,6 +1297,7 @@ def _GetOutputFilePathAndTool(spec, msbuild):
'executable': ('VCLinkerTool', 'Link', '$(OutDir)', '.exe'),
'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
+ 'windows_driver': ('VCLinkerTool', 'Link', '$(OutDir)', '.sys'),
'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)lib\\', '.lib'),
}
output_file_props = output_file_map.get(spec['type'])
@@ -1330,7 +1360,8 @@ def _GetDisabledWarnings(config):
def _GetModuleDefinition(spec):
def_file = ''
- if spec['type'] in ['shared_library', 'loadable_module', 'executable']:
+ if spec['type'] in ['shared_library', 'loadable_module', 'executable',
+ 'windows_driver']:
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
def_file = _FixPath(def_files[0])
@@ -1937,6 +1968,19 @@ def PerformBuild(data, configurations, params):
rtn = subprocess.check_call(arguments)
+def CalculateGeneratorInputInfo(params):
+ if params.get('flavor') == 'ninja':
+ toplevel = params['options'].toplevel_dir
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, ninja_generator.ComputeOutputDir(params),
+ 'gypfiles-msvs-ninja'))
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ 'toplevel': toplevel,
+ 'qualified_out_dir': qualified_out_dir,
+ }
+
def GenerateOutput(target_list, target_dicts, data, params):
"""Generate .sln and .vcproj files.
@@ -2622,7 +2666,7 @@ def _GetMSBuildProjectConfigurations(configurations):
return [group]
-def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
+def _GetMSBuildGlobalProperties(spec, version, guid, gyp_file_name):
namespace = os.path.splitext(gyp_file_name)[0]
properties = [
['PropertyGroup', {'Label': 'Globals'},
@@ -2637,6 +2681,18 @@ def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64':
properties[0].append(['PreferredToolArchitecture', 'x64'])
+ if spec.get('msvs_target_platform_version'):
+ target_platform_version = spec.get('msvs_target_platform_version')
+ properties[0].append(['WindowsTargetPlatformVersion',
+ target_platform_version])
+ if spec.get('msvs_target_platform_minversion'):
+ target_platform_minversion = spec.get('msvs_target_platform_minversion')
+ properties[0].append(['WindowsTargetPlatformMinVersion',
+ target_platform_minversion])
+ else:
+ properties[0].append(['WindowsTargetPlatformMinVersion',
+ target_platform_version])
+
if spec.get('msvs_enable_winrt'):
properties[0].append(['DefaultLanguage', 'en-US'])
properties[0].append(['AppContainerApplication', 'true'])
@@ -2645,33 +2701,45 @@ def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
properties[0].append(['ApplicationTypeRevision', app_type_revision])
else:
properties[0].append(['ApplicationTypeRevision', '8.1'])
-
- if spec.get('msvs_target_platform_version'):
- target_platform_version = spec.get('msvs_target_platform_version')
- properties[0].append(['WindowsTargetPlatformVersion',
- target_platform_version])
- if spec.get('msvs_target_platform_minversion'):
- target_platform_minversion = spec.get('msvs_target_platform_minversion')
- properties[0].append(['WindowsTargetPlatformMinVersion',
- target_platform_minversion])
- else:
- properties[0].append(['WindowsTargetPlatformMinVersion',
- target_platform_version])
if spec.get('msvs_enable_winphone'):
properties[0].append(['ApplicationType', 'Windows Phone'])
else:
properties[0].append(['ApplicationType', 'Windows Store'])
+ platform_name = None
+ msvs_windows_sdk_version = None
+ for configuration in spec['configurations'].itervalues():
+ platform_name = platform_name or _ConfigPlatform(configuration)
+ msvs_windows_sdk_version = (msvs_windows_sdk_version or
+ _ConfigWindowsTargetPlatformVersion(configuration, version))
+ if platform_name and msvs_windows_sdk_version:
+ break
+ if msvs_windows_sdk_version:
+ properties[0].append(['WindowsTargetPlatformVersion',
+ str(msvs_windows_sdk_version)])
+ elif version.compatible_sdks:
+ raise GypError('%s requires any SDK of %o version, but non were found' %
+ (version.description, version.compatible_sdks))
+
+ if platform_name == 'ARM':
+ properties[0].append(['WindowsSDKDesktopARMSupport', 'true'])
+
return properties
+
def _GetMSBuildConfigurationDetails(spec, build_file):
properties = {}
for name, settings in spec['configurations'].iteritems():
msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file)
condition = _GetConfigurationCondition(name, settings)
character_set = msbuild_attributes.get('CharacterSet')
+ config_type = msbuild_attributes.get('ConfigurationType')
_AddConditionalProperty(properties, condition, 'ConfigurationType',
- msbuild_attributes['ConfigurationType'])
+ config_type)
+ if config_type == 'Driver':
+ _AddConditionalProperty(properties, condition, 'DriverType', 'WDM')
+ _AddConditionalProperty(properties, condition, 'TargetVersion',
+ _ConfigTargetVersion(settings))
if character_set:
if 'msvs_enable_winrt' not in spec :
_AddConditionalProperty(properties, condition, 'CharacterSet',
@@ -2770,6 +2838,7 @@ def _ConvertMSVSConfigurationType(config_type):
'1': 'Application',
'2': 'DynamicLibrary',
'4': 'StaticLibrary',
+ '5': 'Driver',
'10': 'Utility'
}[config_type]
return config_type
@@ -2798,6 +2867,9 @@ def _GetMSBuildAttributes(spec, config, build_file):
product_name = spec.get('product_name', '$(ProjectName)')
target_name = prefix + product_name
msbuild_attributes['TargetName'] = target_name
+ if 'TargetExt' not in msbuild_attributes and 'product_extension' in spec:
+ ext = spec.get('product_extension')
+ msbuild_attributes['TargetExt'] = '.' + ext
if spec.get('msvs_external_builder'):
external_out_dir = spec.get('msvs_external_builder_out_dir', '.')
@@ -2809,6 +2881,7 @@ def _GetMSBuildAttributes(spec, config, build_file):
'executable': 'Link',
'shared_library': 'Link',
'loadable_module': 'Link',
+ 'windows_driver': 'Link',
'static_library': 'Lib',
}
msbuild_tool = msbuild_tool_map.get(spec['type'])
@@ -2851,6 +2924,9 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
attributes['OutputDirectory'])
_AddConditionalProperty(properties, condition, 'TargetName',
attributes['TargetName'])
+ if 'TargetExt' in attributes:
+ _AddConditionalProperty(properties, condition, 'TargetExt',
+ attributes['TargetExt'])
if attributes.get('TargetPath'):
_AddConditionalProperty(properties, condition, 'TargetPath',
@@ -3203,6 +3279,9 @@ def _GetMSBuildProjectReferences(project):
['ReferenceOutputAssembly', 'false']
]
for config in dependency.spec.get('configurations', {}).itervalues():
+ if config.get('msvs_use_library_dependency_inputs', 0):
+ project_ref.append(['UseLibraryDependencyInputs', 'true'])
+ break
# If it's disabled in any config, turn it off in the reference.
if config.get('msvs_2010_disable_uldi_when_referenced', 0):
project_ref.append(['UseLibraryDependencyInputs', 'false'])
@@ -3295,7 +3374,8 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
}]
content += _GetMSBuildProjectConfigurations(configurations)
- content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name)
+ content += _GetMSBuildGlobalProperties(spec, version, project.guid,
+ project_file_name)
content += import_default_section
content += _GetMSBuildConfigurationDetails(spec, project.build_file)
if spec.get('msvs_enable_winphone'):
diff --git a/tools/gyp/pylib/gyp/generator/ninja.py b/tools/gyp/pylib/gyp/generator/ninja.py
index 0e8ae97908..0555a4a90d 100644
--- a/tools/gyp/pylib/gyp/generator/ninja.py
+++ b/tools/gyp/pylib/gyp/generator/ninja.py
@@ -139,12 +139,18 @@ class Target(object):
self.bundle = None
# On Windows, incremental linking requires linking against all the .objs
# that compose a .lib (rather than the .lib itself). That list is stored
- # here.
+ # here. In this case, we also need to save the compile_deps for the target,
+ # so that the the target that directly depends on the .objs can also depend
+ # on those.
self.component_objs = None
+ self.compile_deps = None
# Windows only. The import .lib is the output of a build step, but
# because dependents only link against the lib (not both the lib and the
# dll) we keep track of the import library here.
self.import_lib = None
+ # Track if this target contains any C++ files, to decide if gcc or g++
+ # should be used for linking.
+ self.uses_cpp = False
def Linkable(self):
"""Return true if this is a target that can be linked against."""
@@ -372,14 +378,17 @@ class NinjaWriter(object):
self.target = Target(spec['type'])
self.is_standalone_static_library = bool(
spec.get('standalone_static_library', 0))
- # Track if this target contains any C++ files, to decide if gcc or g++
- # should be used for linking.
- self.uses_cpp = False
+
+ self.target_rpath = generator_flags.get('target_rpath', r'\$$ORIGIN/lib/')
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
self.xcode_settings = self.msvs_settings = None
if self.flavor == 'mac':
self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
+ mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None)
+ if mac_toolchain_dir:
+ self.xcode_settings.mac_toolchain_dir = mac_toolchain_dir
+
if self.flavor == 'win':
self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
generator_flags)
@@ -416,6 +425,8 @@ class NinjaWriter(object):
target = self.target_outputs[dep]
actions_depends.append(target.PreActionInput(self.flavor))
compile_depends.append(target.PreCompileInput())
+ if target.uses_cpp:
+ self.target.uses_cpp = True
actions_depends = filter(None, actions_depends)
compile_depends = filter(None, compile_depends)
actions_depends = self.WriteCollapsedDependencies('actions_depends',
@@ -441,7 +452,12 @@ class NinjaWriter(object):
# Write out the compilation steps, if any.
link_deps = []
- sources = extra_sources + spec.get('sources', [])
+ try:
+ sources = extra_sources + spec.get('sources', [])
+ except TypeError:
+ print 'extra_sources: ', str(extra_sources)
+ print 'spec.get("sources"): ', str(spec.get('sources'))
+ raise
if sources:
if self.flavor == 'mac' and len(self.archs) > 1:
# Write subninja file containing compile and link commands scoped to
@@ -474,16 +490,17 @@ class NinjaWriter(object):
elif self.flavor == 'mac' and len(self.archs) > 1:
link_deps = collections.defaultdict(list)
-
+ compile_deps = self.target.actions_stamp or actions_depends
if self.flavor == 'win' and self.target.type == 'static_library':
self.target.component_objs = link_deps
+ self.target.compile_deps = compile_deps
# Write out a link step, if needed.
output = None
is_empty_bundle = not link_deps and not mac_bundle_depends
if link_deps or self.target.actions_stamp or actions_depends:
output = self.WriteTarget(spec, config_name, config, link_deps,
- self.target.actions_stamp or actions_depends)
+ compile_deps)
if self.is_mac_bundle:
mac_bundle_depends.append(output)
@@ -555,6 +572,9 @@ class NinjaWriter(object):
if 'sources' in spec and self.flavor == 'win':
outputs += self.WriteWinIdlFiles(spec, prebuild)
+ if self.xcode_settings and self.xcode_settings.IsIosFramework():
+ self.WriteiOSFrameworkHeaders(spec, outputs, prebuild)
+
stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
if self.is_mac_bundle:
@@ -652,6 +672,7 @@ class NinjaWriter(object):
for var in special_locals:
if '${%s}' % var in argument:
needed_variables.add(var)
+ needed_variables = sorted(needed_variables)
def cygwin_munge(path):
# pylint: disable=cell-var-from-loop
@@ -725,6 +746,7 @@ class NinjaWriter(object):
# WriteNewNinjaRule uses unique_name for creating an rsp file on win.
extra_bindings.append(('unique_name',
hashlib.md5(outputs[0]).hexdigest()))
+
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
implicit=inputs,
order_only=prebuild,
@@ -736,7 +758,11 @@ class NinjaWriter(object):
def WriteCopies(self, copies, prebuild, mac_bundle_depends):
outputs = []
- env = self.GetToolchainEnv()
+ if self.xcode_settings:
+ extra_env = self.xcode_settings.GetPerTargetSettings()
+ env = self.GetToolchainEnv(additional_settings=extra_env)
+ else:
+ env = self.GetToolchainEnv()
for copy in copies:
for path in copy['files']:
# Normalize the path so trailing slashes don't confuse us.
@@ -758,18 +784,38 @@ class NinjaWriter(object):
return outputs
+ def WriteiOSFrameworkHeaders(self, spec, outputs, prebuild):
+ """Prebuild steps to generate hmap files and copy headers to destination."""
+ framework = self.ComputeMacBundleOutput()
+ all_sources = spec['sources']
+ copy_headers = spec['mac_framework_headers']
+ output = self.GypPathToUniqueOutput('headers.hmap')
+ self.xcode_settings.header_map_path = output
+ all_headers = map(self.GypPathToNinja,
+ filter(lambda x:x.endswith(('.h')), all_sources))
+ variables = [('framework', framework),
+ ('copy_headers', map(self.GypPathToNinja, copy_headers))]
+ outputs.extend(self.ninja.build(
+ output, 'compile_ios_framework_headers', all_headers,
+ variables=variables, order_only=prebuild))
+
def WriteMacBundleResources(self, resources, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources'."""
xcassets = []
+
+ extra_env = self.xcode_settings.GetPerTargetSettings()
+ env = self.GetSortedXcodeEnv(additional_settings=extra_env)
+ env = self.ComputeExportEnvString(env)
+ isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
+
for output, res in gyp.xcode_emulation.GetMacBundleResources(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, map(self.GypPathToNinja, resources)):
output = self.ExpandSpecial(output)
if os.path.splitext(output)[-1] != '.xcassets':
- isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
self.ninja.build(output, 'mac_tool', res,
variables=[('mactool_cmd', 'copy-bundle-resource'), \
- ('binary', isBinary)])
+ ('env', env), ('binary', isBinary)])
bundle_depends.append(output)
else:
xcassets.append(res)
@@ -988,7 +1034,7 @@ class NinjaWriter(object):
obj_ext = self.obj_ext
if ext in ('cc', 'cpp', 'cxx'):
command = 'cxx'
- self.uses_cpp = True
+ self.target.uses_cpp = True
elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
command = 'cc'
elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
@@ -1003,7 +1049,7 @@ class NinjaWriter(object):
command = 'objc'
elif self.flavor == 'mac' and ext == 'mm':
command = 'objcxx'
- self.uses_cpp = True
+ self.target.uses_cpp = True
elif self.flavor == 'win' and ext == 'rc':
command = 'rc'
obj_ext = '.res'
@@ -1054,16 +1100,16 @@ class NinjaWriter(object):
cmd = map.get(lang)
ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
- def WriteLink(self, spec, config_name, config, link_deps):
+ def WriteLink(self, spec, config_name, config, link_deps, compile_deps):
"""Write out a link step. Fills out target.binary. """
if self.flavor != 'mac' or len(self.archs) == 1:
return self.WriteLinkForArch(
- self.ninja, spec, config_name, config, link_deps)
+ self.ninja, spec, config_name, config, link_deps, compile_deps)
else:
output = self.ComputeOutput(spec)
inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec,
config_name, config, link_deps[arch],
- arch=arch)
+ compile_deps, arch=arch)
for arch in self.archs]
extra_bindings = []
build_output = output
@@ -1082,7 +1128,7 @@ class NinjaWriter(object):
return output
def WriteLinkForArch(self, ninja_file, spec, config_name, config,
- link_deps, arch=None):
+ link_deps, compile_deps, arch=None):
"""Write out a link step. Fills out target.binary. """
command = {
'executable': 'link',
@@ -1093,6 +1139,15 @@ class NinjaWriter(object):
implicit_deps = set()
solibs = set()
+ order_deps = set()
+
+ if compile_deps:
+ # Normally, the compiles of the target already depend on compile_deps,
+ # but a shared_library target might have no sources and only link together
+ # a few static_library deps, so the link step also needs to depend
+ # on compile_deps to make sure actions in the shared_library target
+ # get run before the link.
+ order_deps.add(compile_deps)
if 'dependencies' in spec:
# Two kinds of dependencies:
@@ -1111,6 +1166,8 @@ class NinjaWriter(object):
target.component_objs and
self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
new_deps = target.component_objs
+ if target.compile_deps:
+ order_deps.add(target.compile_deps)
elif self.flavor == 'win' and target.import_lib:
new_deps = [target.import_lib]
elif target.UsesToc(self.flavor):
@@ -1128,7 +1185,7 @@ class NinjaWriter(object):
implicit_deps.add(final_output)
extra_bindings = []
- if self.uses_cpp and self.flavor != 'win':
+ if self.target.uses_cpp and self.flavor != 'win':
extra_bindings.append(('ld', '$ldxx'))
output = self.ComputeOutput(spec, arch)
@@ -1171,7 +1228,9 @@ class NinjaWriter(object):
rpath = 'lib/'
if self.toolset != 'target':
rpath += self.toolset
- ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
+ ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
+ else:
+ ldflags.append('-Wl,-rpath=%s' % self.target_rpath)
ldflags.append('-Wl,-rpath-link=%s' % rpath)
self.WriteVariableList(ninja_file, 'ldflags',
map(self.ExpandSpecial, ldflags))
@@ -1245,10 +1304,12 @@ class NinjaWriter(object):
if len(solibs):
- extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs)))
+ extra_bindings.append(('solibs',
+ gyp.common.EncodePOSIXShellList(sorted(solibs))))
ninja_file.build(output, command + command_suffix, link_deps,
- implicit=list(implicit_deps),
+ implicit=sorted(implicit_deps),
+ order_only=list(order_deps),
variables=extra_bindings)
return linked_binary
@@ -1263,7 +1324,7 @@ class NinjaWriter(object):
self.target.type = 'none'
elif spec['type'] == 'static_library':
self.target.binary = self.ComputeOutput(spec)
- if (self.flavor not in ('mac', 'openbsd', 'win') and not
+ if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
self.is_standalone_static_library):
self.ninja.build(self.target.binary, 'alink_thin', link_deps,
order_only=compile_deps)
@@ -1300,7 +1361,8 @@ class NinjaWriter(object):
# needed.
variables=variables)
else:
- self.target.binary = self.WriteLink(spec, config_name, config, link_deps)
+ self.target.binary = self.WriteLink(spec, config_name, config, link_deps,
+ compile_deps)
return self.target.binary
def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
@@ -1313,9 +1375,13 @@ class NinjaWriter(object):
self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
is_command_start=not package_framework)
if package_framework and not is_empty:
- variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
- self.ninja.build(output, 'package_framework', mac_bundle_depends,
- variables=variables)
+ if spec['type'] == 'shared_library' and self.xcode_settings.isIOS:
+ self.ninja.build(output, 'package_ios_framework', mac_bundle_depends,
+ variables=variables)
+ else:
+ variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
+ self.ninja.build(output, 'package_framework', mac_bundle_depends,
+ variables=variables)
else:
self.ninja.build(output, 'stamp', mac_bundle_depends,
variables=variables)
@@ -1802,7 +1868,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
# Put build-time support tools in out/{config_name}.
- gyp.common.CopyTool(flavor, toplevel_build)
+ gyp.common.CopyTool(flavor, toplevel_build, generator_flags)
# Grab make settings for CC/CXX.
# The rules are
@@ -1828,7 +1894,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
ld_host = '$cc_host'
ldxx_host = '$cxx_host'
- ar_host = 'ar'
+ ar_host = ar
cc_host = None
cxx_host = None
cc_host_global_setting = None
@@ -1883,6 +1949,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
wrappers[key_prefix] = os.path.join(build_to_root, value)
+ mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None)
+ if mac_toolchain_dir:
+ wrappers['LINK'] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir
+
if flavor == 'win':
configs = [target_dicts[qualified_target]['configurations'][config_name]
for qualified_target in target_list]
@@ -1893,7 +1963,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
configs, generator_flags)
cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
toplevel_build, generator_flags, shared_system_includes, OpenOutput)
- for arch, path in cl_paths.iteritems():
+ for arch, path in sorted(cl_paths.iteritems()):
if clang_cl:
# If we have selected clang-cl, use that instead.
path = clang_cl
@@ -2217,6 +2287,12 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
description='COMPILE XCASSETS $in',
command='$env ./gyp-mac-tool compile-xcassets $keys $in')
master_ninja.rule(
+ 'compile_ios_framework_headers',
+ description='COMPILE HEADER MAPS AND COPY FRAMEWORK HEADERS $in',
+ command='$env ./gyp-mac-tool compile-ios-framework-header-map $out '
+ '$framework $in && $env ./gyp-mac-tool '
+ 'copy-ios-framework-headers $framework $copy_headers')
+ master_ninja.rule(
'mac_tool',
description='MACTOOL $mactool_cmd $in',
command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary')
@@ -2225,6 +2301,11 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
description='PACKAGE FRAMEWORK $out, POSTBUILDS',
command='./gyp-mac-tool package-framework $out $version$postbuilds '
'&& touch $out')
+ master_ninja.rule(
+ 'package_ios_framework',
+ description='PACKAGE IOS FRAMEWORK $out, POSTBUILDS',
+ command='./gyp-mac-tool package-ios-framework $out $postbuilds '
+ '&& touch $out')
if flavor == 'win':
master_ninja.rule(
'stamp',
@@ -2329,7 +2410,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
# able to run actions and build libraries by their short name.
master_ninja.newline()
master_ninja.comment('Short names for targets.')
- for short_name in target_short_names:
+ for short_name in sorted(target_short_names):
master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
target_short_names[short_name]])
@@ -2345,7 +2426,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
if all_outputs:
master_ninja.newline()
- master_ninja.build('all', 'phony', list(all_outputs))
+ master_ninja.build('all', 'phony', sorted(all_outputs))
master_ninja.default(generator_flags.get('default_target', 'all'))
master_ninja_file.close()
diff --git a/tools/gyp/pylib/gyp/generator/xcode.py b/tools/gyp/pylib/gyp/generator/xcode.py
index 482b53ac8a..db99d6ab81 100644
--- a/tools/gyp/pylib/gyp/generator/xcode.py
+++ b/tools/gyp/pylib/gyp/generator/xcode.py
@@ -77,6 +77,7 @@ generator_additional_non_configuration_keys = [
'mac_framework_headers',
'mac_framework_private_headers',
'mac_xctest_bundle',
+ 'mac_xcuitest_bundle',
'xcode_create_dependents_test_runner',
]
@@ -87,6 +88,8 @@ generator_extra_sources_for_rules = [
'mac_framework_private_headers',
]
+generator_filelist_paths = None
+
# Xcode's standard set of library directories, which don't need to be duplicated
# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
xcode_standard_library_dirs = frozenset([
@@ -578,6 +581,26 @@ def PerformBuild(data, configurations, params):
subprocess.check_call(arguments)
+def CalculateGeneratorInputInfo(params):
+ toplevel = params['options'].toplevel_dir
+ if params.get('flavor') == 'ninja':
+ generator_dir = os.path.relpath(params['options'].generator_output or '.')
+ output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
+ output_dir = os.path.normpath(os.path.join(generator_dir, output_dir))
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, output_dir, 'gypfiles-xcode-ninja'))
+ else:
+ output_dir = os.path.normpath(os.path.join(toplevel, 'xcodebuild'))
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, output_dir, 'gypfiles'))
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ 'toplevel': toplevel,
+ 'qualified_out_dir': qualified_out_dir,
+ }
+
+
def GenerateOutput(target_list, target_dicts, data, params):
# Optionally configure each spec to use ninja as the external builder.
ninja_wrapper = params.get('flavor') == 'ninja'
@@ -590,6 +613,15 @@ def GenerateOutput(target_list, target_dicts, data, params):
parallel_builds = generator_flags.get('xcode_parallel_builds', True)
serialize_all_tests = \
generator_flags.get('xcode_serialize_all_test_runs', True)
+ upgrade_check_project_version = \
+ generator_flags.get('xcode_upgrade_check_project_version', None)
+
+ # Format upgrade_check_project_version with leading zeros as needed.
+ if upgrade_check_project_version:
+ upgrade_check_project_version = str(upgrade_check_project_version)
+ while len(upgrade_check_project_version) < 4:
+ upgrade_check_project_version = '0' + upgrade_check_project_version
+
skip_excluded_files = \
not generator_flags.get('xcode_list_excluded_files', True)
xcode_projects = {}
@@ -604,9 +636,17 @@ def GenerateOutput(target_list, target_dicts, data, params):
xcode_projects[build_file] = xcp
pbxp = xcp.project
+ # Set project-level attributes from multiple options
+ project_attributes = {};
if parallel_builds:
- pbxp.SetProperty('attributes',
- {'BuildIndependentTargetsInParallel': 'YES'})
+ project_attributes['BuildIndependentTargetsInParallel'] = 'YES'
+ if upgrade_check_project_version:
+ project_attributes['LastUpgradeCheck'] = upgrade_check_project_version
+ project_attributes['LastTestingUpgradeCheck'] = \
+ upgrade_check_project_version
+ project_attributes['LastSwiftUpdateCheck'] = \
+ upgrade_check_project_version
+ pbxp.SetProperty('attributes', project_attributes)
# Add gyp/gypi files to project
if not generator_flags.get('standalone'):
@@ -648,14 +688,18 @@ def GenerateOutput(target_list, target_dicts, data, params):
'loadable_module': 'com.googlecode.gyp.xcode.bundle',
'shared_library': 'com.apple.product-type.library.dynamic',
'static_library': 'com.apple.product-type.library.static',
+ 'mac_kernel_extension': 'com.apple.product-type.kernel-extension',
'executable+bundle': 'com.apple.product-type.application',
'loadable_module+bundle': 'com.apple.product-type.bundle',
'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
+ 'loadable_module+xcuitest': 'com.apple.product-type.bundle.ui-testing',
'shared_library+bundle': 'com.apple.product-type.framework',
'executable+extension+bundle': 'com.apple.product-type.app-extension',
'executable+watch+extension+bundle':
'com.apple.product-type.watchkit-extension',
- 'executable+watch+bundle': 'com.apple.product-type.application.watchapp',
+ 'executable+watch+bundle':
+ 'com.apple.product-type.application.watchapp',
+ 'mac_kernel_extension+bundle': 'com.apple.product-type.kernel-extension',
}
target_properties = {
@@ -665,13 +709,19 @@ def GenerateOutput(target_list, target_dicts, data, params):
type = spec['type']
is_xctest = int(spec.get('mac_xctest_bundle', 0))
+ is_xcuitest = int(spec.get('mac_xcuitest_bundle', 0))
is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
is_app_extension = int(spec.get('ios_app_extension', 0))
is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0))
is_watch_app = int(spec.get('ios_watch_app', 0))
if type != 'none':
type_bundle_key = type
- if is_xctest:
+ if is_xcuitest:
+ type_bundle_key += '+xcuitest'
+ assert type == 'loadable_module', (
+ 'mac_xcuitest_bundle targets must have type loadable_module '
+ '(target %s)' % target_name)
+ elif is_xctest:
type_bundle_key += '+xctest'
assert type == 'loadable_module', (
'mac_xctest_bundle targets must have type loadable_module '
@@ -703,6 +753,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
assert not is_bundle, (
'mac_bundle targets cannot have type none (target "%s")' %
target_name)
+ assert not is_xcuitest, (
+ 'mac_xcuitest_bundle targets cannot have type none (target "%s")' %
+ target_name)
assert not is_xctest, (
'mac_xctest_bundle targets cannot have type none (target "%s")' %
target_name)
diff --git a/tools/gyp/pylib/gyp/input.py b/tools/gyp/pylib/gyp/input.py
index bc68c3765d..a046a15cc1 100644
--- a/tools/gyp/pylib/gyp/input.py
+++ b/tools/gyp/pylib/gyp/input.py
@@ -28,7 +28,13 @@ from gyp.common import OrderedSet
# A list of types that are treated as linkable.
-linkable_types = ['executable', 'shared_library', 'loadable_module']
+linkable_types = [
+ 'executable',
+ 'shared_library',
+ 'loadable_module',
+ 'mac_kernel_extension',
+ 'windows_driver',
+]
# A list of sections that contain links to other targets.
dependency_sections = ['dependencies', 'export_dependent_settings']
@@ -1534,11 +1540,15 @@ class DependencyGraphNode(object):
# dependents.
flat_list = OrderedSet()
+ def ExtractNodeRef(node):
+ """Extracts the object that the node represents from the given node."""
+ return node.ref
+
# in_degree_zeros is the list of DependencyGraphNodes that have no
# dependencies not in flat_list. Initially, it is a copy of the children
# of this node, because when the graph was built, nodes with no
# dependencies were made implicit dependents of the root node.
- in_degree_zeros = set(self.dependents[:])
+ in_degree_zeros = sorted(self.dependents[:], key=ExtractNodeRef)
while in_degree_zeros:
# Nodes in in_degree_zeros have no dependencies not in flat_list, so they
@@ -1550,12 +1560,13 @@ class DependencyGraphNode(object):
# Look at dependents of the node just added to flat_list. Some of them
# may now belong in in_degree_zeros.
- for node_dependent in node.dependents:
+ for node_dependent in sorted(node.dependents, key=ExtractNodeRef):
is_in_degree_zero = True
# TODO: We want to check through the
# node_dependent.dependencies list but if it's long and we
# always start at the beginning, then we get O(n^2) behaviour.
- for node_dependent_dependency in node_dependent.dependencies:
+ for node_dependent_dependency in (sorted(node_dependent.dependencies,
+ key=ExtractNodeRef)):
if not node_dependent_dependency.ref in flat_list:
# The dependent one or more dependencies not in flat_list. There
# will be more chances to add it to flat_list when examining
@@ -1568,7 +1579,7 @@ class DependencyGraphNode(object):
# All of the dependent's dependencies are already in flat_list. Add
# it to in_degree_zeros where it will be processed in a future
# iteration of the outer loop.
- in_degree_zeros.add(node_dependent)
+ in_degree_zeros += [node_dependent]
return list(flat_list)
@@ -1724,11 +1735,13 @@ class DependencyGraphNode(object):
dependencies.add(self.ref)
return dependencies
- # Executables and loadable modules are already fully and finally linked.
- # Nothing else can be a link dependency of them, there can only be
- # dependencies in the sense that a dependent target might run an
- # executable or load the loadable_module.
- if not initial and target_type in ('executable', 'loadable_module'):
+ # Executables, mac kernel extensions, windows drivers and loadable modules
+ # are already fully and finally linked. Nothing else can be a link
+ # dependency of them, there can only be dependencies in the sense that a
+ # dependent target might run an executable or load the loadable_module.
+ if not initial and target_type in ('executable', 'loadable_module',
+ 'mac_kernel_extension',
+ 'windows_driver'):
return dependencies
# Shared libraries are already fully linked. They should only be included
@@ -2479,7 +2492,7 @@ def ValidateTargetType(target, target_dict):
"""
VALID_TARGET_TYPES = ('executable', 'loadable_module',
'static_library', 'shared_library',
- 'none')
+ 'mac_kernel_extension', 'none', 'windows_driver')
target_type = target_dict.get('type', None)
if target_type not in VALID_TARGET_TYPES:
raise GypError("Target %s has an invalid target type '%s'. "
diff --git a/tools/gyp/pylib/gyp/mac_tool.py b/tools/gyp/pylib/gyp/mac_tool.py
index eeeaceb0c7..b0363cc393 100755
--- a/tools/gyp/pylib/gyp/mac_tool.py
+++ b/tools/gyp/pylib/gyp/mac_tool.py
@@ -17,6 +17,7 @@ import plistlib
import re
import shutil
import string
+import struct
import subprocess
import sys
import tempfile
@@ -48,6 +49,7 @@ class MacTool(object):
def ExecCopyBundleResource(self, source, dest, convert_to_binary):
"""Copies a resource file to the bundle/Resources directory, performing any
necessary compilation on each resource."""
+ convert_to_binary = convert_to_binary == 'True'
extension = os.path.splitext(source)[1].lower()
if os.path.isdir(source):
# Copy tree.
@@ -61,11 +63,16 @@ class MacTool(object):
return self._CopyXIBFile(source, dest)
elif extension == '.storyboard':
return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
+ elif extension == '.strings' and not convert_to_binary:
+ self._CopyStringsFile(source, dest)
else:
+ if os.path.exists(dest):
+ os.unlink(dest)
shutil.copy(source, dest)
+ if convert_to_binary and extension in ('.plist', '.strings'):
+ self._ConvertToBinary(dest)
+
def _CopyXIBFile(self, source, dest):
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
@@ -76,8 +83,26 @@ class MacTool(object):
if os.path.relpath(dest):
dest = os.path.join(base, dest)
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
+ args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices']
+
+ if os.environ['XCODE_VERSION_ACTUAL'] > '0700':
+ args.extend(['--auto-activate-custom-fonts'])
+ if 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ:
+ args.extend([
+ '--target-device', 'iphone', '--target-device', 'ipad',
+ '--minimum-deployment-target',
+ os.environ['IPHONEOS_DEPLOYMENT_TARGET'],
+ ])
+ else:
+ args.extend([
+ '--target-device', 'mac',
+ '--minimum-deployment-target',
+ os.environ['MACOSX_DEPLOYMENT_TARGET'],
+ ])
+
+ args.extend(['--output-format', 'human-readable-text', '--compile', dest,
+ source])
+
ibtool_section_re = re.compile(r'/\*.*\*/')
ibtool_re = re.compile(r'.*note:.*is clipping its content')
ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
@@ -96,7 +121,7 @@ class MacTool(object):
subprocess.check_call([
'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
- def _CopyStringsFile(self, source, dest, convert_to_binary):
+ def _CopyStringsFile(self, source, dest):
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
input_code = self._DetectInputEncoding(source) or "UTF-8"
@@ -116,16 +141,13 @@ class MacTool(object):
fp.write(s.decode(input_code).encode('UTF-16'))
fp.close()
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
def _DetectInputEncoding(self, file_name):
"""Reads the first few bytes from file_name and tries to guess the text
encoding. Returns None as a guess if it can't detect it."""
fp = open(file_name, 'rb')
try:
header = fp.read(3)
- except e:
+ except:
fp.close()
return None
fp.close()
@@ -153,7 +175,7 @@ class MacTool(object):
# Go through all the environment variables and replace them as variables in
# the file.
- IDENT_RE = re.compile(r'[/\s]')
+ IDENT_RE = re.compile(r'[_/\s]')
for key in os.environ:
if key.startswith('_'):
continue
@@ -228,7 +250,8 @@ class MacTool(object):
def ExecFilterLibtool(self, *cmd_list):
"""Calls libtool and filters out '/path/to/libtool: file: foo.o has no
symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
+ libtool_re = re.compile(r'^.*libtool: (?:for architecture: \S* )?'
+ r'file: .* has no symbols$')
libtool_re5 = re.compile(
r'^.*libtool: warning for library: ' +
r'.* the table of contents is empty ' +
@@ -253,6 +276,23 @@ class MacTool(object):
break
return libtoolout.returncode
+ def ExecPackageIosFramework(self, framework):
+ # Find the name of the binary based on the part before the ".framework".
+ binary = os.path.basename(framework).split('.')[0]
+ module_path = os.path.join(framework, 'Modules');
+ if not os.path.exists(module_path):
+ os.mkdir(module_path)
+ module_template = 'framework module %s {\n' \
+ ' umbrella header "%s.h"\n' \
+ '\n' \
+ ' export *\n' \
+ ' module * { export * }\n' \
+ '}\n' % (binary, binary)
+
+ module_file = open(os.path.join(module_path, 'module.modulemap'), "w")
+ module_file.write(module_template)
+ module_file.close()
+
def ExecPackageFramework(self, framework, version):
"""Takes a path to Something.framework and the Current version of that and
sets up all the symlinks."""
@@ -289,6 +329,23 @@ class MacTool(object):
os.remove(link)
os.symlink(dest, link)
+ def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers):
+ framework_name = os.path.basename(framework).split('.')[0]
+ all_headers = map(os.path.abspath, all_headers)
+ filelist = {}
+ for header in all_headers:
+ filename = os.path.basename(header)
+ filelist[filename] = header
+ filelist[os.path.join(framework_name, filename)] = header
+ WriteHmap(out, filelist)
+
+ def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers):
+ header_path = os.path.join(framework, 'Headers');
+ if not os.path.exists(header_path):
+ os.makedirs(header_path)
+ for header in copy_headers:
+ shutil.copy(header, os.path.join(header_path, os.path.basename(header)))
+
def ExecCompileXcassets(self, keys, *inputs):
"""Compiles multiple .xcassets files into a single .car file.
@@ -349,49 +406,28 @@ class MacTool(object):
self._MergePlist(merged_plist, plist)
plistlib.writePlist(merged_plist, output)
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
+ def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
"""Code sign a bundle.
This function tries to code sign an iOS bundle, following the same
algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
+ 1. pick the provisioning profile that best match the bundle identifier,
and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
+ 2. copy Entitlements.plist from user or SDK next to the bundle,
+ 3. code sign the bundle.
"""
- resource_rules_path = self._InstallResourceRules(resource_rules)
substitutions, overrides = self._InstallProvisioningProfile(
provisioning, self._GetCFBundleIdentifier())
entitlements_path = self._InstallEntitlements(
entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
+ args = ['codesign', '--force', '--sign', key]
+ if preserve == 'True':
+ args.extend(['--deep', '--preserve-metadata=identifier,entitlements'])
+ else:
+ args.extend(['--entitlements', entitlements_path])
+ args.extend(['--timestamp=none', path])
+ subprocess.check_call(args)
def _InstallProvisioningProfile(self, profile, bundle_identifier):
"""Installs embedded.mobileprovision into the bundle.
@@ -606,5 +642,71 @@ class MacTool(object):
return {k: self._ExpandVariables(data[k], substitutions) for k in data}
return data
+def NextGreaterPowerOf2(x):
+ return 2**(x).bit_length()
+
+def WriteHmap(output_name, filelist):
+ """Generates a header map based on |filelist|.
+
+ Per Mark Mentovai:
+ A header map is structured essentially as a hash table, keyed by names used
+ in #includes, and providing pathnames to the actual files.
+
+ The implementation below and the comment above comes from inspecting:
+ http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
+ while also looking at the implementation in clang in:
+ https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
+ """
+ magic = 1751998832
+ version = 1
+ _reserved = 0
+ count = len(filelist)
+ capacity = NextGreaterPowerOf2(count)
+ strings_offset = 24 + (12 * capacity)
+ max_value_length = len(max(filelist.items(), key=lambda (k,v):len(v))[1])
+
+ out = open(output_name, "wb")
+ out.write(struct.pack('<LHHLLLL', magic, version, _reserved, strings_offset,
+ count, capacity, max_value_length))
+
+ # Create empty hashmap buckets.
+ buckets = [None] * capacity
+ for file, path in filelist.items():
+ key = 0
+ for c in file:
+ key += ord(c.lower()) * 13
+
+ # Fill next empty bucket.
+ while buckets[key & capacity - 1] is not None:
+ key = key + 1
+ buckets[key & capacity - 1] = (file, path)
+
+ next_offset = 1
+ for bucket in buckets:
+ if bucket is None:
+ out.write(struct.pack('<LLL', 0, 0, 0))
+ else:
+ (file, path) = bucket
+ key_offset = next_offset
+ prefix_offset = key_offset + len(file) + 1
+ suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
+ next_offset = suffix_offset + len(os.path.basename(path)) + 1
+ out.write(struct.pack('<LLL', key_offset, prefix_offset, suffix_offset))
+
+ # Pad byte since next offset starts at 1.
+ out.write(struct.pack('<x'))
+
+ for bucket in buckets:
+ if bucket is not None:
+ (file, path) = bucket
+ out.write(struct.pack('<%ds' % len(file), file))
+ out.write(struct.pack('<s', '\0'))
+ base = os.path.dirname(path) + os.sep
+ out.write(struct.pack('<%ds' % len(base), base))
+ out.write(struct.pack('<s', '\0'))
+ path = os.path.basename(path)
+ out.write(struct.pack('<%ds' % len(path), path))
+ out.write(struct.pack('<s', '\0'))
+
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
diff --git a/tools/gyp/pylib/gyp/msvs_emulation.py b/tools/gyp/pylib/gyp/msvs_emulation.py
index ca67b122f0..e4a85a96e6 100644
--- a/tools/gyp/pylib/gyp/msvs_emulation.py
+++ b/tools/gyp/pylib/gyp/msvs_emulation.py
@@ -485,8 +485,9 @@ class MsvsSettings(object):
if self.msvs_precompiled_header[config]:
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
if _LanguageMatchesForPch(source_ext, extension):
- pch = os.path.split(self.msvs_precompiled_header[config])[1]
- return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
+ pch = self.msvs_precompiled_header[config]
+ pchbase = os.path.split(pch)[1]
+ return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pchbase + '.pch']
return []
def GetCflagsC(self, config):
@@ -888,7 +889,7 @@ class PrecompiledHeader(object):
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
- return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
+ return self.settings.msvs_precompiled_header[self.config]
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
@@ -961,6 +962,10 @@ def _ExtractImportantEnvironment(output_of_set):
'tmp',
)
env = {}
+ # This occasionally happens and leads to misleading SYSTEMROOT error messages
+ # if not caught here.
+ if output_of_set.count('=') == 0:
+ raise Exception('Invalid output_of_set. Value is:\n%s' % output_of_set)
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match(envvar + '=', line.lower()):
@@ -1029,6 +1034,8 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
+ if popen.returncode != 0:
+ raise Exception('"%s" failed with error %d' % (args, popen.returncode))
env = _ExtractImportantEnvironment(variables)
# Inject system includes from gyp files into INCLUDE.
diff --git a/tools/gyp/pylib/gyp/win_tool.py b/tools/gyp/pylib/gyp/win_tool.py
index bb6f1ea436..1c843a0b6c 100755
--- a/tools/gyp/pylib/gyp/win_tool.py
+++ b/tools/gyp/pylib/gyp/win_tool.py
@@ -116,11 +116,19 @@ class WinTool(object):
env = self._GetEnv(arch)
if use_separate_mspdbsrv == 'True':
self._UseSeparateMspdbsrv(env, args)
- link = subprocess.Popen([args[0].replace('/', '\\')] + list(args[1:]),
- shell=True,
- env=env,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
+ if sys.platform == 'win32':
+ args = list(args) # *args is a tuple by default, which is read-only.
+ args[0] = args[0].replace('/', '\\')
+ # https://docs.python.org/2/library/subprocess.html:
+ # "On Unix with shell=True [...] if args is a sequence, the first item
+ # specifies the command string, and any additional items will be treated as
+ # additional arguments to the shell itself. That is to say, Popen does the
+ # equivalent of:
+ # Popen(['/bin/sh', '-c', args[0], args[1], ...])"
+ # For that reason, since going through the shell doesn't seem necessary on
+ # non-Windows don't do that there.
+ link = subprocess.Popen(args, shell=sys.platform == 'win32', env=env,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = link.communicate()
for line in out.splitlines():
if (not line.startswith(' Creating library ') and
diff --git a/tools/gyp/pylib/gyp/xcode_emulation.py b/tools/gyp/pylib/gyp/xcode_emulation.py
index 407ead074b..dba8e7699e 100644
--- a/tools/gyp/pylib/gyp/xcode_emulation.py
+++ b/tools/gyp/pylib/gyp/xcode_emulation.py
@@ -147,6 +147,7 @@ class XcodeSettings(object):
# Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_sdk_path_cache = {}
+ _platform_path_cache = {}
_sdk_root_cache = {}
# Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
@@ -161,6 +162,8 @@ class XcodeSettings(object):
self.spec = spec
self.isIOS = False
+ self.mac_toolchain_dir = None
+ self.header_map_path = None
# Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
# This means self.xcode_settings[config] always contains all settings
@@ -221,8 +224,19 @@ class XcodeSettings(object):
default)
return format == "binary"
+ def IsIosFramework(self):
+ return self.spec['type'] == 'shared_library' and self._IsBundle() and \
+ self.isIOS
+
def _IsBundle(self):
- return int(self.spec.get('mac_bundle', 0)) != 0
+ return int(self.spec.get('mac_bundle', 0)) != 0 or self._IsXCTest() or \
+ self._IsXCUiTest()
+
+ def _IsXCTest(self):
+ return int(self.spec.get('mac_xctest_bundle', 0)) != 0
+
+ def _IsXCUiTest(self):
+ return int(self.spec.get('mac_xcuitest_bundle', 0)) != 0
def _IsIosAppExtension(self):
return int(self.spec.get('ios_app_extension', 0)) != 0
@@ -298,11 +312,62 @@ class XcodeSettings(object):
return self.GetBundleContentsFolderPath()
return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
+ def GetBundleExecutableFolderPath(self):
+ """Returns the qualified path to the bundle's executables folder. E.g.
+ Chromium.app/Contents/MacOS. Only valid for bundles."""
+ assert self._IsBundle()
+ if self.spec['type'] in ('shared_library') or self.isIOS:
+ return self.GetBundleContentsFolderPath()
+ elif self.spec['type'] in ('executable', 'loadable_module'):
+ return os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
+
+ def GetBundleJavaFolderPath(self):
+ """Returns the qualified path to the bundle's Java resource folder.
+ E.g. Chromium.app/Contents/Resources/Java. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleResourceFolder(), 'Java')
+
+ def GetBundleFrameworksFolderPath(self):
+ """Returns the qualified path to the bundle's frameworks folder. E.g,
+ Chromium.app/Contents/Frameworks. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(), 'Frameworks')
+
+ def GetBundleSharedFrameworksFolderPath(self):
+ """Returns the qualified path to the bundle's frameworks folder. E.g,
+ Chromium.app/Contents/SharedFrameworks. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(),
+ 'SharedFrameworks')
+
+ def GetBundleSharedSupportFolderPath(self):
+ """Returns the qualified path to the bundle's shared support folder. E.g,
+ Chromium.app/Contents/SharedSupport. Only valid for bundles."""
+ assert self._IsBundle()
+ if self.spec['type'] == 'shared_library':
+ return self.GetBundleResourceFolder()
+ else:
+ return os.path.join(self.GetBundleContentsFolderPath(),
+ 'SharedSupport')
+
+ def GetBundlePlugInsFolderPath(self):
+ """Returns the qualified path to the bundle's plugins folder. E.g,
+ Chromium.app/Contents/PlugIns. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(), 'PlugIns')
+
+ def GetBundleXPCServicesFolderPath(self):
+ """Returns the qualified path to the bundle's XPC services folder. E.g,
+ Chromium.app/Contents/XPCServices. Only valid for bundles."""
+ assert self._IsBundle()
+ return os.path.join(self.GetBundleContentsFolderPath(), 'XPCServices')
+
def GetBundlePlistPath(self):
"""Returns the qualified path to the bundle's plist file. E.g.
Chromium.app/Contents/Info.plist. Only valid for bundles."""
assert self._IsBundle()
- if self.spec['type'] in ('executable', 'loadable_module'):
+ if self.spec['type'] in ('executable', 'loadable_module') or \
+ self.IsIosFramework():
return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist')
else:
return os.path.join(self.GetBundleContentsFolderPath(),
@@ -322,6 +387,10 @@ class XcodeSettings(object):
assert self._IsBundle(), ('ios_watch_app flag requires mac_bundle '
'(target %s)' % self.spec['target_name'])
return 'com.apple.product-type.application.watchapp'
+ if self._IsXCUiTest():
+ assert self._IsBundle(), ('mac_xcuitest_bundle flag requires mac_bundle '
+ '(target %s)' % self.spec['target_name'])
+ return 'com.apple.product-type.bundle.ui-testing'
if self._IsBundle():
return {
'executable': 'com.apple.product-type.application',
@@ -352,11 +421,8 @@ class XcodeSettings(object):
"""Returns the name of the bundle binary of by this target.
E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
assert self._IsBundle()
- if self.spec['type'] in ('shared_library') or self.isIOS:
- path = self.GetBundleContentsFolderPath()
- elif self.spec['type'] in ('executable', 'loadable_module'):
- path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
- return os.path.join(path, self.GetExecutableName())
+ return os.path.join(self.GetBundleExecutableFolderPath(), \
+ self.GetExecutableName())
def _GetStandaloneExecutableSuffix(self):
if 'product_extension' in self.spec:
@@ -407,8 +473,8 @@ class XcodeSettings(object):
return self._GetStandaloneBinaryPath()
def GetExecutablePath(self):
- """Returns the directory name of the bundle represented by this target. E.g.
- Chromium.app/Contents/MacOS/Chromium."""
+ """Returns the qualified path to the primary executable of the bundle
+ represented by this target. E.g. Chromium.app/Contents/MacOS/Chromium."""
if self._IsBundle():
return self._GetBundleBinaryPath()
else:
@@ -429,7 +495,7 @@ class XcodeSettings(object):
# Since the CLT has no SDK paths anyway, returning None is the
# most sensible route and should still do the right thing.
try:
- return GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
+ return GetStdout(['xcrun', '--sdk', sdk, infoitem])
except:
pass
@@ -438,6 +504,14 @@ class XcodeSettings(object):
configname = self.configname
return self.GetPerConfigSetting('SDKROOT', configname, default='')
+ def _XcodePlatformPath(self, configname=None):
+ sdk_root = self._SdkRoot(configname)
+ if sdk_root not in XcodeSettings._platform_path_cache:
+ platform_path = self._GetSdkVersionInfoItem(sdk_root,
+ '--show-sdk-platform-path')
+ XcodeSettings._platform_path_cache[sdk_root] = platform_path
+ return XcodeSettings._platform_path_cache[sdk_root]
+
def _SdkPath(self, configname=None):
sdk_root = self._SdkRoot(configname)
if sdk_root.startswith('/'):
@@ -446,7 +520,7 @@ class XcodeSettings(object):
def _XcodeSdkPath(self, sdk_root):
if sdk_root not in XcodeSettings._sdk_path_cache:
- sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path')
+ sdk_path = self._GetSdkVersionInfoItem(sdk_root, '--show-sdk-path')
XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
if sdk_root:
XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
@@ -477,6 +551,9 @@ class XcodeSettings(object):
if 'SDKROOT' in self._Settings() and sdk_root:
cflags.append('-isysroot %s' % sdk_root)
+ if self.header_map_path:
+ cflags.append('-I%s' % self.header_map_path)
+
if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
cflags.append('-Wconstant-conversion')
@@ -568,6 +645,10 @@ class XcodeSettings(object):
cflags += self._Settings().get('WARNING_CFLAGS', [])
+ platform_root = self._XcodePlatformPath(configname)
+ if platform_root and self._IsXCTest():
+ cflags.append('-F' + platform_root + '/Developer/Library/Frameworks/')
+
if sdk_root:
framework_root = sdk_root
else:
@@ -814,7 +895,8 @@ class XcodeSettings(object):
ldflags.append('-arch ' + archs[0])
# Xcode adds the product directory by default.
- ldflags.append('-L' + product_dir)
+ # Rewrite -L. to -L./ to work around http://www.openradar.me/25313838
+ ldflags.append('-L' + (product_dir if product_dir != '.' else './'))
install_name = self.GetInstallName()
if install_name and self.spec['type'] != 'loadable_module':
@@ -831,19 +913,24 @@ class XcodeSettings(object):
for directory in framework_dirs:
ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
+ platform_root = self._XcodePlatformPath(configname)
+ if sdk_root and platform_root and self._IsXCTest():
+ ldflags.append('-F' + platform_root + '/Developer/Library/Frameworks/')
+ ldflags.append('-framework XCTest')
+
is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension()
if sdk_root and is_extension:
# Adds the link flags for extensions. These flags are common for all
# extensions and provide loader and main function.
# These flags reflect the compilation options used by xcode to compile
# extensions.
- ldflags.append('-lpkstart')
if XcodeVersion() < '0900':
+ ldflags.append('-lpkstart')
ldflags.append(sdk_root +
'/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit')
+ else:
+ ldflags.append('-e _NSExtensionMain')
ldflags.append('-fapplication-extension')
- ldflags.append('-Xlinker -rpath '
- '-Xlinker @executable_path/../../Frameworks')
self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
@@ -917,7 +1004,8 @@ class XcodeSettings(object):
self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
default_strip_style = 'debugging'
- if self.spec['type'] == 'loadable_module' and self._IsBundle():
+ if ((self.spec['type'] == 'loadable_module' or self._IsIosAppExtension())
+ and self._IsBundle()):
default_strip_style = 'non-global'
elif self.spec['type'] == 'executable':
default_strip_style = 'all'
@@ -972,13 +1060,25 @@ class XcodeSettings(object):
"""Return a shell command to codesign the iOS output binary so it can
be deployed to a device. This should be run as the very last step of the
build."""
- if not (self.isIOS and self.spec['type'] == 'executable'):
+ if not (self.isIOS and
+ (self.spec['type'] == 'executable' or self._IsXCTest()) or
+ self.IsIosFramework()):
return []
+ postbuilds = []
+ product_name = self.GetFullProductName()
settings = self.xcode_settings[configname]
+
+ # Xcode expects XCTests to be copied into the TEST_HOST dir.
+ if self._IsXCTest():
+ source = os.path.join("${BUILT_PRODUCTS_DIR}", product_name)
+ test_host = os.path.dirname(settings.get('TEST_HOST'));
+ xctest_destination = os.path.join(test_host, 'PlugIns', product_name)
+ postbuilds.extend(['ditto %s %s' % (source, xctest_destination)])
+
key = self._GetIOSCodeSignIdentityKey(settings)
if not key:
- return []
+ return postbuilds
# Warn for any unimplemented signing xcode keys.
unimpl = ['OTHER_CODE_SIGN_FLAGS']
@@ -987,12 +1087,41 @@ class XcodeSettings(object):
print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
', '.join(sorted(unimpl)))
- return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % (
+ if self._IsXCTest():
+ # For device xctests, Xcode copies two extra frameworks into $TEST_HOST.
+ test_host = os.path.dirname(settings.get('TEST_HOST'));
+ frameworks_dir = os.path.join(test_host, 'Frameworks')
+ platform_root = self._XcodePlatformPath(configname)
+ frameworks = \
+ ['Developer/Library/PrivateFrameworks/IDEBundleInjection.framework',
+ 'Developer/Library/Frameworks/XCTest.framework']
+ for framework in frameworks:
+ source = os.path.join(platform_root, framework)
+ destination = os.path.join(frameworks_dir, os.path.basename(framework))
+ postbuilds.extend(['ditto %s %s' % (source, destination)])
+
+ # Then re-sign everything with 'preserve=True'
+ postbuilds.extend(['%s code-sign-bundle "%s" "%s" "%s" "%s" %s' % (
+ os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
+ settings.get('CODE_SIGN_ENTITLEMENTS', ''),
+ settings.get('PROVISIONING_PROFILE', ''), destination, True)
+ ])
+ plugin_dir = os.path.join(test_host, 'PlugIns')
+ targets = [os.path.join(plugin_dir, product_name), test_host]
+ for target in targets:
+ postbuilds.extend(['%s code-sign-bundle "%s" "%s" "%s" "%s" %s' % (
+ os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
+ settings.get('CODE_SIGN_ENTITLEMENTS', ''),
+ settings.get('PROVISIONING_PROFILE', ''), target, True)
+ ])
+
+ postbuilds.extend(['%s code-sign-bundle "%s" "%s" "%s" "%s" %s' % (
os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
- settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''),
settings.get('CODE_SIGN_ENTITLEMENTS', ''),
- settings.get('PROVISIONING_PROFILE', ''))
- ]
+ settings.get('PROVISIONING_PROFILE', ''),
+ os.path.join("${BUILT_PRODUCTS_DIR}", product_name), False)
+ ])
+ return postbuilds
def _GetIOSCodeSignIdentityKey(self, settings):
identity = settings.get('CODE_SIGN_IDENTITY')
@@ -1074,25 +1203,37 @@ class XcodeSettings(object):
xcode, xcode_build = XcodeVersion()
cache['DTXcode'] = xcode
cache['DTXcodeBuild'] = xcode_build
+ compiler = self.xcode_settings[configname].get('GCC_VERSION')
+ if compiler is not None:
+ cache['DTCompiler'] = compiler
sdk_root = self._SdkRoot(configname)
if not sdk_root:
sdk_root = self._DefaultSdkRoot()
- cache['DTSDKName'] = sdk_root
- if xcode >= '0430':
+ sdk_version = self._GetSdkVersionInfoItem(sdk_root, '--show-sdk-version')
+ cache['DTSDKName'] = sdk_root + (sdk_version or '')
+ if xcode >= '0720':
cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
- sdk_root, 'ProductBuildVersion')
+ sdk_root, '--show-sdk-build-version')
+ elif xcode >= '0430':
+ cache['DTSDKBuild'] = sdk_version
else:
cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
if self.isIOS:
- cache['DTPlatformName'] = cache['DTSDKName']
+ cache['MinimumOSVersion'] = self.xcode_settings[configname].get(
+ 'IPHONEOS_DEPLOYMENT_TARGET')
+ cache['DTPlatformName'] = sdk_root
+ cache['DTPlatformVersion'] = sdk_version
+
if configname.endswith("iphoneos"):
- cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem(
- sdk_root, 'ProductVersion')
cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
+ cache['DTPlatformBuild'] = cache['DTSDKBuild']
else:
cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
+ # This is weird, but Xcode sets DTPlatformBuild to an empty field
+ # for simulator builds.
+ cache['DTPlatformBuild'] = ""
XcodeSettings._plist_cache[configname] = cache
# Include extra plist items that are per-target, not per global
@@ -1334,7 +1475,10 @@ def IsMacBundle(flavor, spec):
Bundles are directories with a certain subdirectory structure, instead of
just a single file. Bundle rules do not produce a binary but also package
resources into that directory."""
- is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
+ is_mac_bundle = int(spec.get('mac_xctest_bundle', 0)) != 0 or \
+ int(spec.get('mac_xcuitest_bundle', 0)) != 0 or \
+ (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
+
if is_mac_bundle:
assert spec['type'] != 'none', (
'mac_bundle targets cannot have type none (target "%s")' %
@@ -1444,14 +1588,16 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings: An optional dict with more values to add to the
result.
"""
+
if not xcode_settings: return {}
# This function is considered a friend of XcodeSettings, so let it reach into
# its implementation details.
spec = xcode_settings.spec
- # These are filled in on a as-needed basis.
+ # These are filled in on an as-needed basis.
env = {
+ 'BUILT_FRAMEWORKS_DIR' : built_products_dir,
'BUILT_PRODUCTS_DIR' : built_products_dir,
'CONFIGURATION' : configuration,
'PRODUCT_NAME' : xcode_settings.GetProductName(),
@@ -1462,12 +1608,16 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
# written for bundles:
'TARGET_BUILD_DIR' : built_products_dir,
'TEMP_DIR' : '${TMPDIR}',
+ 'XCODE_VERSION_ACTUAL' : XcodeVersion()[0],
}
if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
env['SDKROOT'] = xcode_settings._SdkPath(configuration)
else:
env['SDKROOT'] = ''
+ if xcode_settings.mac_toolchain_dir:
+ env['DEVELOPER_DIR'] = xcode_settings.mac_toolchain_dir
+
if spec['type'] in (
'executable', 'static_library', 'shared_library', 'loadable_module'):
env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()
@@ -1478,10 +1628,27 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
env['MACH_O_TYPE'] = mach_o_type
env['PRODUCT_TYPE'] = xcode_settings.GetProductType()
if xcode_settings._IsBundle():
+ # xcodeproj_file.py sets the same Xcode subfolder value for this as for
+ # FRAMEWORKS_FOLDER_PATH so Xcode builds will actually use FFP's value.
+ env['BUILT_FRAMEWORKS_DIR'] = \
+ os.path.join(built_products_dir + os.sep \
+ + xcode_settings.GetBundleFrameworksFolderPath())
env['CONTENTS_FOLDER_PATH'] = \
- xcode_settings.GetBundleContentsFolderPath()
+ xcode_settings.GetBundleContentsFolderPath()
+ env['EXECUTABLE_FOLDER_PATH'] = \
+ xcode_settings.GetBundleExecutableFolderPath()
env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \
xcode_settings.GetBundleResourceFolder()
+ env['JAVA_FOLDER_PATH'] = xcode_settings.GetBundleJavaFolderPath()
+ env['FRAMEWORKS_FOLDER_PATH'] = \
+ xcode_settings.GetBundleFrameworksFolderPath()
+ env['SHARED_FRAMEWORKS_FOLDER_PATH'] = \
+ xcode_settings.GetBundleSharedFrameworksFolderPath()
+ env['SHARED_SUPPORT_FOLDER_PATH'] = \
+ xcode_settings.GetBundleSharedSupportFolderPath()
+ env['PLUGINS_FOLDER_PATH'] = xcode_settings.GetBundlePlugInsFolderPath()
+ env['XPCSERVICES_FOLDER_PATH'] = \
+ xcode_settings.GetBundleXPCServicesFolderPath()
env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath()
env['WRAPPER_NAME'] = xcode_settings.GetWrapperName()
@@ -1495,8 +1662,6 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
sdk_root = xcode_settings._SdkRoot(configuration)
if not sdk_root:
sdk_root = xcode_settings._XcodeSdkPath('')
- if sdk_root is None:
- sdk_root = ''
env['SDKROOT'] = sdk_root
if not additional_settings:
@@ -1612,11 +1777,12 @@ def _AddIOSDeviceConfigurations(targets):
for target_dict in targets.itervalues():
toolset = target_dict['toolset']
configs = target_dict['configurations']
- for config_name, config_dict in dict(configs).iteritems():
- iphoneos_config_dict = copy.deepcopy(config_dict)
+ for config_name, simulator_config_dict in dict(configs).iteritems():
+ iphoneos_config_dict = copy.deepcopy(simulator_config_dict)
configs[config_name + '-iphoneos'] = iphoneos_config_dict
- configs[config_name + '-iphonesimulator'] = config_dict
+ configs[config_name + '-iphonesimulator'] = simulator_config_dict
if toolset == 'target':
+ simulator_config_dict['xcode_settings']['SDKROOT'] = 'iphonesimulator'
iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
return targets
diff --git a/tools/gyp/pylib/gyp/xcode_ninja.py b/tools/gyp/pylib/gyp/xcode_ninja.py
index 3820d6bf04..bc76ffff4e 100644
--- a/tools/gyp/pylib/gyp/xcode_ninja.py
+++ b/tools/gyp/pylib/gyp/xcode_ninja.py
@@ -92,11 +92,16 @@ def _TargetFromSpec(old_spec, params):
new_xcode_settings['CODE_SIGNING_REQUIRED'] = "NO"
new_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] = \
old_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET']
+ for key in ['BUNDLE_LOADER', 'TEST_HOST']:
+ if key in old_xcode_settings:
+ new_xcode_settings[key] = old_xcode_settings[key]
+
ninja_target['configurations'][config] = {}
ninja_target['configurations'][config]['xcode_settings'] = \
new_xcode_settings
ninja_target['mac_bundle'] = old_spec.get('mac_bundle', 0)
+ ninja_target['mac_xctest_bundle'] = old_spec.get('mac_xctest_bundle', 0)
ninja_target['ios_app_extension'] = old_spec.get('ios_app_extension', 0)
ninja_target['ios_watchkit_extension'] = \
old_spec.get('ios_watchkit_extension', 0)
@@ -138,9 +143,10 @@ def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
if target_extras is not None and re.search(target_extras, target_name):
return True
- # Otherwise just show executable targets.
- if spec.get('type', '') == 'executable' and \
- spec.get('product_extension', '') != 'bundle':
+ # Otherwise just show executable targets and xc_tests.
+ if (int(spec.get('mac_xctest_bundle', 0)) != 0 or
+ (spec.get('type', '') == 'executable' and
+ spec.get('product_extension', '') != 'bundle')):
# If there is a filter and the target does not match, exclude the target.
if executable_target_pattern is not None:
@@ -227,13 +233,26 @@ def CreateWrapper(target_list, target_dicts, data, params):
# Tell Xcode to look everywhere for headers.
sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } }
+ # Put excluded files into the sources target so they can be opened in Xcode.
+ skip_excluded_files = \
+ not generator_flags.get('xcode_ninja_list_excluded_files', True)
+
sources = []
for target, target_dict in target_dicts.iteritems():
base = os.path.dirname(target)
files = target_dict.get('sources', []) + \
target_dict.get('mac_bundle_resources', [])
+
+ if not skip_excluded_files:
+ files.extend(target_dict.get('sources_excluded', []) +
+ target_dict.get('mac_bundle_resources_excluded', []))
+
for action in target_dict.get('actions', []):
files.extend(action.get('inputs', []))
+
+ if not skip_excluded_files:
+ files.extend(action.get('inputs_excluded', []))
+
# Remove files starting with $. These are mostly intermediate files for the
# build system.
files = [ file for file in files if not file.startswith('$')]
diff --git a/tools/gyp/pylib/gyp/xcodeproj_file.py b/tools/gyp/pylib/gyp/xcodeproj_file.py
index 034a0d2d4f..e69235f724 100644
--- a/tools/gyp/pylib/gyp/xcodeproj_file.py
+++ b/tools/gyp/pylib/gyp/xcodeproj_file.py
@@ -1492,6 +1492,7 @@ class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
'icns': 'image.icns',
'java': 'sourcecode.java',
'js': 'sourcecode.javascript',
+ 'kext': 'wrapper.kext',
'm': 'sourcecode.c.objc',
'mm': 'sourcecode.cpp.objcpp',
'nib': 'wrapper.nib',
@@ -1944,24 +1945,40 @@ class PBXCopyFilesBuildPhase(XCBuildPhase):
'name': [0, str, 0, 0],
})
- # path_tree_re matches "$(DIR)/path" or just "$(DIR)". Match group 1 is
- # "DIR", match group 3 is "path" or None.
- path_tree_re = re.compile('^\\$\\((.*)\\)(/(.*)|)$')
-
- # path_tree_to_subfolder maps names of Xcode variables to the associated
- # dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object.
- path_tree_to_subfolder = {
- 'BUILT_PRODUCTS_DIR': 16, # Products Directory
- # Other types that can be chosen via the Xcode UI.
- # TODO(mark): Map Xcode variable names to these.
- # : 1, # Wrapper
- # : 6, # Executables: 6
- # : 7, # Resources
- # : 15, # Java Resources
- # : 10, # Frameworks
- # : 11, # Shared Frameworks
- # : 12, # Shared Support
- # : 13, # PlugIns
+ # path_tree_re matches "$(DIR)/path", "$(DIR)/$(DIR2)/path" or just "$(DIR)".
+ # Match group 1 is "DIR", group 3 is "path" or "$(DIR2") or "$(DIR2)/path"
+ # or None. If group 3 is "path", group 4 will be None otherwise group 4 is
+ # "DIR2" and group 6 is "path".
+ path_tree_re = re.compile(r'^\$\((.*?)\)(/(\$\((.*?)\)(/(.*)|)|(.*)|)|)$')
+
+ # path_tree_{first,second}_to_subfolder map names of Xcode variables to the
+ # associated dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase
+ # object.
+ path_tree_first_to_subfolder = {
+ # Types that can be chosen via the Xcode UI.
+ 'BUILT_PRODUCTS_DIR': 16, # Products Directory
+ 'BUILT_FRAMEWORKS_DIR': 10, # Not an official Xcode macro.
+ # Existed before support for the
+ # names below was added. Maps to
+ # "Frameworks".
+ }
+
+ path_tree_second_to_subfolder = {
+ 'WRAPPER_NAME': 1, # Wrapper
+ # Although Xcode's friendly name is "Executables", the destination
+ # is demonstrably the value of the build setting
+ # EXECUTABLE_FOLDER_PATH not EXECUTABLES_FOLDER_PATH.
+ 'EXECUTABLE_FOLDER_PATH': 6, # Executables.
+ 'UNLOCALIZED_RESOURCES_FOLDER_PATH': 7, # Resources
+ 'JAVA_FOLDER_PATH': 15, # Java Resources
+ 'FRAMEWORKS_FOLDER_PATH': 10, # Frameworks
+ 'SHARED_FRAMEWORKS_FOLDER_PATH': 11, # Shared Frameworks
+ 'SHARED_SUPPORT_FOLDER_PATH': 12, # Shared Support
+ 'PLUGINS_FOLDER_PATH': 13, # PlugIns
+ # For XPC Services, Xcode sets both dstPath and dstSubfolderSpec.
+ # Note that it re-uses the BUILT_PRODUCTS_DIR value for
+ # dstSubfolderSpec. dstPath is set below.
+ 'XPCSERVICES_FOLDER_PATH': 16, # XPC Services.
}
def Name(self):
@@ -1982,14 +1999,61 @@ class PBXCopyFilesBuildPhase(XCBuildPhase):
path_tree_match = self.path_tree_re.search(path)
if path_tree_match:
- # Everything else needs to be relative to an Xcode variable.
- path_tree = path_tree_match.group(1)
- relative_path = path_tree_match.group(3)
-
- if path_tree in self.path_tree_to_subfolder:
- subfolder = self.path_tree_to_subfolder[path_tree]
+ path_tree = path_tree_match.group(1);
+ if path_tree in self.path_tree_first_to_subfolder:
+ subfolder = self.path_tree_first_to_subfolder[path_tree]
+ relative_path = path_tree_match.group(3)
if relative_path is None:
relative_path = ''
+
+ if subfolder == 16 and path_tree_match.group(4) is not None:
+ # BUILT_PRODUCTS_DIR (16) is the first element in a path whose
+ # second element is possibly one of the variable names in
+ # path_tree_second_to_subfolder. Xcode sets the values of all these
+ # variables to relative paths so .gyp files must prefix them with
+ # BUILT_PRODUCTS_DIR, e.g.
+ # $(BUILT_PRODUCTS_DIR)/$(PLUGINS_FOLDER_PATH). Then
+ # xcode_emulation.py can export these variables with the same values
+ # as Xcode yet make & ninja files can determine the absolute path
+ # to the target. Xcode uses the dstSubfolderSpec value set here
+ # to determine the full path.
+ #
+ # An alternative of xcode_emulation.py setting the values to absolute
+ # paths when exporting these variables has been ruled out because
+ # then the values would be different depending on the build tool.
+ #
+ # Another alternative is to invent new names for the variables used
+ # to match to the subfolder indices in the second table. .gyp files
+ # then will not need to prepend $(BUILT_PRODUCTS_DIR) because
+ # xcode_emulation.py can set the values of those variables to
+ # the absolute paths when exporting. This is possibly the thinking
+ # behind BUILT_FRAMEWORKS_DIR which is used in exactly this manner.
+ #
+ # Requiring prepending BUILT_PRODUCTS_DIR has been chosen because
+ # this same way could be used to specify destinations in .gyp files
+ # that pre-date this addition to GYP. However they would only work
+ # with the Xcode generator. The previous version of xcode_emulation.py
+ # does not export these variables. Such files will get the benefit
+ # of the Xcode UI showing the proper destination name simply by
+ # regenerating the projects with this version of GYP.
+ path_tree = path_tree_match.group(4)
+ relative_path = path_tree_match.group(6)
+ separator = '/'
+
+ if path_tree in self.path_tree_second_to_subfolder:
+ subfolder = self.path_tree_second_to_subfolder[path_tree]
+ if relative_path is None:
+ relative_path = ''
+ separator = ''
+ if path_tree == 'XPCSERVICES_FOLDER_PATH':
+ relative_path = '$(CONTENTS_FOLDER_PATH)/XPCServices' \
+ + separator + relative_path
+ else:
+ # subfolder = 16 from above
+ # The second element of the path is an unrecognized variable.
+ # Include it and any remaining elements in relative_path.
+ relative_path = path_tree_match.group(3);
+
else:
# The path starts with an unrecognized Xcode variable
# name like $(SRCROOT). Xcode will still handle this
@@ -2260,8 +2324,12 @@ class PBXNativeTarget(XCTarget):
'', ''],
'com.apple.product-type.bundle.unit-test': ['wrapper.cfbundle',
'', '.xctest'],
+ 'com.apple.product-type.bundle.ui-testing': ['wrapper.cfbundle',
+ '', '.xctest'],
'com.googlecode.gyp.xcode.bundle': ['compiled.mach-o.dylib',
'', '.so'],
+ 'com.apple.product-type.kernel-extension': ['wrapper.kext',
+ '', '.kext'],
}
def __init__(self, properties=None, id=None, parent=None,
@@ -2314,7 +2382,9 @@ class PBXNativeTarget(XCTarget):
force_extension = suffix[1:]
if self._properties['productType'] == \
- 'com.apple.product-type-bundle.unit.test':
+ 'com.apple.product-type-bundle.unit.test' or \
+ self._properties['productType'] == \
+ 'com.apple.product-type-bundle.ui-testing':
if force_extension is None:
force_extension = suffix[1:]
diff --git a/tools/gyp/tools/pretty_gyp.py b/tools/gyp/tools/pretty_gyp.py
index c51d35872c..d5736bbd4a 100755
--- a/tools/gyp/tools/pretty_gyp.py
+++ b/tools/gyp/tools/pretty_gyp.py
@@ -118,24 +118,23 @@ def prettyprint_input(lines):
basic_offset = 2
last_line = ""
for line in lines:
- if COMMENT_RE.match(line):
- print line
- else:
- line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
- if len(line) > 0:
+ line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
+ if len(line) > 0:
+ brace_diff = 0
+ if not COMMENT_RE.match(line):
(brace_diff, after) = count_braces(line)
- if brace_diff != 0:
- if after:
- print " " * (basic_offset * indent) + line
- indent += brace_diff
- else:
- indent += brace_diff
- print " " * (basic_offset * indent) + line
+ if brace_diff != 0:
+ if after:
+ print " " * (basic_offset * indent) + line
+ indent += brace_diff
else:
+ indent += brace_diff
print " " * (basic_offset * indent) + line
else:
- print ""
- last_line = line
+ print " " * (basic_offset * indent) + line
+ else:
+ print ""
+ last_line = line
def main():