summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorTimothy J Fontaine <tjfontaine@gmail.com>2013-11-10 15:15:34 -0800
committerTimothy J Fontaine <tjfontaine@gmail.com>2013-11-10 15:15:34 -0800
commit201098535443ef6588b0fd176a425fdf3213ee70 (patch)
tree3259766e34c4a70f087c0ea52c16e0bc74eb11a7 /tools
parent3dcc9b93e1e4d80aa15a40a9d82a6b9f0bef0ec0 (diff)
downloadandroid-node-v8-201098535443ef6588b0fd176a425fdf3213ee70.tar.gz
android-node-v8-201098535443ef6588b0fd176a425fdf3213ee70.tar.bz2
android-node-v8-201098535443ef6588b0fd176a425fdf3213ee70.zip
gyp: update to bebdcea
Diffstat (limited to 'tools')
-rw-r--r--tools/gyp/AUTHORS2
-rw-r--r--tools/gyp/DEPS2
-rw-r--r--tools/gyp/MANIFEST21
-rw-r--r--tools/gyp/PRESUBMIT.py1
-rwxr-xr-xtools/gyp/buildbot/buildbot_run.py148
-rwxr-xr-xtools/gyp/gyp19
-rwxr-xr-xtools/gyp/gyp.bat2
-rwxr-xr-xtools/gyp/gyp_main.py18
-rwxr-xr-xtools/gyp/gyptest.py23
-rw-r--r--tools/gyp/pylib/gyp/MSVSNew.py19
-rw-r--r--tools/gyp/pylib/gyp/MSVSSettings.py7
-rw-r--r--tools/gyp/pylib/gyp/MSVSUtil.py81
-rw-r--r--tools/gyp/pylib/gyp/MSVSVersion.py43
-rw-r--r--tools/gyp/pylib/gyp/SCons.py199
-rwxr-xr-xtools/gyp/pylib/gyp/__init__.py125
-rw-r--r--tools/gyp/pylib/gyp/common.py26
-rwxr-xr-xtools/gyp/pylib/gyp/flock_tool.py (renamed from tools/gyp/pylib/gyp/sun_tool.py)12
-rw-r--r--tools/gyp/pylib/gyp/generator/android.py128
-rw-r--r--tools/gyp/pylib/gyp/generator/dump_dependency_json.py14
-rw-r--r--tools/gyp/pylib/gyp/generator/eclipse.py63
-rw-r--r--tools/gyp/pylib/gyp/generator/make.py79
-rw-r--r--tools/gyp/pylib/gyp/generator/msvs.py205
-rw-r--r--tools/gyp/pylib/gyp/generator/ninja.py867
-rw-r--r--tools/gyp/pylib/gyp/generator/ninja_test.py48
-rw-r--r--tools/gyp/pylib/gyp/generator/scons.py1072
-rw-r--r--tools/gyp/pylib/gyp/generator/xcode.py73
-rw-r--r--tools/gyp/pylib/gyp/generator/xcode_test.py23
-rw-r--r--tools/gyp/pylib/gyp/input.py280
-rwxr-xr-xtools/gyp/pylib/gyp/input_test.py90
-rwxr-xr-xtools/gyp/pylib/gyp/mac_tool.py73
-rw-r--r--tools/gyp/pylib/gyp/msvs_emulation.py143
-rw-r--r--tools/gyp/pylib/gyp/ninja_syntax.py18
-rwxr-xr-xtools/gyp/pylib/gyp/win_tool.py53
-rw-r--r--tools/gyp/pylib/gyp/xcode_emulation.py313
-rw-r--r--tools/gyp/pylib/gyp/xcodeproj_file.py56
-rwxr-xr-xtools/gyp/setup.py11
-rw-r--r--tools/gyp/tools/emacs/gyp.el1
-rwxr-xr-xtools/gyp_node.py (renamed from tools/gyp_node)0
38 files changed, 2222 insertions, 2136 deletions
diff --git a/tools/gyp/AUTHORS b/tools/gyp/AUTHORS
index 8977761960..234e1483fe 100644
--- a/tools/gyp/AUTHORS
+++ b/tools/gyp/AUTHORS
@@ -3,6 +3,8 @@
Google Inc.
Bloomberg Finance L.P.
+Yandex LLC
Steven Knight <knight@baldmt.com>
Ryan Norton <rnorton10@gmail.com>
+Eric N. Vander Weele <ericvw@gmail.com>
diff --git a/tools/gyp/DEPS b/tools/gyp/DEPS
index c17571ae0c..2e1120f274 100644
--- a/tools/gyp/DEPS
+++ b/tools/gyp/DEPS
@@ -8,8 +8,6 @@ vars = {
}
deps = {
- "scons":
- Var("chrome_trunk") + "/src/third_party/scons@44099",
}
deps_os = {
diff --git a/tools/gyp/MANIFEST b/tools/gyp/MANIFEST
deleted file mode 100644
index 925ecc1842..0000000000
--- a/tools/gyp/MANIFEST
+++ /dev/null
@@ -1,21 +0,0 @@
-setup.py
-gyp
-LICENSE
-AUTHORS
-pylib/gyp/MSVSNew.py
-pylib/gyp/MSVSProject.py
-pylib/gyp/MSVSToolFile.py
-pylib/gyp/MSVSUserFile.py
-pylib/gyp/MSVSVersion.py
-pylib/gyp/SCons.py
-pylib/gyp/__init__.py
-pylib/gyp/common.py
-pylib/gyp/input.py
-pylib/gyp/xcodeproj_file.py
-pylib/gyp/generator/__init__.py
-pylib/gyp/generator/gypd.py
-pylib/gyp/generator/gypsh.py
-pylib/gyp/generator/make.py
-pylib/gyp/generator/msvs.py
-pylib/gyp/generator/scons.py
-pylib/gyp/generator/xcode.py
diff --git a/tools/gyp/PRESUBMIT.py b/tools/gyp/PRESUBMIT.py
index 65235661a4..5567b88bd1 100644
--- a/tools/gyp/PRESUBMIT.py
+++ b/tools/gyp/PRESUBMIT.py
@@ -17,7 +17,6 @@ PYLINT_BLACKLIST = [
'test/lib/TestCommon.py',
'test/lib/TestGyp.py',
# Needs style fix.
- 'pylib/gyp/generator/scons.py',
'pylib/gyp/generator/xcode.py',
]
diff --git a/tools/gyp/buildbot/buildbot_run.py b/tools/gyp/buildbot/buildbot_run.py
new file mode 100755
index 0000000000..398eb87a85
--- /dev/null
+++ b/tools/gyp/buildbot/buildbot_run.py
@@ -0,0 +1,148 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Argument-less script to select what to run on the buildbots."""
+
+
+import os
+import shutil
+import subprocess
+import sys
+
+
+if sys.platform in ['win32', 'cygwin']:
+ EXE_SUFFIX = '.exe'
+else:
+ EXE_SUFFIX = ''
+
+
+BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__))
+TRUNK_DIR = os.path.dirname(BUILDBOT_DIR)
+ROOT_DIR = os.path.dirname(TRUNK_DIR)
+ANDROID_DIR = os.path.join(ROOT_DIR, 'android')
+OUT_DIR = os.path.join(TRUNK_DIR, 'out')
+
+
+def CallSubProcess(*args, **kwargs):
+ """Wrapper around subprocess.call which treats errors as build exceptions."""
+ retcode = subprocess.call(*args, **kwargs)
+ if retcode != 0:
+ print '@@@STEP_EXCEPTION@@@'
+ sys.exit(1)
+
+
+def PrepareAndroidTree():
+ """Prepare an Android tree to run 'android' format tests."""
+ if os.environ['BUILDBOT_CLOBBER'] == '1':
+ print '@@@BUILD_STEP Clobber Android checkout@@@'
+ shutil.rmtree(ANDROID_DIR)
+
+ # The release of Android we use is static, so there's no need to do anything
+ # if the directory already exists.
+ if os.path.isdir(ANDROID_DIR):
+ return
+
+ print '@@@BUILD_STEP Initialize Android checkout@@@'
+ os.mkdir(ANDROID_DIR)
+ CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot'])
+ CallSubProcess(['git', 'config', '--global',
+ 'user.email', 'chrome-bot@google.com'])
+ CallSubProcess(['git', 'config', '--global', 'color.ui', 'false'])
+ CallSubProcess(
+ ['repo', 'init',
+ '-u', 'https://android.googlesource.com/platform/manifest',
+ '-b', 'android-4.2.1_r1',
+ '-g', 'all,-notdefault,-device,-darwin,-mips,-x86'],
+ cwd=ANDROID_DIR)
+
+ print '@@@BUILD_STEP Sync Android@@@'
+ CallSubProcess(['repo', 'sync', '-j4'], cwd=ANDROID_DIR)
+
+ print '@@@BUILD_STEP Build Android@@@'
+ CallSubProcess(
+ ['/bin/bash',
+ '-c', 'source build/envsetup.sh && lunch full-eng && make -j4'],
+ cwd=ANDROID_DIR)
+
+
+def GypTestFormat(title, format=None, msvs_version=None):
+ """Run the gyp tests for a given format, emitting annotator tags.
+
+ See annotator docs at:
+ https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations
+ Args:
+ format: gyp format to test.
+ Returns:
+ 0 for sucesss, 1 for failure.
+ """
+ if not format:
+ format = title
+
+ print '@@@BUILD_STEP ' + title + '@@@'
+ sys.stdout.flush()
+ env = os.environ.copy()
+ if msvs_version:
+ env['GYP_MSVS_VERSION'] = msvs_version
+ command = ' '.join(
+ [sys.executable, 'trunk/gyptest.py',
+ '--all',
+ '--passed',
+ '--format', format,
+ '--chdir', 'trunk'])
+ if format == 'android':
+ # gyptest needs the environment setup from envsetup/lunch in order to build
+ # using the 'android' backend, so this is done in a single shell.
+ retcode = subprocess.call(
+ ['/bin/bash',
+ '-c', 'source build/envsetup.sh && lunch full-eng && cd %s && %s'
+ % (ROOT_DIR, command)],
+ cwd=ANDROID_DIR, env=env)
+ else:
+ retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True)
+ if retcode:
+ # Emit failure tag, and keep going.
+ print '@@@STEP_FAILURE@@@'
+ return 1
+ return 0
+
+
+def GypBuild():
+ # Dump out/ directory.
+ print '@@@BUILD_STEP cleanup@@@'
+ print 'Removing %s...' % OUT_DIR
+ shutil.rmtree(OUT_DIR, ignore_errors=True)
+ print 'Done.'
+
+ retcode = 0
+ # The Android gyp bot runs on linux so this must be tested first.
+ if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-android':
+ PrepareAndroidTree()
+ retcode += GypTestFormat('android')
+ elif sys.platform.startswith('linux'):
+ retcode += GypTestFormat('ninja')
+ retcode += GypTestFormat('make')
+ elif sys.platform == 'darwin':
+ retcode += GypTestFormat('ninja')
+ retcode += GypTestFormat('xcode')
+ retcode += GypTestFormat('make')
+ elif sys.platform == 'win32':
+ retcode += GypTestFormat('ninja')
+ if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64':
+ retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010')
+ retcode += GypTestFormat('msvs-2012', format='msvs', msvs_version='2012')
+ else:
+ raise Exception('Unknown platform')
+ if retcode:
+ # TODO(bradnelson): once the annotator supports a postscript (section for
+ # after the build proper that could be used for cumulative failures),
+ # use that instead of this. This isolates the final return value so
+ # that it isn't misattributed to the last stage.
+ print '@@@BUILD_STEP failures@@@'
+ sys.exit(retcode)
+
+
+if __name__ == '__main__':
+ GypBuild()
diff --git a/tools/gyp/gyp b/tools/gyp/gyp
index d52e7116f5..a157f34950 100755
--- a/tools/gyp/gyp
+++ b/tools/gyp/gyp
@@ -1,18 +1,7 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
+#!/bin/bash
+# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-import sys
-
-# TODO(mark): sys.path manipulation is some temporary testing stuff.
-try:
- import gyp
-except ImportError, e:
- import os.path
- sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'pylib'))
- import gyp
-
-if __name__ == '__main__':
- sys.exit(gyp.main(sys.argv[1:]))
+BASE=`dirname $0`
+python $BASE/gyp_main.py "$@"
diff --git a/tools/gyp/gyp.bat b/tools/gyp/gyp.bat
index 90fbc6d30e..c0b4ca24e5 100755
--- a/tools/gyp/gyp.bat
+++ b/tools/gyp/gyp.bat
@@ -2,4 +2,4 @@
@rem Use of this source code is governed by a BSD-style license that can be
@rem found in the LICENSE file.
-@python "%~dp0/gyp" %*
+@python "%~dp0gyp_main.py" %*
diff --git a/tools/gyp/gyp_main.py b/tools/gyp/gyp_main.py
new file mode 100755
index 0000000000..4ec872f0f9
--- /dev/null
+++ b/tools/gyp/gyp_main.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+# TODO(mark): sys.path manipulation is some temporary testing stuff.
+try:
+ import gyp
+except ImportError, e:
+ import os.path
+ sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'pylib'))
+ import gyp
+
+if __name__ == '__main__':
+ sys.exit(gyp.script_main())
diff --git a/tools/gyp/gyptest.py b/tools/gyp/gyptest.py
index efa75a7aa8..a80dfbf2df 100755
--- a/tools/gyp/gyptest.py
+++ b/tools/gyp/gyptest.py
@@ -130,15 +130,18 @@ sys.stdout = Unbuffered(sys.stdout)
sys.stderr = Unbuffered(sys.stderr)
+def is_test_name(f):
+ return f.startswith('gyptest') and f.endswith('.py')
+
+
def find_all_gyptest_files(directory):
- result = []
- for root, dirs, files in os.walk(directory):
- if '.svn' in dirs:
- dirs.remove('.svn')
- result.extend([ os.path.join(root, f) for f in files
- if f.startswith('gyptest') and f.endswith('.py') ])
- result.sort()
- return result
+ result = []
+ for root, dirs, files in os.walk(directory):
+ if '.svn' in dirs:
+ dirs.remove('.svn')
+ result.extend([ os.path.join(root, f) for f in files if is_test_name(f) ])
+ result.sort()
+ return result
def main(argv=None):
@@ -186,6 +189,9 @@ def main(argv=None):
if os.path.isdir(arg):
tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
else:
+ if not is_test_name(os.path.basename(arg)):
+ print >>sys.stderr, arg, 'is not a valid gyp test name.'
+ sys.exit(1)
tests.append(arg)
if opts.list:
@@ -210,6 +216,7 @@ def main(argv=None):
else:
# TODO: not duplicate this mapping from pylib/gyp/__init__.py
format_list = {
+ 'aix5': ['make'],
'freebsd7': ['make'],
'freebsd8': ['make'],
'openbsd5': ['make'],
diff --git a/tools/gyp/pylib/gyp/MSVSNew.py b/tools/gyp/pylib/gyp/MSVSNew.py
index 253fe61986..845dcb0639 100644
--- a/tools/gyp/pylib/gyp/MSVSNew.py
+++ b/tools/gyp/pylib/gyp/MSVSNew.py
@@ -2,7 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-"""New implementation of Visual Studio project generation for SCons."""
+"""New implementation of Visual Studio project generation."""
import os
import random
@@ -325,14 +325,15 @@ class MSVSSolution:
f.write('\tEndGlobalSection\r\n')
# Folder mappings
- # TODO(rspangler): Should omit this section if there are no folders
- f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
- for e in all_entries:
- if not isinstance(e, MSVSFolder):
- continue # Does not apply to projects, only folders
- for subentry in e.entries:
- f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
- f.write('\tEndGlobalSection\r\n')
+ # Omit this section if there are no folders
+ if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
+ f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
+ for e in all_entries:
+ if not isinstance(e, MSVSFolder):
+ continue # Does not apply to projects, only folders
+ for subentry in e.entries:
+ f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
+ f.write('\tEndGlobalSection\r\n')
f.write('EndGlobal\r\n')
diff --git a/tools/gyp/pylib/gyp/MSVSSettings.py b/tools/gyp/pylib/gyp/MSVSSettings.py
index d0d4990319..e8be386288 100644
--- a/tools/gyp/pylib/gyp/MSVSSettings.py
+++ b/tools/gyp/pylib/gyp/MSVSSettings.py
@@ -834,8 +834,13 @@ _Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean)
# MSVS options not found in MSBuild.
_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean)
_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean)
-# TODO(jeanluc) I don't think these are genuine settings but byproducts of Gyp.
+# These settings generate correctly in the MSVS output files when using
+# e.g. DelayLoadDLLs! or AdditionalDependencies! to exclude files from
+# configuration entries, but result in spurious artifacts which can be
+# safely ignored here. See crbug.com/246570
_MSVSOnly(_link, 'AdditionalLibraryDirectories_excluded', _folder_list)
+_MSVSOnly(_link, 'DelayLoadDLLs_excluded', _file_list)
+_MSVSOnly(_link, 'AdditionalDependencies_excluded', _file_list)
# MSBuild options not found in MSVS.
_MSBuildOnly(_link, 'BuildingInIDE', _boolean)
diff --git a/tools/gyp/pylib/gyp/MSVSUtil.py b/tools/gyp/pylib/gyp/MSVSUtil.py
index 5afcd1f2ab..62e8d260d4 100644
--- a/tools/gyp/pylib/gyp/MSVSUtil.py
+++ b/tools/gyp/pylib/gyp/MSVSUtil.py
@@ -10,7 +10,8 @@ import os
_TARGET_TYPE_EXT = {
'executable': '.exe',
- 'shared_library': '.dll'
+ 'loadable_module': '.dll',
+ 'shared_library': '.dll',
}
@@ -121,6 +122,46 @@ def ShardTargets(target_list, target_dicts):
return (new_target_list, new_target_dicts)
+def _GetPdbPath(target_dict, config_name, vars):
+ """Returns the path to the PDB file that will be generated by a given
+ configuration.
+
+ The lookup proceeds as follows:
+ - Look for an explicit path in the VCLinkerTool configuration block.
+ - Look for an 'msvs_large_pdb_path' variable.
+ - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
+ specified.
+ - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
+
+ Arguments:
+ target_dict: The target dictionary to be searched.
+ config_name: The name of the configuration of interest.
+ vars: A dictionary of common GYP variables with generator-specific values.
+ Returns:
+ The path of the corresponding PDB file.
+ """
+ config = target_dict['configurations'][config_name]
+ msvs = config.setdefault('msvs_settings', {})
+
+ linker = msvs.get('VCLinkerTool', {})
+
+ pdb_path = linker.get('ProgramDatabaseFile')
+ if pdb_path:
+ return pdb_path
+
+ variables = target_dict.get('variables', {})
+ pdb_path = variables.get('msvs_large_pdb_path', None)
+ if pdb_path:
+ return pdb_path
+
+
+ pdb_base = target_dict.get('product_name', target_dict['target_name'])
+ pdb_base = '%s%s.pdb' % (pdb_base, _TARGET_TYPE_EXT[target_dict['type']])
+ pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
+
+ return pdb_path
+
+
def InsertLargePdbShims(target_list, target_dicts, vars):
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
@@ -138,6 +179,7 @@ def InsertLargePdbShims(target_list, target_dicts, vars):
targets_to_shim = []
for t in target_dicts:
target_dict = target_dicts[t]
+
# We only want to shim targets that have msvs_large_pdb enabled.
if not int(target_dict.get('msvs_large_pdb', 0)):
continue
@@ -162,7 +204,7 @@ def InsertLargePdbShims(target_list, target_dicts, vars):
# GYP and the project may be on different drives), and Ninja hates absolute
# paths (it ends up generating the .obj and .obj.d alongside the source
# file, polluting GYPs tree).
- copy_suffix = '_large_pdb_copy'
+ copy_suffix = 'large_pdb_copy'
copy_target_name = target_name + '_' + copy_suffix
full_copy_target_name = _SuffixName(t, copy_suffix)
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
@@ -179,7 +221,7 @@ def InsertLargePdbShims(target_list, target_dicts, vars):
# This is the dict for the PDB generating shim target. It depends on the
# copy target.
- shim_suffix = '_large_pdb_shim'
+ shim_suffix = 'large_pdb_shim'
shim_target_name = target_name + '_' + shim_suffix
full_shim_target_name = _SuffixName(t, shim_suffix)
shim_dict = copy.deepcopy(base_dict)
@@ -190,19 +232,32 @@ def InsertLargePdbShims(target_list, target_dicts, vars):
# Set up the shim to output its PDB to the same location as the final linker
# target.
- for config in shim_dict.get('configurations').itervalues():
- msvs = config.setdefault('msvs_settings')
+ for config_name, config in shim_dict.get('configurations').iteritems():
+ pdb_path = _GetPdbPath(target_dict, config_name, vars)
- linker = msvs.pop('VCLinkerTool') # We want to clear this dict.
- pdb_path = linker.get('ProgramDatabaseFile')
+ # A few keys that we don't want to propagate.
+ for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']:
+ config.pop(key, None)
- compiler = msvs.setdefault('VCCLCompilerTool', {})
- compiler.setdefault('DebugInformationFormat', '3')
- compiler.setdefault('ProgramDataBaseFileName', pdb_path)
+ msvs = config.setdefault('msvs_settings', {})
- # Add the new targets.
- target_list.append(full_copy_target_name)
- target_list.append(full_shim_target_name)
+ # Update the compiler directives in the shim target.
+ compiler = msvs.setdefault('VCCLCompilerTool', {})
+ compiler['DebugInformationFormat'] = '3'
+ compiler['ProgramDataBaseFileName'] = pdb_path
+
+ # Set the explicit PDB path in the appropriate configuration of the
+ # original target.
+ config = target_dict['configurations'][config_name]
+ msvs = config.setdefault('msvs_settings', {})
+ linker = msvs.setdefault('VCLinkerTool', {})
+ linker['GenerateDebugInformation'] = 'true'
+ linker['ProgramDatabaseFile'] = pdb_path
+
+ # Add the new targets. They must go to the beginning of the list so that
+ # the dependency generation works as expected in ninja.
+ target_list.insert(0, full_copy_target_name)
+ target_list.insert(0, full_shim_target_name)
target_dicts[full_copy_target_name] = copy_dict
target_dicts[full_shim_target_name] = shim_dict
diff --git a/tools/gyp/pylib/gyp/MSVSVersion.py b/tools/gyp/pylib/gyp/MSVSVersion.py
index 2d95cd0c9e..956fa01161 100644
--- a/tools/gyp/pylib/gyp/MSVSVersion.py
+++ b/tools/gyp/pylib/gyp/MSVSVersion.py
@@ -83,6 +83,13 @@ class VisualStudioVersion(object):
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
# isn't always.
if target_arch == 'x86':
+ if self.short_name == '2013' and (
+ os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
+ os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
+ # VS2013 non-Express has a x64-x86 cross that we want to prefer.
+ return [os.path.normpath(
+ os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
+ # Otherwise, the standard x86 compiler.
return [os.path.normpath(
os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
else:
@@ -197,6 +204,24 @@ def _CreateVersion(name, path, sdk_based=False):
if path:
path = os.path.normpath(path)
versions = {
+ '2013': VisualStudioVersion('2013',
+ 'Visual Studio 2013',
+ solution_version='13.00',
+ project_version='12.0',
+ flat_sln=False,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset='v120'),
+ '2013e': VisualStudioVersion('2013e',
+ 'Visual Studio 2013',
+ solution_version='13.00',
+ project_version='12.0',
+ flat_sln=True,
+ uses_vcxproj=True,
+ path=path,
+ sdk_based=sdk_based,
+ default_toolset='v120'),
'2012': VisualStudioVersion('2012',
'Visual Studio 2012',
solution_version='12.00',
@@ -224,7 +249,7 @@ def _CreateVersion(name, path, sdk_based=False):
path=path,
sdk_based=sdk_based),
'2010e': VisualStudioVersion('2010e',
- 'Visual Studio 2010',
+ 'Visual C++ Express 2010',
solution_version='11.00',
project_version='4.0',
flat_sln=True,
@@ -288,10 +313,16 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
2008(e) - Visual Studio 2008 (9)
2010(e) - Visual Studio 2010 (10)
2012(e) - Visual Studio 2012 (11)
+ 2013(e) - Visual Studio 2013 (11)
Where (e) is e for express editions of MSVS and blank otherwise.
"""
version_to_year = {
- '8.0': '2005', '9.0': '2008', '10.0': '2010', '11.0': '2012'}
+ '8.0': '2005',
+ '9.0': '2008',
+ '10.0': '2010',
+ '11.0': '2012',
+ '12.0': '2013',
+ }
versions = []
for version in versions_to_check:
# Old method of searching for which VS version is installed
@@ -345,7 +376,7 @@ def SelectVisualStudioVersion(version='auto'):
if version == 'auto':
version = os.environ.get('GYP_MSVS_VERSION', 'auto')
version_map = {
- 'auto': ('10.0', '9.0', '8.0', '11.0'),
+ 'auto': ('10.0', '12.0', '9.0', '8.0', '11.0'),
'2005': ('8.0',),
'2005e': ('8.0',),
'2008': ('9.0',),
@@ -354,13 +385,15 @@ def SelectVisualStudioVersion(version='auto'):
'2010e': ('10.0',),
'2012': ('11.0',),
'2012e': ('11.0',),
+ '2013': ('12.0',),
+ '2013e': ('12.0',),
}
override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
if override_path:
msvs_version = os.environ.get('GYP_MSVS_VERSION')
- if not msvs_version or 'e' not in msvs_version:
+ if not msvs_version:
raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
- 'set to an "e" version (e.g. 2010e)')
+ 'set to a particular version (e.g. 2010e).')
return _CreateVersion(msvs_version, override_path, sdk_based=True)
version = str(version)
versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
diff --git a/tools/gyp/pylib/gyp/SCons.py b/tools/gyp/pylib/gyp/SCons.py
deleted file mode 100644
index 42d845ebed..0000000000
--- a/tools/gyp/pylib/gyp/SCons.py
+++ /dev/null
@@ -1,199 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-SCons generator.
-
-This contains class definitions and supporting functions for generating
-pieces of SCons files for the different types of GYP targets.
-"""
-
-import os
-
-
-def WriteList(fp, list, prefix='',
- separator=',\n ',
- preamble=None,
- postamble=None):
- fp.write(preamble or '')
- fp.write((separator or ' ').join([prefix + l for l in list]))
- fp.write(postamble or '')
-
-
-class TargetBase(object):
- """
- Base class for a SCons representation of a GYP target.
- """
- is_ignored = False
- target_prefix = ''
- target_suffix = ''
- def __init__(self, spec):
- self.spec = spec
- def full_product_name(self):
- """
- Returns the full name of the product being built:
-
- * Uses 'product_name' if it's set, else prefix + 'target_name'.
- * Prepends 'product_dir' if set.
- * Appends SCons suffix variables for the target type (or
- product_extension).
- """
- suffix = self.target_suffix
- product_extension = self.spec.get('product_extension')
- if product_extension:
- suffix = '.' + product_extension
- prefix = self.spec.get('product_prefix', self.target_prefix)
- name = self.spec['target_name']
- name = prefix + self.spec.get('product_name', name) + suffix
- product_dir = self.spec.get('product_dir')
- if product_dir:
- name = os.path.join(product_dir, name)
- else:
- name = os.path.join(self.out_dir, name)
- return name
-
- def write_input_files(self, fp):
- """
- Writes the definition of the input files (sources).
- """
- sources = self.spec.get('sources')
- if not sources:
- fp.write('\ninput_files = []\n')
- return
- preamble = '\ninput_files = [\n '
- postamble = ',\n]\n'
- WriteList(fp, map(repr, sources), preamble=preamble, postamble=postamble)
-
- def builder_call(self):
- """
- Returns the actual SCons builder call to build this target.
- """
- name = self.full_product_name()
- return 'env.%s(env.File(%r), input_files)' % (self.builder_name, name)
- def write_target(self, fp, src_dir='', pre=''):
- """
- Writes the lines necessary to build this target.
- """
- fp.write('\n' + pre)
- fp.write('_outputs = %s\n' % self.builder_call())
- fp.write('target_files.extend(_outputs)\n')
-
-
-class NoneTarget(TargetBase):
- """
- A GYP target type of 'none', implicitly or explicitly.
- """
- def write_target(self, fp, src_dir='', pre=''):
- fp.write('\ntarget_files.extend(input_files)\n')
-
-
-class SettingsTarget(TargetBase):
- """
- A GYP target type of 'settings'.
- """
- is_ignored = True
-
-
-compilable_sources_template = """
-_result = []
-for infile in input_files:
- if env.compilable(infile):
- if (type(infile) == type('')
- and (infile.startswith(%(src_dir)r)
- or not os.path.isabs(env.subst(infile)))):
- # Force files below the build directory by replacing all '..'
- # elements in the path with '__':
- base, ext = os.path.splitext(os.path.normpath(infile))
- base = [d == '..' and '__' or d for d in base.split('/')]
- base = os.path.join(*base)
- object = '${OBJ_DIR}/${COMPONENT_NAME}/${TARGET_NAME}/' + base
- if not infile.startswith(%(src_dir)r):
- infile = %(src_dir)r + infile
- infile = env.%(name)s(object, infile)[0]
- else:
- infile = env.%(name)s(infile)[0]
- _result.append(infile)
-input_files = _result
-"""
-
-class CompilableSourcesTargetBase(TargetBase):
- """
- An abstract base class for targets that compile their source files.
-
- We explicitly transform compilable files into object files,
- even though SCons could infer that for us, because we want
- to control where the object file ends up. (The implicit rules
- in SCons always put the object file next to the source file.)
- """
- intermediate_builder_name = None
- def write_target(self, fp, src_dir='', pre=''):
- if self.intermediate_builder_name is None:
- raise NotImplementedError
- if src_dir and not src_dir.endswith('/'):
- src_dir += '/'
- variables = {
- 'src_dir': src_dir,
- 'name': self.intermediate_builder_name,
- }
- fp.write(compilable_sources_template % variables)
- super(CompilableSourcesTargetBase, self).write_target(fp)
-
-
-class ProgramTarget(CompilableSourcesTargetBase):
- """
- A GYP target type of 'executable'.
- """
- builder_name = 'GypProgram'
- intermediate_builder_name = 'StaticObject'
- target_prefix = '${PROGPREFIX}'
- target_suffix = '${PROGSUFFIX}'
- out_dir = '${TOP_BUILDDIR}'
-
-
-class StaticLibraryTarget(CompilableSourcesTargetBase):
- """
- A GYP target type of 'static_library'.
- """
- builder_name = 'GypStaticLibrary'
- intermediate_builder_name = 'StaticObject'
- target_prefix = '${LIBPREFIX}'
- target_suffix = '${LIBSUFFIX}'
- out_dir = '${LIB_DIR}'
-
-
-class SharedLibraryTarget(CompilableSourcesTargetBase):
- """
- A GYP target type of 'shared_library'.
- """
- builder_name = 'GypSharedLibrary'
- intermediate_builder_name = 'SharedObject'
- target_prefix = '${SHLIBPREFIX}'
- target_suffix = '${SHLIBSUFFIX}'
- out_dir = '${LIB_DIR}'
-
-
-class LoadableModuleTarget(CompilableSourcesTargetBase):
- """
- A GYP target type of 'loadable_module'.
- """
- builder_name = 'GypLoadableModule'
- intermediate_builder_name = 'SharedObject'
- target_prefix = '${SHLIBPREFIX}'
- target_suffix = '${SHLIBSUFFIX}'
- out_dir = '${TOP_BUILDDIR}'
-
-
-TargetMap = {
- None : NoneTarget,
- 'none' : NoneTarget,
- 'settings' : SettingsTarget,
- 'executable' : ProgramTarget,
- 'static_library' : StaticLibraryTarget,
- 'shared_library' : SharedLibraryTarget,
- 'loadable_module' : LoadableModuleTarget,
-}
-
-
-def Target(spec):
- return TargetMap[spec.get('type')](spec)
diff --git a/tools/gyp/pylib/gyp/__init__.py b/tools/gyp/pylib/gyp/__init__.py
index 3769c52652..30edea5675 100755
--- a/tools/gyp/pylib/gyp/__init__.py
+++ b/tools/gyp/pylib/gyp/__init__.py
@@ -106,10 +106,6 @@ def Load(build_files, format, default_variables={},
# so we can default things and the generators only have to provide what
# they need.
generator_input_info = {
- 'generator_wants_absolute_build_file_paths':
- getattr(generator, 'generator_wants_absolute_build_file_paths', False),
- 'generator_handles_variants':
- getattr(generator, 'generator_handles_variants', False),
'non_configuration_keys':
getattr(generator, 'generator_additional_non_configuration_keys', []),
'path_sections':
@@ -123,12 +119,14 @@ def Load(build_files, format, default_variables={},
'generator_wants_static_library_dependencies_adjusted', True),
'generator_wants_sorted_dependencies':
getattr(generator, 'generator_wants_sorted_dependencies', False),
+ 'generator_filelist_paths':
+ getattr(generator, 'generator_filelist_paths', None),
}
# Process the input specific to this generator.
result = gyp.input.Load(build_files, default_variables, includes[:],
depth, generator_input_info, check, circular_check,
- params['parallel'])
+ params['parallel'], params['root_targets'])
return [generator] + result
def NameValueListToDict(name_value_list):
@@ -283,26 +281,26 @@ def gyp_main(args):
parser = RegeneratableOptionParser()
usage = 'usage: %s [options ...] [build_file ...]'
parser.set_usage(usage.replace('%s', '%prog'))
+ parser.add_option('--build', dest='configs', action='append',
+ help='configuration for build after project generation')
+ parser.add_option('--check', dest='check', action='store_true',
+ help='check format of gyp files')
+ parser.add_option('--config-dir', dest='config_dir', action='store',
+ env_name='GYP_CONFIG_DIR', default=None,
+ help='The location for configuration files like '
+ 'include.gypi.')
+ parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
+ action='append', default=[], help='turn on a debugging '
+ 'mode for debugging GYP. Supported modes are "variables", '
+ '"includes" and "general" or "all" for all of them.')
parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
env_name='GYP_DEFINES',
help='sets variable VAR to value VAL')
+ parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
+ help='set DEPTH gyp variable to a relative path to PATH')
parser.add_option('-f', '--format', dest='formats', action='append',
env_name='GYP_GENERATORS', regenerate=False,
help='output formats to generate')
- parser.add_option('--msvs-version', dest='msvs_version',
- regenerate=False,
- help='Deprecated; use -G msvs_version=MSVS_VERSION instead')
- parser.add_option('-I', '--include', dest='includes', action='append',
- metavar='INCLUDE', type='path',
- help='files to include in all loaded .gyp files')
- parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
- help='set DEPTH gyp variable to a relative path to PATH')
- parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
- action='append', default=[], help='turn on a debugging '
- 'mode for debugging GYP. Supported modes are "variables", '
- '"includes" and "general" or "all" for all of them.')
- parser.add_option('-S', '--suffix', dest='suffix', default='',
- help='suffix to add to generated files')
parser.add_option('-G', dest='generator_flags', action='append', default=[],
metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
help='sets generator flag FLAG to VAL')
@@ -313,16 +311,9 @@ def gyp_main(args):
parser.add_option('--ignore-environment', dest='use_environment',
action='store_false', default=True, regenerate=False,
help='do not read options from environment variables')
- parser.add_option('--check', dest='check', action='store_true',
- help='check format of gyp files')
- parser.add_option('--parallel', action='store_true',
- env_name='GYP_PARALLEL',
- help='Use multiprocessing for speed (experimental)')
- parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
- default=None, metavar='DIR', type='path',
- help='directory to use as the root of the source tree')
- parser.add_option('--build', dest='configs', action='append',
- help='configuration for build after project generation')
+ parser.add_option('-I', '--include', dest='includes', action='append',
+ metavar='INCLUDE', type='path',
+ help='files to include in all loaded .gyp files')
# --no-circular-check disables the check for circular relationships between
# .gyp files. These relationships should not exist, but they've only been
# observed to be harmful with the Xcode generator. Chromium's .gyp files
@@ -333,27 +324,47 @@ def gyp_main(args):
parser.add_option('--no-circular-check', dest='circular_check',
action='store_false', default=True, regenerate=False,
help="don't check for circular relationships between files")
-
- # We read a few things from ~/.gyp, so set up a var for that.
- home_vars = ['HOME']
- if sys.platform in ('cygwin', 'win32'):
- home_vars.append('USERPROFILE')
- home = None
- home_dot_gyp = None
- for home_var in home_vars:
- home = os.getenv(home_var)
- if home != None:
- home_dot_gyp = os.path.join(home, '.gyp')
- if not os.path.exists(home_dot_gyp):
- home_dot_gyp = None
- else:
- break
-
- # TODO(thomasvl): add support for ~/.gyp/defaults
+ parser.add_option('--no-parallel', action='store_true', default=False,
+ help='Disable multiprocessing')
+ parser.add_option('-S', '--suffix', dest='suffix', default='',
+ help='suffix to add to generated files')
+ parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
+ default=None, metavar='DIR', type='path',
+ help='directory to use as the root of the source tree')
+ parser.add_option('-R', '--root-target', dest='root_targets',
+ action='append', metavar='TARGET',
+ help='include only TARGET and its deep dependencies')
options, build_files_arg = parser.parse_args(args)
build_files = build_files_arg
+ # Set up the configuration directory (defaults to ~/.gyp)
+ if not options.config_dir:
+ home = None
+ home_dot_gyp = None
+ if options.use_environment:
+ home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
+ if home_dot_gyp:
+ home_dot_gyp = os.path.expanduser(home_dot_gyp)
+
+ if not home_dot_gyp:
+ home_vars = ['HOME']
+ if sys.platform in ('cygwin', 'win32'):
+ home_vars.append('USERPROFILE')
+ for home_var in home_vars:
+ home = os.getenv(home_var)
+ if home != None:
+ home_dot_gyp = os.path.join(home, '.gyp')
+ if not os.path.exists(home_dot_gyp):
+ home_dot_gyp = None
+ else:
+ break
+ else:
+ home_dot_gyp = os.path.expanduser(options.config_dir)
+
+ if home_dot_gyp and not os.path.exists(home_dot_gyp):
+ home_dot_gyp = None
+
if not options.formats:
# If no format was given on the command line, then check the env variable.
generate_formats = []
@@ -377,9 +388,7 @@ def gyp_main(args):
if g_o:
options.generator_output = g_o
- if not options.parallel and options.use_environment:
- p = os.environ.get('GYP_PARALLEL')
- options.parallel = bool(p and p != '0')
+ options.parallel = not options.no_parallel
for mode in options.debug:
gyp.debug[mode] = 1
@@ -473,15 +482,6 @@ def gyp_main(args):
if DEBUG_GENERAL in gyp.debug.keys():
DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
- # TODO: Remove this and the option after we've gotten folks to move to the
- # generator flag.
- if options.msvs_version:
- print >>sys.stderr, \
- 'DEPRECATED: Use generator flag (-G msvs_version=' + \
- options.msvs_version + ') instead of --msvs-version=' + \
- options.msvs_version
- generator_flags['msvs_version'] = options.msvs_version
-
# Generate all requested formats (use a set in case we got one format request
# twice)
for format in set(options.formats):
@@ -492,7 +492,8 @@ def gyp_main(args):
'build_files_arg': build_files_arg,
'gyp_binary': sys.argv[0],
'home_dot_gyp': home_dot_gyp,
- 'parallel': options.parallel}
+ 'parallel': options.parallel,
+ 'root_targets': options.root_targets}
# Start with the default variables from the command line.
[generator, flat_list, targets, data] = Load(build_files, format,
@@ -528,5 +529,9 @@ def main(args):
sys.stderr.write("gyp: %s\n" % e)
return 1
+# NOTE: setuptools generated console_scripts calls function with no arguments
+def script_main():
+ return main(sys.argv[1:])
+
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ sys.exit(script_main())
diff --git a/tools/gyp/pylib/gyp/common.py b/tools/gyp/pylib/gyp/common.py
index e50f51c307..b9d2abef02 100644
--- a/tools/gyp/pylib/gyp/common.py
+++ b/tools/gyp/pylib/gyp/common.py
@@ -44,6 +44,14 @@ def ExceptionAppend(e, msg):
e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
+def FindQualifiedTargets(target, qualified_list):
+ """
+ Given a list of qualified targets, return the qualified targets for the
+ specified |target|.
+ """
+ return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
+
+
def ParseQualifiedTarget(target):
# Splits a qualified target into a build file, target name and toolset.
@@ -131,6 +139,13 @@ def RelativePath(path, relative_to):
path = os.path.realpath(path)
relative_to = os.path.realpath(relative_to)
+ # On Windows, we can't create a relative path to a different drive, so just
+ # use the absolute path.
+ if sys.platform == 'win32':
+ if (os.path.splitdrive(path)[0].lower() !=
+ os.path.splitdrive(relative_to)[0].lower()):
+ return path
+
# Split the paths into components.
path_split = path.split(os.path.sep)
relative_to_split = relative_to.split(os.path.sep)
@@ -401,9 +416,16 @@ def GetFlavor(params):
def CopyTool(flavor, out_path):
- """Finds (mac|sun|win)_tool.gyp in the gyp directory and copies it
+ """Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
to |out_path|."""
- prefix = { 'solaris': 'sun', 'mac': 'mac', 'win': 'win' }.get(flavor, None)
+ # aix and solaris just need flock emulation. mac and win use more complicated
+ # support scripts.
+ prefix = {
+ 'aix': 'flock',
+ 'solaris': 'flock',
+ 'mac': 'mac',
+ 'win': 'win'
+ }.get(flavor, None)
if not prefix:
return
diff --git a/tools/gyp/pylib/gyp/sun_tool.py b/tools/gyp/pylib/gyp/flock_tool.py
index 90d59c8240..3e7efff26e 100755
--- a/tools/gyp/pylib/gyp/sun_tool.py
+++ b/tools/gyp/pylib/gyp/flock_tool.py
@@ -3,8 +3,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-"""These functions are executed via gyp-sun-tool when using the Makefile
-generator."""
+"""These functions are executed via gyp-flock-tool when using the Makefile
+generator. Used on systems that don't have a built-in flock."""
import fcntl
import os
@@ -14,14 +14,12 @@ import sys
def main(args):
- executor = SunTool()
+ executor = FlockTool()
executor.Dispatch(args)
-class SunTool(object):
- """This class performs all the SunOS tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
+class FlockTool(object):
+ """This class emulates the 'flock' command."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
diff --git a/tools/gyp/pylib/gyp/generator/android.py b/tools/gyp/pylib/gyp/generator/android.py
index a01ead020d..9476a1df69 100644
--- a/tools/gyp/pylib/gyp/generator/android.py
+++ b/tools/gyp/pylib/gyp/generator/android.py
@@ -39,7 +39,7 @@ generator_default_variables = {
'RULE_INPUT_PATH': '$(RULE_SOURCES)',
'RULE_INPUT_EXT': '$(suffix $<)',
'RULE_INPUT_NAME': '$(notdir $<)',
- 'CONFIGURATION_NAME': '$(GYP_DEFAULT_CONFIGURATION)',
+ 'CONFIGURATION_NAME': '$(GYP_CONFIGURATION)',
}
# Make supports multiple toolsets
@@ -292,11 +292,9 @@ class AndroidMkWriter(object):
main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
- '$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_intermediate_dir)' %
- main_output)
+ '$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
- '$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)' %
- main_output)
+ '$(abspath $(gyp_shared_intermediate_dir))' % main_output)
# Android's envsetup.sh adds a number of directories to the path including
# the built host binary directory. This causes actions/rules invoked by
@@ -394,11 +392,9 @@ class AndroidMkWriter(object):
main_output = outputs[0]
self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
self.WriteLn('%s: gyp_intermediate_dir := '
- '$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_intermediate_dir)'
- % main_output)
+ '$(abspath $(gyp_intermediate_dir))' % main_output)
self.WriteLn('%s: gyp_shared_intermediate_dir := '
- '$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)'
- % main_output)
+ '$(abspath $(gyp_shared_intermediate_dir))' % main_output)
# See explanation in WriteActions.
self.WriteLn('%s: export PATH := '
@@ -413,7 +409,9 @@ class AndroidMkWriter(object):
(main_output, main_output_deps))
self.WriteLn('\t%s\n' % command)
for output in outputs[1:]:
- self.WriteLn('%s: %s' % (output, main_output))
+ # Make each output depend on the main output, with an empty command
+ # to force make to notice that the mtime has changed.
+ self.WriteLn('%s: %s ;' % (output, main_output))
self.WriteLn('.PHONY: %s' % (rule_trigger))
self.WriteLn('%s: %s' % (rule_trigger, main_output))
self.WriteLn('')
@@ -470,42 +468,39 @@ class AndroidMkWriter(object):
Args:
spec, configs: input from gyp.
"""
- config = configs[spec['default_configuration']]
- extracted_includes = []
-
- self.WriteLn('\n# Flags passed to both C and C++ files.')
- cflags, includes_from_cflags = self.ExtractIncludesFromCFlags(
- config.get('cflags'))
- extracted_includes.extend(includes_from_cflags)
- self.WriteList(cflags, 'MY_CFLAGS')
-
- cflags_c, includes_from_cflags_c = self.ExtractIncludesFromCFlags(
- config.get('cflags_c'))
- extracted_includes.extend(includes_from_cflags_c)
- self.WriteList(cflags_c, 'MY_CFLAGS_C')
-
- self.WriteList(config.get('defines'), 'MY_DEFS', prefix='-D',
- quoter=make.EscapeCppDefine)
- self.WriteLn('LOCAL_CFLAGS := $(MY_CFLAGS_C) $(MY_CFLAGS) $(MY_DEFS)')
-
+ for configname, config in sorted(configs.iteritems()):
+ extracted_includes = []
+
+ self.WriteLn('\n# Flags passed to both C and C++ files.')
+ cflags, includes_from_cflags = self.ExtractIncludesFromCFlags(
+ config.get('cflags', []) + config.get('cflags_c', []))
+ extracted_includes.extend(includes_from_cflags)
+ self.WriteList(cflags, 'MY_CFLAGS_%s' % configname)
+
+ self.WriteList(config.get('defines'), 'MY_DEFS_%s' % configname,
+ prefix='-D', quoter=make.EscapeCppDefine)
+
+ self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS')
+ includes = list(config.get('include_dirs', []))
+ includes.extend(extracted_includes)
+ includes = map(Sourceify, map(self.LocalPathify, includes))
+ includes = self.NormalizeIncludePaths(includes)
+ self.WriteList(includes, 'LOCAL_C_INCLUDES_%s' % configname)
+
+ self.WriteLn('\n# Flags passed to only C++ (and not C) files.')
+ self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS_%s' % configname)
+
+ self.WriteLn('\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) '
+ '$(MY_DEFS_$(GYP_CONFIGURATION))')
# Undefine ANDROID for host modules
- # TODO: the source code should not use macro ANDROID to tell if it's host or
- # target module.
+ # TODO: the source code should not use macro ANDROID to tell if it's host
+ # or target module.
if self.toolset == 'host':
self.WriteLn('# Undefine ANDROID for host modules')
self.WriteLn('LOCAL_CFLAGS += -UANDROID')
-
- self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS')
- includes = list(config.get('include_dirs', []))
- includes.extend(extracted_includes)
- includes = map(Sourceify, map(self.LocalPathify, includes))
- includes = self.NormalizeIncludePaths(includes)
- self.WriteList(includes, 'LOCAL_C_INCLUDES')
self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
- '$(LOCAL_C_INCLUDES)')
-
- self.WriteLn('\n# Flags passed to only C++ (and not C) files.')
- self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS')
+ '$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))')
+ self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))')
def WriteSources(self, spec, configs, extra_sources):
@@ -698,24 +693,6 @@ class AndroidMkWriter(object):
assert spec.get('product_dir') is None # TODO: not supported?
return os.path.join(path, self.ComputeOutputBasename(spec))
-
- def NormalizeLdFlags(self, ld_flags):
- """ Clean up ldflags from gyp file.
- Remove any ldflags that contain android_top_dir.
-
- Args:
- ld_flags: ldflags from gyp files.
-
- Returns:
- clean ldflags
- """
- clean_ldflags = []
- for flag in ld_flags:
- if self.android_top_dir in flag:
- continue
- clean_ldflags.append(flag)
- return clean_ldflags
-
def NormalizeIncludePaths(self, include_paths):
""" Normalize include_paths.
Convert absolute paths to relative to the Android top directory;
@@ -747,12 +724,11 @@ class AndroidMkWriter(object):
"""
clean_cflags = []
include_paths = []
- if cflags:
- for flag in cflags:
- if flag.startswith('-I'):
- include_paths.append(flag[2:])
- else:
- clean_cflags.append(flag)
+ for flag in cflags:
+ if flag.startswith('-I'):
+ include_paths.append(flag[2:])
+ else:
+ clean_cflags.append(flag)
return (clean_cflags, include_paths)
@@ -816,14 +792,11 @@ class AndroidMkWriter(object):
spec, configs: input from gyp.
link_deps: link dependency list; see ComputeDeps()
"""
- config = configs[spec['default_configuration']]
-
- # LDFLAGS
- ldflags = list(config.get('ldflags', []))
- static_flags, dynamic_flags = self.ComputeAndroidLibraryModuleNames(
- ldflags)
- self.WriteLn('')
- self.WriteList(self.NormalizeLdFlags(ldflags), 'LOCAL_LDFLAGS')
+ for configname, config in sorted(configs.iteritems()):
+ ldflags = list(config.get('ldflags', []))
+ self.WriteLn('')
+ self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname)
+ self.WriteLn('\nLOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION))')
# Libraries (i.e. -lfoo)
libraries = gyp.common.uniquer(spec.get('libraries', []))
@@ -834,12 +807,12 @@ class AndroidMkWriter(object):
static_link_deps = [x[1] for x in link_deps if x[0] == 'static']
shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared']
self.WriteLn('')
- self.WriteList(static_flags + static_libs + static_link_deps,
+ self.WriteList(static_libs + static_link_deps,
'LOCAL_STATIC_LIBRARIES')
self.WriteLn('# Enable grouping to fix circular references')
self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true')
self.WriteLn('')
- self.WriteList(dynamic_flags + dynamic_libs + shared_link_deps,
+ self.WriteList(dynamic_libs + shared_link_deps,
'LOCAL_SHARED_LIBRARIES')
@@ -1083,10 +1056,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
os.path.dirname(makefile_path))
include_list.add(mkfile_rel_path)
- # Some tools need to know the absolute path of the top directory.
- root_makefile.write('GYP_ABS_ANDROID_TOP_DIR := $(shell pwd)\n')
- root_makefile.write('GYP_DEFAULT_CONFIGURATION := %s\n' %
- default_configuration)
+ root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration)
# Write out the sorted list of includes.
root_makefile.write('\n')
diff --git a/tools/gyp/pylib/gyp/generator/dump_dependency_json.py b/tools/gyp/pylib/gyp/generator/dump_dependency_json.py
index f8480dd284..927ba6ebad 100644
--- a/tools/gyp/pylib/gyp/generator/dump_dependency_json.py
+++ b/tools/gyp/pylib/gyp/generator/dump_dependency_json.py
@@ -45,19 +45,7 @@ def CalculateVariables(default_variables, params):
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
- # Set a variable so conditions can be based on msvs_version.
- msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
- default_variables['MSVS_VERSION'] = msvs_version.ShortName()
-
- # To determine processor word size on Windows, in addition to checking
- # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
- # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
- # contains the actual word size of the system when running thru WOW64).
- if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
- '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
- default_variables['MSVS_OS_BITS'] = 64
- else:
- default_variables['MSVS_OS_BITS'] = 32
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
def CalculateGeneratorInputInfo(params):
diff --git a/tools/gyp/pylib/gyp/generator/eclipse.py b/tools/gyp/pylib/gyp/generator/eclipse.py
index 08425da8e8..a80edc8929 100644
--- a/tools/gyp/pylib/gyp/generator/eclipse.py
+++ b/tools/gyp/pylib/gyp/generator/eclipse.py
@@ -22,6 +22,7 @@ import os.path
import subprocess
import gyp
import gyp.common
+import gyp.msvs_emulation
import shlex
generator_wants_static_library_dependencies_adjusted = False
@@ -52,7 +53,18 @@ def CalculateVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
for key, val in generator_flags.items():
default_variables.setdefault(key, val)
- default_variables.setdefault('OS', gyp.common.GetFlavor(params))
+ flavor = gyp.common.GetFlavor(params)
+ default_variables.setdefault('OS', flavor)
+ if flavor == 'win':
+ # Copy additional generator configuration data from VS, which is shared
+ # by the Eclipse generator.
+ import gyp.generator.msvs as msvs_generator
+ generator_additional_non_configuration_keys = getattr(msvs_generator,
+ 'generator_additional_non_configuration_keys', [])
+ generator_additional_path_sections = getattr(msvs_generator,
+ 'generator_additional_path_sections', [])
+
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
def CalculateGeneratorInputInfo(params):
@@ -65,7 +77,7 @@ def CalculateGeneratorInputInfo(params):
def GetAllIncludeDirectories(target_list, target_dicts,
- shared_intermediate_dirs, config_name):
+ shared_intermediate_dirs, config_name, params):
"""Calculate the set of include directories to be used.
Returns:
@@ -76,6 +88,9 @@ def GetAllIncludeDirectories(target_list, target_dicts,
gyp_includes_set = set()
compiler_includes_list = []
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == 'win':
+ generator_flags = params.get('generator_flags', {})
for target_name in target_list:
target = target_dicts[target_name]
if config_name in target['configurations']:
@@ -85,7 +100,11 @@ def GetAllIncludeDirectories(target_list, target_dicts,
# may be done in gyp files to force certain includes to come at the end.
# TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
# remove this.
- cflags = config['cflags']
+ if flavor == 'win':
+ msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
+ cflags = msvs_settings.GetCflags(config_name)
+ else:
+ cflags = config['cflags']
for cflag in cflags:
include_dir = ''
if cflag.startswith('-I'):
@@ -146,7 +165,7 @@ def GetCompilerPath(target_list, target_dicts, data):
return 'gcc'
-def GetAllDefines(target_list, target_dicts, data, config_name):
+def GetAllDefines(target_list, target_dicts, data, config_name, params):
"""Calculate the defines for a project.
Returns:
@@ -156,22 +175,33 @@ def GetAllDefines(target_list, target_dicts, data, config_name):
# Get defines declared in the gyp files.
all_defines = {}
+ flavor = gyp.common.GetFlavor(params)
+ if flavor == 'win':
+ generator_flags = params.get('generator_flags', {})
for target_name in target_list:
target = target_dicts[target_name]
+ if flavor == 'win':
+ msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
+ extra_defines = msvs_settings.GetComputedDefines(config_name)
+ else:
+ extra_defines = []
if config_name in target['configurations']:
config = target['configurations'][config_name]
- for define in config['defines']:
- split_define = define.split('=', 1)
- if len(split_define) == 1:
- split_define.append('1')
- if split_define[0].strip() in all_defines:
- # Already defined
- continue
-
- all_defines[split_define[0].strip()] = split_define[1].strip()
-
+ target_defines = config['defines']
+ else:
+ target_defines = []
+ for define in target_defines + extra_defines:
+ split_define = define.split('=', 1)
+ if len(split_define) == 1:
+ split_define.append('1')
+ if split_define[0].strip() in all_defines:
+ # Already defined
+ continue
+ all_defines[split_define[0].strip()] = split_define[1].strip()
# Get default compiler defines (if possible).
+ if flavor == 'win':
+ return all_defines # Default defines already processed in the loop above.
cc_target = GetCompilerPath(target_list, target_dicts, data)
if cc_target:
command = shlex.split(cc_target)
@@ -250,9 +280,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
'GNU C++', 'GNU C', 'Assembly']
include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
- shared_intermediate_dirs, config_name)
+ shared_intermediate_dirs, config_name,
+ params)
WriteIncludePaths(out, eclipse_langs, include_dirs)
- defines = GetAllDefines(target_list, target_dicts, data, config_name)
+ defines = GetAllDefines(target_list, target_dicts, data, config_name, params)
WriteMacros(out, eclipse_langs, defines)
out.write('</cdtprojectproperties>\n')
diff --git a/tools/gyp/pylib/gyp/generator/make.py b/tools/gyp/pylib/gyp/generator/make.py
index 9806c64a8f..7bef217f67 100644
--- a/tools/gyp/pylib/gyp/generator/make.py
+++ b/tools/gyp/pylib/gyp/generator/make.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
+# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -166,15 +166,11 @@ cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS
quiet_cmd_link = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-# TODO(thakis): Find out and document the difference between shared_library and
-# loadable_module on mac.
quiet_cmd_solink = SOLINK($(TOOLSET)) $@
cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-# TODO(thakis): The solink_module rule is likely wrong. Xcode seems to pass
-# -bundle -single_module here (for osmesa.so).
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
"""
LINK_COMMANDS_ANDROID = """\
@@ -205,6 +201,24 @@ cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(T
"""
+LINK_COMMANDS_AIX = """\
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
+
+quiet_cmd_alink_thin = AR($(TOOLSET)) $@
+cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
+
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+"""
+
+
# Header of toplevel Makefile.
# This should go into the build tree, but it's easier to keep it here for now.
SHARED_HEADER = ("""\
@@ -250,6 +264,14 @@ all_deps :=
%(make_global_settings)s
+CC.target ?= %(CC.target)s
+CFLAGS.target ?= $(CFLAGS)
+CXX.target ?= %(CXX.target)s
+CXXFLAGS.target ?= $(CXXFLAGS)
+LINK.target ?= %(LINK.target)s
+LDFLAGS.target ?= $(LDFLAGS)
+AR.target ?= $(AR)
+
# C++ apps need to be linked with g++.
#
# Note: flock is used to seralize linking. Linking is a memory-intensive
@@ -261,14 +283,6 @@ all_deps :=
# This will allow make to invoke N linker processes as specified in -jN.
LINK ?= %(flock)s $(builddir)/linker.lock $(CXX.target)
-CC.target ?= %(CC.target)s
-CFLAGS.target ?= $(CFLAGS)
-CXX.target ?= %(CXX.target)s
-CXXFLAGS.target ?= $(CXXFLAGS)
-LINK.target ?= %(LINK.target)s
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
# to replicate this environment fallback in make as well.
CC.host ?= %(CC.host)s
@@ -483,14 +497,6 @@ quiet_cmd_infoplist = INFOPLIST $@
cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
"""
-SHARED_HEADER_SUN_COMMANDS = """
-# gyp-sun-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_sun_tool = SUNTOOL $(4) $<
-cmd_sun_tool = ./gyp-sun-tool $(4) $< "$@"
-"""
-
def WriteRootHeaderSuffixRules(writer):
extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower)
@@ -978,7 +984,13 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
self.WriteLn('%s: obj := $(abs_obj)' % outputs[0])
self.WriteLn('%s: builddir := $(abs_builddir)' % outputs[0])
self.WriteMakeRule(outputs, inputs + ['FORCE_DO_CMD'], actions)
+ # Spaces in rule filenames are not supported, but rule variables have
+ # spaces in them (e.g. RULE_INPUT_PATH expands to '$(abspath $<)').
+ # The spaces within the variables are valid, so remove the variables
+ # before checking.
+ variables_with_spaces = re.compile(r'\$\([^ ]* \$<\)')
for output in outputs:
+ output = re.sub(variables_with_spaces, '', output)
assert ' ' not in output, (
"Spaces in rule filenames not yet supported (%s)" % output)
self.WriteLn('all_deps += %s' % ' '.join(outputs))
@@ -1402,7 +1414,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
gyp_to_build = gyp.common.InvertRelativePath(self.path)
- target_postbuild = self.xcode_settings.GetTargetPostbuilds(
+ target_postbuild = self.xcode_settings.AddImplicitPostbuilds(
configname,
QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
self.output))),
@@ -1413,12 +1425,14 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
else:
ldflags = config.get('ldflags', [])
# Compute an rpath for this output if needed.
- if any(dep.endswith('.so') for dep in deps):
+ if any(dep.endswith('.so') or '.so.' in dep for dep in deps):
# We want to get the literal string "$ORIGIN" into the link command,
# so we need lots of escaping.
ldflags.append(r'-Wl,-rpath=\$$ORIGIN/lib.%s/' % self.toolset)
ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' %
self.toolset)
+ library_dirs = config.get('library_dirs', [])
+ ldflags += [('-L%s' % library_dir) for library_dir in library_dirs]
self.WriteList(ldflags, 'LDFLAGS_%s' % configname)
if self.flavor == 'mac':
self.WriteList(self.xcode_settings.GetLibtoolflags(configname),
@@ -1881,13 +1895,15 @@ def WriteAutoRegenerationRule(params, root_makefile, makefile_name,
options = params['options']
build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir)
for filename in params['build_files_arg']]
+
gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'],
options.toplevel_dir)
if not gyp_binary.startswith(os.sep):
gyp_binary = os.path.join('.', gyp_binary)
+
root_makefile.write(
"quiet_cmd_regen_makefile = ACTION Regenerating $@\n"
- "cmd_regen_makefile = %(cmd)s\n"
+ "cmd_regen_makefile = cd $(srcdir); %(cmd)s\n"
"%(makefile_name)s: %(deps)s\n"
"\t$(call do_cmd,regen_makefile)\n\n" % {
'makefile_name': makefile_name,
@@ -1980,25 +1996,30 @@ def GenerateOutput(target_list, target_dicts, data, params):
})
elif flavor == 'solaris':
header_params.update({
- 'flock': './gyp-sun-tool flock',
+ 'flock': './gyp-flock-tool flock',
'flock_index': 2,
- 'extra_commands': SHARED_HEADER_SUN_COMMANDS,
})
elif flavor == 'freebsd':
# Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific.
header_params.update({
'flock': 'lockf',
})
+ elif flavor == 'aix':
+ header_params.update({
+ 'link_commands': LINK_COMMANDS_AIX,
+ 'flock': './gyp-flock-tool flock',
+ 'flock_index': 2,
+ })
header_params.update({
'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'),
'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'),
'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'),
- 'LINK.target': GetEnvironFallback(('LD_target', 'LD'), '$(LINK)'),
+ 'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'),
'CC.host': GetEnvironFallback(('CC_host',), 'gcc'),
'AR.host': GetEnvironFallback(('AR_host',), 'ar'),
'CXX.host': GetEnvironFallback(('CXX_host',), 'g++'),
- 'LINK.host': GetEnvironFallback(('LD_host',), 'g++'),
+ 'LINK.host': GetEnvironFallback(('LINK_host',), '$(CXX.host)'),
})
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
diff --git a/tools/gyp/pylib/gyp/generator/msvs.py b/tools/gyp/pylib/gyp/generator/msvs.py
index 51acf2eb3e..2f2c3cffd6 100644
--- a/tools/gyp/pylib/gyp/generator/msvs.py
+++ b/tools/gyp/pylib/gyp/generator/msvs.py
@@ -66,6 +66,10 @@ generator_additional_non_configuration_keys = [
'msvs_cygwin_shell',
'msvs_large_pdb',
'msvs_shard',
+ 'msvs_external_builder',
+ 'msvs_external_builder_out_dir',
+ 'msvs_external_builder_build_cmd',
+ 'msvs_external_builder_clean_cmd',
]
@@ -221,7 +225,7 @@ def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
tool = tools[tool_name]
if tool.get(setting):
if only_if_unset: return
- if type(tool[setting]) == list:
+ if type(tool[setting]) == list and type(value) == list:
tool[setting] += value
else:
raise TypeError(
@@ -459,8 +463,7 @@ def _FindRuleTriggerFiles(rule, sources):
Returns:
The list of sources that trigger a particular rule.
"""
- rule_ext = rule['extension']
- return [s for s in sources if s.endswith('.' + rule_ext)]
+ return rule.get('rule_sources', [])
def _RuleInputsAndOutputs(rule, trigger_file):
@@ -784,10 +787,10 @@ def _GenerateRulesForMSVS(p, output_dir, options, spec,
if rules_external:
_GenerateExternalRules(rules_external, output_dir, spec,
sources, options, actions_to_add)
- _AdjustSourcesForRules(rules, sources, excluded_sources)
+ _AdjustSourcesForRules(spec, rules, sources, excluded_sources)
-def _AdjustSourcesForRules(rules, sources, excluded_sources):
+def _AdjustSourcesForRules(spec, rules, sources, excluded_sources):
# Add outputs generated by each rule (if applicable).
for rule in rules:
# Done if not processing outputs as sources.
@@ -800,7 +803,8 @@ def _AdjustSourcesForRules(rules, sources, excluded_sources):
outputs = set(_FixPaths(outputs))
inputs.remove(_FixPath(trigger_file))
sources.update(inputs)
- excluded_sources.update(inputs)
+ if not spec.get('msvs_external_builder'):
+ excluded_sources.update(inputs)
sources.update(outputs)
@@ -1027,12 +1031,13 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
spec: The target dictionary containing the properties of the target.
config_type: The configuration type, a number as defined by Microsoft.
config_name: The name of the configuration.
- config: The dictionnary that defines the special processing to be done
+ config: The dictionary that defines the special processing to be done
for this configuration.
"""
# Get the information for this configuration
include_dirs, resource_include_dirs = _GetIncludeDirs(config)
libraries = _GetLibraries(spec)
+ library_dirs = _GetLibraryDirs(config)
out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
defines = _GetDefines(config)
defines = [_EscapeCppDefineForMSVS(d) for d in defines]
@@ -1062,6 +1067,8 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
'AdditionalIncludeDirectories', resource_include_dirs)
# Add in libraries.
_ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', libraries)
+ _ToolAppend(tools, 'VCLinkerTool', 'AdditionalLibraryDirectories',
+ library_dirs)
if out_file:
_ToolAppend(tools, vc_tool, 'OutputFile', out_file, only_if_unset=True)
# Add defines.
@@ -1101,7 +1108,7 @@ def _GetIncludeDirs(config):
"""Returns the list of directories to be used for #include directives.
Arguments:
- config: The dictionnary that defines the special processing to be done
+ config: The dictionary that defines the special processing to be done
for this configuration.
Returns:
The list of directory paths.
@@ -1117,6 +1124,21 @@ def _GetIncludeDirs(config):
return include_dirs, resource_include_dirs
+def _GetLibraryDirs(config):
+ """Returns the list of directories to be used for library search paths.
+
+ Arguments:
+ config: The dictionary that defines the special processing to be done
+ for this configuration.
+ Returns:
+ The list of directory paths.
+ """
+
+ library_dirs = config.get('library_dirs', [])
+ library_dirs = _FixPaths(library_dirs)
+ return library_dirs
+
+
def _GetLibraries(spec):
"""Returns the list of libraries for this configuration.
@@ -1181,11 +1203,29 @@ def _GetOutputFilePathAndTool(spec, msbuild):
return out_file, vc_tool, msbuild_tool
+def _GetOutputTargetExt(spec):
+ """Returns the extension for this target, including the dot
+
+ If product_extension is specified, set target_extension to this to avoid
+ MSB8012, returns None otherwise. Ignores any target_extension settings in
+ the input files.
+
+ Arguments:
+ spec: The target dictionary containing the properties of the target.
+ Returns:
+ A string with the extension, or None
+ """
+ target_extension = spec.get('product_extension')
+ if target_extension:
+ return '.' + target_extension
+ return None
+
+
def _GetDefines(config):
"""Returns the list of preprocessor definitions for this configuation.
Arguments:
- config: The dictionnary that defines the special processing to be done
+ config: The dictionary that defines the special processing to be done
for this configuration.
Returns:
The list of preprocessor definitions.
@@ -1222,7 +1262,7 @@ def _ConvertToolsToExpectedForm(tools):
"""Convert tools to a form expected by Visual Studio.
Arguments:
- tools: A dictionnary of settings; the tool name is the key.
+ tools: A dictionary of settings; the tool name is the key.
Returns:
A list of Tool objects.
"""
@@ -1251,8 +1291,8 @@ def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name):
Arguments:
p: The target project being generated.
spec: the target project dict.
- tools: A dictionnary of settings; the tool name is the key.
- config: The dictionnary that defines the special processing to be done
+ tools: A dictionary of settings; the tool name is the key.
+ config: The dictionary that defines the special processing to be done
for this configuration.
config_type: The configuration type, a number as defined by Microsoft.
config_name: The name of the configuration.
@@ -1324,7 +1364,8 @@ def _PrepareListOfSources(spec, generator_flags, gyp_file):
# Add all inputs to sources and excluded sources.
inputs = set(inputs)
sources.update(inputs)
- excluded_sources.update(inputs)
+ if not spec.get('msvs_external_builder'):
+ excluded_sources.update(inputs)
if int(a.get('process_outputs_as_sources', False)):
_AddNormalizedSources(sources, a.get('outputs', []))
# Add in 'copies' inputs and outputs.
@@ -1695,14 +1736,58 @@ def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
obj.set_msbuild_toolset(
_GetMsbuildToolsetOfProject(proj_path, spec, msvs_version))
projects[qualified_target] = obj
- # Set all the dependencies
+ # Set all the dependencies, but not if we are using an external builder like
+ # ninja
for project in projects.values():
- deps = project.spec.get('dependencies', [])
- deps = [projects[d] for d in deps]
- project.set_dependencies(deps)
+ if not project.spec.get('msvs_external_builder'):
+ deps = project.spec.get('dependencies', [])
+ deps = [projects[d] for d in deps]
+ project.set_dependencies(deps)
return projects
+def _InitNinjaFlavor(options, target_list, target_dicts):
+ """Initialize targets for the ninja flavor.
+
+ This sets up the necessary variables in the targets to generate msvs projects
+ that use ninja as an external builder. The variables in the spec are only set
+ if they have not been set. This allows individual specs to override the
+ default values initialized here.
+ Arguments:
+ options: Options provided to the generator.
+ target_list: List of target pairs: 'base/base.gyp:base'.
+ target_dicts: Dict of target properties keyed on target pair.
+ """
+ for qualified_target in target_list:
+ spec = target_dicts[qualified_target]
+ if spec.get('msvs_external_builder'):
+ # The spec explicitly defined an external builder, so don't change it.
+ continue
+
+ path_to_ninja = spec.get('msvs_path_to_ninja', 'ninja.exe')
+
+ spec['msvs_external_builder'] = 'ninja'
+ if not spec.get('msvs_external_builder_out_dir'):
+ spec['msvs_external_builder_out_dir'] = \
+ options.depth + '/out/$(Configuration)'
+ if not spec.get('msvs_external_builder_build_cmd'):
+ spec['msvs_external_builder_build_cmd'] = [
+ path_to_ninja,
+ '-C',
+ '$(OutDir)',
+ '$(ProjectName)',
+ ]
+ if not spec.get('msvs_external_builder_clean_cmd'):
+ spec['msvs_external_builder_clean_cmd'] = [
+ path_to_ninja,
+ '-C',
+ '$(OutDir)',
+ '-t',
+ 'clean',
+ '$(ProjectName)',
+ ]
+
+
def CalculateVariables(default_variables, params):
"""Generated variables that require params to be known."""
@@ -1727,6 +1812,9 @@ def CalculateVariables(default_variables, params):
else:
default_variables['MSVS_OS_BITS'] = 32
+ if gyp.common.GetFlavor(params) == 'ninja':
+ default_variables['SHARED_INTERMEDIATE_DIR'] = '$(OutDir)gen'
+
def PerformBuild(data, configurations, params):
options = params['options']
@@ -1774,6 +1862,10 @@ def GenerateOutput(target_list, target_dicts, data, params):
(target_list, target_dicts) = MSVSUtil.InsertLargePdbShims(
target_list, target_dicts, generator_default_variables)
+ # Optionally configure each spec to use ninja as the external builder.
+ if params.get('flavor') == 'ninja':
+ _InitNinjaFlavor(options, target_list, target_dicts)
+
# Prepare the set of configurations.
configs = set()
for qualified_target in target_list:
@@ -1964,7 +2056,7 @@ def _GenerateRulesForMSBuild(output_dir, options, spec,
if rules_external:
_GenerateExternalRules(rules_external, output_dir, spec,
sources, options, actions_to_add)
- _AdjustSourcesForRules(rules, sources, excluded_sources)
+ _AdjustSourcesForRules(spec, rules, sources, excluded_sources)
class MSBuildRule(object):
@@ -2560,6 +2652,10 @@ def _GetMSBuildAttributes(spec, config, build_file):
target_name = prefix + product_name
msbuild_attributes['TargetName'] = target_name
+ if spec.get('msvs_external_builder'):
+ external_out_dir = spec.get('msvs_external_builder_out_dir', '.')
+ msbuild_attributes['OutputDirectory'] = _FixPath(external_out_dir) + '\\'
+
# Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile'
# (depending on the tool used) to avoid MSB8012 warning.
msbuild_tool_map = {
@@ -2574,6 +2670,9 @@ def _GetMSBuildAttributes(spec, config, build_file):
out_file = msbuild_settings[msbuild_tool].get('OutputFile')
if out_file:
msbuild_attributes['TargetPath'] = _FixPath(out_file)
+ target_ext = msbuild_settings[msbuild_tool].get('TargetExt')
+ if target_ext:
+ msbuild_attributes['TargetExt'] = target_ext
return msbuild_attributes
@@ -2609,6 +2708,9 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
if attributes.get('TargetPath'):
_AddConditionalProperty(properties, condition, 'TargetPath',
attributes['TargetPath'])
+ if attributes.get('TargetExt'):
+ _AddConditionalProperty(properties, condition, 'TargetExt',
+ attributes['TargetExt'])
if new_paths:
_AddConditionalProperty(properties, condition, 'ExecutablePath',
@@ -2727,7 +2829,9 @@ def _FinalizeMSBuildSettings(spec, configuration):
msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
include_dirs, resource_include_dirs = _GetIncludeDirs(configuration)
libraries = _GetLibraries(spec)
+ library_dirs = _GetLibraryDirs(configuration)
out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
+ target_ext = _GetOutputTargetExt(spec)
defines = _GetDefines(configuration)
if converted:
# Visual Studio 2010 has TR1
@@ -2760,9 +2864,14 @@ def _FinalizeMSBuildSettings(spec, configuration):
# set, to prevent inheriting default libraries from the enviroment.
_ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies',
libraries)
+ _ToolAppend(msbuild_settings, 'Link', 'AdditionalLibraryDirectories',
+ library_dirs)
if out_file:
_ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file,
only_if_unset=True)
+ if target_ext:
+ _ToolAppend(msbuild_settings, msbuild_tool, 'TargetExt', target_ext,
+ only_if_unset=True)
# Add defines.
_ToolAppend(msbuild_settings, 'ClCompile',
'PreprocessorDefinitions', defines)
@@ -2778,7 +2887,7 @@ def _FinalizeMSBuildSettings(spec, configuration):
_ToolAppend(msbuild_settings, 'ClCompile',
'PrecompiledHeaderFile', precompiled_header)
_ToolAppend(msbuild_settings, 'ClCompile',
- 'ForcedIncludeFiles', precompiled_header)
+ 'ForcedIncludeFiles', [precompiled_header])
# Loadable modules don't generate import libraries;
# tell dependent projects to not expect one.
if spec['type'] == 'loadable_module':
@@ -2958,22 +3067,32 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
targets_files_of_rules = set()
extension_to_rule_name = {}
list_excluded = generator_flags.get('msvs_list_excluded_files', True)
- _GenerateRulesForMSBuild(project_dir, options, spec,
- sources, excluded_sources,
- props_files_of_rules, targets_files_of_rules,
- actions_to_add, extension_to_rule_name)
+
+ # Don't generate rules if we are using an external builder like ninja.
+ if not spec.get('msvs_external_builder'):
+ _GenerateRulesForMSBuild(project_dir, options, spec,
+ sources, excluded_sources,
+ props_files_of_rules, targets_files_of_rules,
+ actions_to_add, extension_to_rule_name)
+ else:
+ rules = spec.get('rules', [])
+ _AdjustSourcesForRules(spec, rules, sources, excluded_sources)
+
sources, excluded_sources, excluded_idl = (
_AdjustSourcesAndConvertToFilterHierarchy(spec, options,
project_dir, sources,
excluded_sources,
list_excluded))
- _AddActions(actions_to_add, spec, project.build_file)
- _AddCopies(actions_to_add, spec)
- # NOTE: this stanza must appear after all actions have been decided.
- # Don't excluded sources with actions attached, or they won't run.
- excluded_sources = _FilterActionsFromExcluded(
- excluded_sources, actions_to_add)
+ # Don't add actions if we are using an external builder like ninja.
+ if not spec.get('msvs_external_builder'):
+ _AddActions(actions_to_add, spec, project.build_file)
+ _AddCopies(actions_to_add, spec)
+
+ # NOTE: this stanza must appear after all actions have been decided.
+ # Don't excluded sources with actions attached, or they won't run.
+ excluded_sources = _FilterActionsFromExcluded(
+ excluded_sources, actions_to_add)
exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild(
@@ -3022,6 +3141,9 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
content += import_cpp_targets_section
content += _GetMSBuildExtensionTargets(targets_files_of_rules)
+ if spec.get('msvs_external_builder'):
+ content += _GetMSBuildExternalBuilderTargets(spec)
+
# TODO(jeanluc) File a bug to get rid of runas. We had in MSVS:
# has_run_as = _WriteMSVSUserFile(project.path, version, spec)
@@ -3030,6 +3152,31 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
return missing_sources
+def _GetMSBuildExternalBuilderTargets(spec):
+ """Return a list of MSBuild targets for external builders.
+
+ Right now, only "Build" and "Clean" targets are generated.
+
+ Arguments:
+ spec: The gyp target spec.
+ Returns:
+ List of MSBuild 'Target' specs.
+ """
+ build_cmd = _BuildCommandLineForRuleRaw(
+ spec, spec['msvs_external_builder_build_cmd'],
+ False, False, False, False)
+ build_target = ['Target', {'Name': 'Build'}]
+ build_target.append(['Exec', {'Command': build_cmd}])
+
+ clean_cmd = _BuildCommandLineForRuleRaw(
+ spec, spec['msvs_external_builder_clean_cmd'],
+ False, False, False, False)
+ clean_target = ['Target', {'Name': 'Clean'}]
+ clean_target.append(['Exec', {'Command': clean_cmd}])
+
+ return [build_target, clean_target]
+
+
def _GetMSBuildExtensions(props_files_of_rules):
extensions = ['ImportGroup', {'Label': 'ExtensionSettings'}]
for props_file in props_files_of_rules:
diff --git a/tools/gyp/pylib/gyp/generator/ninja.py b/tools/gyp/pylib/gyp/generator/ninja.py
index c6bceaf382..23fb9b8afd 100644
--- a/tools/gyp/pylib/gyp/generator/ninja.py
+++ b/tools/gyp/pylib/gyp/generator/ninja.py
@@ -4,6 +4,7 @@
import copy
import hashlib
+import json
import multiprocessing
import os.path
import re
@@ -15,6 +16,7 @@ import gyp.common
import gyp.msvs_emulation
import gyp.MSVSUtil as MSVSUtil
import gyp.xcode_emulation
+from cStringIO import StringIO
from gyp.common import GetEnvironFallback
import gyp.ninja_syntax as ninja_syntax
@@ -56,6 +58,7 @@ generator_default_variables = {
generator_additional_non_configuration_keys = []
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
+generator_filelist_paths = None
# TODO: figure out how to not build extra host objects in the non-cross-compile
# case when this is enabled, and enable unconditionally.
@@ -97,6 +100,12 @@ def Define(d, flavor):
return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor)
+def AddArch(output, arch):
+ """Adds an arch string to an output path."""
+ output, extension = os.path.splitext(output)
+ return '%s.%s%s' % (output, arch, extension)
+
+
class Target:
"""Target represents the paths used within a single gyp target.
@@ -203,7 +212,8 @@ class Target:
class NinjaWriter:
def __init__(self, qualified_target, target_outputs, base_dir, build_dir,
- output_file, flavor, toplevel_dir=None):
+ output_file, toplevel_build, output_file_name, flavor,
+ toplevel_dir=None):
"""
base_dir: path from source root to directory containing this gyp file,
by gyp semantics, all input paths are relative to this
@@ -216,6 +226,9 @@ class NinjaWriter:
self.base_dir = base_dir
self.build_dir = build_dir
self.ninja = ninja_syntax.Writer(output_file)
+ self.toplevel_build = toplevel_build
+ self.output_file_name = output_file_name
+
self.flavor = flavor
self.abs_build_dir = None
if toplevel_dir is not None:
@@ -293,7 +306,7 @@ class NinjaWriter:
expanded = os.path.normpath(expanded)
return expanded
if '$|' in path:
- path = self.ExpandSpecial(path)
+ path = self.ExpandSpecial(path)
assert '$' not in path, path
return os.path.normpath(os.path.join(self.build_to_base, path))
@@ -345,8 +358,11 @@ class NinjaWriter:
self.ninja.newline()
return targets[0]
- def WriteSpec(self, spec, config_name, generator_flags,
- case_sensitive_filesystem):
+ def _SubninjaNameForArch(self, arch):
+ output_file_base = os.path.splitext(self.output_file_name)[0]
+ return '%s.%s.ninja' % (output_file_base, arch)
+
+ def WriteSpec(self, spec, config_name, generator_flags):
"""The main entry point for NinjaWriter: write the build rules for a spec.
Returns a Target object, which represents the output paths for this spec.
@@ -360,6 +376,9 @@ class NinjaWriter:
self.target = Target(spec['type'])
self.is_standalone_static_library = bool(
spec.get('standalone_static_library', 0))
+ # Track if this target contains any C++ files, to decide if gcc or g++
+ # should be used for linking.
+ self.uses_cpp = False
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
self.xcode_settings = self.msvs_settings = None
@@ -370,6 +389,18 @@ class NinjaWriter:
generator_flags)
arch = self.msvs_settings.GetArch(config_name)
self.ninja.variable('arch', self.win_env[arch])
+ self.ninja.variable('cc', '$cl_' + arch)
+ self.ninja.variable('cxx', '$cl_' + arch)
+
+ if self.flavor == 'mac':
+ self.archs = self.xcode_settings.GetActiveArchs(config_name)
+ if len(self.archs) > 1:
+ self.arch_subninjas = dict(
+ (arch, ninja_syntax.Writer(
+ OpenOutput(os.path.join(self.toplevel_build,
+ self._SubninjaNameForArch(arch)),
+ 'w')))
+ for arch in self.archs)
# Compute predepends for all rules.
# actions_depends is the dependencies this target depends on before running
@@ -411,8 +442,14 @@ class NinjaWriter:
# Write out the compilation steps, if any.
link_deps = []
- sources = spec.get('sources', []) + extra_sources
+ sources = extra_sources + spec.get('sources', [])
if sources:
+ if self.flavor == 'mac' and len(self.archs) > 1:
+ # Write subninja file containing compile and link commands scoped to
+ # a single arch if a fat binary is being built.
+ for arch in self.archs:
+ self.ninja.subninja(self._SubninjaNameForArch(arch))
+
pch = None
if self.flavor == 'win':
gyp.msvs_emulation.VerifyMissingSources(
@@ -425,17 +462,24 @@ class NinjaWriter:
self.xcode_settings, self.GypPathToNinja,
lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))
link_deps = self.WriteSources(
- config_name, config, sources, compile_depends_stamp, pch,
- case_sensitive_filesystem, spec)
+ self.ninja, config_name, config, sources, compile_depends_stamp, pch,
+ spec)
# Some actions/rules output 'sources' that are already object files.
- link_deps += [self.GypPathToNinja(f)
- for f in sources if f.endswith(self.obj_ext)]
+ obj_outputs = [f for f in sources if f.endswith(self.obj_ext)]
+ if obj_outputs:
+ if self.flavor != 'mac' or len(self.archs) == 1:
+ link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
+ else:
+ print "Warning: Actions/rules writing object files don't work with " \
+ "multiarch targets, dropping. (target %s)" % spec['target_name']
+
if self.flavor == 'win' and self.target.type == 'static_library':
self.target.component_objs = link_deps
# Write out a link step, if needed.
output = None
+ is_empty_bundle = not link_deps and not mac_bundle_depends
if link_deps or self.target.actions_stamp or actions_depends:
output = self.WriteTarget(spec, config_name, config, link_deps,
self.target.actions_stamp or actions_depends)
@@ -444,7 +488,7 @@ class NinjaWriter:
# Bundle all of the above together, if needed.
if self.is_mac_bundle:
- output = self.WriteMacBundle(spec, mac_bundle_depends)
+ output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle)
if not output:
return None
@@ -491,6 +535,10 @@ class NinjaWriter:
"""Write out the Actions, Rules, and Copies steps. Return a path
representing the outputs of these steps."""
outputs = []
+ if self.is_mac_bundle:
+ mac_bundle_resources = spec.get('mac_bundle_resources', [])[:]
+ else:
+ mac_bundle_resources = []
extra_mac_bundle_resources = []
if 'actions' in spec:
@@ -498,6 +546,7 @@ class NinjaWriter:
extra_mac_bundle_resources)
if 'rules' in spec:
outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
+ mac_bundle_resources,
extra_mac_bundle_resources)
if 'copies' in spec:
outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends)
@@ -508,9 +557,8 @@ class NinjaWriter:
stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
if self.is_mac_bundle:
- mac_bundle_resources = spec.get('mac_bundle_resources', []) + \
- extra_mac_bundle_resources
- self.WriteMacBundleResources(mac_bundle_resources, mac_bundle_depends)
+ self.WriteMacBundleResources(
+ extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
self.WriteMacInfoPlist(mac_bundle_depends)
return stamp
@@ -567,7 +615,7 @@ class NinjaWriter:
return all_outputs
def WriteRules(self, rules, extra_sources, prebuild,
- extra_mac_bundle_resources):
+ mac_bundle_resources, extra_mac_bundle_resources):
env = self.GetSortedXcodeEnv()
all_outputs = []
for rule in rules:
@@ -607,6 +655,7 @@ class NinjaWriter:
# For each source file, write an edge that generates all the outputs.
for source in rule.get('rule_sources', []):
+ source = os.path.normpath(source)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
@@ -620,15 +669,27 @@ class NinjaWriter:
if int(rule.get('process_outputs_as_sources', False)):
extra_sources += outputs
- if int(rule.get('process_outputs_as_mac_bundle_resources', False)):
+
+ was_mac_bundle_resource = source in mac_bundle_resources
+ if was_mac_bundle_resource or \
+ int(rule.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs
+ # Note: This is n_resources * n_outputs_in_rule. Put to-be-removed
+ # items in a set and remove them all in a single pass if this becomes
+ # a performance issue.
+ if was_mac_bundle_resource:
+ mac_bundle_resources.remove(source)
extra_bindings = []
for var in needed_variables:
if var == 'root':
extra_bindings.append(('root', cygwin_munge(root)))
elif var == 'dirname':
- extra_bindings.append(('dirname', cygwin_munge(dirname)))
+ # '$dirname' is a parameter to the rule action, which means
+ # it shouldn't be converted to a Ninja path. But we don't
+ # want $!PRODUCT_DIR in there either.
+ dirname_expanded = self.ExpandSpecial(dirname, self.base_to_build)
+ extra_bindings.append(('dirname', cygwin_munge(dirname_expanded)))
elif var == 'source':
# '$source' is a parameter to the rule action, which means
# it shouldn't be converted to a Ninja path. But we don't
@@ -682,8 +743,9 @@ class NinjaWriter:
def WriteMacBundleResources(self, resources, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources'."""
for output, res in gyp.xcode_emulation.GetMacBundleResources(
- self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
+ generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, map(self.GypPathToNinja, resources)):
+ output = self.ExpandSpecial(output)
self.ninja.build(output, 'mac_tool', res,
variables=[('mactool_cmd', 'copy-bundle-resource')])
bundle_depends.append(output)
@@ -691,38 +753,56 @@ class NinjaWriter:
def WriteMacInfoPlist(self, bundle_depends):
"""Write build rules for bundle Info.plist files."""
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
- self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
+ generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, self.GypPathToNinja)
if not info_plist:
return
+ out = self.ExpandSpecial(out)
if defines:
# Create an intermediate file to store preprocessed results.
intermediate_plist = self.GypPathToUniqueOutput(
os.path.basename(info_plist))
defines = ' '.join([Define(d, self.flavor) for d in defines])
- info_plist = self.ninja.build(intermediate_plist, 'infoplist', info_plist,
- variables=[('defines',defines)])
+ info_plist = self.ninja.build(
+ intermediate_plist, 'preprocess_infoplist', info_plist,
+ variables=[('defines',defines)])
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
env = self.ComputeExportEnvString(env)
- self.ninja.build(out, 'mac_tool', info_plist,
- variables=[('mactool_cmd', 'copy-info-plist'),
- ('env', env)])
+ keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
+ keys = QuoteShellArgument(json.dumps(keys), self.flavor)
+ self.ninja.build(out, 'copy_infoplist', info_plist,
+ variables=[('env', env), ('keys', keys)])
bundle_depends.append(out)
- def WriteSources(self, config_name, config, sources, predepends,
- precompiled_header, case_sensitive_filesystem, spec):
+ def WriteSources(self, ninja_file, config_name, config, sources, predepends,
+ precompiled_header, spec):
"""Write build rules to compile all of |sources|."""
if self.toolset == 'host':
self.ninja.variable('ar', '$ar_host')
self.ninja.variable('cc', '$cc_host')
self.ninja.variable('cxx', '$cxx_host')
self.ninja.variable('ld', '$ld_host')
+ self.ninja.variable('ldxx', '$ldxx_host')
+
+ if self.flavor != 'mac' or len(self.archs) == 1:
+ return self.WriteSourcesForArch(
+ self.ninja, config_name, config, sources, predepends,
+ precompiled_header, spec)
+ else:
+ return dict((arch, self.WriteSourcesForArch(
+ self.arch_subninjas[arch], config_name, config, sources, predepends,
+ precompiled_header, spec, arch=arch))
+ for arch in self.archs)
+
+ def WriteSourcesForArch(self, ninja_file, config_name, config, sources,
+ predepends, precompiled_header, spec, arch=None):
+ """Write build rules to compile all of |sources|."""
extra_defines = []
if self.flavor == 'mac':
- cflags = self.xcode_settings.GetCflags(config_name)
+ cflags = self.xcode_settings.GetCflags(config_name, arch=arch)
cflags_c = self.xcode_settings.GetCflagsC(config_name)
cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
cflags_objc = ['$cflags_c'] + \
@@ -742,56 +822,70 @@ class NinjaWriter:
obj += '.' + self.toolset
pdbpath = os.path.normpath(os.path.join(obj, self.base_dir,
self.name + '.pdb'))
- self.WriteVariableList('pdbname', [pdbpath])
- self.WriteVariableList('pchprefix', [self.name])
+ self.WriteVariableList(ninja_file, 'pdbname', [pdbpath])
+ self.WriteVariableList(ninja_file, 'pchprefix', [self.name])
else:
cflags = config.get('cflags', [])
cflags_c = config.get('cflags_c', [])
cflags_cc = config.get('cflags_cc', [])
+ # Respect environment variables related to build, but target-specific
+ # flags can still override them.
+ if self.toolset == 'target':
+ cflags_c = (os.environ.get('CPPFLAGS', '').split() +
+ os.environ.get('CFLAGS', '').split() + cflags_c)
+ cflags_cc = (os.environ.get('CPPFLAGS', '').split() +
+ os.environ.get('CXXFLAGS', '').split() + cflags_cc)
+
defines = config.get('defines', []) + extra_defines
- self.WriteVariableList('defines', [Define(d, self.flavor) for d in defines])
+ self.WriteVariableList(ninja_file, 'defines',
+ [Define(d, self.flavor) for d in defines])
if self.flavor == 'win':
- self.WriteVariableList('rcflags',
+ self.WriteVariableList(ninja_file, 'rcflags',
[QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
for f in self.msvs_settings.GetRcflags(config_name,
self.GypPathToNinja)])
include_dirs = config.get('include_dirs', [])
+ env = self.GetSortedXcodeEnv()
if self.flavor == 'win':
+ env = self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
+ config=config_name)
include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
config_name)
- self.WriteVariableList('includes',
- [QuoteShellArgument('-I' + self.GypPathToNinja(i), self.flavor)
+ self.WriteVariableList(ninja_file, 'includes',
+ [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in include_dirs])
- pch_commands = precompiled_header.GetPchBuildCommands()
+ pch_commands = precompiled_header.GetPchBuildCommands(arch)
if self.flavor == 'mac':
- self.WriteVariableList('cflags_pch_c',
- [precompiled_header.GetInclude('c')])
- self.WriteVariableList('cflags_pch_cc',
- [precompiled_header.GetInclude('cc')])
- self.WriteVariableList('cflags_pch_objc',
- [precompiled_header.GetInclude('m')])
- self.WriteVariableList('cflags_pch_objcc',
- [precompiled_header.GetInclude('mm')])
-
- self.WriteVariableList('cflags', map(self.ExpandSpecial, cflags))
- self.WriteVariableList('cflags_c', map(self.ExpandSpecial, cflags_c))
- self.WriteVariableList('cflags_cc', map(self.ExpandSpecial, cflags_cc))
+ # Most targets use no precompiled headers, so only write these if needed.
+ for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'),
+ ('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]:
+ include = precompiled_header.GetInclude(ext, arch)
+ if include: ninja_file.variable(var, include)
+
+ self.WriteVariableList(ninja_file, 'cflags',
+ map(self.ExpandSpecial, cflags))
+ self.WriteVariableList(ninja_file, 'cflags_c',
+ map(self.ExpandSpecial, cflags_c))
+ self.WriteVariableList(ninja_file, 'cflags_cc',
+ map(self.ExpandSpecial, cflags_cc))
if self.flavor == 'mac':
- self.WriteVariableList('cflags_objc', map(self.ExpandSpecial,
- cflags_objc))
- self.WriteVariableList('cflags_objcc', map(self.ExpandSpecial,
- cflags_objcc))
- self.ninja.newline()
+ self.WriteVariableList(ninja_file, 'cflags_objc',
+ map(self.ExpandSpecial, cflags_objc))
+ self.WriteVariableList(ninja_file, 'cflags_objcc',
+ map(self.ExpandSpecial, cflags_objcc))
+ ninja_file.newline()
outputs = []
+ has_rc_source = False
for source in sources:
filename, ext = os.path.splitext(source)
ext = ext[1:]
obj_ext = self.obj_ext
if ext in ('cc', 'cpp', 'cxx'):
command = 'cxx'
+ self.uses_cpp = True
elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
command = 'cc'
elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
@@ -808,37 +902,41 @@ class NinjaWriter:
command = 'objc'
elif self.flavor == 'mac' and ext == 'mm':
command = 'objcxx'
+ self.uses_cpp = True
elif self.flavor == 'win' and ext == 'rc':
command = 'rc'
obj_ext = '.res'
+ has_rc_source = True
else:
# Ignore unhandled extensions.
continue
input = self.GypPathToNinja(source)
output = self.GypPathToUniqueOutput(filename + obj_ext)
- # Ninja's depfile handling gets confused when the case of a filename
- # changes on a case-insensitive file system. To work around that, always
- # convert .o filenames to lowercase on such file systems. See
- # https://github.com/martine/ninja/issues/402 for details.
- if not case_sensitive_filesystem:
- output = output.lower()
- implicit = precompiled_header.GetObjDependencies([input], [output])
+ if arch is not None:
+ output = AddArch(output, arch)
+ implicit = precompiled_header.GetObjDependencies([input], [output], arch)
variables = []
if self.flavor == 'win':
variables, output, implicit = precompiled_header.GetFlagsModifications(
input, output, implicit, command, cflags_c, cflags_cc,
self.ExpandSpecial)
- self.ninja.build(output, command, input,
+ ninja_file.build(output, command, input,
implicit=[gch for _, _, gch in implicit],
order_only=predepends, variables=variables)
outputs.append(output)
- self.WritePchTargets(pch_commands)
+ if has_rc_source:
+ resource_include_dirs = config.get('resource_include_dirs', include_dirs)
+ self.WriteVariableList(ninja_file, 'resource_includes',
+ [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
+ for i in resource_include_dirs])
- self.ninja.newline()
+ self.WritePchTargets(ninja_file, pch_commands)
+
+ ninja_file.newline()
return outputs
- def WritePchTargets(self, pch_commands):
+ def WritePchTargets(self, ninja_file, pch_commands):
"""Writes ninja rules to compile prefix headers."""
if not pch_commands:
return
@@ -853,16 +951,34 @@ class NinjaWriter:
map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }
cmd = map.get(lang)
- self.ninja.build(gch, cmd, input, variables=[(var_name, lang_flag)])
+ ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
def WriteLink(self, spec, config_name, config, link_deps):
"""Write out a link step. Fills out target.binary. """
-
+ if self.flavor != 'mac' or len(self.archs) == 1:
+ return self.WriteLinkForArch(
+ self.ninja, spec, config_name, config, link_deps)
+ else:
+ output = self.ComputeOutput(spec)
+ inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec,
+ config_name, config, link_deps[arch],
+ arch=arch)
+ for arch in self.archs]
+ extra_bindings = []
+ if not self.is_mac_bundle:
+ self.AppendPostbuildVariable(extra_bindings, spec, output, output)
+ self.ninja.build(output, 'lipo', inputs, variables=extra_bindings)
+ return output
+
+ def WriteLinkForArch(self, ninja_file, spec, config_name, config,
+ link_deps, arch=None):
+ """Write out a link step. Fills out target.binary. """
command = {
'executable': 'link',
'loadable_module': 'solink_module',
'shared_library': 'solink',
}[spec['type']]
+ command_suffix = ''
implicit_deps = set()
solibs = set()
@@ -879,61 +995,92 @@ class NinjaWriter:
continue
linkable = target.Linkable()
if linkable:
+ new_deps = []
if (self.flavor == 'win' and
target.component_objs and
self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
- extra_link_deps |= set(target.component_objs)
+ new_deps = target.component_objs
elif self.flavor == 'win' and target.import_lib:
- extra_link_deps.add(target.import_lib)
+ new_deps = [target.import_lib]
elif target.UsesToc(self.flavor):
solibs.add(target.binary)
implicit_deps.add(target.binary + '.TOC')
else:
- extra_link_deps.add(target.binary)
+ new_deps = [target.binary]
+ for new_dep in new_deps:
+ if new_dep not in extra_link_deps:
+ extra_link_deps.add(new_dep)
+ link_deps.append(new_dep)
final_output = target.FinalOutput()
if not linkable or final_output != target.binary:
implicit_deps.add(final_output)
- link_deps.extend(list(extra_link_deps))
-
extra_bindings = []
- if self.is_mac_bundle:
- output = self.ComputeMacBundleBinaryOutput()
- else:
- output = self.ComputeOutput(spec)
- extra_bindings.append(('postbuilds',
- self.GetPostbuildCommand(spec, output, output)))
+ if self.uses_cpp and self.flavor != 'win':
+ extra_bindings.append(('ld', '$ldxx'))
+
+ output = self.ComputeOutput(spec, arch)
+ if arch is None and not self.is_mac_bundle:
+ self.AppendPostbuildVariable(extra_bindings, spec, output, output)
is_executable = spec['type'] == 'executable'
+ # The ldflags config key is not used on mac or win. On those platforms
+ # linker flags are set via xcode_settings and msvs_settings, respectively.
+ env_ldflags = os.environ.get('LDFLAGS', '').split()
if self.flavor == 'mac':
ldflags = self.xcode_settings.GetLdflags(config_name,
self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
- self.GypPathToNinja)
+ self.GypPathToNinja, arch)
+ ldflags = env_ldflags + ldflags
elif self.flavor == 'win':
manifest_name = self.GypPathToUniqueOutput(
self.ComputeOutputFileName(spec))
ldflags, manifest_files = self.msvs_settings.GetLdflags(config_name,
self.GypPathToNinja, self.ExpandSpecial, manifest_name, is_executable)
- self.WriteVariableList('manifests', manifest_files)
+ ldflags = env_ldflags + ldflags
+ self.WriteVariableList(ninja_file, 'manifests', manifest_files)
+ command_suffix = _GetWinLinkRuleNameSuffix(
+ self.msvs_settings.IsEmbedManifest(config_name),
+ self.msvs_settings.IsLinkIncremental(config_name))
+ def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
+ if def_file:
+ implicit_deps.add(def_file)
else:
- ldflags = config.get('ldflags', [])
+ # Respect environment variables related to build, but target-specific
+ # flags can still override them.
+ ldflags = env_ldflags + config.get('ldflags', [])
if is_executable and len(solibs):
- ldflags.append('-Wl,-rpath=\$$ORIGIN/lib/')
- ldflags.append('-Wl,-rpath-link=lib/')
- self.WriteVariableList('ldflags',
- gyp.common.uniquer(map(self.ExpandSpecial,
- ldflags)))
+ rpath = 'lib/'
+ if self.toolset != 'target':
+ rpath += self.toolset
+ ldflags.append('-Wl,-rpath=\$$ORIGIN/%s' % rpath)
+ ldflags.append('-Wl,-rpath-link=%s' % rpath)
+ self.WriteVariableList(ninja_file, 'ldflags',
+ gyp.common.uniquer(map(self.ExpandSpecial, ldflags)))
+
+ library_dirs = config.get('library_dirs', [])
+ if self.flavor == 'win':
+ library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name)
+ for l in library_dirs]
+ library_dirs = ['/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l),
+ self.flavor)
+ for l in library_dirs]
+ else:
+ library_dirs = [QuoteShellArgument('-L' + self.GypPathToNinja(l),
+ self.flavor)
+ for l in library_dirs]
libraries = gyp.common.uniquer(map(self.ExpandSpecial,
spec.get('libraries', [])))
if self.flavor == 'mac':
- libraries = self.xcode_settings.AdjustLibraries(libraries)
+ libraries = self.xcode_settings.AdjustLibraries(libraries, config_name)
elif self.flavor == 'win':
libraries = self.msvs_settings.AdjustLibraries(libraries)
- self.WriteVariableList('libs', libraries)
- self.target.binary = output
+ self.WriteVariableList(ninja_file, 'libs', library_dirs + libraries)
+
+ linked_binary = output
if command in ('solink', 'solink_module'):
extra_bindings.append(('soname', os.path.split(output)[1]))
@@ -946,56 +1093,80 @@ class NinjaWriter:
extra_bindings.append(('implibflag',
'/IMPLIB:%s' % self.target.import_lib))
output = [output, self.target.import_lib]
- else:
+ elif not self.is_mac_bundle:
output = [output, output + '.TOC']
+ else:
+ command = command + '_notoc'
if len(solibs):
extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs)))
- self.ninja.build(output, command, link_deps,
+ ninja_file.build(output, command + command_suffix, link_deps,
implicit=list(implicit_deps),
variables=extra_bindings)
+ return linked_binary
def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
- if spec['type'] == 'none':
+ extra_link_deps = any(self.target_outputs.get(dep).Linkable()
+ for dep in spec.get('dependencies', [])
+ if dep in self.target_outputs)
+ if spec['type'] == 'none' or (not link_deps and not extra_link_deps):
# TODO(evan): don't call this function for 'none' target types, as
# it doesn't do anything, and we fake out a 'binary' with a stamp file.
self.target.binary = compile_deps
+ self.target.type = 'none'
elif spec['type'] == 'static_library':
self.target.binary = self.ComputeOutput(spec)
- variables = []
- postbuild = self.GetPostbuildCommand(
- spec, self.target.binary, self.target.binary)
- if postbuild:
- variables.append(('postbuilds', postbuild))
- if self.xcode_settings:
- variables.append(('libtool_flags',
- self.xcode_settings.GetLibtoolflags(config_name)))
- if (self.flavor not in ('mac', 'win') and not
+ if (self.flavor not in ('mac', 'openbsd', 'win') and not
self.is_standalone_static_library):
self.ninja.build(self.target.binary, 'alink_thin', link_deps,
- order_only=compile_deps, variables=variables)
+ order_only=compile_deps)
else:
+ variables = []
+ if self.xcode_settings:
+ libtool_flags = self.xcode_settings.GetLibtoolflags(config_name)
+ if libtool_flags:
+ variables.append(('libtool_flags', libtool_flags))
if self.msvs_settings:
libflags = self.msvs_settings.GetLibFlags(config_name,
self.GypPathToNinja)
variables.append(('libflags', libflags))
- self.ninja.build(self.target.binary, 'alink', link_deps,
- order_only=compile_deps, variables=variables)
+
+ if self.flavor != 'mac' or len(self.archs) == 1:
+ self.AppendPostbuildVariable(variables, spec,
+ self.target.binary, self.target.binary)
+ self.ninja.build(self.target.binary, 'alink', link_deps,
+ order_only=compile_deps, variables=variables)
+ else:
+ inputs = []
+ for arch in self.archs:
+ output = self.ComputeOutput(spec, arch)
+ self.arch_subninjas[arch].build(output, 'alink', link_deps[arch],
+ order_only=compile_deps,
+ variables=variables)
+ inputs.append(output)
+ # TODO: It's not clear if libtool_flags should be passed to the alink
+ # call that combines single-arch .a files into a fat .a file.
+ self.AppendPostbuildVariable(variables, spec,
+ self.target.binary, self.target.binary)
+ self.ninja.build(self.target.binary, 'alink', inputs,
+ # FIXME: test proving order_only=compile_deps isn't
+ # needed.
+ variables=variables)
else:
- self.WriteLink(spec, config_name, config, link_deps)
+ self.target.binary = self.WriteLink(spec, config_name, config, link_deps)
return self.target.binary
- def WriteMacBundle(self, spec, mac_bundle_depends):
+ def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
assert self.is_mac_bundle
package_framework = spec['type'] in ('shared_library', 'loadable_module')
output = self.ComputeMacBundleOutput()
- postbuild = self.GetPostbuildCommand(spec, output, self.target.binary,
- is_command_start=not package_framework)
+ if is_empty:
+ output += '.stamp'
variables = []
- if postbuild:
- variables.append(('postbuilds', postbuild))
- if package_framework:
+ self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
+ is_command_start=not package_framework)
+ if package_framework and not is_empty:
variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
self.ninja.build(output, 'package_framework', mac_bundle_depends,
variables=variables)
@@ -1025,23 +1196,30 @@ class NinjaWriter:
postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file
return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)
- def GetPostbuildCommand(self, spec, output, output_binary,
- is_command_start=False):
+ def AppendPostbuildVariable(self, variables, spec, output, binary,
+ is_command_start=False):
+ """Adds a 'postbuild' variable if there is a postbuild for |output|."""
+ postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start)
+ if postbuild:
+ variables.append(('postbuilds', postbuild))
+
+ def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
"""Returns a shell command that runs all the postbuilds, and removes
|output| if any of them fails. If |is_command_start| is False, then the
returned string will start with ' && '."""
if not self.xcode_settings or spec['type'] == 'none' or not output:
return ''
output = QuoteShellArgument(output, self.flavor)
- target_postbuilds = self.xcode_settings.GetTargetPostbuilds(
- self.config_name,
- os.path.normpath(os.path.join(self.base_to_build, output)),
- QuoteShellArgument(
- os.path.normpath(os.path.join(self.base_to_build, output_binary)),
- self.flavor),
- quiet=True)
postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
- postbuilds = target_postbuilds + postbuilds
+ if output_binary is not None:
+ postbuilds = self.xcode_settings.AddImplicitPostbuilds(
+ self.config_name,
+ os.path.normpath(os.path.join(self.base_to_build, output)),
+ QuoteShellArgument(
+ os.path.normpath(os.path.join(self.base_to_build, output_binary)),
+ self.flavor),
+ postbuilds, quiet=True)
+
if not postbuilds:
return ''
# Postbuilds expect to be run in the gyp file's directory, so insert an
@@ -1074,14 +1252,9 @@ class NinjaWriter:
def ComputeMacBundleOutput(self):
"""Return the 'output' (full output path) to a bundle output directory."""
assert self.is_mac_bundle
- path = self.ExpandSpecial(generator_default_variables['PRODUCT_DIR'])
- return os.path.join(path, self.xcode_settings.GetWrapperName())
-
- def ComputeMacBundleBinaryOutput(self):
- """Return the 'output' (full output path) to the binary in a bundle."""
- assert self.is_mac_bundle
- path = self.ExpandSpecial(generator_default_variables['PRODUCT_DIR'])
- return os.path.join(path, self.xcode_settings.GetExecutablePath())
+ path = generator_default_variables['PRODUCT_DIR']
+ return self.ExpandSpecial(
+ os.path.join(path, self.xcode_settings.GetWrapperName()))
def ComputeOutputFileName(self, spec, type=None):
"""Compute the filename of the final output for the current target."""
@@ -1133,12 +1306,9 @@ class NinjaWriter:
else:
raise Exception('Unhandled output type %s' % type)
- def ComputeOutput(self, spec, type=None):
+ def ComputeOutput(self, spec, arch=None):
"""Compute the path for the final output of the spec."""
- assert not self.is_mac_bundle or type
-
- if not type:
- type = spec['type']
+ type = spec['type']
if self.flavor == 'win':
override = self.msvs_settings.GetOutputName(self.config_name,
@@ -1146,13 +1316,13 @@ class NinjaWriter:
if override:
return override
- if self.flavor == 'mac' and type in (
+ if arch is None and self.flavor == 'mac' and type in (
'static_library', 'executable', 'shared_library', 'loadable_module'):
filename = self.xcode_settings.GetExecutablePath()
else:
filename = self.ComputeOutputFileName(spec, type)
- if 'product_dir' in spec:
+ if arch is None and 'product_dir' in spec:
path = os.path.join(spec['product_dir'], filename)
return self.ExpandSpecial(path)
@@ -1164,7 +1334,14 @@ class NinjaWriter:
elif self.flavor == 'win' and self.toolset == 'target':
type_in_output_root += ['shared_library']
- if type in type_in_output_root or self.is_standalone_static_library:
+ if arch is not None:
+ # Make sure partial executables don't end up in a bundle or the regular
+ # output directory.
+ archdir = 'arch'
+ if self.toolset != 'target':
+ archdir = os.path.join('arch', '%s' % self.toolset)
+ return os.path.join(archdir, AddArch(filename, arch))
+ elif type in type_in_output_root or self.is_standalone_static_library:
return filename
elif type == 'shared_library':
libdir = 'lib'
@@ -1174,11 +1351,11 @@ class NinjaWriter:
else:
return self.GypPathToUniqueOutput(filename, qualified=False)
- def WriteVariableList(self, var, values):
+ def WriteVariableList(self, ninja_file, var, values):
assert not isinstance(values, str)
if values is None:
values = []
- self.ninja.variable(var, ' '.join(values))
+ ninja_file.variable(var, ' '.join(values))
def WriteNewNinjaRule(self, name, args, description, is_cygwin, env):
"""Write out a new ninja "rule" statement for a given command.
@@ -1275,7 +1452,6 @@ def CalculateVariables(default_variables, params):
default_variables['STATIC_LIB_SUFFIX'] = '.lib'
default_variables['SHARED_LIB_PREFIX'] = ''
default_variables['SHARED_LIB_SUFFIX'] = '.dll'
- generator_flags = params.get('generator_flags', {})
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
@@ -1285,19 +1461,7 @@ def CalculateVariables(default_variables, params):
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
- # Set a variable so conditions can be based on msvs_version.
- msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
- default_variables['MSVS_VERSION'] = msvs_version.ShortName()
-
- # To determine processor word size on Windows, in addition to checking
- # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
- # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
- # contains the actual word size of the system when running thru WOW64).
- if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
- '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
- default_variables['MSVS_OS_BITS'] = 64
- else:
- default_variables['MSVS_OS_BITS'] = 32
+ gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
else:
operating_system = flavor
if flavor == 'android':
@@ -1309,6 +1473,32 @@ def CalculateVariables(default_variables, params):
default_variables.setdefault('LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'obj'))
+def ComputeOutputDir(params):
+ """Returns the path from the toplevel_dir to the build output directory."""
+ # generator_dir: relative path from pwd to where make puts build files.
+ # Makes migrating from make to ninja easier, ninja doesn't put anything here.
+ generator_dir = os.path.relpath(params['options'].generator_output or '.')
+
+ # output_dir: relative path from generator_dir to the build directory.
+ output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
+
+ # Relative path from source root to our output files. e.g. "out"
+ return os.path.normpath(os.path.join(generator_dir, output_dir))
+
+
+def CalculateGeneratorInputInfo(params):
+ """Called by __init__ to initialize generator values based on params."""
+ # E.g. "out/gypfiles"
+ toplevel = params['options'].toplevel_dir
+ qualified_out_dir = os.path.normpath(os.path.join(
+ toplevel, ComputeOutputDir(params), 'gypfiles'))
+
+ global generator_filelist_paths
+ generator_filelist_paths = {
+ 'toplevel': toplevel,
+ 'qualified_out_dir': qualified_out_dir,
+ }
+
def OpenOutput(path, mode='w'):
"""Open |path| for writing, creating directories if necessary."""
@@ -1326,32 +1516,149 @@ def CommandWithWrapper(cmd, wrappers, prog):
return prog
+def GetDefaultConcurrentLinks():
+ """Returns a best-guess for a number of concurrent links."""
+ if sys.platform in ('win32', 'cygwin'):
+ import ctypes
+
+ class MEMORYSTATUSEX(ctypes.Structure):
+ _fields_ = [
+ ("dwLength", ctypes.c_ulong),
+ ("dwMemoryLoad", ctypes.c_ulong),
+ ("ullTotalPhys", ctypes.c_ulonglong),
+ ("ullAvailPhys", ctypes.c_ulonglong),
+ ("ullTotalPageFile", ctypes.c_ulonglong),
+ ("ullAvailPageFile", ctypes.c_ulonglong),
+ ("ullTotalVirtual", ctypes.c_ulonglong),
+ ("ullAvailVirtual", ctypes.c_ulonglong),
+ ("sullAvailExtendedVirtual", ctypes.c_ulonglong),
+ ]
+
+ stat = MEMORYSTATUSEX()
+ stat.dwLength = ctypes.sizeof(stat)
+ ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
+
+ mem_limit = max(1, stat.ullTotalPhys / (4 * (2 ** 30))) # total / 4GB
+ hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
+ return min(mem_limit, hard_cap)
+ elif sys.platform.startswith('linux'):
+ with open("/proc/meminfo") as meminfo:
+ memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
+ for line in meminfo:
+ match = memtotal_re.match(line)
+ if not match:
+ continue
+ # Allow 8Gb per link on Linux because Gold is quite memory hungry
+ return max(1, int(match.group(1)) / (8 * (2 ** 20)))
+ return 1
+ elif sys.platform == 'darwin':
+ try:
+ avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
+ # A static library debug build of Chromium's unit_tests takes ~2.7GB, so
+ # 4GB per ld process allows for some more bloat.
+ return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB
+ except:
+ return 1
+ else:
+ # TODO(scottmg): Implement this for other platforms.
+ return 1
+
+
+def _GetWinLinkRuleNameSuffix(embed_manifest, link_incremental):
+ """Returns the suffix used to select an appropriate linking rule depending on
+ whether the manifest embedding and/or incremental linking is enabled."""
+ suffix = ''
+ if embed_manifest:
+ suffix += '_embed'
+ if link_incremental:
+ suffix += '_inc'
+ return suffix
+
+
+def _AddWinLinkRules(master_ninja, embed_manifest, link_incremental):
+ """Adds link rules for Windows platform to |master_ninja|."""
+ def FullLinkCommand(ldcmd, out, binary_type):
+ cmd = ('cmd /c %(ldcmd)s'
+ ' && %(python)s gyp-win-tool manifest-wrapper $arch'
+ ' cmd /c if exist %(out)s.manifest del %(out)s.manifest'
+ ' && %(python)s gyp-win-tool manifest-wrapper $arch'
+ ' $mt -nologo -manifest $manifests')
+ if embed_manifest and not link_incremental:
+ # Embed manifest into a binary. If incremental linking is enabled,
+ # embedding is postponed to the re-linking stage (see below).
+ cmd += ' -outputresource:%(out)s;%(resname)s'
+ else:
+ # Save manifest as an external file.
+ cmd += ' -out:%(out)s.manifest'
+ if link_incremental:
+ # There is no point in generating separate rule for the case when
+ # incremental linking is enabled, but manifest embedding is disabled.
+ # In that case the basic rule should be used (e.g. 'link').
+ # See also implementation of _GetWinLinkRuleNameSuffix().
+ assert embed_manifest
+ # Make .rc file out of manifest, compile it to .res file and re-link.
+ cmd += (' && %(python)s gyp-win-tool manifest-to-rc $arch'
+ ' %(out)s.manifest %(out)s.manifest.rc %(resname)s'
+ ' && %(python)s gyp-win-tool rc-wrapper $arch $rc'
+ ' %(out)s.manifest.rc'
+ ' && %(ldcmd)s %(out)s.manifest.res')
+ resource_name = {
+ 'exe': '1',
+ 'dll': '2',
+ }[binary_type]
+ return cmd % {'python': sys.executable,
+ 'out': out,
+ 'ldcmd': ldcmd,
+ 'resname': resource_name}
+
+ rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest, link_incremental)
+ dlldesc = 'LINK%s(DLL) $dll' % rule_name_suffix.upper()
+ dllcmd = ('%s gyp-win-tool link-wrapper $arch '
+ '$ld /nologo $implibflag /DLL /OUT:$dll '
+ '/PDB:$dll.pdb @$dll.rsp' % sys.executable)
+ dllcmd = FullLinkCommand(dllcmd, '$dll', 'dll')
+ master_ninja.rule('solink' + rule_name_suffix,
+ description=dlldesc, command=dllcmd,
+ rspfile='$dll.rsp',
+ rspfile_content='$libs $in_newline $ldflags',
+ restat=True,
+ pool='link_pool')
+ master_ninja.rule('solink_module' + rule_name_suffix,
+ description=dlldesc, command=dllcmd,
+ rspfile='$dll.rsp',
+ rspfile_content='$libs $in_newline $ldflags',
+ restat=True,
+ pool='link_pool')
+ # Note that ldflags goes at the end so that it has the option of
+ # overriding default settings earlier in the command line.
+ exe_cmd = ('%s gyp-win-tool link-wrapper $arch '
+ '$ld /nologo /OUT:$out /PDB:$out.pdb @$out.rsp' %
+ sys.executable)
+ exe_cmd = FullLinkCommand(exe_cmd, '$out', 'exe')
+ master_ninja.rule('link' + rule_name_suffix,
+ description='LINK%s $out' % rule_name_suffix.upper(),
+ command=exe_cmd,
+ rspfile='$out.rsp',
+ rspfile_content='$in_newline $libs $ldflags',
+ pool='link_pool')
+
+
def GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name):
options = params['options']
flavor = gyp.common.GetFlavor(params)
generator_flags = params.get('generator_flags', {})
- # generator_dir: relative path from pwd to where make puts build files.
- # Makes migrating from make to ninja easier, ninja doesn't put anything here.
- generator_dir = os.path.relpath(params['options'].generator_output or '.')
-
- # output_dir: relative path from generator_dir to the build directory.
- output_dir = generator_flags.get('output_dir', 'out')
-
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
- build_dir = os.path.normpath(os.path.join(generator_dir,
- output_dir,
- config_name))
+ build_dir = os.path.normpath(
+ os.path.join(ComputeOutputDir(params), config_name))
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
master_ninja = ninja_syntax.Writer(
OpenOutput(os.path.join(toplevel_build, 'build.ninja')),
width=120)
- case_sensitive_filesystem = not os.path.exists(
- os.path.join(toplevel_build, 'BUILD.NINJA'))
# Put build-time support tools in out/{config_name}.
gyp.common.CopyTool(flavor, toplevel_build)
@@ -1364,17 +1671,20 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
# 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
# to cc/cxx.
if flavor == 'win':
+ # Overridden by local arch choice in the use_deps case.
+ # Chromium's ffmpeg c99conv.py currently looks for a 'cc =' line in
+ # build.ninja so needs something valid here. http://crbug.com/233985
cc = 'cl.exe'
cxx = 'cl.exe'
ld = 'link.exe'
- gyp.msvs_emulation.GenerateEnvironmentFiles(
- toplevel_build, generator_flags, OpenOutput)
ld_host = '$ld'
else:
cc = 'gcc'
cxx = 'g++'
- ld = '$cxx'
- ld_host = '$cxx_host'
+ ld = '$cc'
+ ldxx = '$cxx'
+ ld_host = '$cc_host'
+ ldxx_host = '$cxx_host'
cc_host = None
cxx_host = None
@@ -1385,40 +1695,40 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
make_global_settings = data[build_file].get('make_global_settings', [])
build_to_root = gyp.common.InvertRelativePath(build_dir,
options.toplevel_dir)
- flock = 'flock'
- if flavor == 'mac':
- flock = './gyp-mac-tool flock'
wrappers = {}
- if flavor != 'win':
- wrappers['LINK'] = flock + ' linker.lock'
for key, value in make_global_settings:
if key == 'CC':
cc = os.path.join(build_to_root, value)
if key == 'CXX':
cxx = os.path.join(build_to_root, value)
- if key == 'LD':
- ld = os.path.join(build_to_root, value)
if key == 'CC.host':
cc_host = os.path.join(build_to_root, value)
cc_host_global_setting = value
if key == 'CXX.host':
cxx_host = os.path.join(build_to_root, value)
cxx_host_global_setting = value
- if key == 'LD.host':
- ld_host = os.path.join(build_to_root, value)
if key.endswith('_wrapper'):
wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
+ # Support wrappers from environment variables too.
+ for key, value in os.environ.iteritems():
+ if key.lower().endswith('_wrapper'):
+ key_prefix = key[:-len('_wrapper')]
+ key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
+ wrappers[key_prefix] = os.path.join(build_to_root, value)
+
+ if flavor == 'win':
+ cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
+ toplevel_build, generator_flags, OpenOutput)
+ for arch, path in cl_paths.iteritems():
+ master_ninja.variable(
+ 'cl_' + arch, CommandWithWrapper('CC', wrappers,
+ QuoteShellArgument(path, flavor)))
+
cc = GetEnvironFallback(['CC_target', 'CC'], cc)
master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx))
- ld = GetEnvironFallback(['LD_target', 'LD'], ld)
-
- if not cc_host:
- cc_host = cc
- if not cxx_host:
- cxx_host = cxx
if flavor == 'win':
master_ninja.variable('ld', ld)
@@ -1427,41 +1737,54 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
master_ninja.variable('rc', 'rc.exe')
master_ninja.variable('asm', 'ml.exe')
master_ninja.variable('mt', 'mt.exe')
- master_ninja.variable('use_dep_database', '1')
else:
master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
+ master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], 'ar'))
- master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], 'ar'))
- cc_host = GetEnvironFallback(['CC_host'], cc_host)
- cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
- ld_host = GetEnvironFallback(['LD_host'], ld_host)
-
- # The environment variable could be used in 'make_global_settings', like
- # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
- if '$(CC)' in cc_host and cc_host_global_setting:
- cc_host = cc_host_global_setting.replace('$(CC)', cc)
- if '$(CXX)' in cxx_host and cxx_host_global_setting:
- cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
- master_ninja.variable('cc_host',
- CommandWithWrapper('CC.host', wrappers, cc_host))
- master_ninja.variable('cxx_host',
- CommandWithWrapper('CXX.host', wrappers, cxx_host))
- if flavor == 'win':
- master_ninja.variable('ld_host', ld_host)
- else:
- master_ninja.variable('ld_host', CommandWithWrapper(
- 'LINK', wrappers, ld_host))
+ if generator_supports_multiple_toolsets:
+ if not cc_host:
+ cc_host = cc
+ if not cxx_host:
+ cxx_host = cxx
+
+ master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], 'ar'))
+ cc_host = GetEnvironFallback(['CC_host'], cc_host)
+ cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
+
+ # The environment variable could be used in 'make_global_settings', like
+ # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
+ if '$(CC)' in cc_host and cc_host_global_setting:
+ cc_host = cc_host_global_setting.replace('$(CC)', cc)
+ if '$(CXX)' in cxx_host and cxx_host_global_setting:
+ cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
+ master_ninja.variable('cc_host',
+ CommandWithWrapper('CC.host', wrappers, cc_host))
+ master_ninja.variable('cxx_host',
+ CommandWithWrapper('CXX.host', wrappers, cxx_host))
+ if flavor == 'win':
+ master_ninja.variable('ld_host', ld_host)
+ else:
+ master_ninja.variable('ld_host', CommandWithWrapper(
+ 'LINK', wrappers, ld_host))
+ master_ninja.variable('ldxx_host', CommandWithWrapper(
+ 'LINK', wrappers, ldxx_host))
+
+ master_ninja.newline()
+ master_ninja.pool('link_pool', depth=GetDefaultConcurrentLinks())
master_ninja.newline()
+ deps = 'msvc' if flavor == 'win' else 'gcc'
+
if flavor != 'win':
master_ninja.rule(
'cc',
description='CC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'),
- depfile='$out.d')
+ depfile='$out.d',
+ deps=deps)
master_ninja.rule(
'cc_s',
description='CC $out',
@@ -1472,13 +1795,14 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
description='CXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc '
'$cflags_pch_cc -c $in -o $out'),
- depfile='$out.d')
+ depfile='$out.d',
+ deps=deps)
else:
- cc_command = ('ninja -t msvc -o $out -e $arch '
+ cc_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cc /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname ')
- cxx_command = ('ninja -t msvc -o $out -e $arch '
+ cxx_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cxx /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname ')
@@ -1486,16 +1810,16 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
'cc',
description='CC $out',
command=cc_command,
- depfile='$out.d',
rspfile='$out.rsp',
- rspfile_content='$defines $includes $cflags $cflags_c')
+ rspfile_content='$defines $includes $cflags $cflags_c',
+ deps=deps)
master_ninja.rule(
'cxx',
description='CXX $out',
command=cxx_command,
- depfile='$out.d',
rspfile='$out.rsp',
- rspfile_content='$defines $includes $cflags $cflags_cc')
+ rspfile_content='$defines $includes $cflags $cflags_cc',
+ deps=deps)
master_ninja.rule(
'idl',
description='IDL $in',
@@ -1507,7 +1831,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
description='RC $in',
# Note: $in must be last otherwise rc.exe complains.
command=('%s gyp-win-tool rc-wrapper '
- '$arch $rc $defines $includes $rcflags /fo$out $in' %
+ '$arch $rc $defines $resource_includes $rcflags /fo$out $in' %
sys.executable))
master_ninja.rule(
'asm',
@@ -1549,18 +1873,22 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
restat=True,
command=(mtime_preserving_solink_base % {
'suffix': '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive '
- '$libs'}))
+ '$libs'}),
+ pool='link_pool')
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib',
restat=True,
command=(mtime_preserving_solink_base % {
- 'suffix': '-Wl,--start-group $in $solibs -Wl,--end-group $libs'}))
+ 'suffix': '-Wl,--start-group $in $solibs -Wl,--end-group '
+ '$libs'}),
+ pool='link_pool')
master_ninja.rule(
'link',
description='LINK $out',
command=('$ld $ldflags -o $out '
- '-Wl,--start-group $in $solibs -Wl,--end-group $libs'))
+ '-Wl,--start-group $in $solibs -Wl,--end-group $libs'),
+ pool='link_pool')
elif flavor == 'win':
master_ninja.rule(
'alink',
@@ -1570,51 +1898,27 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
sys.executable),
rspfile='$out.rsp',
rspfile_content='$in_newline $libflags')
- dlldesc = 'LINK(DLL) $dll'
- dllcmd = ('%s gyp-win-tool link-wrapper $arch '
- '$ld /nologo $implibflag /DLL /OUT:$dll '
- '/PDB:$dll.pdb @$dll.rsp' % sys.executable)
- dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch '
- 'cmd /c if exist $dll.manifest del $dll.manifest' %
- sys.executable)
- dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch '
- '$mt -nologo -manifest $manifests -out:$dll.manifest' %
- sys.executable)
- master_ninja.rule('solink', description=dlldesc, command=dllcmd,
- rspfile='$dll.rsp',
- rspfile_content='$libs $in_newline $ldflags',
- restat=True)
- master_ninja.rule('solink_module', description=dlldesc, command=dllcmd,
- rspfile='$dll.rsp',
- rspfile_content='$libs $in_newline $ldflags',
- restat=True)
- # Note that ldflags goes at the end so that it has the option of
- # overriding default settings earlier in the command line.
- master_ninja.rule(
- 'link',
- description='LINK $out',
- command=('%s gyp-win-tool link-wrapper $arch '
- '$ld /nologo /OUT:$out /PDB:$out.pdb @$out.rsp && '
- '%s gyp-win-tool manifest-wrapper $arch '
- 'cmd /c if exist $out.manifest del $out.manifest && '
- '%s gyp-win-tool manifest-wrapper $arch '
- '$mt -nologo -manifest $manifests -out:$out.manifest' %
- (sys.executable, sys.executable, sys.executable)),
- rspfile='$out.rsp',
- rspfile_content='$in_newline $libs $ldflags')
+ _AddWinLinkRules(master_ninja, embed_manifest=True, link_incremental=True)
+ _AddWinLinkRules(master_ninja, embed_manifest=True, link_incremental=False)
+ _AddWinLinkRules(master_ninja, embed_manifest=False, link_incremental=False)
+ # Do not generate rules for embed_manifest=False and link_incremental=True
+ # because in that case rules for (False, False) should be used (see
+ # implementation of _GetWinLinkRuleNameSuffix()).
else:
master_ninja.rule(
'objc',
description='OBJC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc '
'$cflags_pch_objc -c $in -o $out'),
- depfile='$out.d')
+ depfile='$out.d',
+ deps=deps)
master_ninja.rule(
'objcxx',
description='OBJCXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc '
'$cflags_pch_objcc -c $in -o $out'),
- depfile='$out.d')
+ depfile='$out.d',
+ deps=deps)
master_ninja.rule(
'alink',
description='LIBTOOL-STATIC $out, POSTBUILDS',
@@ -1622,9 +1926,14 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
'./gyp-mac-tool filter-libtool libtool $libtool_flags '
'-static -o $out $in'
'$postbuilds')
+ master_ninja.rule(
+ 'lipo',
+ description='LIPO $out, POSTBUILDS',
+ command='rm -f $out && lipo -create $in -output $out$postbuilds')
# Record the public interface of $lib in $lib.TOC. See the corresponding
# comment in the posix section above for details.
+ solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
mtime_preserving_solink_base = (
'if [ ! -e $lib -o ! -e ${lib}.TOC ] || '
# Always force dependent targets to relink if this library
@@ -1638,37 +1947,57 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
'mv ${lib}.tmp ${lib}.TOC ; '
'fi; '
'fi'
- % { 'solink': '$ld -shared $ldflags -o $lib %(suffix)s',
+ % { 'solink': solink_base,
'extract_toc':
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
- # TODO(thakis): The solink_module rule is likely wrong. Xcode seems to pass
- # -bundle -single_module here (for osmesa.so).
+ solink_suffix = '$in $solibs $libs$postbuilds'
master_ninja.rule(
'solink',
description='SOLINK $lib, POSTBUILDS',
restat=True,
- command=(mtime_preserving_solink_base % {
- 'suffix': '$in $solibs $libs$postbuilds'}))
+ command=mtime_preserving_solink_base % {'suffix': solink_suffix,
+ 'type': '-shared'},
+ pool='link_pool')
+ master_ninja.rule(
+ 'solink_notoc',
+ description='SOLINK $lib, POSTBUILDS',
+ restat=True,
+ command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
+ pool='link_pool')
+
+ solink_module_suffix = '$in $solibs $libs$postbuilds'
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
- command=(mtime_preserving_solink_base % {
- 'suffix': '$in $solibs $libs$postbuilds'}))
+ command=mtime_preserving_solink_base % {'suffix': solink_module_suffix,
+ 'type': '-bundle'},
+ pool='link_pool')
+ master_ninja.rule(
+ 'solink_module_notoc',
+ description='SOLINK(module) $lib, POSTBUILDS',
+ restat=True,
+ command=solink_base % {'suffix': solink_module_suffix, 'type': '-bundle'},
+ pool='link_pool')
master_ninja.rule(
'link',
description='LINK $out, POSTBUILDS',
command=('$ld $ldflags -o $out '
- '$in $solibs $libs$postbuilds'))
+ '$in $solibs $libs$postbuilds'),
+ pool='link_pool')
master_ninja.rule(
- 'infoplist',
- description='INFOPLIST $out',
+ 'preprocess_infoplist',
+ description='PREPROCESS INFOPLIST $out',
command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
'plutil -convert xml1 $out $out'))
master_ninja.rule(
+ 'copy_infoplist',
+ description='COPY INFOPLIST $in',
+ command='$env ./gyp-mac-tool copy-info-plist $in $out $keys')
+ master_ninja.rule(
'mac_tool',
description='MACTOOL $mactool_cmd $in',
command='$env ./gyp-mac-tool $mactool_cmd $in $out')
@@ -1710,6 +2039,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
# target_short_names is a map from target short name to a list of Target
# objects.
target_short_names = {}
+
for qualified_target in target_list:
# qualified_target is like: third_party/icu/icu.gyp:icui18n#target
build_file, name, toolset = \
@@ -1731,14 +2061,21 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
obj += '.' + toolset
output_file = os.path.join(obj, base_path, name + '.ninja')
- abs_build_dir = os.path.abspath(toplevel_build)
+ ninja_output = StringIO()
writer = NinjaWriter(qualified_target, target_outputs, base_path, build_dir,
- OpenOutput(os.path.join(toplevel_build, output_file)),
+ ninja_output,
+ toplevel_build, output_file,
flavor, toplevel_dir=options.toplevel_dir)
- master_ninja.subninja(output_file)
- target = writer.WriteSpec(
- spec, config_name, generator_flags, case_sensitive_filesystem)
+ target = writer.WriteSpec(spec, config_name, generator_flags)
+
+ if ninja_output.tell() > 0:
+ # Only create files for ninja files that actually have contents.
+ with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file:
+ ninja_file.write(ninja_output.getvalue())
+ ninja_output.close()
+ master_ninja.subninja(output_file)
+
if target:
if name != target.FinalOutput() and spec['toolset'] == 'target':
target_short_names.setdefault(name, []).append(target)
@@ -1781,6 +2118,10 @@ def CallGenerateOutputForConfig(arglist):
def GenerateOutput(target_list, target_dicts, data, params):
+ # Update target_dicts for iOS device builds.
+ target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
+ target_dicts)
+
user_config = params.get('generator_flags', {}).get('config', None)
if gyp.common.GetFlavor(params) == 'win':
target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
@@ -1799,7 +2140,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
for config_name in config_names:
arglists.append(
(target_list, target_dicts, data, params, config_name))
- pool.map(CallGenerateOutputForConfig, arglists)
+ pool.map(CallGenerateOutputForConfig, arglists)
except KeyboardInterrupt, e:
pool.terminate()
raise e
diff --git a/tools/gyp/pylib/gyp/generator/ninja_test.py b/tools/gyp/pylib/gyp/generator/ninja_test.py
index 90dd153730..52661bcdf0 100644
--- a/tools/gyp/pylib/gyp/generator/ninja_test.py
+++ b/tools/gyp/pylib/gyp/generator/ninja_test.py
@@ -14,31 +14,31 @@ import TestCommon
class TestPrefixesAndSuffixes(unittest.TestCase):
- if sys.platform in ('win32', 'cygwin'):
- def test_BinaryNamesWindows(self):
- writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'ninja.build', 'win')
- spec = { 'target_name': 'wee' }
- self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
- endswith('.exe'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
- endswith('.dll'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
- endswith('.lib'))
+ def test_BinaryNamesWindows(self):
+ writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
+ 'build.ninja', 'win')
+ spec = { 'target_name': 'wee' }
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
+ endswith('.exe'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
+ endswith('.dll'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
+ endswith('.lib'))
- if sys.platform == 'linux2':
- def test_BinaryNamesLinux(self):
- writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'ninja.build', 'linux')
- spec = { 'target_name': 'wee' }
- self.assertTrue('.' not in writer.ComputeOutputFileName(spec,
- 'executable'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
- startswith('lib'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
- startswith('lib'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
- endswith('.so'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
- endswith('.a'))
+ def test_BinaryNamesLinux(self):
+ writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
+ 'build.ninja', 'linux')
+ spec = { 'target_name': 'wee' }
+ self.assertTrue('.' not in writer.ComputeOutputFileName(spec,
+ 'executable'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
+ startswith('lib'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
+ startswith('lib'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
+ endswith('.so'))
+ self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
+ endswith('.a'))
if __name__ == '__main__':
unittest.main()
diff --git a/tools/gyp/pylib/gyp/generator/scons.py b/tools/gyp/pylib/gyp/generator/scons.py
deleted file mode 100644
index fe7cb581b3..0000000000
--- a/tools/gyp/pylib/gyp/generator/scons.py
+++ /dev/null
@@ -1,1072 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import gyp
-import gyp.common
-import gyp.SCons as SCons
-import os.path
-import pprint
-import re
-import subprocess
-
-
-# TODO: remove when we delete the last WriteList() call in this module
-WriteList = SCons.WriteList
-
-
-generator_default_variables = {
- 'EXECUTABLE_PREFIX': '',
- 'EXECUTABLE_SUFFIX': '',
- 'STATIC_LIB_PREFIX': '${LIBPREFIX}',
- 'SHARED_LIB_PREFIX': '${SHLIBPREFIX}',
- 'STATIC_LIB_SUFFIX': '${LIBSUFFIX}',
- 'SHARED_LIB_SUFFIX': '${SHLIBSUFFIX}',
- 'INTERMEDIATE_DIR': '${INTERMEDIATE_DIR}',
- 'SHARED_INTERMEDIATE_DIR': '${SHARED_INTERMEDIATE_DIR}',
- 'OS': 'linux',
- 'PRODUCT_DIR': '$TOP_BUILDDIR',
- 'SHARED_LIB_DIR': '$LIB_DIR',
- 'LIB_DIR': '$LIB_DIR',
- 'RULE_INPUT_ROOT': '${SOURCE.filebase}',
- 'RULE_INPUT_DIRNAME': '${SOURCE.dir}',
- 'RULE_INPUT_EXT': '${SOURCE.suffix}',
- 'RULE_INPUT_NAME': '${SOURCE.file}',
- 'RULE_INPUT_PATH': '${SOURCE.abspath}',
- 'CONFIGURATION_NAME': '${CONFIG_NAME}',
-}
-
-# Tell GYP how to process the input for us.
-generator_handles_variants = True
-generator_wants_absolute_build_file_paths = True
-
-
-def FixPath(path, prefix):
- if not os.path.isabs(path) and not path[0] == '$':
- path = prefix + path
- return path
-
-
-header = """\
-# This file is generated; do not edit.
-"""
-
-
-_alias_template = """
-if GetOption('verbose'):
- _action = Action([%(action)s])
-else:
- _action = Action([%(action)s], %(message)s)
-_outputs = env.Alias(
- ['_%(target_name)s_action'],
- %(inputs)s,
- _action
-)
-env.AlwaysBuild(_outputs)
-"""
-
-_run_as_template = """
-if GetOption('verbose'):
- _action = Action([%(action)s])
-else:
- _action = Action([%(action)s], %(message)s)
-"""
-
-_run_as_template_suffix = """
-_run_as_target = env.Alias('run_%(target_name)s', target_files, _action)
-env.Requires(_run_as_target, [
- Alias('%(target_name)s'),
-])
-env.AlwaysBuild(_run_as_target)
-"""
-
-_command_template = """
-if GetOption('verbose'):
- _action = Action([%(action)s])
-else:
- _action = Action([%(action)s], %(message)s)
-_outputs = env.Command(
- %(outputs)s,
- %(inputs)s,
- _action
-)
-"""
-
-# This is copied from the default SCons action, updated to handle symlinks.
-_copy_action_template = """
-import shutil
-import SCons.Action
-
-def _copy_files_or_dirs_or_symlinks(dest, src):
- SCons.Node.FS.invalidate_node_memos(dest)
- if SCons.Util.is_List(src) and os.path.isdir(dest):
- for file in src:
- shutil.copy2(file, dest)
- return 0
- elif os.path.islink(src):
- linkto = os.readlink(src)
- os.symlink(linkto, dest)
- return 0
- elif os.path.isfile(src):
- return shutil.copy2(src, dest)
- else:
- return shutil.copytree(src, dest, 1)
-
-def _copy_files_or_dirs_or_symlinks_str(dest, src):
- return 'Copying %s to %s ...' % (src, dest)
-
-GYPCopy = SCons.Action.ActionFactory(_copy_files_or_dirs_or_symlinks,
- _copy_files_or_dirs_or_symlinks_str,
- convert=str)
-"""
-
-_rule_template = """
-%(name)s_additional_inputs = %(inputs)s
-%(name)s_outputs = %(outputs)s
-def %(name)s_emitter(target, source, env):
- return (%(name)s_outputs, source + %(name)s_additional_inputs)
-if GetOption('verbose'):
- %(name)s_action = Action([%(action)s])
-else:
- %(name)s_action = Action([%(action)s], %(message)s)
-env['BUILDERS']['%(name)s'] = Builder(action=%(name)s_action,
- emitter=%(name)s_emitter)
-
-_outputs = []
-_processed_input_files = []
-for infile in input_files:
- if (type(infile) == type('')
- and not os.path.isabs(infile)
- and not infile[0] == '$'):
- infile = %(src_dir)r + infile
- if str(infile).endswith('.%(extension)s'):
- _generated = env.%(name)s(infile)
- env.Precious(_generated)
- _outputs.append(_generated)
- %(process_outputs_as_sources_line)s
- else:
- _processed_input_files.append(infile)
-prerequisites.extend(_outputs)
-input_files = _processed_input_files
-"""
-
-_spawn_hack = """
-import re
-import SCons.Platform.posix
-needs_shell = re.compile('["\\'><!^&]')
-def gyp_spawn(sh, escape, cmd, args, env):
- def strip_scons_quotes(arg):
- if arg[0] == '"' and arg[-1] == '"':
- return arg[1:-1]
- return arg
- stripped_args = [strip_scons_quotes(a) for a in args]
- if needs_shell.search(' '.join(stripped_args)):
- return SCons.Platform.posix.exec_spawnvpe([sh, '-c', ' '.join(args)], env)
- else:
- return SCons.Platform.posix.exec_spawnvpe(stripped_args, env)
-"""
-
-
-def EscapeShellArgument(s):
- """Quotes an argument so that it will be interpreted literally by a POSIX
- shell. Taken from
- http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
- """
- return "'" + s.replace("'", "'\\''") + "'"
-
-
-def InvertNaiveSConsQuoting(s):
- """SCons tries to "help" with quoting by naively putting double-quotes around
- command-line arguments containing space or tab, which is broken for all
- but trivial cases, so we undo it. (See quote_spaces() in Subst.py)"""
- if ' ' in s or '\t' in s:
- # Then SCons will put double-quotes around this, so add our own quotes
- # to close its quotes at the beginning and end.
- s = '"' + s + '"'
- return s
-
-
-def EscapeSConsVariableExpansion(s):
- """SCons has its own variable expansion syntax using $. We must escape it for
- strings to be interpreted literally. For some reason this requires four
- dollar signs, not two, even without the shell involved."""
- return s.replace('$', '$$$$')
-
-
-def EscapeCppDefine(s):
- """Escapes a CPP define so that it will reach the compiler unaltered."""
- s = EscapeShellArgument(s)
- s = InvertNaiveSConsQuoting(s)
- s = EscapeSConsVariableExpansion(s)
- return s
-
-
-def GenerateConfig(fp, config, indent='', src_dir=''):
- """
- Generates SCons dictionary items for a gyp configuration.
-
- This provides the main translation between the (lower-case) gyp settings
- keywords and the (upper-case) SCons construction variables.
- """
- var_mapping = {
- 'ASFLAGS' : 'asflags',
- 'CCFLAGS' : 'cflags',
- 'CFLAGS' : 'cflags_c',
- 'CXXFLAGS' : 'cflags_cc',
- 'CPPDEFINES' : 'defines',
- 'CPPPATH' : 'include_dirs',
- # Add the ldflags value to $LINKFLAGS, but not $SHLINKFLAGS.
- # SCons defines $SHLINKFLAGS to incorporate $LINKFLAGS, so
- # listing both here would case 'ldflags' to get appended to
- # both, and then have it show up twice on the command line.
- 'LINKFLAGS' : 'ldflags',
- }
- postamble='\n%s],\n' % indent
- for scons_var in sorted(var_mapping.keys()):
- gyp_var = var_mapping[scons_var]
- value = config.get(gyp_var)
- if value:
- if gyp_var in ('defines',):
- value = [EscapeCppDefine(v) for v in value]
- if gyp_var in ('include_dirs',):
- if src_dir and not src_dir.endswith('/'):
- src_dir += '/'
- result = []
- for v in value:
- v = FixPath(v, src_dir)
- # Force SCons to evaluate the CPPPATH directories at
- # SConscript-read time, so delayed evaluation of $SRC_DIR
- # doesn't point it to the --generator-output= directory.
- result.append('env.Dir(%r)' % v)
- value = result
- else:
- value = map(repr, value)
- WriteList(fp,
- value,
- prefix=indent,
- preamble='%s%s = [\n ' % (indent, scons_var),
- postamble=postamble)
-
-
-def GenerateSConscript(output_filename, spec, build_file, build_file_data):
- """
- Generates a SConscript file for a specific target.
-
- This generates a SConscript file suitable for building any or all of
- the target's configurations.
-
- A SConscript file may be called multiple times to generate targets for
- multiple configurations. Consequently, it needs to be ready to build
- the target for any requested configuration, and therefore contains
- information about the settings for all configurations (generated into
- the SConscript file at gyp configuration time) as well as logic for
- selecting (at SCons build time) the specific configuration being built.
-
- The general outline of a generated SConscript file is:
-
- -- Header
-
- -- Import 'env'. This contains a $CONFIG_NAME construction
- variable that specifies what configuration to build
- (e.g. Debug, Release).
-
- -- Configurations. This is a dictionary with settings for
- the different configurations (Debug, Release) under which this
- target can be built. The values in the dictionary are themselves
- dictionaries specifying what construction variables should added
- to the local copy of the imported construction environment
- (Append), should be removed (FilterOut), and should outright
- replace the imported values (Replace).
-
- -- Clone the imported construction environment and update
- with the proper configuration settings.
-
- -- Initialize the lists of the targets' input files and prerequisites.
-
- -- Target-specific actions and rules. These come after the
- input file and prerequisite initializations because the
- outputs of the actions and rules may affect the input file
- list (process_outputs_as_sources) and get added to the list of
- prerequisites (so that they're guaranteed to be executed before
- building the target).
-
- -- Call the Builder for the target itself.
-
- -- Arrange for any copies to be made into installation directories.
-
- -- Set up the {name} Alias (phony Node) for the target as the
- primary handle for building all of the target's pieces.
-
- -- Use env.Require() to make sure the prerequisites (explicitly
- specified, but also including the actions and rules) are built
- before the target itself.
-
- -- Return the {name} Alias to the calling SConstruct file
- so it can be added to the list of default targets.
- """
- scons_target = SCons.Target(spec)
-
- gyp_dir = os.path.dirname(output_filename)
- if not gyp_dir:
- gyp_dir = '.'
- gyp_dir = os.path.abspath(gyp_dir)
-
- output_dir = os.path.dirname(output_filename)
- src_dir = build_file_data['_DEPTH']
- src_dir_rel = gyp.common.RelativePath(src_dir, output_dir)
- subdir = gyp.common.RelativePath(os.path.dirname(build_file), src_dir)
- src_subdir = '$SRC_DIR/' + subdir
- src_subdir_ = src_subdir + '/'
-
- component_name = os.path.splitext(os.path.basename(build_file))[0]
- target_name = spec['target_name']
-
- if not os.path.exists(gyp_dir):
- os.makedirs(gyp_dir)
- fp = open(output_filename, 'w')
- fp.write(header)
-
- fp.write('\nimport os\n')
- fp.write('\nImport("env")\n')
-
- #
- fp.write('\n')
- fp.write('env = env.Clone(COMPONENT_NAME=%s,\n' % repr(component_name))
- fp.write(' TARGET_NAME=%s)\n' % repr(target_name))
-
- #
- for config in spec['configurations'].itervalues():
- if config.get('scons_line_length'):
- fp.write(_spawn_hack)
- break
-
- #
- indent = ' ' * 12
- fp.write('\n')
- fp.write('configurations = {\n')
- for config_name, config in spec['configurations'].iteritems():
- fp.write(' \'%s\' : {\n' % config_name)
-
- fp.write(' \'Append\' : dict(\n')
- GenerateConfig(fp, config, indent, src_subdir)
- libraries = spec.get('libraries')
- if libraries:
- WriteList(fp,
- map(repr, libraries),
- prefix=indent,
- preamble='%sLIBS = [\n ' % indent,
- postamble='\n%s],\n' % indent)
- fp.write(' ),\n')
-
- fp.write(' \'FilterOut\' : dict(\n' )
- for key, var in config.get('scons_remove', {}).iteritems():
- fp.write(' %s = %s,\n' % (key, repr(var)))
- fp.write(' ),\n')
-
- fp.write(' \'Replace\' : dict(\n' )
- scons_settings = config.get('scons_variable_settings', {})
- for key in sorted(scons_settings.keys()):
- val = pprint.pformat(scons_settings[key])
- fp.write(' %s = %s,\n' % (key, val))
- if 'c++' in spec.get('link_languages', []):
- fp.write(' %s = %s,\n' % ('LINK', repr('$CXX')))
- if config.get('scons_line_length'):
- fp.write(' SPAWN = gyp_spawn,\n')
- fp.write(' ),\n')
-
- fp.write(' \'ImportExternal\' : [\n' )
- for var in config.get('scons_import_variables', []):
- fp.write(' %s,\n' % repr(var))
- fp.write(' ],\n')
-
- fp.write(' \'PropagateExternal\' : [\n' )
- for var in config.get('scons_propagate_variables', []):
- fp.write(' %s,\n' % repr(var))
- fp.write(' ],\n')
-
- fp.write(' },\n')
- fp.write('}\n')
-
- fp.write('\n'
- 'config = configurations[env[\'CONFIG_NAME\']]\n'
- 'env.Append(**config[\'Append\'])\n'
- 'env.FilterOut(**config[\'FilterOut\'])\n'
- 'env.Replace(**config[\'Replace\'])\n')
-
- fp.write('\n'
- '# Scons forces -fPIC for SHCCFLAGS on some platforms.\n'
- '# Disable that so we can control it from cflags in gyp.\n'
- '# Note that Scons itself is inconsistent with its -fPIC\n'
- '# setting. SHCCFLAGS forces -fPIC, and SHCFLAGS does not.\n'
- '# This will make SHCCFLAGS consistent with SHCFLAGS.\n'
- 'env[\'SHCCFLAGS\'] = [\'$CCFLAGS\']\n')
-
- fp.write('\n'
- 'for _var in config[\'ImportExternal\']:\n'
- ' if _var in ARGUMENTS:\n'
- ' env[_var] = ARGUMENTS[_var]\n'
- ' elif _var in os.environ:\n'
- ' env[_var] = os.environ[_var]\n'
- 'for _var in config[\'PropagateExternal\']:\n'
- ' if _var in ARGUMENTS:\n'
- ' env[_var] = ARGUMENTS[_var]\n'
- ' elif _var in os.environ:\n'
- ' env[\'ENV\'][_var] = os.environ[_var]\n')
-
- fp.write('\n'
- "env['ENV']['LD_LIBRARY_PATH'] = env.subst('$LIB_DIR')\n")
-
- #
- #fp.write("\nif env.has_key('CPPPATH'):\n")
- #fp.write(" env['CPPPATH'] = map(env.Dir, env['CPPPATH'])\n")
-
- variants = spec.get('variants', {})
- for setting in sorted(variants.keys()):
- if_fmt = 'if ARGUMENTS.get(%s) not in (None, \'0\'):\n'
- fp.write('\n')
- fp.write(if_fmt % repr(setting.upper()))
- fp.write(' env.AppendUnique(\n')
- GenerateConfig(fp, variants[setting], indent, src_subdir)
- fp.write(' )\n')
-
- #
- scons_target.write_input_files(fp)
-
- fp.write('\n')
- fp.write('target_files = []\n')
- prerequisites = spec.get('scons_prerequisites', [])
- fp.write('prerequisites = %s\n' % pprint.pformat(prerequisites))
-
- actions = spec.get('actions', [])
- for action in actions:
- a = ['cd', src_subdir, '&&'] + action['action']
- message = action.get('message')
- if message:
- message = repr(message)
- inputs = [FixPath(f, src_subdir_) for f in action.get('inputs', [])]
- outputs = [FixPath(f, src_subdir_) for f in action.get('outputs', [])]
- if outputs:
- template = _command_template
- else:
- template = _alias_template
- fp.write(template % {
- 'inputs' : pprint.pformat(inputs),
- 'outputs' : pprint.pformat(outputs),
- 'action' : pprint.pformat(a),
- 'message' : message,
- 'target_name': target_name,
- })
- if int(action.get('process_outputs_as_sources', 0)):
- fp.write('input_files.extend(_outputs)\n')
- fp.write('prerequisites.extend(_outputs)\n')
- fp.write('target_files.extend(_outputs)\n')
-
- rules = spec.get('rules', [])
- for rule in rules:
- name = re.sub('[^a-zA-Z0-9_]', '_', rule['rule_name'])
- message = rule.get('message')
- if message:
- message = repr(message)
- if int(rule.get('process_outputs_as_sources', 0)):
- poas_line = '_processed_input_files.extend(_generated)'
- else:
- poas_line = '_processed_input_files.append(infile)'
- inputs = [FixPath(f, src_subdir_) for f in rule.get('inputs', [])]
- outputs = [FixPath(f, src_subdir_) for f in rule.get('outputs', [])]
- # Skip a rule with no action and no inputs.
- if 'action' not in rule and not rule.get('rule_sources', []):
- continue
- a = ['cd', src_subdir, '&&'] + rule['action']
- fp.write(_rule_template % {
- 'inputs' : pprint.pformat(inputs),
- 'outputs' : pprint.pformat(outputs),
- 'action' : pprint.pformat(a),
- 'extension' : rule['extension'],
- 'name' : name,
- 'message' : message,
- 'process_outputs_as_sources_line' : poas_line,
- 'src_dir' : src_subdir_,
- })
-
- scons_target.write_target(fp, src_subdir)
-
- copies = spec.get('copies', [])
- if copies:
- fp.write(_copy_action_template)
- for copy in copies:
- destdir = None
- files = None
- try:
- destdir = copy['destination']
- except KeyError, e:
- gyp.common.ExceptionAppend(
- e,
- "Required 'destination' key missing for 'copies' in %s." % build_file)
- raise
- try:
- files = copy['files']
- except KeyError, e:
- gyp.common.ExceptionAppend(
- e, "Required 'files' key missing for 'copies' in %s." % build_file)
- raise
- if not files:
- # TODO: should probably add a (suppressible) warning;
- # a null file list may be unintentional.
- continue
- if not destdir:
- raise Exception(
- "Required 'destination' key is empty for 'copies' in %s." % build_file)
-
- fmt = ('\n'
- '_outputs = env.Command(%s,\n'
- ' %s,\n'
- ' GYPCopy(\'$TARGET\', \'$SOURCE\'))\n')
- for f in copy['files']:
- # Remove trailing separators so basename() acts like Unix basename and
- # always returns the last element, whether a file or dir. Without this,
- # only the contents, not the directory itself, are copied (and nothing
- # might be copied if dest already exists, since scons thinks nothing needs
- # to be done).
- dest = os.path.join(destdir, os.path.basename(f.rstrip(os.sep)))
- f = FixPath(f, src_subdir_)
- dest = FixPath(dest, src_subdir_)
- fp.write(fmt % (repr(dest), repr(f)))
- fp.write('target_files.extend(_outputs)\n')
-
- run_as = spec.get('run_as')
- if run_as:
- action = run_as.get('action', [])
- working_directory = run_as.get('working_directory')
- if not working_directory:
- working_directory = gyp_dir
- else:
- if not os.path.isabs(working_directory):
- working_directory = os.path.normpath(os.path.join(gyp_dir,
- working_directory))
- if run_as.get('environment'):
- for (key, val) in run_as.get('environment').iteritems():
- action = ['%s="%s"' % (key, val)] + action
- action = ['cd', '"%s"' % working_directory, '&&'] + action
- fp.write(_run_as_template % {
- 'action' : pprint.pformat(action),
- 'message' : run_as.get('message', ''),
- })
-
- fmt = "\ngyp_target = env.Alias('%s', target_files)\n"
- fp.write(fmt % target_name)
-
- dependencies = spec.get('scons_dependencies', [])
- if dependencies:
- WriteList(fp, dependencies, preamble='dependencies = [\n ',
- postamble='\n]\n')
- fp.write('env.Requires(target_files, dependencies)\n')
- fp.write('env.Requires(gyp_target, dependencies)\n')
- fp.write('for prerequisite in prerequisites:\n')
- fp.write(' env.Requires(prerequisite, dependencies)\n')
- fp.write('env.Requires(gyp_target, prerequisites)\n')
-
- if run_as:
- fp.write(_run_as_template_suffix % {
- 'target_name': target_name,
- })
-
- fp.write('Return("gyp_target")\n')
-
- fp.close()
-
-
-#############################################################################
-# TEMPLATE BEGIN
-
-_wrapper_template = """\
-
-__doc__ = '''
-Wrapper configuration for building this entire "solution,"
-including all the specific targets in various *.scons files.
-'''
-
-import os
-import sys
-
-import SCons.Environment
-import SCons.Util
-
-def GetProcessorCount():
- '''
- Detects the number of CPUs on the system. Adapted form:
- http://codeliberates.blogspot.com/2008/05/detecting-cpuscores-in-python.html
- '''
- # Linux, Unix and Mac OS X:
- if hasattr(os, 'sysconf'):
- if os.sysconf_names.has_key('SC_NPROCESSORS_ONLN'):
- # Linux and Unix or Mac OS X with python >= 2.5:
- return os.sysconf('SC_NPROCESSORS_ONLN')
- else: # Mac OS X with Python < 2.5:
- return int(os.popen2("sysctl -n hw.ncpu")[1].read())
- # Windows:
- if os.environ.has_key('NUMBER_OF_PROCESSORS'):
- return max(int(os.environ.get('NUMBER_OF_PROCESSORS', '1')), 1)
- return 1 # Default
-
-# Support PROGRESS= to show progress in different ways.
-p = ARGUMENTS.get('PROGRESS')
-if p == 'spinner':
- Progress(['/\\r', '|\\r', '\\\\\\r', '-\\r'],
- interval=5,
- file=open('/dev/tty', 'w'))
-elif p == 'name':
- Progress('$TARGET\\r', overwrite=True, file=open('/dev/tty', 'w'))
-
-# Set the default -j value based on the number of processors.
-SetOption('num_jobs', GetProcessorCount() + 1)
-
-# Have SCons use its cached dependency information.
-SetOption('implicit_cache', 1)
-
-# Only re-calculate MD5 checksums if a timestamp has changed.
-Decider('MD5-timestamp')
-
-# Since we set the -j value by default, suppress SCons warnings about being
-# unable to support parallel build on versions of Python with no threading.
-default_warnings = ['no-no-parallel-support']
-SetOption('warn', default_warnings + GetOption('warn'))
-
-AddOption('--mode', nargs=1, dest='conf_list', default=[],
- action='append', help='Configuration to build.')
-
-AddOption('--verbose', dest='verbose', default=False,
- action='store_true', help='Verbose command-line output.')
-
-
-#
-sconscript_file_map = %(sconscript_files)s
-
-class LoadTarget:
- '''
- Class for deciding if a given target sconscript is to be included
- based on a list of included target names, optionally prefixed with '-'
- to exclude a target name.
- '''
- def __init__(self, load):
- '''
- Initialize a class with a list of names for possible loading.
-
- Arguments:
- load: list of elements in the LOAD= specification
- '''
- self.included = set([c for c in load if not c.startswith('-')])
- self.excluded = set([c[1:] for c in load if c.startswith('-')])
-
- if not self.included:
- self.included = set(['all'])
-
- def __call__(self, target):
- '''
- Returns True if the specified target's sconscript file should be
- loaded, based on the initialized included and excluded lists.
- '''
- return (target in self.included or
- ('all' in self.included and not target in self.excluded))
-
-if 'LOAD' in ARGUMENTS:
- load = ARGUMENTS['LOAD'].split(',')
-else:
- load = []
-load_target = LoadTarget(load)
-
-sconscript_files = []
-for target, sconscript in sconscript_file_map.iteritems():
- if load_target(target):
- sconscript_files.append(sconscript)
-
-
-target_alias_list= []
-
-conf_list = GetOption('conf_list')
-if conf_list:
- # In case the same --mode= value was specified multiple times.
- conf_list = list(set(conf_list))
-else:
- conf_list = [%(default_configuration)r]
-
-sconsbuild_dir = Dir(%(sconsbuild_dir)s)
-
-
-def FilterOut(self, **kw):
- kw = SCons.Environment.copy_non_reserved_keywords(kw)
- for key, val in kw.items():
- envval = self.get(key, None)
- if envval is None:
- # No existing variable in the environment, so nothing to delete.
- continue
-
- for vremove in val:
- # Use while not if, so we can handle duplicates.
- while vremove in envval:
- envval.remove(vremove)
-
- self[key] = envval
-
- # TODO(sgk): SCons.Environment.Append() has much more logic to deal
- # with various types of values. We should handle all those cases in here
- # too. (If variable is a dict, etc.)
-
-
-non_compilable_suffixes = {
- 'LINUX' : set([
- '.bdic',
- '.css',
- '.dat',
- '.fragment',
- '.gperf',
- '.h',
- '.hh',
- '.hpp',
- '.html',
- '.hxx',
- '.idl',
- '.in',
- '.in0',
- '.in1',
- '.js',
- '.mk',
- '.rc',
- '.sigs',
- '',
- ]),
- 'WINDOWS' : set([
- '.h',
- '.hh',
- '.hpp',
- '.dat',
- '.idl',
- '.in',
- '.in0',
- '.in1',
- ]),
-}
-
-def compilable(env, file):
- base, ext = os.path.splitext(str(file))
- if ext in non_compilable_suffixes[env['TARGET_PLATFORM']]:
- return False
- return True
-
-def compilable_files(env, sources):
- return [x for x in sources if compilable(env, x)]
-
-def GypProgram(env, target, source, *args, **kw):
- source = compilable_files(env, source)
- result = env.Program(target, source, *args, **kw)
- if env.get('INCREMENTAL'):
- env.Precious(result)
- return result
-
-def GypTestProgram(env, target, source, *args, **kw):
- source = compilable_files(env, source)
- result = env.Program(target, source, *args, **kw)
- if env.get('INCREMENTAL'):
- env.Precious(*result)
- return result
-
-def GypLibrary(env, target, source, *args, **kw):
- source = compilable_files(env, source)
- result = env.Library(target, source, *args, **kw)
- return result
-
-def GypLoadableModule(env, target, source, *args, **kw):
- source = compilable_files(env, source)
- result = env.LoadableModule(target, source, *args, **kw)
- return result
-
-def GypStaticLibrary(env, target, source, *args, **kw):
- source = compilable_files(env, source)
- result = env.StaticLibrary(target, source, *args, **kw)
- return result
-
-def GypSharedLibrary(env, target, source, *args, **kw):
- source = compilable_files(env, source)
- result = env.SharedLibrary(target, source, *args, **kw)
- if env.get('INCREMENTAL'):
- env.Precious(result)
- return result
-
-def add_gyp_methods(env):
- env.AddMethod(GypProgram)
- env.AddMethod(GypTestProgram)
- env.AddMethod(GypLibrary)
- env.AddMethod(GypLoadableModule)
- env.AddMethod(GypStaticLibrary)
- env.AddMethod(GypSharedLibrary)
-
- env.AddMethod(FilterOut)
-
- env.AddMethod(compilable)
-
-
-base_env = Environment(
- tools = %(scons_tools)s,
- INTERMEDIATE_DIR='$OBJ_DIR/${COMPONENT_NAME}/_${TARGET_NAME}_intermediate',
- LIB_DIR='$TOP_BUILDDIR/lib',
- OBJ_DIR='$TOP_BUILDDIR/obj',
- SCONSBUILD_DIR=sconsbuild_dir.abspath,
- SHARED_INTERMEDIATE_DIR='$OBJ_DIR/_global_intermediate',
- SRC_DIR=Dir(%(src_dir)r),
- TARGET_PLATFORM='LINUX',
- TOP_BUILDDIR='$SCONSBUILD_DIR/$CONFIG_NAME',
- LIBPATH=['$LIB_DIR'],
-)
-
-if not GetOption('verbose'):
- base_env.SetDefault(
- ARCOMSTR='Creating library $TARGET',
- ASCOMSTR='Assembling $TARGET',
- CCCOMSTR='Compiling $TARGET',
- CONCATSOURCECOMSTR='ConcatSource $TARGET',
- CXXCOMSTR='Compiling $TARGET',
- LDMODULECOMSTR='Building loadable module $TARGET',
- LINKCOMSTR='Linking $TARGET',
- MANIFESTCOMSTR='Updating manifest for $TARGET',
- MIDLCOMSTR='Compiling IDL $TARGET',
- PCHCOMSTR='Precompiling $TARGET',
- RANLIBCOMSTR='Indexing $TARGET',
- RCCOMSTR='Compiling resource $TARGET',
- SHCCCOMSTR='Compiling $TARGET',
- SHCXXCOMSTR='Compiling $TARGET',
- SHLINKCOMSTR='Linking $TARGET',
- SHMANIFESTCOMSTR='Updating manifest for $TARGET',
- )
-
-add_gyp_methods(base_env)
-
-for conf in conf_list:
- env = base_env.Clone(CONFIG_NAME=conf)
- SConsignFile(env.File('$TOP_BUILDDIR/.sconsign').abspath)
- for sconscript in sconscript_files:
- target_alias = env.SConscript(sconscript, exports=['env'])
- if target_alias:
- target_alias_list.extend(target_alias)
-
-Default(Alias('all', target_alias_list))
-
-help_fmt = '''
-Usage: hammer [SCONS_OPTIONS] [VARIABLES] [TARGET] ...
-
-Local command-line build options:
- --mode=CONFIG Configuration to build:
- --mode=Debug [default]
- --mode=Release
- --verbose Print actual executed command lines.
-
-Supported command-line build variables:
- LOAD=[module,...] Comma-separated list of components to load in the
- dependency graph ('-' prefix excludes)
- PROGRESS=type Display a progress indicator:
- name: print each evaluated target name
- spinner: print a spinner every 5 targets
-
-The following TARGET names can also be used as LOAD= module names:
-
-%%s
-'''
-
-if GetOption('help'):
- def columnar_text(items, width=78, indent=2, sep=2):
- result = []
- colwidth = max(map(len, items)) + sep
- cols = (width - indent) / colwidth
- if cols < 1:
- cols = 1
- rows = (len(items) + cols - 1) / cols
- indent = '%%*s' %% (indent, '')
- sep = indent
- for row in xrange(0, rows):
- result.append(sep)
- for i in xrange(row, len(items), rows):
- result.append('%%-*s' %% (colwidth, items[i]))
- sep = '\\n' + indent
- result.append('\\n')
- return ''.join(result)
-
- load_list = set(sconscript_file_map.keys())
- target_aliases = set(map(str, target_alias_list))
-
- common = load_list and target_aliases
- load_only = load_list - common
- target_only = target_aliases - common
- help_text = [help_fmt %% columnar_text(sorted(list(common)))]
- if target_only:
- fmt = "The following are additional TARGET names:\\n\\n%%s\\n"
- help_text.append(fmt %% columnar_text(sorted(list(target_only))))
- if load_only:
- fmt = "The following are additional LOAD= module names:\\n\\n%%s\\n"
- help_text.append(fmt %% columnar_text(sorted(list(load_only))))
- Help(''.join(help_text))
-"""
-
-# TEMPLATE END
-#############################################################################
-
-
-def GenerateSConscriptWrapper(build_file, build_file_data, name,
- output_filename, sconscript_files,
- default_configuration):
- """
- Generates the "wrapper" SConscript file (analogous to the Visual Studio
- solution) that calls all the individual target SConscript files.
- """
- output_dir = os.path.dirname(output_filename)
- src_dir = build_file_data['_DEPTH']
- src_dir_rel = gyp.common.RelativePath(src_dir, output_dir)
- if not src_dir_rel:
- src_dir_rel = '.'
- scons_settings = build_file_data.get('scons_settings', {})
- sconsbuild_dir = scons_settings.get('sconsbuild_dir', '#')
- scons_tools = scons_settings.get('tools', ['default'])
-
- sconscript_file_lines = ['dict(']
- for target in sorted(sconscript_files.keys()):
- sconscript = sconscript_files[target]
- sconscript_file_lines.append(' %s = %r,' % (target, sconscript))
- sconscript_file_lines.append(')')
-
- fp = open(output_filename, 'w')
- fp.write(header)
- fp.write(_wrapper_template % {
- 'default_configuration' : default_configuration,
- 'name' : name,
- 'scons_tools' : repr(scons_tools),
- 'sconsbuild_dir' : repr(sconsbuild_dir),
- 'sconscript_files' : '\n'.join(sconscript_file_lines),
- 'src_dir' : src_dir_rel,
- })
- fp.close()
-
- # Generate the SConstruct file that invokes the wrapper SConscript.
- dir, fname = os.path.split(output_filename)
- SConstruct = os.path.join(dir, 'SConstruct')
- fp = open(SConstruct, 'w')
- fp.write(header)
- fp.write('SConscript(%s)\n' % repr(fname))
- fp.close()
-
-
-def TargetFilename(target, build_file=None, output_suffix=''):
- """Returns the .scons file name for the specified target.
- """
- if build_file is None:
- build_file, target = gyp.common.ParseQualifiedTarget(target)[:2]
- output_file = os.path.join(os.path.dirname(build_file),
- target + output_suffix + '.scons')
- return output_file
-
-
-def PerformBuild(data, configurations, params):
- options = params['options']
-
- # Due to the way we test gyp on the chromium typbots
- # we need to look for 'scons.py' as well as the more common 'scons'
- # TODO(sbc): update the trybots to have a more normal install
- # of scons.
- scons = 'scons'
- paths = os.environ['PATH'].split(os.pathsep)
- for scons_name in ['scons', 'scons.py']:
- for path in paths:
- test_scons = os.path.join(path, scons_name)
- print 'looking for: %s' % test_scons
- if os.path.exists(test_scons):
- print "found scons: %s" % scons
- scons = test_scons
- break
-
- for config in configurations:
- arguments = [scons, '-C', options.toplevel_dir, '--mode=%s' % config]
- print "Building [%s]: %s" % (config, arguments)
- subprocess.check_call(arguments)
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- """
- Generates all the output files for the specified targets.
- """
- options = params['options']
-
- if options.generator_output:
- def output_path(filename):
- return filename.replace(params['cwd'], options.generator_output)
- else:
- def output_path(filename):
- return filename
-
- default_configuration = None
-
- for qualified_target in target_list:
- spec = target_dicts[qualified_target]
- if spec['toolset'] != 'target':
- raise Exception(
- 'Multiple toolsets not supported in scons build (target %s)' %
- qualified_target)
- scons_target = SCons.Target(spec)
- if scons_target.is_ignored:
- continue
-
- # TODO: assumes the default_configuration of the first target
- # non-Default target is the correct default for all targets.
- # Need a better model for handle variation between targets.
- if (not default_configuration and
- spec['default_configuration'] != 'Default'):
- default_configuration = spec['default_configuration']
-
- build_file, target = gyp.common.ParseQualifiedTarget(qualified_target)[:2]
- output_file = TargetFilename(target, build_file, options.suffix)
- if options.generator_output:
- output_file = output_path(output_file)
-
- if not spec.has_key('libraries'):
- spec['libraries'] = []
-
- # Add dependent static library targets to the 'libraries' value.
- deps = spec.get('dependencies', [])
- spec['scons_dependencies'] = []
- for d in deps:
- td = target_dicts[d]
- target_name = td['target_name']
- spec['scons_dependencies'].append("Alias('%s')" % target_name)
- if td['type'] in ('static_library', 'shared_library'):
- libname = td.get('product_name', target_name)
- spec['libraries'].append('lib' + libname)
- if td['type'] == 'loadable_module':
- prereqs = spec.get('scons_prerequisites', [])
- # TODO: parameterize with <(SHARED_LIBRARY_*) variables?
- td_target = SCons.Target(td)
- td_target.target_prefix = '${SHLIBPREFIX}'
- td_target.target_suffix = '${SHLIBSUFFIX}'
-
- GenerateSConscript(output_file, spec, build_file, data[build_file])
-
- if not default_configuration:
- default_configuration = 'Default'
-
- for build_file in sorted(data.keys()):
- path, ext = os.path.splitext(build_file)
- if ext != '.gyp':
- continue
- output_dir, basename = os.path.split(path)
- output_filename = path + '_main' + options.suffix + '.scons'
-
- all_targets = gyp.common.AllTargets(target_list, target_dicts, build_file)
- sconscript_files = {}
- for t in all_targets:
- scons_target = SCons.Target(target_dicts[t])
- if scons_target.is_ignored:
- continue
- bf, target = gyp.common.ParseQualifiedTarget(t)[:2]
- target_filename = TargetFilename(target, bf, options.suffix)
- tpath = gyp.common.RelativePath(target_filename, output_dir)
- sconscript_files[target] = tpath
-
- output_filename = output_path(output_filename)
- if sconscript_files:
- GenerateSConscriptWrapper(build_file, data[build_file], basename,
- output_filename, sconscript_files,
- default_configuration)
diff --git a/tools/gyp/pylib/gyp/generator/xcode.py b/tools/gyp/pylib/gyp/generator/xcode.py
index ca3b01eea0..331e78baaa 100644
--- a/tools/gyp/pylib/gyp/generator/xcode.py
+++ b/tools/gyp/pylib/gyp/generator/xcode.py
@@ -72,6 +72,7 @@ generator_additional_non_configuration_keys = [
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
+ 'mac_xctest_bundle',
'xcode_create_dependents_test_runner',
]
@@ -480,39 +481,6 @@ sys.exit(subprocess.call(sys.argv[1:]))" """
raise
-cached_xcode_version = None
-def InstalledXcodeVersion():
- """Fetches the installed version of Xcode, returns empty string if it is
- unable to figure it out."""
-
- global cached_xcode_version
- if not cached_xcode_version is None:
- return cached_xcode_version
-
- # Default to an empty string
- cached_xcode_version = ''
-
- # Collect the xcodebuild's version information.
- try:
- import subprocess
- cmd = ['/usr/bin/xcodebuild', '-version']
- proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
- xcodebuild_version_info = proc.communicate()[0]
- # Any error, return empty string
- if proc.returncode:
- xcodebuild_version_info = ''
- except OSError:
- # We failed to launch the tool
- xcodebuild_version_info = ''
-
- # Pull out the Xcode version itself.
- match_line = re.search('^Xcode (.*)$', xcodebuild_version_info, re.MULTILINE)
- if match_line:
- cached_xcode_version = match_line.group(1)
- # Done!
- return cached_xcode_version
-
-
def AddSourceToTarget(source, type, pbxp, xct):
# TODO(mark): Perhaps source_extensions and library_extensions can be made a
# little bit fancier.
@@ -579,13 +547,13 @@ def ExpandXcodeVariables(string, expansions):
return string
-def EscapeXCodeArgument(s):
- """We must escape the arguments that we give to XCode so that it knows not to
- split on spaces and to respect backslash and quote literals."""
- s = s.replace('\\', '\\\\')
- s = s.replace('"', '\\"')
- return '"' + s + '"'
-
+_xcode_define_re = re.compile(r'([\\\"\' ])')
+def EscapeXcodeDefine(s):
+ """We must escape the defines that we give to XCode so that it knows not to
+ split on spaces and to respect backslash and quote literals. However, we
+ must not quote the define, or Xcode will incorrectly intepret variables
+ especially $(inherited)."""
+ return re.sub(_xcode_define_re, r'\\\1', s)
def PerformBuild(data, configurations, params):
@@ -675,6 +643,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
'static_library': 'com.apple.product-type.library.static',
'executable+bundle': 'com.apple.product-type.application',
'loadable_module+bundle': 'com.apple.product-type.bundle',
+ 'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
'shared_library+bundle': 'com.apple.product-type.framework',
}
@@ -684,11 +653,18 @@ def GenerateOutput(target_list, target_dicts, data, params):
}
type = spec['type']
- is_bundle = int(spec.get('mac_bundle', 0))
+ is_xctest = int(spec.get('mac_xctest_bundle', 0))
+ is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
if type != 'none':
type_bundle_key = type
- if is_bundle:
+ if is_xctest:
+ type_bundle_key += '+xctest'
+ assert type == 'loadable_module', (
+ 'mac_xctest_bundle targets must have type loadable_module '
+ '(target %s)' % target_name)
+ elif is_bundle:
type_bundle_key += '+bundle'
+
xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
try:
target_properties['productType'] = _types[type_bundle_key]
@@ -701,6 +677,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
assert not is_bundle, (
'mac_bundle targets cannot have type none (target "%s")' %
target_name)
+ assert not is_xctest, (
+ 'mac_xctest_bundle targets cannot have type none (target "%s")' %
+ target_name)
target_product_name = spec.get('product_name')
if target_product_name is not None:
@@ -1053,7 +1032,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
if [ "${JOB_COUNT}" -gt 4 ]; then
JOB_COUNT=4
fi
-exec "${DEVELOPER_BIN_DIR}/make" -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
+exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
exit 1
""" % makefile_name
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
@@ -1211,9 +1190,15 @@ exit 1
xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
for include_dir in configuration.get('include_dirs', []):
xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
+ for library_dir in configuration.get('library_dirs', []):
+ if library_dir not in xcode_standard_library_dirs and (
+ not xcbc.HasBuildSetting(_library_search_paths_var) or
+ library_dir not in xcbc.GetBuildSetting(_library_search_paths_var)):
+ xcbc.AppendBuildSetting(_library_search_paths_var, library_dir)
+
if 'defines' in configuration:
for define in configuration['defines']:
- set_define = EscapeXCodeArgument(define)
+ set_define = EscapeXcodeDefine(define)
xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
if 'xcode_settings' in configuration:
for xck, xcv in configuration['xcode_settings'].iteritems():
diff --git a/tools/gyp/pylib/gyp/generator/xcode_test.py b/tools/gyp/pylib/gyp/generator/xcode_test.py
new file mode 100644
index 0000000000..260324a43f
--- /dev/null
+++ b/tools/gyp/pylib/gyp/generator/xcode_test.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Unit tests for the xcode.py file. """
+
+import gyp.generator.xcode as xcode
+import unittest
+import sys
+
+
+class TestEscapeXcodeDefine(unittest.TestCase):
+ if sys.platform == 'darwin':
+ def test_InheritedRemainsUnescaped(self):
+ self.assertEqual(xcode.EscapeXcodeDefine('$(inherited)'), '$(inherited)')
+
+ def test_Escaping(self):
+ self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\')
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/gyp/pylib/gyp/input.py b/tools/gyp/pylib/gyp/input.py
index eca0eb93aa..45e791d141 100644
--- a/tools/gyp/pylib/gyp/input.py
+++ b/tools/gyp/pylib/gyp/input.py
@@ -22,6 +22,7 @@ import subprocess
import sys
import threading
import time
+import traceback
from gyp.common import GypError
@@ -57,7 +58,7 @@ def IsPathSection(section):
section = section[:-1]
return section in path_sections or is_path_section_match_re.search(section)
-# base_non_configuraiton_keys is a list of key names that belong in the target
+# base_non_configuration_keys is a list of key names that belong in the target
# itself and should not be propagated into its configurations. It is merged
# with a list that can come from the generator to
# create non_configuration_keys.
@@ -69,7 +70,6 @@ base_non_configuration_keys = [
'default_configuration',
'dependencies',
'dependencies_original',
- 'link_languages',
'libraries',
'postbuilds',
'product_dir',
@@ -85,7 +85,6 @@ base_non_configuration_keys = [
'toolset',
'toolsets',
'type',
- 'variants',
# Sections that can be found inside targets or configurations, but that
# should not be propagated from targets into their configurations.
@@ -108,12 +107,14 @@ invalid_configuration_keys = [
'type',
]
-# Controls how the generator want the build file paths.
-absolute_build_file_paths = False
-
# Controls whether or not the generator supports multiple toolsets.
multiple_toolsets = False
+# Paths for converting filelist paths to output paths: {
+# toplevel,
+# qualified_output_dir,
+# }
+generator_filelist_paths = None
def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
"""Return a list of all build files included into build_file_path.
@@ -223,21 +224,26 @@ def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
raise
+ if not isinstance(build_file_data, dict):
+ raise GypError("%s does not evaluate to a dictionary." % build_file_path)
+
data[build_file_path] = build_file_data
aux_data[build_file_path] = {}
# Scan for includes and merge them in.
- try:
- if is_target:
- LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
- aux_data, variables, includes, check)
- else:
- LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
- aux_data, variables, None, check)
- except Exception, e:
- gyp.common.ExceptionAppend(e,
- 'while reading includes of ' + build_file_path)
- raise
+ if ('skip_includes' not in build_file_data or
+ not build_file_data['skip_includes']):
+ try:
+ if is_target:
+ LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
+ aux_data, variables, includes, check)
+ else:
+ LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
+ aux_data, variables, None, check)
+ except Exception, e:
+ gyp.common.ExceptionAppend(e,
+ 'while reading includes of ' + build_file_path)
+ raise
return build_file_data
@@ -344,10 +350,6 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
else:
variables['DEPTH'] = d.replace('\\', '/')
- # If the generator needs absolue paths, then do so.
- if absolute_build_file_paths:
- build_file_path = os.path.abspath(build_file_path)
-
if build_file_path in data['target_build_files']:
# Already loaded.
return False
@@ -445,7 +447,8 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
def CallLoadTargetBuildFile(global_flags,
build_file_path, data,
aux_data, variables,
- includes, depth, check):
+ includes, depth, check,
+ generator_input_info):
"""Wrapper around LoadTargetBuildFile for parallel processing.
This wrapper is used when LoadTargetBuildFile is executed in
@@ -463,6 +466,7 @@ def CallLoadTargetBuildFile(global_flags,
data_keys = set(data)
aux_data_keys = set(aux_data)
+ SetGeneratorGlobals(generator_input_info)
result = LoadTargetBuildFile(build_file_path, data,
aux_data, variables,
includes, depth, check, False)
@@ -488,8 +492,12 @@ def CallLoadTargetBuildFile(global_flags,
data_out,
aux_data_out,
dependencies)
+ except GypError, e:
+ sys.stderr.write("gyp: %s\n" % e)
+ return None
except Exception, e:
- print >>sys.stderr, 'Exception: ', e
+ print >>sys.stderr, 'Exception:', e
+ print >>sys.stderr, traceback.format_exc()
return None
@@ -550,12 +558,14 @@ class ParallelState(object):
self.condition.release()
-def LoadTargetBuildFileParallel(build_file_path, data, aux_data,
- variables, includes, depth, check):
+def LoadTargetBuildFilesParallel(build_files, data, aux_data,
+ variables, includes, depth, check,
+ generator_input_info):
parallel_state = ParallelState()
parallel_state.condition = threading.Condition()
- parallel_state.dependencies = [build_file_path]
- parallel_state.scheduled = set([build_file_path])
+ # Make copies of the build_files argument that we can modify while working.
+ parallel_state.dependencies = list(build_files)
+ parallel_state.scheduled = set(build_files)
parallel_state.pending = 0
parallel_state.data = data
parallel_state.aux_data = aux_data
@@ -564,12 +574,6 @@ def LoadTargetBuildFileParallel(build_file_path, data, aux_data,
parallel_state.condition.acquire()
while parallel_state.dependencies or parallel_state.pending:
if parallel_state.error:
- print >>sys.stderr, (
- '\n'
- 'Note: an error occurred while running gyp using multiprocessing.\n'
- 'For more verbose output, set GYP_PARALLEL=0 in your environment.\n'
- 'If the error only occurs when GYP_PARALLEL=1, '
- 'please report a bug!')
break
if not parallel_state.dependencies:
parallel_state.condition.wait()
@@ -584,7 +588,6 @@ def LoadTargetBuildFileParallel(build_file_path, data, aux_data,
global_flags = {
'path_sections': globals()['path_sections'],
'non_configuration_keys': globals()['non_configuration_keys'],
- 'absolute_build_file_paths': globals()['absolute_build_file_paths'],
'multiple_toolsets': globals()['multiple_toolsets']}
if not parallel_state.pool:
@@ -593,16 +596,20 @@ def LoadTargetBuildFileParallel(build_file_path, data, aux_data,
CallLoadTargetBuildFile,
args = (global_flags, dependency,
data_in, aux_data_in,
- variables, includes, depth, check),
+ variables, includes, depth, check, generator_input_info),
callback = parallel_state.LoadTargetBuildFileCallback)
except KeyboardInterrupt, e:
parallel_state.pool.terminate()
raise e
parallel_state.condition.release()
- if parallel_state.error:
- sys.exit()
+ parallel_state.pool.close()
+ parallel_state.pool.join()
+ parallel_state.pool = None
+
+ if parallel_state.error:
+ sys.exit(1)
# Look for the bracket that matches the first bracket seen in a
# string, and return the start and end as a tuple. For example, if
@@ -785,7 +792,7 @@ def ExpandVariables(input, phase, variables, build_file):
# Find the build file's directory, so commands can be run or file lists
# generated relative to it.
build_file_dir = os.path.dirname(build_file)
- if build_file_dir == '':
+ if build_file_dir == '' and not file_list:
# If build_file is just a leaf filename indicating a file in the
# current directory, build_file_dir might be an empty string. Set
# it to None to signal to subprocess.Popen that it should run the
@@ -802,9 +809,23 @@ def ExpandVariables(input, phase, variables, build_file):
else:
contents_list = contents.split(' ')
replacement = contents_list[0]
- path = replacement
- if not os.path.isabs(path):
- path = os.path.join(build_file_dir, path)
+ if os.path.isabs(replacement):
+ raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
+
+ if not generator_filelist_paths:
+ path = os.path.join(build_file_dir, replacement)
+ else:
+ if os.path.isabs(build_file_dir):
+ toplevel = generator_filelist_paths['toplevel']
+ rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
+ else:
+ rel_build_file_dir = build_file_dir
+ qualified_out_dir = generator_filelist_paths['qualified_out_dir']
+ path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
+ if not os.path.isdir(os.path.dirname(path)):
+ os.makedirs(os.path.dirname(path))
+
+ replacement = gyp.common.RelativePath(path, build_file_dir)
f = gyp.common.WriteOnDiff(path)
for i in contents_list[1:]:
f.write('%s\n' % i)
@@ -843,7 +864,8 @@ def ExpandVariables(input, phase, variables, build_file):
# that don't load quickly, this can be faster than
# <!(python modulename param eters). Do this in |build_file_dir|.
oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
- os.chdir(build_file_dir)
+ if build_file_dir: # build_file_dir may be None (see above).
+ os.chdir(build_file_dir)
try:
parsed_contents = shlex.split(contents)
@@ -1439,6 +1461,9 @@ class DependencyGraphNode(object):
self.dependencies = []
self.dependents = []
+ def __repr__(self):
+ return '<DependencyGraphNode: %r>' % self.ref
+
def FlattenToList(self):
# flat_list is the sorted list of dependencies - actually, the list items
# are the "ref" attributes of DependencyGraphNodes. Every target will
@@ -1481,6 +1506,27 @@ class DependencyGraphNode(object):
return flat_list
+ def FindCycles(self, path=None):
+ """
+ Returns a list of cycles in the graph, where each cycle is its own list.
+ """
+ if path is None:
+ path = [self]
+
+ results = []
+ for node in self.dependents:
+ if node in path:
+ cycle = [node]
+ for part in path:
+ cycle.append(part)
+ if part == node:
+ break
+ results.append(tuple(cycle))
+ else:
+ results.extend(node.FindCycles([node] + path))
+
+ return list(set(results))
+
def DirectDependencies(self, dependencies=None):
"""Returns a list of just direct dependencies."""
if dependencies == None:
@@ -1556,7 +1602,8 @@ class DependencyGraphNode(object):
return dependencies
- def LinkDependencies(self, targets, dependencies=None, initial=True):
+ def _LinkDependenciesInternal(self, targets, include_shared_libraries,
+ dependencies=None, initial=True):
"""Returns a list of dependency targets that are linked into this target.
This function has a split personality, depending on the setting of
@@ -1566,6 +1613,9 @@ class DependencyGraphNode(object):
When adding a target to the list of dependencies, this function will
recurse into itself with |initial| set to False, to collect dependencies
that are linked into the linkable target for which the list is being built.
+
+ If |include_shared_libraries| is False, the resulting dependencies will not
+ include shared_library targets that are linked into this target.
"""
if dependencies == None:
dependencies = []
@@ -1610,6 +1660,16 @@ class DependencyGraphNode(object):
if not initial and target_type in ('executable', 'loadable_module'):
return dependencies
+ # Shared libraries are already fully linked. They should only be included
+ # in |dependencies| when adjusting static library dependencies (in order to
+ # link against the shared_library's import lib), but should not be included
+ # in |dependencies| when propagating link_settings.
+ # The |include_shared_libraries| flag controls which of these two cases we
+ # are handling.
+ if (not initial and target_type == 'shared_library' and
+ not include_shared_libraries):
+ return dependencies
+
# The target is linkable, add it to the list of link dependencies.
if self.ref not in dependencies:
dependencies.append(self.ref)
@@ -1619,10 +1679,32 @@ class DependencyGraphNode(object):
# this target linkable. Always look at dependencies of the initial
# target, and always look at dependencies of non-linkables.
for dependency in self.dependencies:
- dependency.LinkDependencies(targets, dependencies, False)
+ dependency._LinkDependenciesInternal(targets,
+ include_shared_libraries,
+ dependencies, False)
return dependencies
+ def DependenciesForLinkSettings(self, targets):
+ """
+ Returns a list of dependency targets whose link_settings should be merged
+ into this target.
+ """
+
+ # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
+ # link_settings are propagated. So for now, we will allow it, unless the
+ # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
+ # False. Once chrome is fixed, we can remove this flag.
+ include_shared_libraries = \
+ targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
+ return self._LinkDependenciesInternal(targets, include_shared_libraries)
+
+ def DependenciesToLinkAgainst(self, targets):
+ """
+ Returns a list of dependency targets that are linked into this target.
+ """
+ return self._LinkDependenciesInternal(targets, True)
+
def BuildDependencyList(targets):
# Create a DependencyGraphNode for each target. Put it into a dict for easy
@@ -1713,10 +1795,16 @@ def VerifyNoGYPFileCircularDependencies(targets):
for file in dependency_nodes.iterkeys():
if not file in flat_list:
bad_files.append(file)
+ common_path_prefix = os.path.commonprefix(dependency_nodes)
+ cycles = []
+ for cycle in root_node.FindCycles():
+ simplified_paths = []
+ for node in cycle:
+ assert(node.ref.startswith(common_path_prefix))
+ simplified_paths.append(node.ref[len(common_path_prefix):])
+ cycles.append('Cycle: %s' % ' -> '.join(simplified_paths))
raise DependencyGraphNode.CircularException, \
- 'Some files not reachable, cycle in .gyp file dependency graph ' + \
- 'detected involving some or all of: ' + \
- ' '.join(bad_files)
+ 'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)
def DoDependentSettings(key, flat_list, targets, dependency_nodes):
@@ -1733,7 +1821,8 @@ def DoDependentSettings(key, flat_list, targets, dependency_nodes):
dependencies = \
dependency_nodes[target].DirectAndImportedDependencies(targets)
elif key == 'link_settings':
- dependencies = dependency_nodes[target].LinkDependencies(targets)
+ dependencies = \
+ dependency_nodes[target].DependenciesForLinkSettings(targets)
else:
raise GypError("DoDependentSettings doesn't know how to determine "
'dependencies for ' + key)
@@ -1806,7 +1895,8 @@ def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
# target. Add them to the dependencies list if they're not already
# present.
- link_dependencies = dependency_nodes[target].LinkDependencies(targets)
+ link_dependencies = \
+ dependency_nodes[target].DependenciesToLinkAgainst(targets)
for dependency in link_dependencies:
if dependency == target:
continue
@@ -2379,6 +2469,8 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
rule_names[rule_name] = rule
rule_extension = rule['extension']
+ if rule_extension.startswith('.'):
+ rule_extension = rule_extension[1:]
if rule_extension in rule_extensions:
raise GypError(('extension %s associated with multiple rules, ' +
'target %s rules %s and %s') %
@@ -2393,7 +2485,6 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
raise GypError(
'rule_sources must not exist in input, target %s rule %s' %
(target, rule_name))
- extension = rule['extension']
rule_sources = []
source_keys = ['sources']
@@ -2403,7 +2494,7 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
(source_root, source_extension) = os.path.splitext(source)
if source_extension.startswith('.'):
source_extension = source_extension[1:]
- if source_extension == extension:
+ if source_extension == rule_extension:
rule_sources.append(source)
if len(rule_sources) > 0:
@@ -2490,6 +2581,41 @@ def TurnIntIntoStrInList(the_list):
TurnIntIntoStrInList(item)
+def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
+ data):
+ """Return only the targets that are deep dependencies of |root_targets|."""
+ qualified_root_targets = []
+ for target in root_targets:
+ target = target.strip()
+ qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
+ if not qualified_targets:
+ raise GypError("Could not find target %s" % target)
+ qualified_root_targets.extend(qualified_targets)
+
+ wanted_targets = {}
+ for target in qualified_root_targets:
+ wanted_targets[target] = targets[target]
+ for dependency in dependency_nodes[target].DeepDependencies():
+ wanted_targets[dependency] = targets[dependency]
+
+ wanted_flat_list = [t for t in flat_list if t in wanted_targets]
+
+ # Prune unwanted targets from each build_file's data dict.
+ for build_file in data['target_build_files']:
+ if not 'targets' in data[build_file]:
+ continue
+ new_targets = []
+ for target in data[build_file]['targets']:
+ qualified_name = gyp.common.QualifiedTarget(build_file,
+ target['target_name'],
+ target['toolset'])
+ if qualified_name in wanted_targets:
+ new_targets.append(target)
+ data[build_file]['targets'] = new_targets
+
+ return wanted_targets, wanted_flat_list
+
+
def VerifyNoCollidingTargets(targets):
"""Verify that no two targets in the same directory share the same name.
@@ -2517,10 +2643,9 @@ def VerifyNoCollidingTargets(targets):
used[key] = gyp
-def Load(build_files, variables, includes, depth, generator_input_info, check,
- circular_check, parallel):
+def SetGeneratorGlobals(generator_input_info):
# Set up path_sections and non_configuration_keys with the default data plus
- # the generator-specifc data.
+ # the generator-specific data.
global path_sections
path_sections = base_path_sections[:]
path_sections.extend(generator_input_info['path_sections'])
@@ -2529,18 +2654,17 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
non_configuration_keys = base_non_configuration_keys[:]
non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
- # TODO(mark) handle variants if the generator doesn't want them directly.
- generator_handles_variants = \
- generator_input_info['generator_handles_variants']
-
- global absolute_build_file_paths
- absolute_build_file_paths = \
- generator_input_info['generator_wants_absolute_build_file_paths']
-
global multiple_toolsets
multiple_toolsets = generator_input_info[
'generator_supports_multiple_toolsets']
+ global generator_filelist_paths
+ generator_filelist_paths = generator_input_info['generator_filelist_paths']
+
+
+def Load(build_files, variables, includes, depth, generator_input_info, check,
+ circular_check, parallel, root_targets):
+ SetGeneratorGlobals(generator_input_info)
# A generator can have other lists (in addition to sources) be processed
# for rules.
extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
@@ -2554,21 +2678,21 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
# track of the keys corresponding to "target" files.
data = {'target_build_files': set()}
aux_data = {}
- for build_file in build_files:
- # Normalize paths everywhere. This is important because paths will be
- # used as keys to the data dict and for references between input files.
- build_file = os.path.normpath(build_file)
- try:
- if parallel:
- print >>sys.stderr, 'Using parallel processing.'
- LoadTargetBuildFileParallel(build_file, data, aux_data,
- variables, includes, depth, check)
- else:
+ # Normalize paths everywhere. This is important because paths will be
+ # used as keys to the data dict and for references between input files.
+ build_files = set(map(os.path.normpath, build_files))
+ if parallel:
+ LoadTargetBuildFilesParallel(build_files, data, aux_data,
+ variables, includes, depth, check,
+ generator_input_info)
+ else:
+ for build_file in build_files:
+ try:
LoadTargetBuildFile(build_file, data, aux_data,
variables, includes, depth, check, True)
- except Exception, e:
- gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
- raise
+ except Exception, e:
+ gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
+ raise
# Build a dict to access each target's subdict by qualified name.
targets = BuildTargetsDict(data)
@@ -2607,6 +2731,12 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
[dependency_nodes, flat_list] = BuildDependencyList(targets)
+ if root_targets:
+ # Remove, from |targets| and |flat_list|, the targets that are not deep
+ # dependencies of the targets specified in |root_targets|.
+ targets, flat_list = PruneUnwantedTargets(
+ targets, flat_list, dependency_nodes, root_targets, data)
+
# Check that no two targets in the same directory have the same name.
VerifyNoCollidingTargets(flat_list)
diff --git a/tools/gyp/pylib/gyp/input_test.py b/tools/gyp/pylib/gyp/input_test.py
new file mode 100755
index 0000000000..cdbf6b2fad
--- /dev/null
+++ b/tools/gyp/pylib/gyp/input_test.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the input.py file."""
+
+import gyp.input
+import unittest
+import sys
+
+
+class TestFindCycles(unittest.TestCase):
+ def setUp(self):
+ self.nodes = {}
+ for x in ('a', 'b', 'c', 'd', 'e'):
+ self.nodes[x] = gyp.input.DependencyGraphNode(x)
+
+ def _create_dependency(self, dependent, dependency):
+ dependent.dependencies.append(dependency)
+ dependency.dependents.append(dependent)
+
+ def test_no_cycle_empty_graph(self):
+ for label, node in self.nodes.iteritems():
+ self.assertEquals([], node.FindCycles())
+
+ def test_no_cycle_line(self):
+ self._create_dependency(self.nodes['a'], self.nodes['b'])
+ self._create_dependency(self.nodes['b'], self.nodes['c'])
+ self._create_dependency(self.nodes['c'], self.nodes['d'])
+
+ for label, node in self.nodes.iteritems():
+ self.assertEquals([], node.FindCycles())
+
+ def test_no_cycle_dag(self):
+ self._create_dependency(self.nodes['a'], self.nodes['b'])
+ self._create_dependency(self.nodes['a'], self.nodes['c'])
+ self._create_dependency(self.nodes['b'], self.nodes['c'])
+
+ for label, node in self.nodes.iteritems():
+ self.assertEquals([], node.FindCycles())
+
+ def test_cycle_self_reference(self):
+ self._create_dependency(self.nodes['a'], self.nodes['a'])
+
+ self.assertEquals([(self.nodes['a'], self.nodes['a'])],
+ self.nodes['a'].FindCycles())
+
+ def test_cycle_two_nodes(self):
+ self._create_dependency(self.nodes['a'], self.nodes['b'])
+ self._create_dependency(self.nodes['b'], self.nodes['a'])
+
+ self.assertEquals([(self.nodes['a'], self.nodes['b'], self.nodes['a'])],
+ self.nodes['a'].FindCycles())
+ self.assertEquals([(self.nodes['b'], self.nodes['a'], self.nodes['b'])],
+ self.nodes['b'].FindCycles())
+
+ def test_two_cycles(self):
+ self._create_dependency(self.nodes['a'], self.nodes['b'])
+ self._create_dependency(self.nodes['b'], self.nodes['a'])
+
+ self._create_dependency(self.nodes['b'], self.nodes['c'])
+ self._create_dependency(self.nodes['c'], self.nodes['b'])
+
+ cycles = self.nodes['a'].FindCycles()
+ self.assertTrue(
+ (self.nodes['a'], self.nodes['b'], self.nodes['a']) in cycles)
+ self.assertTrue(
+ (self.nodes['b'], self.nodes['c'], self.nodes['b']) in cycles)
+ self.assertEquals(2, len(cycles))
+
+ def test_big_cycle(self):
+ self._create_dependency(self.nodes['a'], self.nodes['b'])
+ self._create_dependency(self.nodes['b'], self.nodes['c'])
+ self._create_dependency(self.nodes['c'], self.nodes['d'])
+ self._create_dependency(self.nodes['d'], self.nodes['e'])
+ self._create_dependency(self.nodes['e'], self.nodes['a'])
+
+ self.assertEquals([(self.nodes['a'],
+ self.nodes['b'],
+ self.nodes['c'],
+ self.nodes['d'],
+ self.nodes['e'],
+ self.nodes['a'])],
+ self.nodes['a'].FindCycles())
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/gyp/pylib/gyp/mac_tool.py b/tools/gyp/pylib/gyp/mac_tool.py
index c06e3bebbf..20b3a4865f 100755
--- a/tools/gyp/pylib/gyp/mac_tool.py
+++ b/tools/gyp/pylib/gyp/mac_tool.py
@@ -9,6 +9,7 @@ These functions are executed via gyp-mac-tool when using the Makefile generator.
"""
import fcntl
+import json
import os
import plistlib
import re
@@ -47,22 +48,33 @@ class MacTool(object):
extension = os.path.splitext(source)[1].lower()
if os.path.isdir(source):
# Copy tree.
+ # TODO(thakis): This copies file attributes like mtime, while the
+ # single-file branch below doesn't. This should probably be changed to
+ # be consistent with the single-file branch.
if os.path.exists(dest):
shutil.rmtree(dest)
shutil.copytree(source, dest)
elif extension == '.xib':
return self._CopyXIBFile(source, dest)
+ elif extension == '.storyboard':
+ return self._CopyXIBFile(source, dest)
elif extension == '.strings':
self._CopyStringsFile(source, dest)
else:
- shutil.copyfile(source, dest)
+ shutil.copy(source, dest)
def _CopyXIBFile(self, source, dest):
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
- tools_dir = os.environ.get('DEVELOPER_BIN_DIR', '/usr/bin')
- args = [os.path.join(tools_dir, 'ibtool'), '--errors', '--warnings',
- '--notices', '--output-format', 'human-readable-text', '--compile',
- dest, source]
+
+ # ibtool sometimes crashes with relative paths. See crbug.com/314728.
+ base = os.path.dirname(os.path.realpath(__file__))
+ if os.path.relpath(source):
+ source = os.path.join(base, source)
+ if os.path.relpath(dest):
+ dest = os.path.join(base, dest)
+
+ args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
+ '--output-format', 'human-readable-text', '--compile', dest, source]
ibtool_section_re = re.compile(r'/\*.*\*/')
ibtool_re = re.compile(r'.*note:.*is clipping its content')
ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
@@ -87,16 +99,14 @@ class MacTool(object):
# semicolon in dictionary.
# on invalid files. Do the same kind of validation.
import CoreFoundation
- s = open(source).read()
+ s = open(source, 'rb').read()
d = CoreFoundation.CFDataCreate(None, s, len(s))
_, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
if error:
return
- fp = open(dest, 'w')
- args = ['/usr/bin/iconv', '--from-code', input_code, '--to-code',
- 'UTF-16', source]
- subprocess.call(args, stdout=fp)
+ fp = open(dest, 'wb')
+ fp.write(s.decode(input_code).encode('UTF-16'))
fp.close()
def _DetectInputEncoding(self, file_name):
@@ -110,28 +120,58 @@ class MacTool(object):
return None
fp.close()
if header.startswith("\xFE\xFF"):
- return "UTF-16BE"
+ return "UTF-16"
elif header.startswith("\xFF\xFE"):
- return "UTF-16LE"
+ return "UTF-16"
elif header.startswith("\xEF\xBB\xBF"):
return "UTF-8"
else:
return None
- def ExecCopyInfoPlist(self, source, dest):
+ def ExecCopyInfoPlist(self, source, dest, *keys):
"""Copies the |source| Info.plist to the destination directory |dest|."""
# Read the source Info.plist into memory.
fd = open(source, 'r')
lines = fd.read()
fd.close()
+ # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
+ plist = plistlib.readPlistFromString(lines)
+ if keys:
+ plist = dict(plist.items() + json.loads(keys[0]).items())
+ lines = plistlib.writePlistToString(plist)
+
# Go through all the environment variables and replace them as variables in
# the file.
+ IDENT_RE = re.compile('[/\s]')
for key in os.environ:
if key.startswith('_'):
continue
evar = '${%s}' % key
- lines = string.replace(lines, evar, os.environ[key])
+ evalue = os.environ[key]
+ lines = string.replace(lines, evar, evalue)
+
+ # Xcode supports various suffices on environment variables, which are
+ # all undocumented. :rfc1034identifier is used in the standard project
+ # template these days, and :identifier was used earlier. They are used to
+ # convert non-url characters into things that look like valid urls --
+ # except that the replacement character for :identifier, '_' isn't valid
+ # in a URL either -- oops, hence :rfc1034identifier was born.
+ evar = '${%s:identifier}' % key
+ evalue = IDENT_RE.sub('_', os.environ[key])
+ lines = string.replace(lines, evar, evalue)
+
+ evar = '${%s:rfc1034identifier}' % key
+ evalue = IDENT_RE.sub('-', os.environ[key])
+ lines = string.replace(lines, evar, evalue)
+
+ # Remove any keys with values that haven't been replaced.
+ lines = lines.split('\n')
+ for i in range(len(lines)):
+ if lines[i].strip().startswith("<string>${"):
+ lines[i] = None
+ lines[i - 1] = None
+ lines = '\n'.join(filter(lambda x: x is not None, lines))
# Write out the file with variables replaced.
fd = open(dest, 'w')
@@ -173,8 +213,9 @@ class MacTool(object):
return subprocess.call(cmd_list)
def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out 'libtool: file: foo.o has no symbols'."""
- libtool_re = re.compile(r'^libtool: file: .* has no symbols$')
+ """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
+ symbols'."""
+ libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE)
_, err = libtoolout.communicate()
for line in err.splitlines():
diff --git a/tools/gyp/pylib/gyp/msvs_emulation.py b/tools/gyp/pylib/gyp/msvs_emulation.py
index bc2afca3e0..3ac153dd86 100644
--- a/tools/gyp/pylib/gyp/msvs_emulation.py
+++ b/tools/gyp/pylib/gyp/msvs_emulation.py
@@ -167,12 +167,17 @@ class MsvsSettings(object):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
+ target_name = self.spec.get('product_prefix', '') + \
+ self.spec.get('product_name', self.spec['target_name'])
+ target_dir = base_to_build + '\\' if base_to_build else ''
replacements = {
- '$(OutDir)\\': base_to_build + '\\' if base_to_build else '',
+ '$(OutDir)\\': target_dir,
+ '$(TargetDir)\\': target_dir,
'$(IntDir)': '$!INTERMEDIATE_DIR',
'$(InputPath)': '${source}',
'$(InputName)': '${root}',
'$(ProjectName)': self.spec['target_name'],
+ '$(TargetName)': target_name,
'$(PlatformName)': target_platform,
'$(ProjectDir)\\': '',
}
@@ -197,7 +202,8 @@ class MsvsSettings(object):
def AdjustLibraries(self, libraries):
"""Strip -l from library if it's specified with that."""
- return [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
+ libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
+ return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
@@ -289,6 +295,15 @@ class MsvsSettings(object):
pdbname = expand_special(self.ConvertVSMacros(pdbname))
return pdbname
+ def GetMapFileName(self, config, expand_special):
+ """Gets the explicitly overriden map file name for a target or returns None
+ if it's not set."""
+ config = self._TargetConfig(config)
+ map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
+ if map_file:
+ map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
+ return map_file
+
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
@@ -320,8 +335,11 @@ class MsvsSettings(object):
cl = self._GetWrapper(self, self.msvs_settings[config],
'VCCLCompilerTool', append=cflags)
cl('Optimization',
- map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O')
+ map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
cl('InlineFunctionExpansion', prefix='/Ob')
+ cl('DisableSpecificWarnings', prefix='/wd')
+ cl('StringPooling', map={'true': '/GF'})
+ cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
@@ -345,6 +363,9 @@ class MsvsSettings(object):
cl('AdditionalOptions', prefix='')
cflags.extend(['/FI' + f for f in self._Setting(
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
+ if self.vs_version.short_name in ('2013', '2013e'):
+ # New flag required in 2013 to maintain previous PDB behavior.
+ cflags.append('/FS')
# ninja handles parallelism by itself, don't have the compiler do it too.
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
return cflags
@@ -402,16 +423,23 @@ class MsvsSettings(object):
lib('AdditionalOptions')
return libflags
- def _GetDefFileAsLdflags(self, spec, ldflags, gyp_to_build_path):
- """.def files get implicitly converted to a ModuleDefinitionFile for the
- linker in the VS generator. Emulate that behaviour here."""
- def_file = ''
+ def GetDefFile(self, gyp_to_build_path):
+ """Returns the .def file from sources, if any. Otherwise returns None."""
+ spec = self.spec
if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
- ldflags.append('/DEF:"%s"' % gyp_to_build_path(def_files[0]))
+ return gyp_to_build_path(def_files[0])
elif len(def_files) > 1:
raise Exception("Multiple .def files")
+ return None
+
+ def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
+ """.def files get implicitly converted to a ModuleDefinitionFile for the
+ linker in the VS generator. Emulate that behaviour here."""
+ def_file = self.GetDefFile(gyp_to_build_path)
+ if def_file:
+ ldflags.append('/DEF:"%s"' % def_file)
def GetLdflags(self, config, gyp_to_build_path, expand_special,
manifest_base_name, is_executable):
@@ -421,7 +449,7 @@ class MsvsSettings(object):
ldflags = []
ld = self._GetWrapper(self, self.msvs_settings[config],
'VCLinkerTool', append=ldflags)
- self._GetDefFileAsLdflags(self.spec, ldflags, gyp_to_build_path)
+ self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
ld('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
ldflags.extend(self._GetAdditionalLibraryDirectories(
@@ -433,6 +461,10 @@ class MsvsSettings(object):
pdb = self.GetPDBName(config, expand_special)
if pdb:
ldflags.append('/PDB:' + pdb)
+ map_file = self.GetMapFileName(config, expand_special)
+ ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
+ else '/MAP'})
+ ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
ld('AdditionalOptions', prefix='')
ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:')
ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
@@ -480,17 +512,32 @@ class MsvsSettings(object):
def _GetLdManifestFlags(self, config, name, allow_isolation):
"""Returns the set of flags that need to be added to the link to generate
a default manifest, as well as the name of the generated file."""
- # Add manifest flags that mirror the defaults in VS. Chromium dev builds
- # do not currently use any non-default settings, but we could parse
- # VCManifestTool blocks if Chromium or other projects need them in the
- # future. Of particular note, we do not yet support EmbedManifest because
- # it complicates incremental linking.
+ # The manifest is generated by default.
output_name = name + '.intermediate.manifest'
flags = [
'/MANIFEST',
'/ManifestFile:' + output_name,
- '''/MANIFESTUAC:"level='asInvoker' uiAccess='false'"'''
]
+
+ config = self._TargetConfig(config)
+ enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
+ default='true')
+ if enable_uac == 'true':
+ execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
+ config, default='0')
+ execution_level_map = {
+ '0': 'asInvoker',
+ '1': 'highestAvailable',
+ '2': 'requireAdministrator'
+ }
+
+ ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
+ default='false')
+ flags.append('''/MANIFESTUAC:"level='%s' uiAccess='%s'"''' %
+ (execution_level_map[execution_level], ui_access))
+ else:
+ flags.append('/MANIFESTUAC:NO')
+
if allow_isolation:
flags.append('/ALLOWISOLATION')
return flags, output_name
@@ -500,9 +547,6 @@ class MsvsSettings(object):
generated by the linker."""
files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
default=[])
- if (self._Setting(
- ('VCManifestTool', 'EmbedManifest'), config, default='') == 'true'):
- print 'gyp/msvs_emulation.py: "EmbedManifest: true" not yet supported.'
if isinstance(files, str):
files = files.split(';')
return [os.path.normpath(
@@ -516,6 +560,18 @@ class MsvsSettings(object):
uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
return uldi == 'true'
+ def IsEmbedManifest(self, config):
+ """Returns whether manifest should be linked into binary."""
+ config = self._TargetConfig(config)
+ embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config)
+ return embed == 'true'
+
+ def IsLinkIncremental(self, config):
+ """Returns whether the target should be linked incrementally."""
+ config = self._TargetConfig(config)
+ link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
+ return link_inc != '1'
+
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
compiler."""
@@ -626,11 +682,12 @@ class PrecompiledHeader(object):
files."""
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
- def GetObjDependencies(self, sources, objs):
+ def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatability
with make.py on Mac, and xcode_emulation.py."""
+ assert arch is None
if not self._PchHeader():
return []
pch_ext = os.path.splitext(self.pch_source)[1]
@@ -639,7 +696,7 @@ class PrecompiledHeader(object):
return [(None, None, self.output_obj)]
return []
- def GetPchBuildCommands(self):
+ def GetPchBuildCommands(self, arch):
"""Not used on Windows as there are no additional build steps required
(instead, existing steps are modified in GetFlagsModifications below)."""
return []
@@ -724,6 +781,14 @@ def _FormatAsEnvironmentBlock(envvar_dict):
block += nul
return block
+def _ExtractCLPath(output_of_where):
+ """Gets the path to cl.exe based on the output of calling the environment
+ setup batch file, followed by the equivalent of `where`."""
+ # Take the first line, as that's the first found in the PATH.
+ for line in output_of_where.strip().splitlines():
+ if line.startswith('LOC:'):
+ return line[len('LOC:'):].strip()
+
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
@@ -739,10 +804,16 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
+ archs = ('x86', 'x64')
if generator_flags.get('ninja_use_custom_environment_files', 0):
- return
+ cl_paths = {}
+ for arch in archs:
+ cl_paths[arch] = 'cl.exe'
+ return cl_paths
vs = GetVSVersion(generator_flags)
- for arch in ('x86', 'x64'):
+ cl_paths = {}
+ for arch in archs:
+ # Extract environment variables for subprocesses.
args = vs.SetupScript(arch)
args.extend(('&&', 'set'))
popen = subprocess.Popen(
@@ -754,6 +825,15 @@ def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out):
f.write(env_block)
f.close()
+ # Find cl.exe location for this architecture.
+ args = vs.SetupScript(arch)
+ args.extend(('&&',
+ 'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
+ popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
+ output, _ = popen.communicate()
+ cl_paths[arch] = _ExtractCLPath(output)
+ return cl_paths
+
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
generator: Check that all regular source files, i.e. not created at run time,
@@ -769,3 +849,22 @@ def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
# path for a slightly less crazy looking output.
cleaned_up = [os.path.normpath(x) for x in missing]
raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
+
+# Sets some values in default_variables, which are required for many
+# generators, run on Windows.
+def CalculateCommonVariables(default_variables, params):
+ generator_flags = params.get('generator_flags', {})
+
+ # Set a variable so conditions can be based on msvs_version.
+ msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
+ default_variables['MSVS_VERSION'] = msvs_version.ShortName()
+
+ # To determine processor word size on Windows, in addition to checking
+ # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
+ # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
+ # contains the actual word size of the system when running thru WOW64).
+ if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
+ '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
+ default_variables['MSVS_OS_BITS'] = 64
+ else:
+ default_variables['MSVS_OS_BITS'] = 32
diff --git a/tools/gyp/pylib/gyp/ninja_syntax.py b/tools/gyp/pylib/gyp/ninja_syntax.py
index 0f3603a8ce..d2948f06c0 100644
--- a/tools/gyp/pylib/gyp/ninja_syntax.py
+++ b/tools/gyp/pylib/gyp/ninja_syntax.py
@@ -34,8 +34,13 @@ class Writer(object):
value = ' '.join(filter(None, value)) # Filter out empty strings.
self._line('%s = %s' % (key, value), indent)
+ def pool(self, name, depth):
+ self._line('pool %s' % name)
+ self.variable('depth', depth, indent=1)
+
def rule(self, name, command, description=None, depfile=None,
- generator=False, restat=False, rspfile=None, rspfile_content=None):
+ generator=False, pool=None, restat=False, rspfile=None,
+ rspfile_content=None, deps=None):
self._line('rule %s' % name)
self.variable('command', command, indent=1)
if description:
@@ -44,12 +49,16 @@ class Writer(object):
self.variable('depfile', depfile, indent=1)
if generator:
self.variable('generator', '1', indent=1)
+ if pool:
+ self.variable('pool', pool, indent=1)
if restat:
self.variable('restat', '1', indent=1)
if rspfile:
self.variable('rspfile', rspfile, indent=1)
if rspfile_content:
self.variable('rspfile_content', rspfile_content, indent=1)
+ if deps:
+ self.variable('deps', deps, indent=1)
def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
variables=None):
@@ -67,13 +76,12 @@ class Writer(object):
all_inputs.append('||')
all_inputs.extend(order_only)
- self._line('build %s: %s %s' % (' '.join(out_outputs),
- rule,
- ' '.join(all_inputs)))
+ self._line('build %s: %s' % (' '.join(out_outputs),
+ ' '.join([rule] + all_inputs)))
if variables:
if isinstance(variables, dict):
- iterator = variables.iteritems()
+ iterator = iter(variables.items())
else:
iterator = iter(variables)
diff --git a/tools/gyp/pylib/gyp/win_tool.py b/tools/gyp/pylib/gyp/win_tool.py
index 7b06573638..3424c015d3 100755
--- a/tools/gyp/pylib/gyp/win_tool.py
+++ b/tools/gyp/pylib/gyp/win_tool.py
@@ -9,7 +9,6 @@
These functions are executed via gyp-win-tool when using the ninja generator.
"""
-from ctypes import windll, wintypes
import os
import shutil
import subprocess
@@ -25,31 +24,6 @@ def main(args):
sys.exit(exit_code)
-class LinkLock(object):
- """A flock-style lock to limit the number of concurrent links to one.
-
- Uses a session-local mutex based on the file's directory.
- """
- def __enter__(self):
- name = 'Local\\%s' % BASE_DIR.replace('\\', '_').replace(':', '_')
- self.mutex = windll.kernel32.CreateMutexW(
- wintypes.c_int(0),
- wintypes.c_int(0),
- wintypes.create_unicode_buffer(name))
- assert self.mutex
- result = windll.kernel32.WaitForSingleObject(
- self.mutex, wintypes.c_int(0xFFFFFFFF))
- # 0x80 means another process was killed without releasing the mutex, but
- # that this process has been given ownership. This is fine for our
- # purposes.
- assert result in (0, 0x80), (
- "%s, %s" % (result, windll.kernel32.GetLastError()))
-
- def __exit__(self, type, value, traceback):
- windll.kernel32.ReleaseMutex(self.mutex)
- windll.kernel32.CloseHandle(self.mutex)
-
-
class WinTool(object):
"""This class performs all the Windows tooling steps. The methods can either
be executed directly, or dispatched from an argument list."""
@@ -96,15 +70,14 @@ class WinTool(object):
' Creating library ui.dll.lib and object ui.dll.exp'
This happens when there are exports from the dll or exe.
"""
- with LinkLock():
- env = self._GetEnv(arch)
- popen = subprocess.Popen(args, shell=True, env=env,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out, _ = popen.communicate()
- for line in out.splitlines():
- if not line.startswith(' Creating library '):
- print line
- return popen.returncode
+ env = self._GetEnv(arch)
+ popen = subprocess.Popen(args, shell=True, env=env,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ out, _ = popen.communicate()
+ for line in out.splitlines():
+ if not line.startswith(' Creating library '):
+ print line
+ return popen.returncode
def ExecManifestWrapper(self, arch, *args):
"""Run manifest tool with environment set. Strip out undesirable warning
@@ -119,6 +92,16 @@ class WinTool(object):
print line
return popen.returncode
+ def ExecManifestToRc(self, arch, *args):
+ """Creates a resource file pointing a SxS assembly manifest.
+ |args| is tuple containing path to resource file, path to manifest file
+ and resource name which can be "1" (for executables) or "2" (for DLLs)."""
+ manifest_path, resource_path, resource_name = args
+ with open(resource_path, 'wb') as output:
+ output.write('#include <windows.h>\n%s RT_MANIFEST "%s"' % (
+ resource_name,
+ os.path.abspath(manifest_path).replace('\\', '/')))
+
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl,
*flags):
"""Filter noisy filenames output from MIDL compile step that isn't
diff --git a/tools/gyp/pylib/gyp/xcode_emulation.py b/tools/gyp/pylib/gyp/xcode_emulation.py
index 806f92b57a..f9cec33639 100644
--- a/tools/gyp/pylib/gyp/xcode_emulation.py
+++ b/tools/gyp/pylib/gyp/xcode_emulation.py
@@ -7,6 +7,7 @@ This module contains classes that help to emulate xcodebuild behavior on top of
other build systems, such as make and ninja.
"""
+import copy
import gyp.common
import os.path
import re
@@ -22,9 +23,19 @@ class XcodeSettings(object):
# at class-level for efficiency.
_sdk_path_cache = {}
+ # Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
+ # cached at class-level for efficiency.
+ _plist_cache = {}
+
+ # Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so
+ # cached at class-level for efficiency.
+ _codesigning_key_cache = {}
+
def __init__(self, spec):
self.spec = spec
+ self.isIOS = False
+
# Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
# This means self.xcode_settings[config] always contains all settings
# for that config -- the per-target settings as well. Settings that are
@@ -33,6 +44,10 @@ class XcodeSettings(object):
configs = spec['configurations']
for configname, config in configs.iteritems():
self.xcode_settings[configname] = config.get('xcode_settings', {})
+ self._ConvertConditionalKeys(configname)
+ if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
+ None):
+ self.isIOS = True
# This is only non-None temporarily during the execution of some methods.
self.configname = None
@@ -40,6 +55,23 @@ class XcodeSettings(object):
# Used by _AdjustLibrary to match .a and .dylib entries in libraries.
self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$')
+ def _ConvertConditionalKeys(self, configname):
+ """Converts or warns on conditional keys. Xcode supports conditional keys,
+ such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation
+ with some keys converted while the rest force a warning."""
+ settings = self.xcode_settings[configname]
+ conditional_keys = [key for key in settings if key.endswith(']')]
+ for key in conditional_keys:
+ # If you need more, speak up at http://crbug.com/122592
+ if key.endswith("[sdk=iphoneos*]"):
+ if configname.endswith("iphoneos"):
+ new_key = key.split("[")[0]
+ settings[new_key] = settings[key]
+ else:
+ print 'Warning: Conditional keys not implemented, ignoring:', \
+ ' '.join(conditional_keys)
+ del settings[key]
+
def _Settings(self):
assert self.configname
return self.xcode_settings[self.configname]
@@ -79,7 +111,7 @@ class XcodeSettings(object):
'WRAPPER_EXTENSION', default=default_wrapper_extension)
return '.' + self.spec.get('product_extension', wrapper_extension)
elif self.spec['type'] == 'executable':
- return '.app'
+ return '.' + self.spec.get('product_extension', 'app')
else:
assert False, "Don't know extension for '%s', target '%s'" % (
self.spec['type'], self.spec['target_name'])
@@ -104,6 +136,8 @@ class XcodeSettings(object):
def GetBundleContentsFolderPath(self):
"""Returns the qualified path to the bundle's contents folder. E.g.
Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
+ if self.isIOS:
+ return self.GetWrapperName()
assert self._IsBundle()
if self.spec['type'] == 'shared_library':
return os.path.join(
@@ -116,6 +150,8 @@ class XcodeSettings(object):
"""Returns the qualified path to the bundle's resource folder. E.g.
Chromium.app/Contents/Resources. Only valid for bundles."""
assert self._IsBundle()
+ if self.isIOS:
+ return self.GetBundleContentsFolderPath()
return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
def GetBundlePlistPath(self):
@@ -160,7 +196,7 @@ class XcodeSettings(object):
"""Returns the name of the bundle binary of by this target.
E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
assert self._IsBundle()
- if self.spec['type'] in ('shared_library'):
+ if self.spec['type'] in ('shared_library') or self.isIOS:
path = self.GetBundleContentsFolderPath()
elif self.spec['type'] in ('executable', 'loadable_module'):
path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
@@ -222,18 +258,32 @@ class XcodeSettings(object):
else:
return self._GetStandaloneBinaryPath()
- def _GetSdkVersionInfoItem(self, sdk, infoitem):
- job = subprocess.Popen(['xcodebuild', '-version', '-sdk', sdk, infoitem],
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
+ def GetActiveArchs(self, configname):
+ """Returns the architectures this target should be built for."""
+ # TODO: Look at VALID_ARCHS, ONLY_ACTIVE_ARCH; possibly set
+ # CURRENT_ARCH / NATIVE_ARCH env vars?
+ return self.xcode_settings[configname].get('ARCHS', ['i386'])
+
+ def _GetStdout(self, cmdlist):
+ job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
out = job.communicate()[0]
if job.returncode != 0:
sys.stderr.write(out + '\n')
- raise GypError('Error %d running xcodebuild' % job.returncode)
+ raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
return out.rstrip('\n')
- def _SdkPath(self):
- sdk_root = self.GetPerTargetSetting('SDKROOT', default='macosx')
+ def _GetSdkVersionInfoItem(self, sdk, infoitem):
+ return self._GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
+
+ def _SdkRoot(self, configname):
+ if configname is None:
+ configname = self.configname
+ return self.GetPerConfigSetting('SDKROOT', configname, default='')
+
+ def _SdkPath(self, configname=None):
+ sdk_root = self._SdkRoot(configname)
+ if sdk_root.startswith('/'):
+ return sdk_root
if sdk_root not in XcodeSettings._sdk_path_cache:
XcodeSettings._sdk_path_cache[sdk_root] = self._GetSdkVersionInfoItem(
sdk_root, 'Path')
@@ -251,7 +301,7 @@ class XcodeSettings(object):
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-miphoneos-version-min=%s')
- def GetCflags(self, configname):
+ def GetCflags(self, configname, arch=None):
"""Returns flags that need to be added to .c, .cc, .m, and .mm
compilations."""
# This functions (and the similar ones below) do not offer complete
@@ -298,6 +348,11 @@ class XcodeSettings(object):
else:
raise NotImplementedError('Unknown debug format %s' % dbg_format)
+ if self._Settings().get('GCC_STRICT_ALIASING') == 'YES':
+ cflags.append('-fstrict-aliasing')
+ elif self._Settings().get('GCC_STRICT_ALIASING') == 'NO':
+ cflags.append('-fno-strict-aliasing')
+
if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'):
cflags.append('-fvisibility=hidden')
@@ -319,7 +374,10 @@ class XcodeSettings(object):
self._WarnUnimplemented('MACH_O_TYPE')
self._WarnUnimplemented('PRODUCT_TYPE')
- archs = self._Settings().get('ARCHS', ['i386'])
+ if arch is not None:
+ archs = [arch]
+ else:
+ archs = self._Settings().get('ARCHS', ['i386'])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
@@ -351,7 +409,10 @@ class XcodeSettings(object):
"""Returns flags that need to be added to .c, and .m compilations."""
self.configname = configname
cflags_c = []
- self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
+ if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi':
+ cflags_c.append('-ansi')
+ else:
+ self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
cflags_c += self._Settings().get('OTHER_CFLAGS', [])
self.configname = None
return cflags_c
@@ -404,13 +465,22 @@ class XcodeSettings(object):
elif gc_policy == 'required':
flags.append('-fobjc-gc-only')
+ def _AddObjectiveCARCFlags(self, flags):
+ if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'):
+ flags.append('-fobjc-arc')
+
+ def _AddObjectiveCMissingPropertySynthesisFlags(self, flags):
+ if self._Test('CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS',
+ 'YES', default='NO'):
+ flags.append('-Wobjc-missing-property-synthesis')
+
def GetCflagsObjC(self, configname):
"""Returns flags that need to be added to .m compilations."""
self.configname = configname
cflags_objc = []
-
self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
-
+ self._AddObjectiveCARCFlags(cflags_objc)
+ self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc)
self.configname = None
return cflags_objc
@@ -419,6 +489,8 @@ class XcodeSettings(object):
self.configname = configname
cflags_objcc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
+ self._AddObjectiveCARCFlags(cflags_objcc)
+ self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc)
if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
cflags_objcc.append('-fobjc-call-cxx-cdtors')
self.configname = None
@@ -513,7 +585,7 @@ class XcodeSettings(object):
ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):])
return ldflag
- def GetLdflags(self, configname, product_dir, gyp_to_build_path):
+ def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
"""Returns flags that need to be passed to the linker.
Args:
@@ -555,7 +627,10 @@ class XcodeSettings(object):
'-Wl,' + gyp_to_build_path(
self._Settings()['ORDER_FILE']))
- archs = self._Settings().get('ARCHS', ['i386'])
+ if arch is not None:
+ archs = [arch]
+ else:
+ archs = self._Settings().get('ARCHS', ['i386'])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
@@ -566,7 +641,7 @@ class XcodeSettings(object):
ldflags.append('-L' + product_dir)
install_name = self.GetInstallName()
- if install_name:
+ if install_name and self.spec['type'] != 'loadable_module':
ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
@@ -613,19 +688,25 @@ class XcodeSettings(object):
del result[key]
return result
+ def GetPerConfigSetting(self, setting, configname, default=None):
+ if configname in self.xcode_settings:
+ return self.xcode_settings[configname].get(setting, default)
+ else:
+ return self.GetPerTargetSetting(setting, default)
+
def GetPerTargetSetting(self, setting, default=None):
"""Tries to get xcode_settings.setting from spec. Assumes that the setting
has the same value in all configurations and throws otherwise."""
- first_pass = True
+ is_first_pass = True
result = None
for configname in sorted(self.xcode_settings.keys()):
- if first_pass:
+ if is_first_pass:
result = self.xcode_settings[configname].get(setting, None)
- first_pass = False
+ is_first_pass = False
else:
assert result == self.xcode_settings[configname].get(setting, None), (
"Expected per-target setting for '%s', got per-config setting "
- "(target %s)" % (setting, spec['target_name']))
+ "(target %s)" % (setting, self.spec['target_name']))
if result is None:
return default
return result
@@ -641,7 +722,7 @@ class XcodeSettings(object):
self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
default_strip_style = 'debugging'
- if self._IsBundle():
+ if self.spec['type'] == 'loadable_module' and self._IsBundle():
default_strip_style = 'non-global'
elif self.spec['type'] == 'executable':
default_strip_style = 'all'
@@ -683,7 +764,8 @@ class XcodeSettings(object):
self.configname = None
return result
- def GetTargetPostbuilds(self, configname, output, output_binary, quiet=False):
+ def _GetTargetPostbuilds(self, configname, output, output_binary,
+ quiet=False):
"""Returns a list of shell commands that contain the shell commands
to run as postbuilds for this target, before the actual postbuilds."""
# dSYMs need to build before stripping happens.
@@ -691,7 +773,51 @@ class XcodeSettings(object):
self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) +
self._GetStripPostbuilds(configname, output_binary, quiet))
- def _AdjustLibrary(self, library):
+ def _GetIOSPostbuilds(self, configname, output_binary):
+ """Return a shell command to codesign the iOS output binary so it can
+ be deployed to a device. This should be run as the very last step of the
+ build."""
+ if not (self.isIOS and self.spec['type'] == "executable"):
+ return []
+
+ identity = self.xcode_settings[configname].get('CODE_SIGN_IDENTITY', '')
+ if identity == '':
+ return []
+ if identity not in XcodeSettings._codesigning_key_cache:
+ proc = subprocess.Popen(['security', 'find-identity', '-p', 'codesigning',
+ '-v'], stdout=subprocess.PIPE)
+ output = proc.communicate()[0].strip()
+ key = None
+ for item in output.split("\n"):
+ if identity in item:
+ assert key == None, (
+ "Multiple codesigning identities for identity: %s" %
+ identity)
+ key = item.split(' ')[1]
+ XcodeSettings._codesigning_key_cache[identity] = key
+ key = XcodeSettings._codesigning_key_cache[identity]
+ if key:
+ # Warn for any unimplemented signing xcode keys.
+ unimpl = ['CODE_SIGN_RESOURCE_RULES_PATH', 'OTHER_CODE_SIGN_FLAGS',
+ 'CODE_SIGN_ENTITLEMENTS']
+ keys = set(self.xcode_settings[configname].keys())
+ unimpl = set(unimpl) & keys
+ if unimpl:
+ print 'Warning: Some codesign keys not implemented, ignoring:', \
+ ' '.join(unimpl)
+ return ['codesign --force --sign %s %s' % (key, output_binary)]
+ return []
+
+ def AddImplicitPostbuilds(self, configname, output, output_binary,
+ postbuilds=[], quiet=False):
+ """Returns a list of shell commands that should run before and after
+ |postbuilds|."""
+ assert output_binary is not None
+ pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
+ post = self._GetIOSPostbuilds(configname, output_binary)
+ return pre + postbuilds + post
+
+ def _AdjustLibrary(self, library, config_name=None):
if library.endswith('.framework'):
l = '-framework ' + os.path.splitext(os.path.basename(library))[0]
else:
@@ -700,15 +826,76 @@ class XcodeSettings(object):
l = '-l' + m.group(1)
else:
l = library
- return l.replace('$(SDKROOT)', self._SdkPath())
+ return l.replace('$(SDKROOT)', self._SdkPath(config_name))
- def AdjustLibraries(self, libraries):
+ def AdjustLibraries(self, libraries, config_name=None):
"""Transforms entries like 'Cocoa.framework' in libraries into entries like
'-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
"""
- libraries = [ self._AdjustLibrary(library) for library in libraries]
+ libraries = [self._AdjustLibrary(library, config_name)
+ for library in libraries]
return libraries
+ def _BuildMachineOSBuild(self):
+ return self._GetStdout(['sw_vers', '-buildVersion'])
+
+ def _XcodeVersion(self):
+ # `xcodebuild -version` output looks like
+ # Xcode 4.6.3
+ # Build version 4H1503
+ # or like
+ # Xcode 3.2.6
+ # Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
+ # BuildVersion: 10M2518
+ # Convert that to '0463', '4H1503'.
+ version_list = self._GetStdout(['xcodebuild', '-version']).splitlines()
+ version = version_list[0]
+ build = version_list[-1]
+ # Be careful to convert "4.2" to "0420":
+ version = version.split()[-1].replace('.', '')
+ version = (version + '0' * (3 - len(version))).zfill(4)
+ build = build.split()[-1]
+ return version, build
+
+ def _XcodeIOSDeviceFamily(self, configname):
+ family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
+ return [int(x) for x in family.split(',')]
+
+ def GetExtraPlistItems(self, configname=None):
+ """Returns a dictionary with extra items to insert into Info.plist."""
+ if configname not in XcodeSettings._plist_cache:
+ cache = {}
+ cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
+
+ xcode, xcode_build = self._XcodeVersion()
+ cache['DTXcode'] = xcode
+ cache['DTXcodeBuild'] = xcode_build
+
+ sdk_root = self._SdkRoot(configname)
+ cache['DTSDKName'] = sdk_root
+ if xcode >= '0430':
+ cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
+ sdk_root, 'ProductBuildVersion')
+ else:
+ cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
+
+ if self.isIOS:
+ cache['DTPlatformName'] = cache['DTSDKName']
+ if configname.endswith("iphoneos"):
+ cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem(
+ sdk_root, 'ProductVersion')
+ cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
+ else:
+ cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
+ XcodeSettings._plist_cache[configname] = cache
+
+ # Include extra plist items that are per-target, not per global
+ # XcodeSettings.
+ items = dict(XcodeSettings._plist_cache[configname])
+ if self.isIOS:
+ items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
+ return items
+
class MacPrefixHeader(object):
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
@@ -760,21 +947,28 @@ class MacPrefixHeader(object):
self.header, lang)
self.header = gyp_path_to_build_path(self.header)
- def GetInclude(self, lang):
+ def _CompiledHeader(self, lang, arch):
+ assert self.compile_headers
+ h = self.compiled_headers[lang]
+ if arch:
+ h += '.' + arch
+ return h
+
+ def GetInclude(self, lang, arch=None):
"""Gets the cflags to include the prefix header for language |lang|."""
if self.compile_headers and lang in self.compiled_headers:
- return '-include %s' % self.compiled_headers[lang]
+ return '-include %s' % self._CompiledHeader(lang, arch)
elif self.header:
return '-include %s' % self.header
else:
return ''
- def _Gch(self, lang):
+ def _Gch(self, lang, arch):
"""Returns the actual file name of the prefix header for language |lang|."""
assert self.compile_headers
- return self.compiled_headers[lang] + '.gch'
+ return self._CompiledHeader(lang, arch) + '.gch'
- def GetObjDependencies(self, sources, objs):
+ def GetObjDependencies(self, sources, objs, arch=None):
"""Given a list of source files and the corresponding object files, returns
a list of (source, object, gch) tuples, where |gch| is the build-directory
relative path to the gch file each object file depends on. |compilable[i]|
@@ -792,20 +986,20 @@ class MacPrefixHeader(object):
'.mm': 'mm',
}.get(ext, None)
if lang:
- result.append((source, obj, self._Gch(lang)))
+ result.append((source, obj, self._Gch(lang, arch)))
return result
- def GetPchBuildCommands(self):
+ def GetPchBuildCommands(self, arch=None):
"""Returns [(path_to_gch, language_flag, language, header)].
|path_to_gch| and |header| are relative to the build directory.
"""
if not self.header or not self.compile_headers:
return []
return [
- (self._Gch('c'), '-x c-header', 'c', self.header),
- (self._Gch('cc'), '-x c++-header', 'cc', self.header),
- (self._Gch('m'), '-x objective-c-header', 'm', self.header),
- (self._Gch('mm'), '-x objective-c++-header', 'mm', self.header),
+ (self._Gch('c', arch), '-x c-header', 'c', self.header),
+ (self._Gch('cc', arch), '-x c++-header', 'cc', self.header),
+ (self._Gch('m', arch), '-x objective-c-header', 'm', self.header),
+ (self._Gch('mm', arch), '-x objective-c++-header', 'mm', self.header),
]
@@ -871,14 +1065,17 @@ def GetMacBundleResources(product_dir, xcode_settings, resources):
output = os.path.join(output, res_parts[1])
# Compiled XIB files are referred to by .nib.
if output.endswith('.xib'):
- output = output[0:-3] + 'nib'
+ output = os.path.splitext(output)[0] + '.nib'
+ # Compiled storyboard files are referred to by .storyboardc.
+ if output.endswith('.storyboard'):
+ output = os.path.splitext(output)[0] + '.storyboardc'
yield output, res
def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
"""Returns (info_plist, dest_plist, defines, extra_env), where:
- * |info_plist| is the sourc plist path, relative to the
+ * |info_plist| is the source plist path, relative to the
build directory,
* |dest_plist| is the destination plist path, relative to the
build directory,
@@ -957,8 +1154,8 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
'TARGET_BUILD_DIR' : built_products_dir,
'TEMP_DIR' : '${TMPDIR}',
}
- if xcode_settings.GetPerTargetSetting('SDKROOT'):
- env['SDKROOT'] = xcode_settings._SdkPath()
+ if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
+ env['SDKROOT'] = xcode_settings._SdkPath(configuration)
else:
env['SDKROOT'] = ''
@@ -1081,3 +1278,35 @@ def GetSpecPostbuildCommands(spec, quiet=False):
spec['target_name'], postbuild['postbuild_name']))
postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
return postbuilds
+
+
+def _HasIOSTarget(targets):
+ """Returns true if any target contains the iOS specific key
+ IPHONEOS_DEPLOYMENT_TARGET."""
+ for target_dict in targets.values():
+ for config in target_dict['configurations'].values():
+ if config.get('xcode_settings', {}).get('IPHONEOS_DEPLOYMENT_TARGET'):
+ return True
+ return False
+
+
+def _AddIOSDeviceConfigurations(targets):
+ """Clone all targets and append -iphoneos to the name. Configure these targets
+ to build for iOS devices."""
+ for target_dict in targets.values():
+ for config_name in target_dict['configurations'].keys():
+ config = target_dict['configurations'][config_name]
+ new_config_name = config_name + '-iphoneos'
+ new_config_dict = copy.deepcopy(config)
+ if target_dict['toolset'] == 'target':
+ new_config_dict['xcode_settings']['ARCHS'] = ['armv7']
+ new_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
+ target_dict['configurations'][new_config_name] = new_config_dict
+ return targets
+
+def CloneConfigurationForDeviceAndEmulator(target_dicts):
+ """If |target_dicts| contains any iOS targets, automatically create -iphoneos
+ targets for iOS device builds."""
+ if _HasIOSTarget(target_dicts):
+ return _AddIOSDeviceConfigurations(target_dicts)
+ return target_dicts
diff --git a/tools/gyp/pylib/gyp/xcodeproj_file.py b/tools/gyp/pylib/gyp/xcodeproj_file.py
index 47712a7f6e..6e4a1dc69d 100644
--- a/tools/gyp/pylib/gyp/xcodeproj_file.py
+++ b/tools/gyp/pylib/gyp/xcodeproj_file.py
@@ -169,7 +169,7 @@ _quoted = re.compile('___')
# This pattern should match any character that needs to be escaped by
# XCObject._EncodeString. See that function.
-_escaped = re.compile('[\\\\"]|[^ -~]')
+_escaped = re.compile('[\\\\"]|[\x00-\x1f]')
# Used by SourceTreeAndPathFromPath
@@ -557,9 +557,9 @@ class XCObject(object):
# 10 ^J NL is encoded as "\n"
# 13 ^M CR is encoded as "\n" rendering it indistinguishable from
# 10 ^J NL
- # All other nonprintable characters within the ASCII range (0 through 127
- # inclusive) are encoded as "\U001f" referring to the Unicode code point in
- # hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e".
+ # All other characters within the ASCII control character range (0 through
+ # 31 inclusive) are encoded as "\U001f" referring to the Unicode code point
+ # in hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e".
# Characters above the ASCII range are passed through to the output encoded
# as UTF-8 without any escaping. These mappings are contained in the
# class' _encode_transforms list.
@@ -1483,8 +1483,11 @@ class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
'cpp': 'sourcecode.cpp.cpp',
'css': 'text.css',
'cxx': 'sourcecode.cpp.cpp',
+ 'dart': 'sourcecode',
'dylib': 'compiled.mach-o.dylib',
'framework': 'wrapper.framework',
+ 'gyp': 'sourcecode',
+ 'gypi': 'sourcecode',
'h': 'sourcecode.c.h',
'hxx': 'sourcecode.cpp.h',
'icns': 'image.icns',
@@ -1512,8 +1515,15 @@ class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
'y': 'sourcecode.yacc',
}
+ prop_map = {
+ 'dart': 'explicitFileType',
+ 'gyp': 'explicitFileType',
+ 'gypi': 'explicitFileType',
+ }
+
if is_dir:
file_type = 'folder'
+ prop_name = 'lastKnownFileType'
else:
basename = posixpath.basename(self._properties['path'])
(root, ext) = posixpath.splitext(basename)
@@ -1528,8 +1538,9 @@ class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
# for unrecognized files not containing text. Xcode seems to choose
# based on content.
file_type = extension_map.get(ext, 'text')
+ prop_name = prop_map.get(ext, 'lastKnownFileType')
- self._properties['lastKnownFileType'] = file_type
+ self._properties[prop_name] = file_type
class PBXVariantGroup(PBXGroup, XCFileLikeElement):
@@ -2227,20 +2238,22 @@ class PBXNativeTarget(XCTarget):
# prefix : the prefix for the file name
# suffix : the suffix for the filen ame
_product_filetypes = {
- 'com.apple.product-type.application': ['wrapper.application',
- '', '.app'],
- 'com.apple.product-type.bundle': ['wrapper.cfbundle',
- '', '.bundle'],
- 'com.apple.product-type.framework': ['wrapper.framework',
- '', '.framework'],
- 'com.apple.product-type.library.dynamic': ['compiled.mach-o.dylib',
- 'lib', '.dylib'],
- 'com.apple.product-type.library.static': ['archive.ar',
- 'lib', '.a'],
- 'com.apple.product-type.tool': ['compiled.mach-o.executable',
- '', ''],
- 'com.googlecode.gyp.xcode.bundle': ['compiled.mach-o.dylib',
- '', '.so'],
+ 'com.apple.product-type.application': ['wrapper.application',
+ '', '.app'],
+ 'com.apple.product-type.bundle': ['wrapper.cfbundle',
+ '', '.bundle'],
+ 'com.apple.product-type.framework': ['wrapper.framework',
+ '', '.framework'],
+ 'com.apple.product-type.library.dynamic': ['compiled.mach-o.dylib',
+ 'lib', '.dylib'],
+ 'com.apple.product-type.library.static': ['archive.ar',
+ 'lib', '.a'],
+ 'com.apple.product-type.tool': ['compiled.mach-o.executable',
+ '', ''],
+ 'com.apple.product-type.bundle.unit-test': ['wrapper.cfbundle',
+ '', '.xctest'],
+ 'com.googlecode.gyp.xcode.bundle': ['compiled.mach-o.dylib',
+ '', '.so'],
}
def __init__(self, properties=None, id=None, parent=None,
@@ -2292,6 +2305,11 @@ class PBXNativeTarget(XCTarget):
if force_extension is None:
force_extension = suffix[1:]
+ if self._properties['productType'] == \
+ 'com.apple.product-type-bundle.unit.test':
+ if force_extension is None:
+ force_extension = suffix[1:]
+
if force_extension is not None:
# If it's a wrapper (bundle), set WRAPPER_EXTENSION.
if filetype.startswith('wrapper.'):
diff --git a/tools/gyp/setup.py b/tools/gyp/setup.py
index ed2b41a3c3..75a42558d8 100755
--- a/tools/gyp/setup.py
+++ b/tools/gyp/setup.py
@@ -4,10 +4,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from distutils.core import setup
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-from distutils.command.install_scripts import install_scripts
+from setuptools import setup
setup(
name='gyp',
@@ -18,9 +15,5 @@ setup(
url='http://code.google.com/p/gyp',
package_dir = {'': 'pylib'},
packages=['gyp', 'gyp.generator'],
-
- scripts = ['gyp'],
- cmdclass = {'install': install,
- 'install_lib': install_lib,
- 'install_scripts': install_scripts},
+ entry_points = {'console_scripts': ['gyp=gyp:script_main'] }
)
diff --git a/tools/gyp/tools/emacs/gyp.el b/tools/gyp/tools/emacs/gyp.el
index f558b53135..3db9f64594 100644
--- a/tools/gyp/tools/emacs/gyp.el
+++ b/tools/gyp/tools/emacs/gyp.el
@@ -43,6 +43,7 @@
(add-to-list 'auto-mode-alist '("\\.gyp\\'" . gyp-mode))
(add-to-list 'auto-mode-alist '("\\.gypi\\'" . gyp-mode))
+(add-to-list 'auto-mode-alist '("/\\.gclient\\'" . gyp-mode))
;;; Font-lock support
diff --git a/tools/gyp_node b/tools/gyp_node.py
index 7b495055c1..7b495055c1 100755
--- a/tools/gyp_node
+++ b/tools/gyp_node.py