aboutsummaryrefslogtreecommitdiff
path: root/tools/gyp/pylib/gyp
diff options
context:
space:
mode:
authorRyan Dahl <ry@tinyclouds.org>2011-08-22 17:08:16 -0700
committerRyan Dahl <ry@tinyclouds.org>2011-08-22 17:09:57 -0700
commit80dd8182907314ae99f5572798c86176e2086c47 (patch)
tree3dddb582141ef135a93789a288c25eed543f32f9 /tools/gyp/pylib/gyp
parentc2ae39b8d60407261d1963461087798458b31358 (diff)
downloadandroid-node-v8-80dd8182907314ae99f5572798c86176e2086c47.tar.gz
android-node-v8-80dd8182907314ae99f5572798c86176e2086c47.tar.bz2
android-node-v8-80dd8182907314ae99f5572798c86176e2086c47.zip
Upgrade GYP to r1010
Diffstat (limited to 'tools/gyp/pylib/gyp')
-rw-r--r--tools/gyp/pylib/gyp/MSVSNew.py10
-rw-r--r--tools/gyp/pylib/gyp/MSVSSettings_test.py2
-rw-r--r--tools/gyp/pylib/gyp/easy_xml_test.py2
-rw-r--r--tools/gyp/pylib/gyp/generator/make.py260
-rw-r--r--tools/gyp/pylib/gyp/generator/msvs.py17
-rw-r--r--tools/gyp/pylib/gyp/generator/msvs_test.py35
-rw-r--r--tools/gyp/pylib/gyp/generator/ninja.py626
-rw-r--r--tools/gyp/pylib/gyp/ninja_syntax.py98
8 files changed, 960 insertions, 90 deletions
diff --git a/tools/gyp/pylib/gyp/MSVSNew.py b/tools/gyp/pylib/gyp/MSVSNew.py
index 1277d4a508..9b9b848fe7 100644
--- a/tools/gyp/pylib/gyp/MSVSNew.py
+++ b/tools/gyp/pylib/gyp/MSVSNew.py
@@ -213,20 +213,16 @@ class MSVSSolution:
IndexError: An entry appears multiple times.
"""
# Walk the entry tree and collect all the folders and projects.
- all_entries = []
+ all_entries = set()
entries_to_check = self.entries[:]
while entries_to_check:
- # Pop from the beginning of the list to preserve the user's order.
e = entries_to_check.pop(0)
- # A project or folder can only appear once in the solution's folder tree.
- # This also protects from cycles.
+ # If this entry has been visited, nothing to do.
if e in all_entries:
- #raise IndexError('Entry "%s" appears more than once in solution' %
- # e.name)
continue
- all_entries.append(e)
+ all_entries.add(e)
# If this is a folder, check its entries too.
if isinstance(e, MSVSFolder):
diff --git a/tools/gyp/pylib/gyp/MSVSSettings_test.py b/tools/gyp/pylib/gyp/MSVSSettings_test.py
index 2ae0dd23bc..199f98b1b3 100644
--- a/tools/gyp/pylib/gyp/MSVSSettings_test.py
+++ b/tools/gyp/pylib/gyp/MSVSSettings_test.py
@@ -8,7 +8,7 @@
import StringIO
import unittest
-import MSVSSettings
+import gyp.MSVSSettings as MSVSSettings
class TestSequenceFunctions(unittest.TestCase):
diff --git a/tools/gyp/pylib/gyp/easy_xml_test.py b/tools/gyp/pylib/gyp/easy_xml_test.py
index a8f32a0cd5..9e59559818 100644
--- a/tools/gyp/pylib/gyp/easy_xml_test.py
+++ b/tools/gyp/pylib/gyp/easy_xml_test.py
@@ -6,7 +6,7 @@
""" Unit tests for the easy_xml.py file. """
-import easy_xml
+import gyp.easy_xml as easy_xml
import unittest
import StringIO
diff --git a/tools/gyp/pylib/gyp/generator/make.py b/tools/gyp/pylib/gyp/generator/make.py
index 5ebc5c96c7..b8914785bf 100644
--- a/tools/gyp/pylib/gyp/generator/make.py
+++ b/tools/gyp/pylib/gyp/generator/make.py
@@ -29,7 +29,6 @@ import gyp.system_test
import os.path
import os
import sys
-import stat
# Debugging-related imports -- remove me once we're solid.
import code
@@ -374,6 +373,7 @@ prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
# do_cmd: run a command via the above cmd_foo names, if necessary.
# Should always run for a given target to handle command-line changes.
# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
+# Third argument, if non-zero, makes it do POSTBUILDS processing.
# Note: We intentionally do NOT call dirx for depfile, since it contains """ + \
SPACE_REPLACEMENT + """ for
# spaces already and dirx strips the """ + SPACE_REPLACEMENT + \
@@ -389,6 +389,9 @@ $(if $(or $(command_changed),$(prereq_changed)),
)
@$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
@$(if $(2),$(fixup_dep))
+ $(if $(and $(3), $(POSTBUILDS)),
+ @for p in $(POSTBUILDS); do eval $$p; done
+ )
)
endef
@@ -422,12 +425,13 @@ quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use #(3) for the command, since $(2) is used as flag by do_cmd already.
-quiet_cmd_mac_tool = MACTOOL $(3) $<
-cmd_mac_tool = ./gyp-mac-tool $(3) $< "$@"
+# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
+# already.
+quiet_cmd_mac_tool = MACTOOL $(4) $<
+cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(3)
+cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
"""
@@ -676,9 +680,9 @@ class XcodeSettings(object):
return os.path.join(self.GetBundleContentsFolderPath(),
'Resources', 'Info.plist')
- def GetBundleBinaryPath(self):
- """Returns the directory name of the bundle represented by this target. E.g.
- Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
+ def _GetBundleBinaryPath(self):
+ """Returns the name of the bundle binary of by this target.
+ E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('loadable_module', 'shared_library'):
path = self.GetBundleContentsFolderPath()
@@ -687,6 +691,53 @@ class XcodeSettings(object):
return os.path.join(path, self.spec.get('product_name',
self.spec['target_name']))
+ def _GetStandaloneExecutableSuffix(self):
+ if 'product_extension' in self.spec:
+ return '.' + self.spec['product_extension']
+ return {
+ 'executable': '',
+ 'static_library': '.a',
+ 'shared_library': '.dylib',
+ 'loadable_module': '.so',
+ }[self.spec['type']]
+
+ def _GetStandaloneExecutablePrefix(self):
+ return self.spec.get('product_prefix', {
+ 'executable': '',
+ 'static_library': 'lib',
+ 'shared_library': 'lib',
+ # Non-bundled loadable_modules are called foo.so for some reason
+ # (that is, .so and no prefix) with the xcode build -- match that.
+ 'loadable_module': '',
+ }[self.spec['type']])
+
+ def _GetStandaloneBinaryPath(self):
+ """Returns the name of the non-bundle binary represented by this target.
+ E.g. hello_world. Only valid for non-bundles."""
+ assert not self._IsBundle()
+ assert self.spec['type'] in (
+ 'executable', 'shared_library', 'static_library', 'loadable_module')
+ target = self.spec['target_name']
+ if self.spec['type'] == 'static_library':
+ if target[:3] == 'lib':
+ target = target[3:]
+ elif self.spec['type'] in ('loadable_module', 'shared_library'):
+ if target[:3] == 'lib':
+ target = target[3:]
+
+ target_prefix = self._GetStandaloneExecutablePrefix()
+ target = self.spec.get('product_name', target)
+ target_ext = self._GetStandaloneExecutableSuffix()
+ return target_prefix + target + target_ext
+
+ def GetExecutablePath(self):
+ """Returns the directory name of the bundle represented by this target. E.g.
+ Chromium.app/Contents/MacOS/Chromium."""
+ if self._IsBundle():
+ return self._GetBundleBinaryPath()
+ else:
+ return self._GetStandaloneBinaryPath()
+
def GetCflags(self, configname):
"""Returns flags that need to be added to .c, .cc, .m, and .mm
compilations."""
@@ -838,6 +889,10 @@ class XcodeSettings(object):
ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s')
self._Appendf(
ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s')
+ self._Appendf(
+ ldflags, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
+ self._Appendf(
+ ldflags, 'SDKROOT', '-isysroot /Developer/SDKs/%s.sdk')
for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []):
ldflags.append('-L' + library_path)
@@ -855,37 +910,47 @@ class XcodeSettings(object):
ldflags.append('-L' + generator_default_variables['PRODUCT_DIR'])
install_name = self.GetPerTargetSetting('LD_DYLIB_INSTALL_NAME')
+ install_base = self.GetPerTargetSetting('DYLIB_INSTALL_NAME_BASE')
+ default_install_name = \
+ '$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)'
+ if not install_name and install_base:
+ install_name = default_install_name
+
if install_name:
# Hardcode support for the variables used in chromium for now, to unblock
# people using the make build.
if '$' in install_name:
- assert install_name == ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
- '$(WRAPPER_NAME)/$(PRODUCT_NAME)'), (
+ assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
+ '$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), (
'Variables in LD_DYLIB_INSTALL_NAME are not generally supported yet'
' in target \'%s\' (got \'%s\')' %
(self.spec['target_name'], install_name))
- install_base = self.GetPerTargetSetting('DYLIB_INSTALL_NAME_BASE')
# I'm not quite sure what :standardizepath does. Just call normpath(),
- # but don't let @executable_path/../foo collapse to foo
- prefix, rest = '', install_base
- if install_base.startswith('@'):
- prefix, rest = install_base.split('/', 1)
- rest = os.path.normpath(rest) # :standardizepath
- install_base = os.path.join(prefix, rest)
+ # but don't let @executable_path/../foo collapse to foo.
+ if '/' in install_base:
+ prefix, rest = '', install_base
+ if install_base.startswith('@'):
+ prefix, rest = install_base.split('/', 1)
+ rest = os.path.normpath(rest) # :standardizepath
+ install_base = os.path.join(prefix, rest)
install_name = install_name.replace(
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)', install_base)
+ if self._IsBundle():
+ # These are only valid for bundles, hence the |if|.
+ install_name = install_name.replace(
+ '$(WRAPPER_NAME)', self.GetWrapperName())
+ install_name = install_name.replace(
+ '$(PRODUCT_NAME)', self.GetProductName())
+ else:
+ assert '$(WRAPPER_NAME)' not in install_name
+ assert '$(PRODUCT_NAME)' not in install_name
+
install_name = install_name.replace(
- '$(WRAPPER_NAME)', self.GetWrapperName())
- install_name = install_name.replace(
- '$(PRODUCT_NAME)', self.GetProductName())
+ '$(EXECUTABLE_PATH)', self.GetExecutablePath())
install_name = QuoteSpaces(install_name)
ldflags.append('-install_name ' + install_name)
- elif self.GetPerTargetSetting('DYLIB_INSTALL_NAME_BASE'):
- # LD_DYLIB_INSTALL_NAME defaults to
- # $(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH).
- print 'Warning: DYLIB_INSTALL_NAME_BASE is not fully implemented.'
self.configname = None
return ldflags
@@ -1453,7 +1518,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
if output.endswith('.xib'):
output = output[0:-3] + 'nib'
- self.WriteDoCmd([output], [path], 'mac_tool,,copy-bundle-resource',
+ self.WriteDoCmd([output], [path], 'mac_tool,,,copy-bundle-resource',
part_of_all=True)
bundle_deps.append(output)
@@ -1467,7 +1532,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
dest_plist = os.path.join(path, self.xcode_settings.GetBundlePlistPath())
dest_plist = QuoteSpaces(dest_plist)
self.WriteXcodeEnv(dest_plist, spec) # plists can contain envvars.
- self.WriteDoCmd([dest_plist], [info_plist], 'mac_tool,,copy-info-plist',
+ self.WriteDoCmd([dest_plist], [info_plist], 'mac_tool,,,copy-info-plist',
part_of_all=True)
bundle_deps.append(dest_plist)
@@ -1593,18 +1658,21 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
self.WriteLn()
- def ComputeOutput(self, spec):
- """Return the 'output' (full output path) of a gyp spec.
+ def ComputeOutputBasename(self, spec):
+ """Return the 'output basename' of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
- '$(obj)/baz/libfoobar.so'
+ 'libfoobar.so'
"""
assert not self.is_mac_bundle
+ if self.flavor == 'mac' and self.type in (
+ 'static_library', 'executable', 'shared_library', 'loadable_module'):
+ return self.xcode_settings.GetExecutablePath()
+
target = spec['target_name']
target_prefix = ''
target_ext = ''
- path = os.path.join('$(obj).' + self.toolset, self.path)
if self.type == 'static_library':
if target[:3] == 'lib':
target = target[3:]
@@ -1615,31 +1683,37 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
target = target[3:]
target_prefix = 'lib'
target_ext = '.so'
- if self.flavor == 'mac':
- if self.type == 'shared_library':
- target_ext = '.dylib'
- else:
- # Non-bundled loadable_modules are called foo.so for some reason
- # (that is, .so and no prefix) with the xcode build -- match that.
- target_prefix = ''
elif self.type == 'none':
target = '%s.stamp' % target
- elif self.type == 'settings':
- return '' # Doesn't have any output.
- elif self.type == 'executable':
- path = os.path.join('$(builddir)')
- else:
+ elif self.type != 'executable':
print ("ERROR: What output file should be generated?",
"type", self.type, "target", target)
- path = spec.get('product_dir', path)
target_prefix = spec.get('product_prefix', target_prefix)
target = spec.get('product_name', target)
product_ext = spec.get('product_extension')
if product_ext:
target_ext = '.' + product_ext
- return os.path.join(path, target_prefix + target + target_ext)
+ return target_prefix + target + target_ext
+
+
+ def ComputeOutput(self, spec):
+ """Return the 'output' (full output path) of a gyp spec.
+
+ E.g., the loadable module 'foobar' in directory 'baz' will produce
+ '$(obj)/baz/libfoobar.so'
+ """
+ assert not self.is_mac_bundle
+
+ if self.type == 'settings':
+ return '' # Doesn't have any output.
+
+ path = os.path.join('$(obj).' + self.toolset, self.path)
+ if self.type == 'executable':
+ path = '$(builddir)'
+ path = spec.get('product_dir', path)
+ return os.path.join(path, self.ComputeOutputBasename(spec))
def ComputeMacBundleOutput(self, spec):
@@ -1652,7 +1726,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
def ComputeMacBundleBinaryOutput(self, spec):
"""Return the 'output' (full output path) to the binary in a bundle."""
path = generator_default_variables['PRODUCT_DIR']
- return os.path.join(path, self.xcode_settings.GetBundleBinaryPath())
+ return os.path.join(path, self.xcode_settings.GetExecutablePath())
def ComputeDeps(self, spec):
@@ -1731,6 +1805,21 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
'%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' % self.output_binary)
self.WriteLn('%s: LIBS := $(LIBS)' % self.output_binary)
+ postbuilds = []
+ if self.flavor == 'mac':
+ # Postbuild actions. Like actions, but implicitly depend on the target's
+ # output.
+ for postbuild in spec.get('postbuilds', []):
+ postbuilds.append('echo POSTBUILD\\(%s\\) %s' % (
+ self.target, postbuild['postbuild_name']))
+ shell_list = postbuild['action']
+ # The first element is the command. If it's a relative path, it's
+ # a script in the source tree relative to the gyp file and needs to be
+ # absolutified. Else, it's in the PATH (e.g. install_name_tool, ln).
+ if os.path.sep in shell_list[0]:
+ shell_list[0] = self.Absolutify(shell_list[0])
+ postbuilds.append('%s' % gyp.common.EncodePOSIXShellList(shell_list))
+
# A bundle directory depends on its dependencies such as bundle resources
# and bundle binary. When all dependencies have been built, the bundle
# needs to be packaged.
@@ -1749,22 +1838,14 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# After the framework is built, package it. Needs to happen before
# postbuilds, since postbuilds depend on this.
if self.type in ('shared_library', 'loadable_module'):
- self.WriteLn('\t@$(call do_cmd,mac_package_framework,0,%s)' %
+ self.WriteLn('\t@$(call do_cmd,mac_package_framework,0,0,%s)' %
self.xcode_settings.GetFrameworkVersion())
- # Postbuild actions. Like actions, but implicitly depend on the output
- # framework.
- for postbuild in spec.get('postbuilds', []):
- self.WriteLn('\t@echo POSTBUILD %s' % postbuild['postbuild_name'])
- shell_list = postbuild['action']
- # The first element is the command. If it's a relative path, it's
- # a script in the source tree relative to the gyp file and needs to be
- # absolutified. Else, it's in the PATH (e.g. install_name_tool, ln).
- if os.path.sep in shell_list[0]:
- shell_list[0] = self.Absolutify(shell_list[0])
- # TODO: Honor V=1 etc. Not using do_cmd because since this is part of
- # the framework rule, there's no need for .d file processing here.
- self.WriteLn('\t@%s' % gyp.common.EncodePOSIXShellList(shell_list))
+ # Bundle postbuilds can depend on the whole bundle, so run them after
+ # the bundle is packaged, not already after the bundle binary is done.
+ for postbuild in postbuilds:
+ self.WriteLn('\t@' + postbuild)
+ postbuilds = [] # Don't write postbuilds for target's output.
# Needed by test/mac/gyptest-rebuild.py.
self.WriteLn('\t@true # No-op, used by tests')
@@ -1775,32 +1856,43 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# on every build (expensive, especially with postbuilds), expliclity
# update the time on the framework directory.
self.WriteLn('\t@touch -c %s' % self.output)
- elif 'postbuilds' in spec:
- print ("Warning: 'postbuild' support for non-bundles "
- "isn't implemented yet (target '%s)'." % self.target)
+
+ if postbuilds:
+ assert not self.is_mac_bundle, ('Postbuilds for bundles should be done '
+ 'on the bundle, not the binary (target \'%s\')' % self.target)
+ self.WriteXcodeEnv(self.output_binary, spec) # For postbuilds
+ postbuilds = [EscapeShellArgument(p) for p in postbuilds]
+ self.WriteLn('%s: builddir := $(abs_builddir)' % self.output_binary)
+ self.WriteLn('%s: POSTBUILDS := %s' % (
+ self.output_binary, ' '.join(postbuilds)))
if self.type == 'executable':
self.WriteLn(
'%s: LD_INPUTS := %s' % (self.output_binary, ' '.join(link_deps)))
- self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all)
+ self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all,
+ postbuilds=postbuilds)
elif self.type == 'static_library':
for link_dep in link_deps:
assert ' ' not in link_dep, (
"Spaces in alink input filenames not supported (%s)" % link_dep)
- self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all)
+ self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all,
+ postbuilds=postbuilds)
elif self.type == 'shared_library':
self.WriteLn(
'%s: LD_INPUTS := %s' % (self.output_binary, ' '.join(link_deps)))
- self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all)
+ self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all,
+ postbuilds=postbuilds)
elif self.type == 'loadable_module':
for link_dep in link_deps:
assert ' ' not in link_dep, (
"Spaces in module input filenames not supported (%s)" % link_dep)
self.WriteDoCmd(
- [self.output_binary], link_deps, 'solink_module', part_of_all)
+ [self.output_binary], link_deps, 'solink_module', part_of_all,
+ postbuilds=postbuilds)
elif self.type == 'none':
# Write a stamp line.
- self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all)
+ self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all,
+ postbuilds=postbuilds)
elif self.type == 'settings':
# Only used for passing flags around.
pass
@@ -1867,14 +1959,19 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
self.fp.write("\n\n")
- def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None):
+ def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None,
+ postbuilds=False):
"""Write a Makefile rule that uses do_cmd.
This makes the outputs dependent on the command line that was run,
as well as support the V= make command line flag.
"""
+ suffix = ''
+ if postbuilds:
+ assert ',' not in command
+ suffix = ',,1' # Tell do_cmd to honor $POSTBUILDS
self.WriteMakeRule(outputs, inputs,
- actions = ['$(call do_cmd,%s)' % command],
+ actions = ['$(call do_cmd,%s%s)' % (command, suffix)],
comment = comment,
force = True)
# Add our outputs to the list of targets we read depfiles from.
@@ -2042,6 +2139,18 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
product_name = spec.get('product_name', self.output)
+ # Some postbuilds try to read a build output file at
+ # ""${BUILT_PRODUCTS_DIR}/${FULL_PRODUCT_NAME}". Static libraries end up
+ # "$(obj).target", so
+ # BUILT_PRODUCTS_DIR is $(builddir)
+ # FULL_PRODUCT_NAME is $(out).target/path/to/lib.a
+ # Since $(obj) contains out/Debug already, the postbuild
+ # would get out/Debug/out/Debug/obj.target/path/to/lib.a. To prevent this,
+ # remove the "out/Debug" prefix from $(obj).
+ if product_name.startswith('$(obj)'):
+ product_name = (
+ '$(subst $(builddir)/,,$(obj))' + product_name[len('$(obj)'):])
+
built_products_dir = generator_default_variables['PRODUCT_DIR']
srcroot = self.path
if target_relative_path:
@@ -2052,6 +2161,8 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
'BUILT_PRODUCTS_DIR' : built_products_dir,
'CONFIGURATION' : '$(BUILDTYPE)',
'PRODUCT_NAME' : product_name,
+ # See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME
+ 'FULL_PRODUCT_NAME' : product_name,
'SRCROOT' : srcroot,
# This is not true for static libraries, but currently the env is only
# written for bundles:
@@ -2060,13 +2171,9 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
}
if self.type in ('executable', 'shared_library'):
env['EXECUTABLE_NAME'] = os.path.basename(self.output_binary)
- # Can't use self.output_binary here because it's not in the products dir.
- # We really care about the final location of the dylib anyway.
- env['EXECUTABLE_PATH'] = StripProductDir(
- self._InstallableTargetInstallPath())
+ if self.type in ('executable', 'shared_library', 'loadable_module'):
+ env['EXECUTABLE_PATH'] = self.xcode_settings.GetExecutablePath()
if self.is_mac_bundle:
- # Overwrite this to point to the binary _in_ the bundle.
- env['EXECUTABLE_PATH'] = self.xcode_settings.GetBundleBinaryPath()
env['CONTENTS_FOLDER_PATH'] = \
self.xcode_settings.GetBundleContentsFolderPath()
env['INFOPLIST_PATH'] = self.xcode_settings.GetBundlePlistPath()
@@ -2312,8 +2419,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
if os.path.exists(mactool_path):
os.remove(mactool_path)
CopyMacTool(mactool_path)
- os.chmod(mactool_path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP |
- stat.S_IROTH | stat.S_IXOTH) # Make file executable.
+ os.chmod(mactool_path, 0o755) # Make file executable.
# Find the list of targets that derive from the gyp file(s) being built.
needed_targets = set()
diff --git a/tools/gyp/pylib/gyp/generator/msvs.py b/tools/gyp/pylib/gyp/generator/msvs.py
index c4893b4758..37d5527dfe 100644
--- a/tools/gyp/pylib/gyp/generator/msvs.py
+++ b/tools/gyp/pylib/gyp/generator/msvs.py
@@ -878,7 +878,7 @@ def _GenerateMSVSProject(project, options, version):
spec, options, gyp_dir, sources, excluded_sources))
# Add in files.
- # _VerifySourcesExist(sources, gyp_dir)
+ _VerifySourcesExist(sources, gyp_dir)
p.AddFiles(sources)
_AddToolFilesToMSVS(p, spec)
@@ -1071,7 +1071,17 @@ def _GetLibraries(spec):
libraries = spec.get('libraries', [])
# Strip out -l, as it is not used on windows (but is needed so we can pass
# in libraries that are assumed to be in the default library path).
- return [re.sub('^(\-l)', '', lib) for lib in libraries]
+ # Also remove duplicate entries, leaving only the last duplicate, while
+ # preserving order.
+ found = set()
+ unique_libraries_list = []
+ for entry in reversed(libraries):
+ library = re.sub('^\-l', '', entry)
+ if library not in found:
+ found.add(library)
+ unique_libraries_list.append(library)
+ unique_libraries_list.reverse()
+ return unique_libraries_list
def _GetOutputFilePathAndTool(spec):
@@ -1643,7 +1653,6 @@ def _ShardTargets(target_list, target_dicts):
shards = int(target_dicts[t].get('msvs_shard', 0))
if shards:
targets_to_shard[t] = shards
- print targets_to_shard
# Shard target_list.
new_target_list = []
for t in target_list:
@@ -2721,7 +2730,7 @@ def _GenerateMSBuildProject(project, options, version):
_GenerateMSBuildFiltersFile(project.path + '.filters', sources,
extension_to_rule_name)
- # _VerifySourcesExist(sources, gyp_dir)
+ _VerifySourcesExist(sources, gyp_dir)
for (_, configuration) in configurations.iteritems():
_FinalizeMSBuildSettings(spec, configuration)
diff --git a/tools/gyp/pylib/gyp/generator/msvs_test.py b/tools/gyp/pylib/gyp/generator/msvs_test.py
new file mode 100644
index 0000000000..60d25abe09
--- /dev/null
+++ b/tools/gyp/pylib/gyp/generator/msvs_test.py
@@ -0,0 +1,35 @@
+#!/usr/bin/python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Unit tests for the msvs.py file. """
+
+import gyp.generator.msvs as msvs
+import unittest
+import StringIO
+
+
+class TestSequenceFunctions(unittest.TestCase):
+
+ def setUp(self):
+ self.stderr = StringIO.StringIO()
+
+ def test_GetLibraries(self):
+ self.assertEqual(
+ msvs._GetLibraries({}),
+ [])
+ self.assertEqual(
+ msvs._GetLibraries({'libraries': []}),
+ [])
+ self.assertEqual(
+ msvs._GetLibraries({'other':'foo', 'libraries': ['a.lib']}),
+ ['a.lib'])
+ self.assertEqual(
+ msvs._GetLibraries({'libraries': ['a.lib', 'b.lib', 'c.lib', '-lb.lib',
+ '-lb.lib', 'd.lib', 'a.lib']}),
+ ['c.lib', 'b.lib', 'd.lib', 'a.lib'])
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tools/gyp/pylib/gyp/generator/ninja.py b/tools/gyp/pylib/gyp/generator/ninja.py
new file mode 100644
index 0000000000..8a8e2900ce
--- /dev/null
+++ b/tools/gyp/pylib/gyp/generator/ninja.py
@@ -0,0 +1,626 @@
+#!/usr/bin/python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import gyp
+import gyp.common
+import gyp.system_test
+import os.path
+import pprint
+import subprocess
+import sys
+
+import gyp.ninja_syntax as ninja_syntax
+
+generator_default_variables = {
+ 'OS': 'linux',
+
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '',
+ 'STATIC_LIB_PREFIX': '',
+ 'STATIC_LIB_SUFFIX': '.a',
+ 'SHARED_LIB_PREFIX': 'lib',
+ 'SHARED_LIB_SUFFIX': '.so',
+ # TODO: intermediate dir should *not* be shared between different targets.
+ # Unfortunately, whatever we provide here gets written into many different
+ # places within the gyp spec so it's difficult to make it target-specific.
+ # Apparently we've made it this far with one global path for the make build
+ # we're safe for now.
+ 'INTERMEDIATE_DIR': '$b/geni',
+ 'SHARED_INTERMEDIATE_DIR': '$b/gen',
+ 'PRODUCT_DIR': '$b',
+ 'SHARED_LIB_DIR': '$b/lib',
+ 'LIB_DIR': '$b',
+
+ # Special variables that may be used by gyp 'rule' targets.
+ # We generate definitions for these variables on the fly when processing a
+ # rule.
+ 'RULE_INPUT_ROOT': '$root',
+ 'RULE_INPUT_PATH': '$source',
+ 'RULE_INPUT_EXT': '$ext',
+ 'RULE_INPUT_NAME': '$name',
+}
+
+NINJA_BASE = """\
+builddir = %(builddir)s
+# Short alias for builddir.
+b = %(builddir)s
+
+cc = %(cc)s
+cxx = %(cxx)s
+
+rule cc
+ depfile = $out.d
+ description = CC $out
+ command = $cc -MMD -MF $out.d $defines $includes $cflags $cflags_c $
+ -c $in -o $out
+
+rule cxx
+ depfile = $out.d
+ description = CXX $out
+ command = $cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc $
+ -c $in -o $out
+
+rule alink
+ description = AR $out
+ command = rm -f $out && ar rcsT $out $in
+
+rule solink
+ description = SOLINK $out
+ command = g++ -Wl,--threads -Wl,--thread-count=4 $
+ -shared $ldflags -o $out -Wl,-soname=$soname $
+ -Wl,--whole-archive $in -Wl,--no-whole-archive $libs
+
+rule link
+ description = LINK $out
+ command = g++ -Wl,--threads -Wl,--thread-count=4 $
+ $ldflags -o $out -Wl,-rpath=\$$ORIGIN/lib $
+ -Wl,--start-group $in -Wl,--end-group $libs
+
+rule stamp
+ description = STAMP $out
+ command = touch $out
+
+rule copy
+ description = COPY $in $out
+ command = ln -f $in $out 2>/dev/null || cp -af $in $out
+
+"""
+
+
+def StripPrefix(arg, prefix):
+ if arg.startswith(prefix):
+ return arg[len(prefix):]
+ return arg
+
+
+def QuoteShellArgument(arg):
+ return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
+
+
+def MaybeQuoteShellArgument(arg):
+ if '"' in arg or ' ' in arg:
+ return QuoteShellArgument(arg)
+ return arg
+
+
+# A small discourse on paths as used within the Ninja build:
+#
+# Paths within a given .gyp file are always relative to the directory
+# containing the .gyp file. Call these "gyp paths". This includes
+# sources as well as the starting directory a given gyp rule/action
+# expects to be run from. We call this directory "base_dir" within
+# the per-.gyp-file NinjaWriter code.
+#
+# All paths as written into the .ninja files are relative to the root
+# of the tree. Call these paths "ninja paths". We set up the ninja
+# variable "$b" to be the path to the root of the build output,
+# e.g. out/Debug/. All files we produce (both at gyp and at build
+# time) appear in that output directory.
+#
+# We translate between these two notions of paths with two helper
+# functions:
+#
+# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
+# into the equivalent ninja path.
+#
+# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
+# an output file; the result can be namespaced such that is unique
+# to the input file name as well as the output target name.
+
+class NinjaWriter:
+ def __init__(self, target_outputs, base_dir, output_file):
+ self.target_outputs = target_outputs
+ # The root-relative path to the source .gyp file; by gyp
+ # semantics, all input paths are relative to this.
+ self.base_dir = base_dir
+ self.ninja = ninja_syntax.Writer(output_file)
+
+ def GypPathToNinja(self, path):
+ """Translate a gyp path to a ninja path.
+
+ See the above discourse on path conversions."""
+ if path.startswith('$'):
+ # If the path contains a reference to a ninja variable, we know
+ # it's already relative to the source root.
+ return path
+ return os.path.normpath(os.path.join(self.base_dir, path))
+
+ def GypPathToUniqueOutput(self, path, qualified=False):
+ """Translate a gyp path to a ninja path for writing output.
+
+ If qualified is True, qualify the resulting filename with the name
+ of the target. This is necessary when e.g. compiling the same
+ path twice for two separate output targets.
+
+ See the above discourse on path conversions."""
+
+ # It may seem strange to discard components of the path, but we are just
+ # attempting to produce a known-unique output filename; we don't want to
+ # reuse any global directory.
+ genvars = generator_default_variables
+ assert not genvars['SHARED_INTERMEDIATE_DIR'].startswith(
+ genvars['INTERMEDIATE_DIR'])
+ path = StripPrefix(path, genvars['INTERMEDIATE_DIR'])
+ path = StripPrefix(path, genvars['SHARED_INTERMEDIATE_DIR'])
+ path = StripPrefix(path, '/')
+ assert not path.startswith('$')
+
+ # Translate the path following this scheme:
+ # Input: foo/bar.gyp, target targ, references baz/out.o
+ # Output: $b/obj/foo/baz/targ.out.o (if qualified)
+ # $b/obj/foo/baz/out.o (otherwise)
+ #
+ # Why this scheme and not some other one?
+ # 1) for a given input, you can compute all derived outputs by matching
+ # its path, even if the input is brought via a gyp file with '..'.
+ # 2) simple files like libraries and stamps have a simple filename.
+ path_dir, path_basename = os.path.split(path)
+ if qualified:
+ path_basename = self.name + '.' + path_basename
+ return os.path.normpath(os.path.join('$b/obj', self.base_dir, path_dir,
+ path_basename))
+
+ def StampPath(self, name):
+ """Return a path for a stamp file with a particular name.
+
+ Stamp files are used to collapse a dependency on a bunch of files
+ into a single file."""
+ return self.GypPathToUniqueOutput(name + '.stamp', qualified=True)
+
+ def WriteSpec(self, spec, config):
+ """The main entry point for NinjaWriter: write the build rules for a spec.
+
+ Returns the path to the build output, or None."""
+
+ if spec['type'] == 'settings':
+ # TODO: 'settings' is not actually part of gyp; it was
+ # accidentally introduced somehow into just the Linux build files.
+ return None
+
+ self.name = spec['target_name']
+
+ # Compute predepends for all rules.
+ # prebuild is the dependencies this target depends on before
+ # running any of its internal steps.
+ prebuild = []
+ if 'dependencies' in spec:
+ prebuild_deps = []
+ for dep in spec['dependencies']:
+ if dep in self.target_outputs:
+ prebuild_deps.append(self.target_outputs[dep][0])
+ if prebuild_deps:
+ stamp = self.StampPath('predepends')
+ prebuild = self.ninja.build(stamp, 'stamp', prebuild_deps)
+ self.ninja.newline()
+
+ # Write out actions, rules, and copies. These must happen before we
+ # compile any sources, so compute a list of predependencies for sources
+ # while we do it.
+ extra_sources = []
+ sources_predepends = self.WriteActionsRulesCopies(spec, extra_sources,
+ prebuild)
+
+ # Write out the compilation steps, if any.
+ link_deps = []
+ sources = spec.get('sources', []) + extra_sources
+ if sources:
+ link_deps = self.WriteSources(config, sources,
+ sources_predepends or prebuild)
+ # Some actions/rules output 'sources' that are already object files.
+ link_deps += [f for f in sources if f.endswith('.o')]
+
+ # The final output of our target depends on the last output of the
+ # above steps.
+ final_deps = link_deps or sources_predepends or prebuild
+ if final_deps:
+ return self.WriteTarget(spec, config, final_deps)
+
+ def WriteActionsRulesCopies(self, spec, extra_sources, prebuild):
+ """Write out the Actions, Rules, and Copies steps. Return any outputs
+ of these steps (or a stamp file if there are lots of outputs)."""
+ outputs = []
+
+ if 'actions' in spec:
+ outputs += self.WriteActions(spec['actions'], extra_sources, prebuild)
+ if 'rules' in spec:
+ outputs += self.WriteRules(spec['rules'], extra_sources, prebuild)
+ if 'copies' in spec:
+ outputs += self.WriteCopies(spec['copies'], prebuild)
+
+ # To simplify downstream build edges, ensure we generate a single
+ # stamp file that represents the results of all of the above.
+ if len(outputs) > 1:
+ stamp = self.StampPath('actions_rules_copies')
+ outputs = self.ninja.build(stamp, 'stamp', outputs)
+
+ return outputs
+
+ def WriteActions(self, actions, extra_sources, prebuild):
+ all_outputs = []
+ for action in actions:
+ # First write out a rule for the action.
+ name = action['action_name']
+ if 'message' in action:
+ description = 'ACTION ' + action['message']
+ else:
+ description = 'ACTION %s: %s' % (self.name, action['action_name'])
+ rule_name = self.WriteNewNinjaRule(name, action['action'], description)
+
+ inputs = [self.GypPathToNinja(i) for i in action['inputs']]
+ if int(action.get('process_outputs_as_sources', False)):
+ extra_sources += action['outputs']
+ outputs = [self.GypPathToNinja(o) for o in action['outputs']]
+
+ # Then write out an edge using the rule.
+ self.ninja.build(outputs, rule_name, inputs,
+ order_only=prebuild)
+ all_outputs += outputs
+
+ self.ninja.newline()
+
+ return all_outputs
+
+ def WriteRules(self, rules, extra_sources, prebuild):
+ all_outputs = []
+ for rule in rules:
+ # First write out a rule for the rule action.
+ name = rule['rule_name']
+ args = rule['action']
+ if 'message' in rule:
+ description = 'RULE ' + rule['message']
+ else:
+ description = 'RULE %s: %s $source' % (self.name, name)
+ rule_name = self.WriteNewNinjaRule(name, args, description)
+
+ # TODO: if the command references the outputs directly, we should
+ # simplify it to just use $out.
+
+ # Rules can potentially make use of some special variables which
+ # must vary per source file.
+ # Compute the list of variables we'll need to provide.
+ special_locals = ('source', 'root', 'ext', 'name')
+ needed_variables = set(['source'])
+ for argument in args:
+ for var in special_locals:
+ if '$' + var in argument:
+ needed_variables.add(var)
+
+ # For each source file, write an edge that generates all the outputs.
+ for source in rule.get('rule_sources', []):
+ basename = os.path.basename(source)
+ root, ext = os.path.splitext(basename)
+ source = self.GypPathToNinja(source)
+
+ outputs = []
+ for output in rule['outputs']:
+ outputs.append(output.replace('$root', root))
+
+ extra_bindings = []
+ for var in needed_variables:
+ if var == 'root':
+ extra_bindings.append(('root', root))
+ elif var == 'source':
+ extra_bindings.append(('source', source))
+ elif var == 'ext':
+ extra_bindings.append(('ext', ext))
+ elif var == 'name':
+ extra_bindings.append(('name', basename))
+ else:
+ assert var == None, repr(var)
+
+ inputs = map(self.GypPathToNinja, rule.get('inputs', []))
+ self.ninja.build(outputs, rule_name, source,
+ implicit=inputs,
+ order_only=prebuild,
+ variables=extra_bindings)
+
+ if int(rule.get('process_outputs_as_sources', False)):
+ extra_sources += outputs
+
+ all_outputs.extend(outputs)
+
+ return all_outputs
+
+ def WriteCopies(self, copies, prebuild):
+ outputs = []
+ for copy in copies:
+ for path in copy['files']:
+ # Normalize the path so trailing slashes don't confuse us.
+ path = os.path.normpath(path)
+ basename = os.path.split(path)[1]
+ src = self.GypPathToNinja(path)
+ dst = self.GypPathToNinja(os.path.join(copy['destination'], basename))
+ outputs += self.ninja.build(dst, 'copy', src,
+ order_only=prebuild)
+
+ return outputs
+
+ def WriteSources(self, config, sources, predepends):
+ """Write build rules to compile all of |sources|."""
+ self.WriteVariableList('defines',
+ ['-D' + MaybeQuoteShellArgument(ninja_syntax.escape(d))
+ for d in config.get('defines', [])])
+ self.WriteVariableList('includes',
+ ['-I' + self.GypPathToNinja(i)
+ for i in config.get('include_dirs', [])])
+ self.WriteVariableList('cflags', config.get('cflags'))
+ self.WriteVariableList('cflags_c', config.get('cflags_c'))
+ self.WriteVariableList('cflags_cc', config.get('cflags_cc'))
+ self.ninja.newline()
+ outputs = []
+ for source in sources:
+ filename, ext = os.path.splitext(source)
+ ext = ext[1:]
+ if ext in ('cc', 'cpp', 'cxx'):
+ command = 'cxx'
+ elif ext in ('c', 's', 'S'):
+ command = 'cc'
+ else:
+ # TODO: should we assert here on unexpected extensions?
+ continue
+ input = self.GypPathToNinja(source)
+ output = self.GypPathToUniqueOutput(filename + '.o', qualified=True)
+ self.ninja.build(output, command, input,
+ order_only=predepends)
+ outputs.append(output)
+ self.ninja.newline()
+ return outputs
+
+ def WriteTarget(self, spec, config, final_deps):
+ output = self.ComputeOutput(spec)
+
+ output_uses_linker = spec['type'] in ('executable', 'loadable_module',
+ 'shared_library')
+
+ implicit_deps = set()
+ if 'dependencies' in spec:
+ # Two kinds of dependencies:
+ # - Linkable dependencies (like a .a or a .so): add them to the link line.
+ # - Non-linkable dependencies (like a rule that generates a file
+ # and writes a stamp file): add them to implicit_deps
+ if output_uses_linker:
+ extra_deps = set()
+ for dep in spec['dependencies']:
+ input, linkable = self.target_outputs.get(dep, (None, False))
+ if not input:
+ continue
+ if linkable:
+ extra_deps.add(input)
+ else:
+ # TODO: Chrome-specific HACK. Chrome runs this lastchange rule on
+ # every build, but we don't want to rebuild when it runs.
+ if 'lastchange.stamp' not in input:
+ implicit_deps.add(input)
+ final_deps.extend(list(extra_deps))
+ command_map = {
+ 'executable': 'link',
+ 'static_library': 'alink',
+ 'loadable_module': 'solink',
+ 'shared_library': 'solink',
+ 'none': 'stamp',
+ }
+ command = command_map[spec['type']]
+
+ if output_uses_linker:
+ self.WriteVariableList('ldflags',
+ gyp.common.uniquer(config.get('ldflags', [])))
+ self.WriteVariableList('libs',
+ gyp.common.uniquer(spec.get('libraries', [])))
+
+ extra_bindings = []
+ if command == 'solink':
+ extra_bindings.append(('soname', os.path.split(output)[1]))
+
+ self.ninja.build(output, command, final_deps,
+ implicit=list(implicit_deps),
+ variables=extra_bindings)
+
+ # Write a short name to build this target. This benefits both the
+ # "build chrome" case as well as the gyp tests, which expect to be
+ # able to run actions and build libraries by their short name.
+ self.ninja.build(self.name, 'phony', output)
+
+ return output
+
+ def ComputeOutputFileName(self, spec):
+ """Compute the filename of the final output for the current target."""
+
+ # Compute filename prefix: the product prefix, or a default for
+ # the product type.
+ DEFAULT_PREFIX = {
+ 'loadable_module': 'lib',
+ 'shared_library': 'lib',
+ }
+ prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(spec['type'], ''))
+
+ # Compute filename extension: the product extension, or a default
+ # for the product type.
+ DEFAULT_EXTENSION = {
+ 'static_library': 'a',
+ 'loadable_module': 'so',
+ 'shared_library': 'so',
+ }
+ extension = spec.get('product_extension',
+ DEFAULT_EXTENSION.get(spec['type'], ''))
+ if extension:
+ extension = '.' + extension
+
+ if 'product_name' in spec:
+ # If we were given an explicit name, use that.
+ target = spec['product_name']
+ else:
+ # Otherwise, derive a name from the target name.
+ target = spec['target_name']
+ if prefix == 'lib':
+ # Snip out an extra 'lib' from libs if appropriate.
+ target = StripPrefix(target, 'lib')
+
+ if spec['type'] in ('static_library', 'loadable_module', 'shared_library',
+ 'executable'):
+ return '%s%s%s' % (prefix, target, extension)
+ elif spec['type'] == 'none':
+ return '%s.stamp' % target
+ elif spec['type'] == 'settings':
+ return None
+ else:
+ raise 'Unhandled output type', spec['type']
+
+ def ComputeOutput(self, spec):
+ """Compute the path for the final output of the spec."""
+
+ filename = self.ComputeOutputFileName(spec)
+
+ if 'product_dir' in spec:
+ path = os.path.join(spec['product_dir'], filename)
+ return path
+
+ # Executables and loadable modules go into the output root,
+ # libraries go into shared library dir, and everything else
+ # goes into the normal place.
+ if spec['type'] in ('executable', 'loadable_module'):
+ return os.path.join('$b', filename)
+ elif spec['type'] == 'shared_library':
+ return os.path.join('$b/lib', filename)
+ else:
+ return self.GypPathToUniqueOutput(filename)
+
+ def WriteVariableList(self, var, values):
+ if values is None:
+ values = []
+ self.ninja.variable(var, ' '.join(values))
+
+ def WriteNewNinjaRule(self, name, args, description):
+ """Write out a new ninja "rule" statement for a given command.
+
+ Returns the name of the new rule."""
+
+ # TODO: we shouldn't need to qualify names; we do it because
+ # currently the ninja rule namespace is global, but it really
+ # should be scoped to the subninja.
+ rule_name = ('%s.%s' % (self.name, name)).replace(' ', '_')
+
+ cd = ''
+ args = args[:]
+ if self.base_dir:
+ # gyp dictates that commands are run from the base directory.
+ # cd into the directory before running, and adjust all paths in
+ # the arguments point to the proper locations.
+ cd = 'cd %s; ' % self.base_dir
+ cdup = '../' * len(self.base_dir.split('/'))
+ for i, arg in enumerate(args):
+ arg = arg.replace('$b', cdup + '$b')
+ arg = arg.replace('$source', cdup + '$source')
+ args[i] = arg
+
+ command = cd + gyp.common.EncodePOSIXShellList(args)
+ self.ninja.rule(rule_name, command, description)
+ self.ninja.newline()
+
+ return rule_name
+
+
+def CalculateVariables(default_variables, params):
+ """Calculate additional variables for use in the build (called by gyp)."""
+ cc_target = os.environ.get('CC.target', os.environ.get('CC', 'cc'))
+ default_variables['LINKER_SUPPORTS_ICF'] = \
+ gyp.system_test.TestLinkerSupportsICF(cc_command=cc_target)
+
+
+def OpenOutput(path):
+ """Open |path| for writing, creating directories if necessary."""
+ try:
+ os.makedirs(os.path.dirname(path))
+ except OSError:
+ pass
+ return open(path, 'w')
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ options = params['options']
+ generator_flags = params.get('generator_flags', {})
+
+ if options.generator_output:
+ raise NotImplementedError, "--generator_output not implemented for ninja"
+
+ config_name = generator_flags.get('config', None)
+ if config_name is None:
+ # Guess which config we want to use: pick the first one from the
+ # first target.
+ config_name = target_dicts[target_list[0]]['default_configuration']
+
+ # builddir: relative path from source root to our output files.
+ # e.g. "out/Debug"
+ builddir = os.path.join(generator_flags.get('output_dir', 'out'), config_name)
+
+ master_ninja = OpenOutput(os.path.join(options.toplevel_dir, builddir,
+ 'build.ninja'))
+ master_ninja.write(NINJA_BASE % {
+ 'builddir': builddir,
+ 'cc': os.environ.get('CC', 'gcc'),
+ 'cxx': os.environ.get('CXX', 'g++'),
+ })
+
+ all_targets = set()
+ for build_file in params['build_files']:
+ for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
+ all_targets.add(target)
+ all_outputs = set()
+
+ subninjas = set()
+ target_outputs = {}
+ for qualified_target in target_list:
+ # qualified_target is like: third_party/icu/icu.gyp:icui18n#target
+ build_file, target, _ = gyp.common.ParseQualifiedTarget(qualified_target)
+
+ # TODO: what is options.depth and how is it different than
+ # options.toplevel_dir?
+ build_file = gyp.common.RelativePath(build_file, options.depth)
+
+ base_path = os.path.dirname(build_file)
+ output_file = os.path.join(builddir, 'obj', base_path, target + '.ninja')
+ spec = target_dicts[qualified_target]
+ config = spec['configurations'][config_name]
+
+ writer = NinjaWriter(target_outputs, base_path,
+ OpenOutput(os.path.join(options.toplevel_dir,
+ output_file)))
+ subninjas.add(output_file)
+
+ output = writer.WriteSpec(spec, config)
+ if output:
+ linkable = spec['type'] in ('static_library', 'shared_library')
+ target_outputs[qualified_target] = (output, linkable)
+
+ if qualified_target in all_targets:
+ all_outputs.add(output)
+
+ for ninja in subninjas:
+ print >>master_ninja, 'subninja', ninja
+
+ if all_outputs:
+ print >>master_ninja, 'build all: phony ||' + ' '.join(all_outputs)
+
+ master_ninja.close()
diff --git a/tools/gyp/pylib/gyp/ninja_syntax.py b/tools/gyp/pylib/gyp/ninja_syntax.py
new file mode 100644
index 0000000000..e2dca2dc5b
--- /dev/null
+++ b/tools/gyp/pylib/gyp/ninja_syntax.py
@@ -0,0 +1,98 @@
+#!/usr/bin/python
+
+# This file comes from
+# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
+# Do not edit! Edit the upstream one instead.
+
+"""Python module for generating .ninja files.
+
+Note that this is emphatically not a required piece of Ninja; it's
+just a helpful utility for build-file-generation systems that already
+use Python.
+"""
+
+import textwrap
+
+class Writer(object):
+ def __init__(self, output, width=78):
+ self.output = output
+ self.width = width
+
+ def newline(self):
+ self.output.write('\n')
+
+ def comment(self, text):
+ for line in textwrap.wrap(text, self.width - 2):
+ self.output.write('# ' + line + '\n')
+
+ def variable(self, key, value, indent=0):
+ self._line('%s = %s' % (key, value), indent)
+
+ def rule(self, name, command, description=None, depfile=None):
+ self._line('rule %s' % name)
+ self.variable('command', command, indent=1)
+ if description:
+ self.variable('description', description, indent=1)
+ if depfile:
+ self.variable('depfile', depfile, indent=1)
+
+ def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
+ variables=None):
+ outputs = self._as_list(outputs)
+ all_inputs = self._as_list(inputs)[:]
+
+ if implicit:
+ all_inputs.append('|')
+ all_inputs.extend(self._as_list(implicit))
+ if order_only:
+ all_inputs.append('||')
+ all_inputs.extend(self._as_list(order_only))
+
+ self._line('build %s: %s %s' % (' '.join(outputs),
+ rule,
+ ' '.join(all_inputs)))
+
+ if variables:
+ for key, val in variables:
+ self.variable(key, val, indent=1)
+
+ return outputs
+
+ def _line(self, text, indent=0):
+ """Write 'text' word-wrapped at self.width characters."""
+ leading_space = ' ' * indent
+ while len(text) > self.width:
+ # The text is too wide; wrap if possible.
+
+ # Find the rightmost space that would obey our width constraint.
+ available_space = self.width - len(leading_space) - len(' $')
+ space = text.rfind(' ', 0, available_space)
+ if space < 0:
+ # No such space; just use the first space we can find.
+ space = text.find(' ', available_space)
+ if space < 0:
+ # Give up on breaking.
+ break
+
+ self.output.write(leading_space + text[0:space] + ' $\n')
+ text = text[space+1:]
+
+ # Subsequent lines are continuations, so indent them.
+ leading_space = ' ' * (indent+2)
+
+ self.output.write(leading_space + text + '\n')
+
+ def _as_list(self, input):
+ if input is None:
+ return []
+ if isinstance(input, list):
+ return input
+ return [input]
+
+
+def escape(string):
+ """Escape a string such that it can be embedded into a Ninja file without
+ further interpretation."""
+ assert '\n' not in string, 'Ninja syntax does not allow newlines'
+ # We only have one special metacharacter: '$'.
+ return string.replace('$', '$$')