summaryrefslogtreecommitdiff
path: root/deps/npm/node_modules/node-gyp/gyp/pylib
diff options
context:
space:
mode:
Diffstat (limited to 'deps/npm/node_modules/node-gyp/gyp/pylib')
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py17
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py42
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py4
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py2
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py14
-rwxr-xr-xdeps/npm/node_modules/node-gyp/gyp/pylib/gyp/__init__.py89
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py10
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py3
-rwxr-xr-xdeps/npm/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py2
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py88
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py20
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py14
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py115
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py3
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py2
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py2
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py40
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py112
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py15
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py33
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py98
-rwxr-xr-xdeps/npm/node_modules/node-gyp/gyp/pylib/gyp/input_test.py22
-rwxr-xr-xdeps/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py18
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py6
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/ordered_dict.py4
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py10
-rwxr-xr-xdeps/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py14
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py24
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py6
-rw-r--r--deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py51
30 files changed, 523 insertions, 357 deletions
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
index 593f0e5b0b..0ec628cc1f 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
@@ -4,22 +4,17 @@
"""New implementation of Visual Studio project generation."""
+import hashlib
import os
import random
import gyp.common
-# hashlib is supplied as of Python 2.5 as the replacement interface for md5
-# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if
-# available, avoiding a deprecation warning under 2.6. Import md5 otherwise,
-# preserving 2.4 compatibility.
try:
- import hashlib
- _new_md5 = hashlib.md5
-except ImportError:
- import md5
- _new_md5 = md5.new
-
+ cmp
+except NameError:
+ def cmp(x, y):
+ return (x > y) - (x < y)
# Initialize random number generator
random.seed()
@@ -50,7 +45,7 @@ def MakeGuid(name, seed='msvs_new'):
not change when the project for a target is rebuilt.
"""
# Calculate a MD5 signature for the seed and name.
- d = _new_md5(str(seed) + str(name)).hexdigest().upper()
+ d = hashlib.md5(str(seed) + str(name)).hexdigest().upper()
# Convert most of the signature to GUID form (discard the rest)
guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
+ '-' + d[20:32] + '}')
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
index c2fb6f995c..065a339a80 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
@@ -14,6 +14,10 @@ The MSBuild schemas were also considered. They are typically found in the
MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild
"""
+from __future__ import print_function
+
+from gyp import string_types
+
import sys
import re
@@ -106,11 +110,11 @@ class _String(_Type):
"""A setting that's just a string."""
def ValidateMSVS(self, value):
- if not isinstance(value, basestring):
+ if not isinstance(value, string_types):
raise ValueError('expected string; got %r' % value)
def ValidateMSBuild(self, value):
- if not isinstance(value, basestring):
+ if not isinstance(value, string_types):
raise ValueError('expected string; got %r' % value)
def ConvertToMSBuild(self, value):
@@ -122,11 +126,11 @@ class _StringList(_Type):
"""A settings that's a list of strings."""
def ValidateMSVS(self, value):
- if not isinstance(value, basestring) and not isinstance(value, list):
+ if not isinstance(value, string_types) and not isinstance(value, list):
raise ValueError('expected string list; got %r' % value)
def ValidateMSBuild(self, value):
- if not isinstance(value, basestring) and not isinstance(value, list):
+ if not isinstance(value, string_types) and not isinstance(value, list):
raise ValueError('expected string list; got %r' % value)
def ConvertToMSBuild(self, value):
@@ -400,7 +404,7 @@ def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
if unrecognized:
# We don't know this setting. Give a warning.
- print >> stderr, error_msg
+ print(error_msg, file=stderr)
def FixVCMacroSlashes(s):
@@ -433,7 +437,7 @@ def ConvertVCMacrosToMSBuild(s):
'$(PlatformName)': '$(Platform)',
'$(SafeInputName)': '%(Filename)',
}
- for old, new in replace_map.iteritems():
+ for old, new in replace_map.items():
s = s.replace(old, new)
s = FixVCMacroSlashes(s)
return s
@@ -453,17 +457,17 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
dictionaries of settings and their values.
"""
msbuild_settings = {}
- for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems():
+ for msvs_tool_name, msvs_tool_settings in msvs_settings.items():
if msvs_tool_name in _msvs_to_msbuild_converters:
msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name]
- for msvs_setting, msvs_value in msvs_tool_settings.iteritems():
+ for msvs_setting, msvs_value in msvs_tool_settings.items():
if msvs_setting in msvs_tool:
# Invoke the translation function.
try:
msvs_tool[msvs_setting](msvs_value, msbuild_settings)
- except ValueError, e:
- print >> stderr, ('Warning: while converting %s/%s to MSBuild, '
- '%s' % (msvs_tool_name, msvs_setting, e))
+ except ValueError as e:
+ print('Warning: while converting %s/%s to MSBuild, '
+ '%s' % (msvs_tool_name, msvs_setting, e), file=stderr)
else:
_ValidateExclusionSetting(msvs_setting,
msvs_tool,
@@ -472,8 +476,8 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
(msvs_tool_name, msvs_setting)),
stderr)
else:
- print >> stderr, ('Warning: unrecognized tool %s while converting to '
- 'MSBuild.' % msvs_tool_name)
+ print('Warning: unrecognized tool %s while converting to '
+ 'MSBuild.' % msvs_tool_name, file=stderr)
return msbuild_settings
@@ -513,13 +517,13 @@ def _ValidateSettings(validators, settings, stderr):
for tool_name in settings:
if tool_name in validators:
tool_validators = validators[tool_name]
- for setting, value in settings[tool_name].iteritems():
+ for setting, value in settings[tool_name].items():
if setting in tool_validators:
try:
tool_validators[setting](value)
- except ValueError, e:
- print >> stderr, ('Warning: for %s/%s, %s' %
- (tool_name, setting, e))
+ except ValueError as e:
+ print('Warning: for %s/%s, %s' %
+ (tool_name, setting, e), file=stderr)
else:
_ValidateExclusionSetting(setting,
tool_validators,
@@ -528,7 +532,7 @@ def _ValidateSettings(validators, settings, stderr):
stderr)
else:
- print >> stderr, ('Warning: unrecognized tool %s' % tool_name)
+ print('Warning: unrecognized tool %s' % (tool_name), file=stderr)
# MSVS and MBuild names of the tools.
@@ -539,6 +543,7 @@ _rc = _Tool('VCResourceCompilerTool', 'ResourceCompile')
_lib = _Tool('VCLibrarianTool', 'Lib')
_manifest = _Tool('VCManifestTool', 'Manifest')
_masm = _Tool('MASM', 'MASM')
+_armasm = _Tool('ARMASM', 'ARMASM')
_AddTool(_compile)
@@ -548,6 +553,7 @@ _AddTool(_rc)
_AddTool(_lib)
_AddTool(_manifest)
_AddTool(_masm)
+_AddTool(_armasm)
# Add sections only found in the MSBuild settings.
_msbuild_validators[''] = {}
_msbuild_validators['ProjectReference'] = {}
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
index 6c07e9a893..2264d64015 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
@@ -91,7 +91,7 @@ class Writer(object):
if environment and isinstance(environment, dict):
env_list = ['%s="%s"' % (key, val)
- for (key,val) in environment.iteritems()]
+ for (key,val) in environment.items()]
environment = ' '.join(env_list)
else:
environment = ''
@@ -135,7 +135,7 @@ class Writer(object):
def WriteIfChanged(self):
"""Writes the user file."""
configs = ['Configurations']
- for config, spec in sorted(self.configurations.iteritems()):
+ for config, spec in sorted(self.configurations.items()):
configs.append(spec)
content = ['VisualStudioUserFile',
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
index 0b32e91180..c8187eb331 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
@@ -235,7 +235,7 @@ def InsertLargePdbShims(target_list, target_dicts, vars):
# Set up the shim to output its PDB to the same location as the final linker
# target.
- for config_name, config in shim_dict.get('configurations').iteritems():
+ for config_name, config in shim_dict.get('configurations').items():
pdb_path = _GetPdbPath(target_dict, config_name, vars)
# A few keys that we don't want to propagate.
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
index d9bfa684fa..13d9777f0e 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
@@ -158,7 +158,7 @@ def _RegistryQuery(key, value=None):
text = None
try:
text = _RegistryQueryBase('Sysnative', key, value)
- except OSError, e:
+ except OSError as e:
if e.errno == errno.ENOENT:
text = _RegistryQueryBase('System32', key, value)
else:
@@ -176,12 +176,18 @@ def _RegistryGetValueUsingWinReg(key, value):
contents of the registry key's value, or None on failure. Throws
ImportError if _winreg is unavailable.
"""
- import _winreg
+ try:
+ # Python 2
+ from _winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
+ except ImportError:
+ # Python 3
+ from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
+
try:
root, subkey = key.split('\\', 1)
assert root == 'HKLM' # Only need HKLM for now.
- with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
- return _winreg.QueryValueEx(hkey, value)[0]
+ with OpenKey(HKEY_LOCAL_MACHINE, subkey) as hkey:
+ return QueryValueEx(hkey, value)[0]
except WindowsError:
return None
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/__init__.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
index 668f38b60d..dee834013f 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
@@ -4,9 +4,11 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+from __future__ import print_function
+
import copy
import gyp.input
-import optparse
+import argparse
import os.path
import re
import shlex
@@ -14,6 +16,13 @@ import sys
import traceback
from gyp.common import GypError
+try:
+ # Python 2
+ string_types = basestring
+except NameError:
+ # Python 3
+ string_types = str
+
# Default debug modes for GYP
debug = {}
@@ -34,8 +43,8 @@ def DebugOutput(mode, message, *args):
pass
if args:
message %= args
- print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
- ctx[1], ctx[2], message)
+ print('%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
+ ctx[1], ctx[2], message))
def FindBuildFiles():
extension = '.gyp'
@@ -207,7 +216,7 @@ def RegenerateFlags(options):
# We always want to ignore the environment when regenerating, to avoid
# duplicate or changed flags in the environment at the time of regeneration.
flags = ['--ignore-environment']
- for name, metadata in options._regeneration_metadata.iteritems():
+ for name, metadata in options._regeneration_metadata.items():
opt = metadata['opt']
value = getattr(options, name)
value_predicate = metadata['type'] == 'path' and FixPath or Noop
@@ -226,24 +235,24 @@ def RegenerateFlags(options):
(action == 'store_false' and not value)):
flags.append(opt)
elif options.use_environment and env_name:
- print >>sys.stderr, ('Warning: environment regeneration unimplemented '
+ print('Warning: environment regeneration unimplemented '
'for %s flag %r env_name %r' % (action, opt,
- env_name))
+ env_name), file=sys.stderr)
else:
- print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
- 'flag %r' % (action, opt))
+ print('Warning: regeneration unimplemented for action %r '
+ 'flag %r' % (action, opt), file=sys.stderr)
return flags
-class RegeneratableOptionParser(optparse.OptionParser):
- def __init__(self):
+class RegeneratableOptionParser(argparse.ArgumentParser):
+ def __init__(self, usage):
self.__regeneratable_options = {}
- optparse.OptionParser.__init__(self)
+ argparse.ArgumentParser.__init__(self, usage=usage)
- def add_option(self, *args, **kw):
+ def add_argument(self, *args, **kw):
"""Add an option to the parser.
- This accepts the same arguments as OptionParser.add_option, plus the
+ This accepts the same arguments as ArgumentParser.add_argument, plus the
following:
regenerate: can be set to False to prevent this option from being included
in regeneration.
@@ -260,7 +269,7 @@ class RegeneratableOptionParser(optparse.OptionParser):
# it as a string.
type = kw.get('type')
if type == 'path':
- kw['type'] = 'string'
+ kw['type'] = str
self.__regeneratable_options[dest] = {
'action': kw.get('action'),
@@ -269,50 +278,50 @@ class RegeneratableOptionParser(optparse.OptionParser):
'opt': args[0],
}
- optparse.OptionParser.add_option(self, *args, **kw)
+ argparse.ArgumentParser.add_argument(self, *args, **kw)
def parse_args(self, *args):
- values, args = optparse.OptionParser.parse_args(self, *args)
+ values, args = argparse.ArgumentParser.parse_known_args(self, *args)
values._regeneration_metadata = self.__regeneratable_options
return values, args
def gyp_main(args):
my_name = os.path.basename(sys.argv[0])
+ usage = 'usage: %(prog)s [options ...] [build_file ...]'
+
- parser = RegeneratableOptionParser()
- usage = 'usage: %s [options ...] [build_file ...]'
- parser.set_usage(usage.replace('%s', '%prog'))
- parser.add_option('--build', dest='configs', action='append',
+ parser = RegeneratableOptionParser(usage=usage.replace('%s', '%(prog)s'))
+ parser.add_argument('--build', dest='configs', action='append',
help='configuration for build after project generation')
- parser.add_option('--check', dest='check', action='store_true',
+ parser.add_argument('--check', dest='check', action='store_true',
help='check format of gyp files')
- parser.add_option('--config-dir', dest='config_dir', action='store',
+ parser.add_argument('--config-dir', dest='config_dir', action='store',
env_name='GYP_CONFIG_DIR', default=None,
help='The location for configuration files like '
'include.gypi.')
- parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
+ parser.add_argument('-d', '--debug', dest='debug', metavar='DEBUGMODE',
action='append', default=[], help='turn on a debugging '
'mode for debugging GYP. Supported modes are "variables", '
'"includes" and "general" or "all" for all of them.')
- parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
+ parser.add_argument('-D', dest='defines', action='append', metavar='VAR=VAL',
env_name='GYP_DEFINES',
help='sets variable VAR to value VAL')
- parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
+ parser.add_argument('--depth', dest='depth', metavar='PATH', type='path',
help='set DEPTH gyp variable to a relative path to PATH')
- parser.add_option('-f', '--format', dest='formats', action='append',
+ parser.add_argument('-f', '--format', dest='formats', action='append',
env_name='GYP_GENERATORS', regenerate=False,
help='output formats to generate')
- parser.add_option('-G', dest='generator_flags', action='append', default=[],
+ parser.add_argument('-G', dest='generator_flags', action='append', default=[],
metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
help='sets generator flag FLAG to VAL')
- parser.add_option('--generator-output', dest='generator_output',
+ parser.add_argument('--generator-output', dest='generator_output',
action='store', default=None, metavar='DIR', type='path',
env_name='GYP_GENERATOR_OUTPUT',
help='puts generated build files under DIR')
- parser.add_option('--ignore-environment', dest='use_environment',
+ parser.add_argument('--ignore-environment', dest='use_environment',
action='store_false', default=True, regenerate=False,
help='do not read options from environment variables')
- parser.add_option('-I', '--include', dest='includes', action='append',
+ parser.add_argument('-I', '--include', dest='includes', action='append',
metavar='INCLUDE', type='path',
help='files to include in all loaded .gyp files')
# --no-circular-check disables the check for circular relationships between
@@ -322,7 +331,7 @@ def gyp_main(args):
# option allows the strict behavior to be used on Macs and the lenient
# behavior to be used elsewhere.
# TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
- parser.add_option('--no-circular-check', dest='circular_check',
+ parser.add_argument('--no-circular-check', dest='circular_check',
action='store_false', default=True, regenerate=False,
help="don't check for circular relationships between files")
# --no-duplicate-basename-check disables the check for duplicate basenames
@@ -331,18 +340,18 @@ def gyp_main(args):
# when duplicate basenames are passed into Make generator on Mac.
# TODO(yukawa): Remove this option when these legacy generators are
# deprecated.
- parser.add_option('--no-duplicate-basename-check',
+ parser.add_argument('--no-duplicate-basename-check',
dest='duplicate_basename_check', action='store_false',
default=True, regenerate=False,
help="don't check for duplicate basenames")
- parser.add_option('--no-parallel', action='store_true', default=False,
+ parser.add_argument('--no-parallel', action='store_true', default=False,
help='Disable multiprocessing')
- parser.add_option('-S', '--suffix', dest='suffix', default='',
+ parser.add_argument('-S', '--suffix', dest='suffix', default='',
help='suffix to add to generated files')
- parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
+ parser.add_argument('--toplevel-dir', dest='toplevel_dir', action='store',
default=None, metavar='DIR', type='path',
help='directory to use as the root of the source tree')
- parser.add_option('-R', '--root-target', dest='root_targets',
+ parser.add_argument('-R', '--root-target', dest='root_targets',
action='append', metavar='TARGET',
help='include only TARGET and its deep dependencies')
@@ -410,7 +419,7 @@ def gyp_main(args):
for option, value in sorted(options.__dict__.items()):
if option[0] == '_':
continue
- if isinstance(value, basestring):
+ if isinstance(value, string_types):
DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
else:
DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
@@ -432,7 +441,7 @@ def gyp_main(args):
build_file_dir = os.path.abspath(os.path.dirname(build_file))
build_file_dir_components = build_file_dir.split(os.path.sep)
components_len = len(build_file_dir_components)
- for index in xrange(components_len - 1, -1, -1):
+ for index in range(components_len - 1, -1, -1):
if build_file_dir_components[index] == 'src':
options.depth = os.path.sep.join(build_file_dir_components)
break
@@ -475,7 +484,7 @@ def gyp_main(args):
if home_dot_gyp != None:
default_include = os.path.join(home_dot_gyp, 'include.gypi')
if os.path.exists(default_include):
- print 'Using overrides found in ' + default_include
+ print('Using overrides found in ' + default_include)
includes.append(default_include)
# Command-line --include files come after the default include.
@@ -536,7 +545,7 @@ def gyp_main(args):
def main(args):
try:
return gyp_main(args)
- except GypError, e:
+ except GypError as e:
sys.stderr.write("gyp: %s\n" % e)
return 1
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py
index 501118796f..834a8e6c95 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py
@@ -2,8 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from __future__ import with_statement
-
import collections
import errno
import filecmp
@@ -363,7 +361,7 @@ def WriteOnDiff(filename):
same = False
try:
same = filecmp.cmp(self.tmp_path, filename, False)
- except OSError, e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
@@ -382,9 +380,9 @@ def WriteOnDiff(filename):
#
# No way to get the umask without setting a new one? Set a safe one
# and then set it back to the old value.
- umask = os.umask(077)
+ umask = os.umask(0o77)
os.umask(umask)
- os.chmod(self.tmp_path, 0666 & ~umask)
+ os.chmod(self.tmp_path, 0o666 & ~umask)
if sys.platform == 'win32' and os.path.exists(filename):
# NOTE: on windows (but not cygwin) rename will not replace an
# existing file, so it must be preceded with a remove. Sadly there
@@ -464,7 +462,7 @@ def CopyTool(flavor, out_path):
''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:]))
# Make file executable.
- os.chmod(tool_path, 0755)
+ os.chmod(tool_path, 0o755)
# From Alex Martelli,
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
index 841f31f925..7c3f621f1f 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
@@ -5,6 +5,7 @@
import re
import os
import locale
+from functools import reduce
def XmlToString(content, encoding='utf-8', pretty=False):
@@ -80,7 +81,7 @@ def _ConstructContentList(xml_parts, specification, pretty, level=0):
# Optionally in second position is a dictionary of the attributes.
rest = specification[1:]
if rest and isinstance(rest[0], dict):
- for at, val in sorted(rest[0].iteritems()):
+ for at, val in sorted(rest[0].items()):
xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True)))
rest = rest[1:]
if rest:
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
index b38d8660f7..81fb79d136 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
@@ -39,7 +39,7 @@ class FlockTool(object):
# where fcntl.flock(fd, LOCK_EX) always fails
# with EBADF, that's why we use this F_SETLK
# hack instead.
- fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666)
+ fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
if sys.platform.startswith('aix'):
# Python on AIX is compiled with LARGEFILE support, which changes the
# struct size.
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
index 921c1a6b71..dc17c96524 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
@@ -62,6 +62,8 @@ directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
then the "all" target includes "b1" and "b2".
"""
+from __future__ import print_function
+
import gyp.common
import gyp.ninja_syntax as ninja_syntax
import json
@@ -155,7 +157,7 @@ def _AddSources(sources, base_path, base_path_components, result):
continue
result.append(base_path + source)
if debug:
- print 'AddSource', org_source, result[len(result) - 1]
+ print('AddSource', org_source, result[len(result) - 1])
def _ExtractSourcesFromAction(action, base_path, base_path_components,
@@ -185,7 +187,7 @@ def _ExtractSources(target, target_dict, toplevel_dir):
base_path += '/'
if debug:
- print 'ExtractSources', target, base_path
+ print('ExtractSources', target, base_path)
results = []
if 'sources' in target_dict:
@@ -278,7 +280,7 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir):
the root of the source tree."""
if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
if debug:
- print 'gyp file modified', build_file
+ print('gyp file modified', build_file)
return True
# First element of included_files is the file itself.
@@ -291,8 +293,8 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir):
_ToGypPath(gyp.common.UnrelativePath(include_file, build_file))
if _ToLocalPath(toplevel_dir, rel_include_file) in files:
if debug:
- print 'included gyp file modified, gyp_file=', build_file, \
- 'included file=', rel_include_file
+ print('included gyp file modified, gyp_file=', build_file,
+ 'included file=', rel_include_file)
return True
return False
@@ -373,7 +375,7 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
# If a build file (or any of its included files) is modified we assume all
# targets in the file are modified.
if build_file_in_files[build_file]:
- print 'matching target from modified build file', target_name
+ print('matching target from modified build file', target_name)
target.match_status = MATCH_STATUS_MATCHES
matching_targets.append(target)
else:
@@ -381,7 +383,7 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
toplevel_dir)
for source in sources:
if _ToGypPath(os.path.normpath(source)) in files:
- print 'target', target_name, 'matches', source
+ print('target', target_name, 'matches', source)
target.match_status = MATCH_STATUS_MATCHES
matching_targets.append(target)
break
@@ -433,7 +435,7 @@ def _DoesTargetDependOnMatchingTargets(target):
for dep in target.deps:
if _DoesTargetDependOnMatchingTargets(dep):
target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
- print '\t', target.name, 'matches by dep', dep.name
+ print('\t', target.name, 'matches by dep', dep.name)
return True
target.match_status = MATCH_STATUS_DOESNT_MATCH
return False
@@ -445,7 +447,7 @@ def _GetTargetsDependingOnMatchingTargets(possible_targets):
supplied as input to analyzer.
possible_targets: targets to search from."""
found = []
- print 'Targets that matched by dependency:'
+ print('Targets that matched by dependency:')
for target in possible_targets:
if _DoesTargetDependOnMatchingTargets(target):
found.append(target)
@@ -484,12 +486,12 @@ def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
(add_if_no_ancestor or target.requires_build)) or
(target.is_static_library and add_if_no_ancestor and
not target.is_or_has_linked_ancestor)):
- print '\t\tadding to compile targets', target.name, 'executable', \
- target.is_executable, 'added_to_compile_targets', \
- target.added_to_compile_targets, 'add_if_no_ancestor', \
- add_if_no_ancestor, 'requires_build', target.requires_build, \
- 'is_static_library', target.is_static_library, \
- 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor
+ print('\t\tadding to compile targets', target.name, 'executable',
+ target.is_executable, 'added_to_compile_targets',
+ target.added_to_compile_targets, 'add_if_no_ancestor',
+ add_if_no_ancestor, 'requires_build', target.requires_build,
+ 'is_static_library', target.is_static_library,
+ 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor)
result.add(target)
target.added_to_compile_targets = True
@@ -500,7 +502,7 @@ def _GetCompileTargets(matching_targets, supplied_targets):
supplied_targets: set of targets supplied to analyzer to search from."""
result = set()
for target in matching_targets:
- print 'finding compile targets for match', target.name
+ print('finding compile targets for match', target.name)
_AddCompileTargets(target, supplied_targets, True, result)
return result
@@ -508,46 +510,46 @@ def _GetCompileTargets(matching_targets, supplied_targets):
def _WriteOutput(params, **values):
"""Writes the output, either to stdout or a file is specified."""
if 'error' in values:
- print 'Error:', values['error']
+ print('Error:', values['error'])
if 'status' in values:
- print values['status']
+ print(values['status'])
if 'targets' in values:
values['targets'].sort()
- print 'Supplied targets that depend on changed files:'
+ print('Supplied targets that depend on changed files:')
for target in values['targets']:
- print '\t', target
+ print('\t', target)
if 'invalid_targets' in values:
values['invalid_targets'].sort()
- print 'The following targets were not found:'
+ print('The following targets were not found:')
for target in values['invalid_targets']:
- print '\t', target
+ print('\t', target)
if 'build_targets' in values:
values['build_targets'].sort()
- print 'Targets that require a build:'
+ print('Targets that require a build:')
for target in values['build_targets']:
- print '\t', target
+ print('\t', target)
if 'compile_targets' in values:
values['compile_targets'].sort()
- print 'Targets that need to be built:'
+ print('Targets that need to be built:')
for target in values['compile_targets']:
- print '\t', target
+ print('\t', target)
if 'test_targets' in values:
values['test_targets'].sort()
- print 'Test targets:'
+ print('Test targets:')
for target in values['test_targets']:
- print '\t', target
+ print('\t', target)
output_path = params.get('generator_flags', {}).get(
'analyzer_output_path', None)
if not output_path:
- print json.dumps(values)
+ print(json.dumps(values))
return
try:
f = open(output_path, 'w')
f.write(json.dumps(values) + '\n')
f.close()
except IOError as e:
- print 'Error writing to output file', output_path, str(e)
+ print('Error writing to output file', output_path, str(e))
def _WasGypIncludeFileModified(params, files):
@@ -556,7 +558,7 @@ def _WasGypIncludeFileModified(params, files):
if params['options'].includes:
for include in params['options'].includes:
if _ToGypPath(os.path.normpath(include)) in files:
- print 'Include file modified, assuming all changed', include
+ print('Include file modified, assuming all changed', include)
return True
return False
@@ -638,13 +640,13 @@ class TargetCalculator(object):
set(self._root_targets))]
else:
test_targets = [x for x in test_targets_no_all]
- print 'supplied test_targets'
+ print('supplied test_targets')
for target_name in self._test_target_names:
- print '\t', target_name
- print 'found test_targets'
+ print('\t', target_name)
+ print('found test_targets')
for target in test_targets:
- print '\t', target.name
- print 'searching for matching test targets'
+ print('\t', target.name)
+ print('searching for matching test targets')
matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
matching_test_targets_contains_all = (test_target_names_contains_all and
set(matching_test_targets) &
@@ -654,14 +656,14 @@ class TargetCalculator(object):
# 'all' is subsequentely added to the matching names below.
matching_test_targets = [x for x in (set(matching_test_targets) &
set(test_targets_no_all))]
- print 'matched test_targets'
+ print('matched test_targets')
for target in matching_test_targets:
- print '\t', target.name
+ print('\t', target.name)
matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1]
for target in matching_test_targets]
if matching_test_targets_contains_all:
matching_target_names.append('all')
- print '\tall'
+ print('\tall')
return matching_target_names
def find_matching_compile_target_names(self):
@@ -677,10 +679,10 @@ class TargetCalculator(object):
if 'all' in self._supplied_target_names():
supplied_targets = [x for x in (set(supplied_targets) |
set(self._root_targets))]
- print 'Supplied test_targets & compile_targets'
+ print('Supplied test_targets & compile_targets')
for target in supplied_targets:
- print '\t', target.name
- print 'Finding compile targets'
+ print('\t', target.name)
+ print('Finding compile targets')
compile_targets = _GetCompileTargets(self._changed_targets,
supplied_targets)
return [gyp.common.ParseQualifiedTarget(target.name)[1]
@@ -699,7 +701,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir))
if debug:
- print 'toplevel_dir', toplevel_dir
+ print('toplevel_dir', toplevel_dir)
if _WasGypIncludeFileModified(params, config.files):
result_dict = { 'status': all_changed_string,
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
index 5b26cc785a..b7f9842888 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
@@ -14,6 +14,8 @@
# variables set potentially clash with other Android build system variables.
# Try to avoid setting global variables where possible.
+from __future__ import print_function
+
import gyp
import gyp.common
import gyp.generator.make as make # Reuse global functions from make backend.
@@ -250,7 +252,7 @@ class AndroidMkWriter(object):
dirs = set()
for out in outputs:
if not out.startswith('$'):
- print ('WARNING: Action for target "%s" writes output to local path '
+ print('WARNING: Action for target "%s" writes output to local path '
'"%s".' % (self.target, out))
dir = os.path.split(out)[0]
if dir:
@@ -355,7 +357,7 @@ class AndroidMkWriter(object):
dirs = set()
for out in outputs:
if not out.startswith('$'):
- print ('WARNING: Rule for target %s writes output to local path %s'
+ print('WARNING: Rule for target %s writes output to local path %s'
% (self.target, out))
dir = os.path.dirname(out)
if dir:
@@ -429,7 +431,7 @@ class AndroidMkWriter(object):
# $(gyp_shared_intermediate_dir). Note that we can't use an assertion
# because some of the gyp tests depend on this.
if not copy['destination'].startswith('$'):
- print ('WARNING: Copy rule for target %s writes output to '
+ print('WARNING: Copy rule for target %s writes output to '
'local path %s' % (self.target, copy['destination']))
# LocalPathify() calls normpath, stripping trailing slashes.
@@ -458,7 +460,7 @@ class AndroidMkWriter(object):
Args:
spec, configs: input from gyp.
"""
- for configname, config in sorted(configs.iteritems()):
+ for configname, config in sorted(configs.items()):
extracted_includes = []
self.WriteLn('\n# Flags passed to both C and C++ files.')
@@ -636,7 +638,7 @@ class AndroidMkWriter(object):
elif self.type == 'none':
target_ext = '.stamp'
elif self.type != 'executable':
- print ("ERROR: What output file should be generated?",
+ print("ERROR: What output file should be generated?",
"type", self.type, "target", target)
if self.type != 'static_library' and self.type != 'shared_library':
@@ -788,7 +790,7 @@ class AndroidMkWriter(object):
static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries)
if self.type != 'static_library':
- for configname, config in sorted(configs.iteritems()):
+ for configname, config in sorted(configs.items()):
ldflags = list(config.get('ldflags', []))
self.WriteLn('')
self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname)
@@ -837,7 +839,7 @@ class AndroidMkWriter(object):
settings = spec.get('aosp_build_settings', {})
if settings:
self.WriteLn('### Set directly by aosp_build_settings.')
- for k, v in settings.iteritems():
+ for k, v in settings.items():
if isinstance(v, list):
self.WriteList(v, k)
else:
@@ -956,7 +958,7 @@ def PerformBuild(data, configurations, params):
env = dict(os.environ)
env['ONE_SHOT_MAKEFILE'] = makefile
arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules']
- print 'Building: %s' % arguments
+ print('Building: %s' % arguments)
subprocess.check_call(arguments, env=env)
@@ -1065,7 +1067,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
write_alias_target=write_alias_targets,
sdk_version=sdk_version)
if android_module in android_modules:
- print ('ERROR: Android module names must be unique. The following '
+ print('ERROR: Android module names must be unique. The following '
'targets both generate Android module name %s.\n %s\n %s' %
(android_module, android_modules[android_module],
qualified_target))
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
index 17f5e6396c..7aabddb633 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
@@ -28,6 +28,8 @@ not be able to find the header file directories described in the generated
CMakeLists.txt file.
"""
+from __future__ import print_function
+
import multiprocessing
import os
import signal
@@ -639,8 +641,8 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
if cmake_target_type is None:
- print ('Target %s has unknown target type %s, skipping.' %
- ( target_name, target_type ) )
+ print('Target %s has unknown target type %s, skipping.' %
+ ( target_name, target_type ))
return
SetVariable(output, 'TARGET', target_name)
@@ -863,7 +865,7 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX']
elif target_type != 'executable':
- print ('ERROR: What output file should be generated?',
+ print('ERROR: What output file should be generated?',
'type', target_type, 'target', target_name)
product_prefix = spec.get('product_prefix', default_product_prefix)
@@ -1180,11 +1182,11 @@ def PerformBuild(data, configurations, params):
output_dir,
config_name))
arguments = ['cmake', '-G', 'Ninja']
- print 'Generating [%s]: %s' % (config_name, arguments)
+ print('Generating [%s]: %s' % (config_name, arguments))
subprocess.check_call(arguments, cwd=build_dir)
arguments = ['ninja', '-C', build_dir]
- print 'Building [%s]: %s' % (config_name, arguments)
+ print('Building [%s]: %s' % (config_name, arguments))
subprocess.check_call(arguments)
@@ -1212,7 +1214,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
arglists.append((target_list, target_dicts, data,
params, config_name))
pool.map(CallGenerateOutputForConfig, arglists)
- except KeyboardInterrupt, e:
+ except KeyboardInterrupt as e:
pool.terminate()
raise e
else:
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
new file mode 100644
index 0000000000..575db63c4e
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py
@@ -0,0 +1,115 @@
+# Copyright (c) 2016 Ben Noordhuis <info@bnoordhuis.nl>. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import gyp.common
+import gyp.xcode_emulation
+import json
+import os
+
+generator_additional_non_configuration_keys = []
+generator_additional_path_sections = []
+generator_extra_sources_for_rules = []
+generator_filelist_paths = None
+generator_supports_multiple_toolsets = True
+generator_wants_sorted_dependencies = False
+
+# Lifted from make.py. The actual values don't matter much.
+generator_default_variables = {
+ 'CONFIGURATION_NAME': '$(BUILDTYPE)',
+ 'EXECUTABLE_PREFIX': '',
+ 'EXECUTABLE_SUFFIX': '',
+ 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni',
+ 'PRODUCT_DIR': '$(builddir)',
+ 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s',
+ 'RULE_INPUT_EXT': '$(suffix $<)',
+ 'RULE_INPUT_NAME': '$(notdir $<)',
+ 'RULE_INPUT_PATH': '$(abspath $<)',
+ 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s',
+ 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen',
+ 'SHARED_LIB_PREFIX': 'lib',
+ 'STATIC_LIB_PREFIX': 'lib',
+ 'STATIC_LIB_SUFFIX': '.a',
+}
+
+
+def IsMac(params):
+ return 'mac' == gyp.common.GetFlavor(params)
+
+
+def CalculateVariables(default_variables, params):
+ default_variables.setdefault('OS', gyp.common.GetFlavor(params))
+
+
+def AddCommandsForTarget(cwd, target, params, per_config_commands):
+ output_dir = params['generator_flags']['output_dir']
+ for configuration_name, configuration in target['configurations'].iteritems():
+ builddir_name = os.path.join(output_dir, configuration_name)
+
+ if IsMac(params):
+ xcode_settings = gyp.xcode_emulation.XcodeSettings(target)
+ cflags = xcode_settings.GetCflags(configuration_name)
+ cflags_c = xcode_settings.GetCflagsC(configuration_name)
+ cflags_cc = xcode_settings.GetCflagsCC(configuration_name)
+ else:
+ cflags = configuration.get('cflags', [])
+ cflags_c = configuration.get('cflags_c', [])
+ cflags_cc = configuration.get('cflags_cc', [])
+
+ cflags_c = cflags + cflags_c
+ cflags_cc = cflags + cflags_cc
+
+ defines = configuration.get('defines', [])
+ defines = ['-D' + s for s in defines]
+
+ # TODO(bnoordhuis) Handle generated source files.
+ sources = target.get('sources', [])
+ sources = [s for s in sources if s.endswith('.c') or s.endswith('.cc')]
+
+ def resolve(filename):
+ return os.path.abspath(os.path.join(cwd, filename))
+
+ # TODO(bnoordhuis) Handle generated header files.
+ include_dirs = configuration.get('include_dirs', [])
+ include_dirs = [s for s in include_dirs if not s.startswith('$(obj)')]
+ includes = ['-I' + resolve(s) for s in include_dirs]
+
+ defines = gyp.common.EncodePOSIXShellList(defines)
+ includes = gyp.common.EncodePOSIXShellList(includes)
+ cflags_c = gyp.common.EncodePOSIXShellList(cflags_c)
+ cflags_cc = gyp.common.EncodePOSIXShellList(cflags_cc)
+
+ commands = per_config_commands.setdefault(configuration_name, [])
+ for source in sources:
+ file = resolve(source)
+ isc = source.endswith('.c')
+ cc = 'cc' if isc else 'c++'
+ cflags = cflags_c if isc else cflags_cc
+ command = ' '.join((cc, defines, includes, cflags,
+ '-c', gyp.common.EncodePOSIXShellArgument(file)))
+ commands.append(dict(command=command, directory=output_dir, file=file))
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+ per_config_commands = {}
+ for qualified_target, target in target_dicts.iteritems():
+ build_file, target_name, toolset = (
+ gyp.common.ParseQualifiedTarget(qualified_target))
+ if IsMac(params):
+ settings = data[build_file]
+ gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(settings, target)
+ cwd = os.path.dirname(build_file)
+ AddCommandsForTarget(cwd, target, params, per_config_commands)
+
+ output_dir = params['generator_flags']['output_dir']
+ for configuration_name, commands in per_config_commands.iteritems():
+ filename = os.path.join(output_dir,
+ configuration_name,
+ 'compile_commands.json')
+ gyp.common.EnsureDirExists(filename)
+ fp = open(filename, 'w')
+ json.dump(commands, fp=fp, indent=0, check_circular=False)
+
+
+def PerformBuild(data, configurations, params):
+ pass
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
index 160eafe2ef..8e4f3168f3 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -96,4 +97,4 @@ def GenerateOutput(target_list, target_dicts, data, params):
f = open(filename, 'w')
json.dump(edges, f)
f.close()
- print 'Wrote json to %s.' % filename
+ print('Wrote json to %s.' % filename)
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
index f4c7c12f59..69b24d947a 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
@@ -141,7 +141,7 @@ def GetAllIncludeDirectories(target_list, target_dicts,
compiler_includes_list.append(include_dir)
# Find standard gyp include dirs.
- if config.has_key('include_dirs'):
+ if 'include_dirs' in config:
include_dirs = config['include_dirs']
for shared_intermediate_dir in shared_intermediate_dirs:
for include_dir in include_dirs:
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
index 3efdb9966a..78eeaa61b2 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
@@ -88,7 +88,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
if not output_file in output_files:
output_files[output_file] = input_file
- for output_file, input_file in output_files.iteritems():
+ for output_file, input_file in output_files.items():
output = open(output_file, 'w')
pprint.pprint(data[input_file], output)
output.close()
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
index fe801b77ce..37ac255bfa 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
@@ -21,6 +21,8 @@
# toplevel Makefile. It may make sense to generate some .mk files on
# the side to keep the files readable.
+from __future__ import print_function
+
import os
import re
import sys
@@ -672,14 +674,13 @@ def _ValidateSourcesForOSX(spec, all_sources):
basenames.setdefault(basename, []).append(source)
error = ''
- for basename, files in basenames.iteritems():
+ for basename, files in basenames.items():
if len(files) > 1:
error += ' %s: %s\n' % (basename, ' '.join(files))
if error:
- print('static library %s has several files with the same basename:\n' %
- spec['target_name'] + error + 'libtool on OS X will generate' +
- ' warnings for them.')
+ print(('static library %s has several files with the same basename:\n' % spec['target_name'])
+ + error + 'libtool on OS X will generate' + ' warnings for them.')
raise GypError('Duplicate basenames in sources section, see list above')
@@ -1381,7 +1382,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
elif self.type == 'none':
target = '%s.stamp' % target
elif self.type != 'executable':
- print ("ERROR: What output file should be generated?",
+ print("ERROR: What output file should be generated?",
"type", self.type, "target", target)
target_prefix = spec.get('product_prefix', target_prefix)
@@ -1546,7 +1547,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
# Postbuilds expect to be run in the gyp file's directory, so insert an
# implicit postbuild to cd to there.
postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path]))
- for i in xrange(len(postbuilds)):
+ for i in range(len(postbuilds)):
if not postbuilds[i].startswith('$'):
postbuilds[i] = EscapeShellArgument(postbuilds[i])
self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output))
@@ -1638,7 +1639,7 @@ $(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all,
postbuilds=postbuilds)
else:
- print "WARNING: no output for", self.type, target
+ print("WARNING: no output for", self.type, self.target)
# Add an alias for each target (if there are any outputs).
# Installable target aliases are created below.
@@ -1992,7 +1993,7 @@ def PerformBuild(data, configurations, params):
if options.toplevel_dir and options.toplevel_dir != '.':
arguments += '-C', options.toplevel_dir
arguments.append('BUILDTYPE=' + config)
- print 'Building [%s]: %s' % (config, arguments)
+ print('Building [%s]: %s' % (config, arguments))
subprocess.check_call(arguments)
@@ -2058,6 +2059,14 @@ def GenerateOutput(target_list, target_dicts, data, params):
'srcdir': srcdir,
'copy_archive_args': copy_archive_arguments,
'makedep_args': makedep_arguments,
+ 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'),
+ 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'),
+ 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'),
+ 'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'),
+ 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'gcc'),
+ 'AR.host': GetEnvironFallback(('AR_host', 'AR'), 'ar'),
+ 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'g++'),
+ 'LINK.host': GetEnvironFallback(('LINK_host', 'LINK'), '$(CXX.host)'),
}
if flavor == 'mac':
flock_command = './gyp-mac-tool flock'
@@ -2078,6 +2087,10 @@ def GenerateOutput(target_list, target_dicts, data, params):
'copy_archive_args': copy_archive_arguments,
'makedep_args': makedep_arguments,
'link_commands': LINK_COMMANDS_OS390,
+ 'CC.target': GetEnvironFallback(('CC_target', 'CC'), 'njsc'),
+ 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), 'njsc++'),
+ 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'njsc'),
+ 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'njsc++'),
})
elif flavor == 'solaris':
header_params.update({
@@ -2103,17 +2116,6 @@ def GenerateOutput(target_list, target_dicts, data, params):
'flock_index': 2,
})
- header_params.update({
- 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'),
- 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'),
- 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'),
- 'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'),
- 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'gcc'),
- 'AR.host': GetEnvironFallback(('AR_host', 'AR'), 'ar'),
- 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'g++'),
- 'LINK.host': GetEnvironFallback(('LINK_host', 'LINK'), '$(CXX.host)'),
- })
-
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings_array = data[build_file].get('make_global_settings', [])
wrappers = {}
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
index 1b383dffe8..aacbe60836 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
@@ -2,6 +2,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+from __future__ import print_function
+
import copy
import ntpath
import os
@@ -86,6 +88,7 @@ generator_additional_non_configuration_keys = [
'msvs_enable_winrt',
'msvs_requires_importlibrary',
'msvs_enable_winphone',
+ 'msvs_enable_marmasm',
'msvs_application_type_revision',
'msvs_target_platform_version',
'msvs_target_platform_minversion',
@@ -449,7 +452,7 @@ def _AddCustomBuildToolForMSVS(p, spec, primary_input,
'CommandLine': cmd,
})
# Add to the properties of primary input for each config.
- for config_name, c_data in spec['configurations'].iteritems():
+ for config_name, c_data in spec['configurations'].items():
p.AddFileConfig(_FixPath(primary_input),
_ConfigFullName(config_name, c_data), tools=[tool])
@@ -755,8 +758,8 @@ def _EscapeVCProjCommandLineArgListItem(s):
# the VCProj but cause the same problem on the final command-line. Moving
# the item to the end of the list does works, but that's only possible if
# there's only one such item. Let's just warn the user.
- print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' +
- 'quotes in ' + s)
+ print('Warning: MSVS may misinterpret the odd number of ' +
+ 'quotes in ' + s, file=sys.stderr)
return s
@@ -969,13 +972,13 @@ def _ValidateSourcesForMSVSProject(spec, version):
basenames.setdefault(basename, []).append(source)
error = ''
- for basename, files in basenames.iteritems():
+ for basename, files in basenames.items():
if len(files) > 1:
error += ' %s: %s\n' % (basename, ' '.join(files))
if error:
- print('static library %s has several files with the same basename:\n' %
- spec['target_name'] + error + 'MSVC08 cannot handle that.')
+ print('static library %s has several files with the same basename:\n' % spec['target_name']
+ + error + 'MSVC08 cannot handle that.')
raise GypError('Duplicate basenames in sources section, see list above')
@@ -1001,7 +1004,7 @@ def _GenerateMSVSProject(project, options, version, generator_flags):
relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
config_type = _GetMSVSConfigurationType(spec, project.build_file)
- for config_name, config in spec['configurations'].iteritems():
+ for config_name, config in spec['configurations'].items():
_AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
# MSVC08 and prior version cannot handle duplicate basenames in the same
@@ -1367,10 +1370,10 @@ def _ConvertToolsToExpectedForm(tools):
A list of Tool objects.
"""
tool_list = []
- for tool, settings in tools.iteritems():
+ for tool, settings in tools.items():
# Collapse settings with lists.
settings_fixed = {}
- for setting, value in settings.iteritems():
+ for setting, value in settings.items():
if type(value) == list:
if ((tool == 'VCLinkerTool' and
setting == 'AdditionalDependencies') or
@@ -1545,7 +1548,7 @@ def _IdlFilesHandledNonNatively(spec, sources):
def _GetPrecompileRelatedFiles(spec):
# Gather a list of precompiled header related sources.
precompiled_related = []
- for _, config in spec['configurations'].iteritems():
+ for _, config in spec['configurations'].items():
for k in precomp_keys:
f = config.get(k)
if f:
@@ -1556,7 +1559,7 @@ def _GetPrecompileRelatedFiles(spec):
def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
list_excluded):
exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
- for file_name, excluded_configs in exclusions.iteritems():
+ for file_name, excluded_configs in exclusions.items():
if (not list_excluded and
len(excluded_configs) == len(spec['configurations'])):
# If we're not listing excluded files, then they won't appear in the
@@ -1573,7 +1576,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
# Exclude excluded sources from being built.
for f in excluded_sources:
excluded_configs = []
- for config_name, config in spec['configurations'].iteritems():
+ for config_name, config in spec['configurations'].items():
precomped = [_FixPath(config.get(i, '')) for i in precomp_keys]
# Don't do this for ones that are precompiled header related.
if f not in precomped:
@@ -1583,7 +1586,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
# Exclude them now.
for f in excluded_idl:
excluded_configs = []
- for config_name, config in spec['configurations'].iteritems():
+ for config_name, config in spec['configurations'].items():
excluded_configs.append((config_name, config))
exclusions[f] = excluded_configs
return exclusions
@@ -1592,7 +1595,7 @@ def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
def _AddToolFilesToMSVS(p, spec):
# Add in tool files (rules).
tool_files = OrderedSet()
- for _, config in spec['configurations'].iteritems():
+ for _, config in spec['configurations'].items():
for f in config.get('msvs_tool_files', []):
tool_files.add(f)
for f in tool_files:
@@ -1605,7 +1608,7 @@ def _HandlePreCompiledHeaders(p, sources, spec):
# kind (i.e. C vs. C++) as the precompiled header source stub needs
# to have use of precompiled headers disabled.
extensions_excluded_from_precompile = []
- for config_name, config in spec['configurations'].iteritems():
+ for config_name, config in spec['configurations'].items():
source = config.get('msvs_precompiled_source')
if source:
source = _FixPath(source)
@@ -1626,7 +1629,7 @@ def _HandlePreCompiledHeaders(p, sources, spec):
else:
basename, extension = os.path.splitext(source)
if extension in extensions_excluded_from_precompile:
- for config_name, config in spec['configurations'].iteritems():
+ for config_name, config in spec['configurations'].items():
tool = MSVSProject.Tool('VCCLCompilerTool',
{'UsePrecompiledHeader': '0',
'ForcedIncludeFiles': '$(NOINHERIT)'})
@@ -1677,7 +1680,7 @@ def _WriteMSVSUserFile(project_path, version, spec):
return # Nothing to add
# Write out the user file.
user_file = _CreateMSVSUserFile(project_path, version, spec)
- for config_name, c_data in spec['configurations'].iteritems():
+ for config_name, c_data in spec['configurations'].items():
user_file.AddDebugSettings(_ConfigFullName(config_name, c_data),
action, environment, working_directory)
user_file.WriteIfChanged()
@@ -1728,7 +1731,7 @@ def _GetPathDict(root, path):
def _DictsToFolders(base_path, bucket, flat):
# Convert to folders recursively.
children = []
- for folder, contents in bucket.iteritems():
+ for folder, contents in bucket.items():
if type(contents) == dict:
folder_children = _DictsToFolders(os.path.join(base_path, folder),
contents, flat)
@@ -1800,7 +1803,7 @@ def _GetPlatformOverridesOfProject(spec):
# Prepare a dict indicating which project configurations are used for which
# solution configurations for this target.
config_platform_overrides = {}
- for config_name, c in spec['configurations'].iteritems():
+ for config_name, c in spec['configurations'].items():
config_fullname = _ConfigFullName(config_name, c)
platform = c.get('msvs_target_platform', _ConfigPlatform(c))
fixed_config_fullname = '%s|%s' % (
@@ -1941,7 +1944,7 @@ def PerformBuild(data, configurations, params):
msvs_version = params['msvs_version']
devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com')
- for build_file, build_file_dict in data.iteritems():
+ for build_file, build_file_dict in data.items():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
@@ -1951,7 +1954,7 @@ def PerformBuild(data, configurations, params):
for config in configurations:
arguments = [devenv, sln_path, '/Build', config]
- print 'Building [%s]: %s' % (config, arguments)
+ print('Building [%s]: %s' % (config, arguments))
rtn = subprocess.check_call(arguments)
@@ -1990,7 +1993,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
configs = set()
for qualified_target in target_list:
spec = target_dicts[qualified_target]
- for config_name, config in spec['configurations'].iteritems():
+ for config_name, config in spec['configurations'].items():
configs.add(_ConfigFullName(config_name, config))
configs = list(configs)
@@ -2033,11 +2036,12 @@ def GenerateOutput(target_list, target_dicts, data, params):
if generator_flags.get('msvs_error_on_missing_sources', False):
raise GypError(error_message)
else:
- print >> sys.stdout, "Warning: " + error_message
+ print("Warning: " + error_message, file=sys.stdout)
def _GenerateMSBuildFiltersFile(filters_path, source_files,
- rule_dependencies, extension_to_rule_name):
+ rule_dependencies, extension_to_rule_name,
+ platforms):
"""Generate the filters file.
This file is used by Visual Studio to organize the presentation of source
@@ -2051,7 +2055,8 @@ def _GenerateMSBuildFiltersFile(filters_path, source_files,
filter_group = []
source_group = []
_AppendFiltersForMSBuild('', source_files, rule_dependencies,
- extension_to_rule_name, filter_group, source_group)
+ extension_to_rule_name, platforms,
+ filter_group, source_group)
if filter_group:
content = ['Project',
{'ToolsVersion': '4.0',
@@ -2067,7 +2072,7 @@ def _GenerateMSBuildFiltersFile(filters_path, source_files,
def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies,
- extension_to_rule_name,
+ extension_to_rule_name, platforms,
filter_group, source_group):
"""Creates the list of filters and sources to be added in the filter file.
@@ -2093,11 +2098,12 @@ def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies,
# Recurse and add its dependents.
_AppendFiltersForMSBuild(filter_name, source.contents,
rule_dependencies, extension_to_rule_name,
- filter_group, source_group)
+ platforms, filter_group, source_group)
else:
# It's a source. Create a source entry.
_, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
- extension_to_rule_name)
+ extension_to_rule_name,
+ platforms)
source_entry = [element, {'Include': source}]
# Specify the filter it is part of, if any.
if parent_filter_name:
@@ -2106,7 +2112,7 @@ def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies,
def _MapFileToMsBuildSourceType(source, rule_dependencies,
- extension_to_rule_name):
+ extension_to_rule_name, platforms):
"""Returns the group and element type of the source file.
Arguments:
@@ -2132,6 +2138,9 @@ def _MapFileToMsBuildSourceType(source, rule_dependencies,
elif ext == '.asm':
group = 'masm'
element = 'MASM'
+ for platform in platforms:
+ if platform.lower() in ['arm', 'arm64']:
+ element = 'MARMASM'
elif ext == '.idl':
group = 'midl'
element = 'Midl'
@@ -2630,7 +2639,7 @@ def _GetConfigurationCondition(name, settings):
def _GetMSBuildProjectConfigurations(configurations):
group = ['ItemGroup', {'Label': 'ProjectConfigurations'}]
- for (name, settings) in sorted(configurations.iteritems()):
+ for (name, settings) in sorted(configurations.items()):
configuration, platform = _GetConfigurationAndPlatform(name, settings)
designation = '%s|%s' % (configuration, platform)
group.append(
@@ -2700,7 +2709,7 @@ def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
def _GetMSBuildConfigurationDetails(spec, build_file):
properties = {}
- for name, settings in spec['configurations'].iteritems():
+ for name, settings in spec['configurations'].items():
msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file)
condition = _GetConfigurationCondition(name, settings)
character_set = msbuild_attributes.get('CharacterSet')
@@ -2729,9 +2738,9 @@ def _GetMSBuildPropertySheets(configurations):
user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props'
additional_props = {}
props_specified = False
- for name, settings in sorted(configurations.iteritems()):
+ for name, settings in sorted(configurations.items()):
configuration = _GetConfigurationCondition(name, settings)
- if settings.has_key('msbuild_props'):
+ if 'msbuild_props' in settings:
additional_props[configuration] = _FixPaths(settings['msbuild_props'])
props_specified = True
else:
@@ -2751,7 +2760,7 @@ def _GetMSBuildPropertySheets(configurations):
]
else:
sheets = []
- for condition, props in additional_props.iteritems():
+ for condition, props in additional_props.items():
import_group = [
'ImportGroup',
{'Label': 'PropertySheets',
@@ -2784,7 +2793,7 @@ def _ConvertMSVSBuildAttributes(spec, config, build_file):
elif a == 'ConfigurationType':
msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a])
else:
- print 'Warning: Do not know how to convert MSVS attribute ' + a
+ print('Warning: Do not know how to convert MSVS attribute ' + a)
return msbuild_attributes
@@ -2878,7 +2887,7 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
new_paths = '$(ExecutablePath);' + ';'.join(new_paths)
properties = {}
- for (name, configuration) in sorted(configurations.iteritems()):
+ for (name, configuration) in sorted(configurations.items()):
condition = _GetConfigurationCondition(name, configuration)
attributes = _GetMSBuildAttributes(spec, configuration, build_file)
msbuild_settings = configuration['finalized_msbuild_settings']
@@ -2903,7 +2912,7 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
_AddConditionalProperty(properties, condition, 'ExecutablePath',
new_paths)
tool_settings = msbuild_settings.get('', {})
- for name, value in sorted(tool_settings.iteritems()):
+ for name, value in sorted(tool_settings.items()):
formatted_value = _GetValueFormattedForMSBuild('', name, value)
_AddConditionalProperty(properties, condition, name, formatted_value)
return _GetMSBuildPropertyGroup(spec, None, properties)
@@ -2972,7 +2981,7 @@ def _GetMSBuildPropertyGroup(spec, label, properties):
# NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
for name in reversed(properties_ordered):
values = properties[name]
- for value, conditions in sorted(values.iteritems()):
+ for value, conditions in sorted(values.items()):
if len(conditions) == num_configurations:
# If the value is the same all configurations,
# just add one unconditional entry.
@@ -2985,18 +2994,18 @@ def _GetMSBuildPropertyGroup(spec, label, properties):
def _GetMSBuildToolSettingsSections(spec, configurations):
groups = []
- for (name, configuration) in sorted(configurations.iteritems()):
+ for (name, configuration) in sorted(configurations.items()):
msbuild_settings = configuration['finalized_msbuild_settings']
group = ['ItemDefinitionGroup',
{'Condition': _GetConfigurationCondition(name, configuration)}
]
- for tool_name, tool_settings in sorted(msbuild_settings.iteritems()):
+ for tool_name, tool_settings in sorted(msbuild_settings.items()):
# Skip the tool named '' which is a holder of global settings handled
# by _GetMSBuildConfigurationGlobalProperties.
if tool_name:
if tool_settings:
tool = [tool_name]
- for name, value in sorted(tool_settings.iteritems()):
+ for name, value in sorted(tool_settings.items()):
formatted_value = _GetValueFormattedForMSBuild(tool_name, name,
value)
tool.append([name, formatted_value])
@@ -3029,7 +3038,7 @@ def _FinalizeMSBuildSettings(spec, configuration):
for ignored_setting in ignored_settings:
value = configuration.get(ignored_setting)
if value:
- print ('Warning: The automatic conversion to MSBuild does not handle '
+ print('Warning: The automatic conversion to MSBuild does not handle '
'%s. Ignoring setting of %s' % (ignored_setting, str(value)))
defines = [_EscapeCppDefineForMSBuild(d) for d in defines]
@@ -3196,7 +3205,7 @@ def _AddSources2(spec, sources, exclusions, grouped_sources,
{'Condition': condition},
'true'])
# Add precompile if needed
- for config_name, configuration in spec['configurations'].iteritems():
+ for config_name, configuration in spec['configurations'].items():
precompiled_source = configuration.get('msvs_precompiled_source', '')
if precompiled_source != '':
precompiled_source = _FixPath(precompiled_source)
@@ -3225,7 +3234,8 @@ def _AddSources2(spec, sources, exclusions, grouped_sources,
detail.append(['ForcedIncludeFiles', ''])
group, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
- extension_to_rule_name)
+ extension_to_rule_name,
+ _GetUniquePlatforms(spec))
grouped_sources[group].append([element, {'Include': source}] + detail)
@@ -3308,7 +3318,7 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
_GenerateMSBuildFiltersFile(project.path + '.filters', sources,
rule_dependencies,
- extension_to_rule_name)
+ extension_to_rule_name, _GetUniquePlatforms(spec))
missing_sources = _VerifySourcesExist(sources, project_dir)
for configuration in configurations.itervalues():
@@ -3328,6 +3338,12 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
import_masm_targets_section = [
['Import',
{'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.targets'}]]
+ import_marmasm_props_section = [
+ ['Import',
+ {'Project': r'$(VCTargetsPath)\BuildCustomizations\marmasm.props'}]]
+ import_marmasm_targets_section = [
+ ['Import',
+ {'Project': r'$(VCTargetsPath)\BuildCustomizations\marmasm.targets'}]]
macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]]
content = [
@@ -3347,6 +3363,8 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
content += _GetMSBuildLocalProperties(project.msbuild_toolset)
content += import_cpp_props_section
content += import_masm_props_section
+ if spec.get('msvs_enable_marmasm'):
+ content += import_marmasm_props_section
content += _GetMSBuildExtensions(props_files_of_rules)
content += _GetMSBuildPropertySheets(configurations)
content += macro_section
@@ -3359,6 +3377,8 @@ def _GenerateMSBuildProject(project, options, version, generator_flags):
content += _GetMSBuildProjectReferences(project)
content += import_cpp_targets_section
content += import_masm_targets_section
+ if spec.get('msvs_enable_marmasm'):
+ content += import_marmasm_targets_section
content += _GetMSBuildExtensionTargets(targets_files_of_rules)
if spec.get('msvs_external_builder'):
@@ -3436,7 +3456,7 @@ def _GenerateActionsForMSBuild(spec, actions_to_add):
"""
sources_handled_by_action = OrderedSet()
actions_spec = []
- for primary_input, actions in actions_to_add.iteritems():
+ for primary_input, actions in actions_to_add.items():
inputs = OrderedSet()
outputs = OrderedSet()
descriptions = []
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
index a00573ebf2..aae5b71310 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -472,8 +473,8 @@ class NinjaWriter(object):
if self.flavor != 'mac' or len(self.archs) == 1:
link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
else:
- print "Warning: Actions/rules writing object files don't work with " \
- "multiarch targets, dropping. (target %s)" % spec['target_name']
+ print("Warning: Actions/rules writing object files don't work with " \
+ "multiarch targets, dropping. (target %s)" % spec['target_name'])
elif self.flavor == 'mac' and len(self.archs) > 1:
link_deps = collections.defaultdict(list)
@@ -796,7 +797,7 @@ class NinjaWriter(object):
'XCASSETS_LAUNCH_IMAGE': 'launch-image',
}
settings = self.xcode_settings.xcode_settings[self.config_name]
- for settings_key, arg_name in settings_to_arg.iteritems():
+ for settings_key, arg_name in settings_to_arg.items():
value = settings.get(settings_key)
if value:
extra_arguments[arg_name] = value
@@ -1889,7 +1890,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
# Support wrappers from environment variables too.
- for key, value in os.environ.iteritems():
+ for key, value in os.environ.items():
if key.lower().endswith('_wrapper'):
key_prefix = key[:-len('_wrapper')]
key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
@@ -1905,7 +1906,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
configs, generator_flags)
cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
toplevel_build, generator_flags, shared_system_includes, OpenOutput)
- for arch, path in cl_paths.iteritems():
+ for arch, path in cl_paths.items():
if clang_cl:
# If we have selected clang-cl, use that instead.
path = clang_cl
@@ -2376,7 +2377,7 @@ def PerformBuild(data, configurations, params):
for config in configurations:
builddir = os.path.join(options.toplevel_dir, 'out', config)
arguments = ['ninja', '-C', builddir]
- print 'Building [%s]: %s' % (config, arguments)
+ print('Building [%s]: %s' % (config, arguments))
subprocess.check_call(arguments)
@@ -2413,7 +2414,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
arglists.append(
(target_list, target_dicts, data, params, config_name))
pool.map(CallGenerateOutputForConfig, arglists)
- except KeyboardInterrupt, e:
+ except KeyboardInterrupt as e:
pool.terminate()
raise e
else:
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
index 0e3fb9301e..694a28afb1 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
@@ -1,3 +1,4 @@
+from __future__ import print_function
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -128,7 +129,7 @@ class XcodeProject(object):
try:
os.makedirs(self.path)
self.created_dir = True
- except OSError, e:
+ except OSError as e:
if e.errno != errno.EEXIST:
raise
@@ -182,7 +183,7 @@ class XcodeProject(object):
# the tree tree view for UI display.
# Any values set globally are applied to all configurations, then any
# per-configuration values are applied.
- for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems():
+ for xck, xcv in self.build_file_dict.get('xcode_settings', {}).items():
xccl.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in self.build_file_dict:
config_ref = self.project.AddOrGetFileInRootGroup(
@@ -196,7 +197,7 @@ class XcodeProject(object):
if build_file_configuration_named:
xcc = xccl.ConfigurationNamed(config_name)
for xck, xcv in build_file_configuration_named.get('xcode_settings',
- {}).iteritems():
+ {}).items():
xcc.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in build_file_configuration_named:
config_ref = self.project.AddOrGetFileInRootGroup(
@@ -272,7 +273,7 @@ class XcodeProject(object):
script = script + "\n".join(
['export %s="%s"' %
(key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
- for (key, val) in command.get('environment').iteritems()]) + "\n"
+ for (key, val) in command.get('environment').items()]) + "\n"
# Some test end up using sockets, files on disk, etc. and can get
# confused if more then one test runs at a time. The generator
@@ -453,7 +454,7 @@ sys.exit(subprocess.call(sys.argv[1:]))" """
same = False
try:
same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
- except OSError, e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
@@ -472,10 +473,10 @@ sys.exit(subprocess.call(sys.argv[1:]))" """
#
# No way to get the umask without setting a new one? Set a safe one
# and then set it back to the old value.
- umask = os.umask(077)
+ umask = os.umask(0o77)
os.umask(umask)
- os.chmod(new_pbxproj_path, 0666 & ~umask)
+ os.chmod(new_pbxproj_path, 0o666 & ~umask)
os.rename(new_pbxproj_path, pbxproj_path)
except Exception:
@@ -565,7 +566,7 @@ def EscapeXcodeDefine(s):
def PerformBuild(data, configurations, params):
options = params['options']
- for build_file, build_file_dict in data.iteritems():
+ for build_file, build_file_dict in data.items():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
@@ -576,7 +577,7 @@ def PerformBuild(data, configurations, params):
for config in configurations:
arguments = ['xcodebuild', '-project', xcodeproj_path]
arguments += ['-configuration', config]
- print "Building [%s]: %s" % (config, arguments)
+ print("Building [%s]: %s" % (config, arguments))
subprocess.check_call(arguments)
@@ -624,7 +625,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
skip_excluded_files = \
not generator_flags.get('xcode_list_excluded_files', True)
xcode_projects = {}
- for build_file, build_file_dict in data.iteritems():
+ for build_file, build_file_dict in data.items():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
@@ -736,7 +737,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
try:
target_properties['productType'] = _types[type_bundle_key]
- except KeyError, e:
+ except KeyError as e:
gyp.common.ExceptionAppend(e, "-- unknown product type while "
"writing target %s" % target_name)
raise
@@ -1013,7 +1014,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# target.
makefile.write('all: \\\n')
for concrete_output_index in \
- xrange(0, len(concrete_outputs_by_rule_source)):
+ range(0, len(concrete_outputs_by_rule_source)):
# Only list the first (index [0]) concrete output of each input
# in the "all" target. Otherwise, a parallel make (-j > 1) would
# attempt to process each input multiple times simultaneously.
@@ -1036,7 +1037,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# rule source. Collect the names of the directories that are
# required.
concrete_output_dirs = []
- for concrete_output_index in xrange(0, len(concrete_outputs)):
+ for concrete_output_index in range(0, len(concrete_outputs)):
concrete_output = concrete_outputs[concrete_output_index]
if concrete_output_index == 0:
bol = ''
@@ -1055,7 +1056,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
# the set of additional rule inputs, if any.
prerequisites = [rule_source]
prerequisites.extend(rule.get('inputs', []))
- for prerequisite_index in xrange(0, len(prerequisites)):
+ for prerequisite_index in range(0, len(prerequisites)):
prerequisite = prerequisites[prerequisite_index]
if prerequisite_index == len(prerequisites) - 1:
eol = ''
@@ -1277,7 +1278,7 @@ exit 1
set_define = EscapeXcodeDefine(define)
xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
if 'xcode_settings' in configuration:
- for xck, xcv in configuration['xcode_settings'].iteritems():
+ for xck, xcv in configuration['xcode_settings'].items():
xcbc.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in configuration:
config_ref = pbxp.AddOrGetFileInRootGroup(
@@ -1285,7 +1286,7 @@ exit 1
xcbc.SetBaseConfiguration(config_ref)
build_files = []
- for build_file, build_file_dict in data.iteritems():
+ for build_file, build_file_dict in data.items():
if build_file.endswith('.gyp'):
build_files.append(build_file)
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py
index 10f6e0dba1..eb9858f0c8 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py
@@ -2,6 +2,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+from __future__ import print_function
+
from compiler.ast import Const
from compiler.ast import Dict
from compiler.ast import Discard
@@ -249,10 +251,10 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes,
else:
build_file_data = eval(build_file_contents, {'__builtins__': None},
None)
- except SyntaxError, e:
+ except SyntaxError as e:
e.filename = build_file_path
raise
- except Exception, e:
+ except Exception as e:
gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
raise
@@ -272,7 +274,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes,
else:
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
aux_data, None, check)
- except Exception, e:
+ except Exception as e:
gyp.common.ExceptionAppend(e,
'while reading includes of ' + build_file_path)
raise
@@ -309,7 +311,7 @@ def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
subdict_path, include)
# Recurse into subdictionaries.
- for k, v in subdict.iteritems():
+ for k, v in subdict.items():
if type(v) is dict:
LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
None, check)
@@ -474,7 +476,7 @@ def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
try:
LoadTargetBuildFile(dependency, data, aux_data, variables,
includes, depth, check, load_dependencies)
- except Exception, e:
+ except Exception as e:
gyp.common.ExceptionAppend(
e, 'while loading dependencies of %s' % build_file_path)
raise
@@ -495,7 +497,7 @@ def CallLoadTargetBuildFile(global_flags,
signal.signal(signal.SIGINT, signal.SIG_IGN)
# Apply globals so that the worker process behaves the same.
- for key, value in global_flags.iteritems():
+ for key, value in global_flags.items():
globals()[key] = value
SetGeneratorGlobals(generator_input_info)
@@ -517,12 +519,12 @@ def CallLoadTargetBuildFile(global_flags,
return (build_file_path,
build_file_data,
dependencies)
- except GypError, e:
+ except GypError as e:
sys.stderr.write("gyp: %s\n" % e)
return None
- except Exception, e:
- print >>sys.stderr, 'Exception:', e
- print >>sys.stderr, traceback.format_exc()
+ except Exception as e:
+ print('Exception:', e, file=sys.stderr)
+ print(traceback.format_exc(), file=sys.stderr)
return None
@@ -612,7 +614,7 @@ def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
args = (global_flags, dependency,
variables, includes, depth, check, generator_input_info),
callback = parallel_state.LoadTargetBuildFileCallback)
- except KeyboardInterrupt, e:
+ except KeyboardInterrupt as e:
parallel_state.pool.terminate()
raise e
@@ -912,7 +914,7 @@ def ExpandVariables(input, phase, variables, build_file):
stderr=subprocess.PIPE,
stdin=subprocess.PIPE,
cwd=build_file_dir)
- except Exception, e:
+ except Exception as e:
raise GypError("%s while executing command '%s' in %s" %
(e, contents, build_file))
@@ -1026,7 +1028,7 @@ def ExpandVariables(input, phase, variables, build_file):
# Convert all strings that are canonically-represented integers into integers.
if type(output) is list:
- for index in xrange(0, len(output)):
+ for index in range(0, len(output)):
if IsStrCanonicalInt(output[index]):
output[index] = int(output[index])
elif IsStrCanonicalInt(output):
@@ -1097,13 +1099,13 @@ def EvalSingleCondition(
if eval(ast_code, {'__builtins__': None}, variables):
return true_dict
return false_dict
- except SyntaxError, e:
+ except SyntaxError as e:
syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
'at character %d.' %
(str(e.args[0]), e.text, build_file, e.offset),
e.filename, e.lineno, e.offset, e.text)
raise syntax_error
- except NameError, e:
+ except NameError as e:
gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
(cond_expr_expanded, build_file))
raise GypError(e)
@@ -1158,7 +1160,7 @@ def ProcessConditionsInDict(the_dict, phase, variables, build_file):
def LoadAutomaticVariablesFromDict(variables, the_dict):
# Any keys with plain string values in the_dict become automatic variables.
# The variable name is the key name with a "_" character prepended.
- for key, value in the_dict.iteritems():
+ for key, value in the_dict.items():
if type(value) in (str, int, list):
variables['_' + key] = value
@@ -1171,7 +1173,7 @@ def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
# the_dict in the_dict's parent dict. If the_dict's parent is not a dict
# (it could be a list or it could be parentless because it is a root dict),
# the_dict_key will be None.
- for key, value in the_dict.get('variables', {}).iteritems():
+ for key, value in the_dict.get('variables', {}).items():
if type(value) not in (str, int, list):
continue
@@ -1180,7 +1182,7 @@ def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
if variable_name in variables:
# If the variable is already set, don't set it.
continue
- if the_dict_key is 'variables' and variable_name in the_dict:
+ if the_dict_key == 'variables' and variable_name in the_dict:
# If the variable is set without a % in the_dict, and the_dict is a
# variables dict (making |variables| a varaibles sub-dict of a
# variables dict), use the_dict's definition.
@@ -1210,7 +1212,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
# list before we process them so that you can reference one
# variable from another. They will be fully expanded by recursion
# in ExpandVariables.
- for key, value in the_dict['variables'].iteritems():
+ for key, value in the_dict['variables'].items():
variables[key] = value
# Handle the associated variables dict first, so that any variable
@@ -1223,7 +1225,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
- for key, value in the_dict.iteritems():
+ for key, value in the_dict.items():
# Skip "variables", which was already processed if present.
if key != 'variables' and type(value) is str:
expanded = ExpandVariables(value, phase, variables, build_file)
@@ -1281,7 +1283,7 @@ def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
# Recurse into child dicts, or process child lists which may result in
# further recursion into descendant dicts.
- for key, value in the_dict.iteritems():
+ for key, value in the_dict.items():
# Skip "variables" and string values, which were already processed if
# present.
if key == 'variables' or type(value) is str:
@@ -1378,12 +1380,12 @@ def QualifyDependencies(targets):
for dep in dependency_sections
for op in ('', '!', '/')]
- for target, target_dict in targets.iteritems():
+ for target, target_dict in targets.items():
target_build_file = gyp.common.BuildFile(target)
toolset = target_dict['toolset']
for dependency_key in all_dependency_sections:
dependencies = target_dict.get(dependency_key, [])
- for index in xrange(0, len(dependencies)):
+ for index in range(0, len(dependencies)):
dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
target_build_file, dependencies[index], toolset)
if not multiple_toolsets:
@@ -1418,13 +1420,13 @@ def ExpandWildcardDependencies(targets, data):
dependency list, must be qualified when this function is called.
"""
- for target, target_dict in targets.iteritems():
+ for target, target_dict in targets.items():
toolset = target_dict['toolset']
target_build_file = gyp.common.BuildFile(target)
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
- # Loop this way instead of "for dependency in" or "for index in xrange"
+ # Loop this way instead of "for dependency in" or "for index in range"
# because the dependencies list will be modified within the loop body.
index = 0
while index < len(dependencies):
@@ -1480,7 +1482,7 @@ def Unify(l):
def RemoveDuplicateDependencies(targets):
"""Makes sure every dependency appears only once in all targets's dependency
lists."""
- for target_name, target_dict in targets.iteritems():
+ for target_name, target_dict in targets.items():
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
if dependencies:
@@ -1496,7 +1498,7 @@ def Filter(l, item):
def RemoveSelfDependencies(targets):
"""Remove self dependencies from targets that have the prune_self_dependency
variable set."""
- for target_name, target_dict in targets.iteritems():
+ for target_name, target_dict in targets.items():
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
if dependencies:
@@ -1509,7 +1511,7 @@ def RemoveSelfDependencies(targets):
def RemoveLinkDependenciesFromNoneTargets(targets):
"""Remove dependencies having the 'link_dependency' attribute from the 'none'
targets."""
- for target_name, target_dict in targets.iteritems():
+ for target_name, target_dict in targets.items():
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
if dependencies:
@@ -1795,14 +1797,14 @@ def BuildDependencyList(targets):
# Create a DependencyGraphNode for each target. Put it into a dict for easy
# access.
dependency_nodes = {}
- for target, spec in targets.iteritems():
+ for target, spec in targets.items():
if target not in dependency_nodes:
dependency_nodes[target] = DependencyGraphNode(target)
# Set up the dependency links. Targets that have no dependencies are treated
# as dependent on root_node.
root_node = DependencyGraphNode(None)
- for target, spec in targets.iteritems():
+ for target, spec in targets.items():
target_node = dependency_nodes[target]
target_build_file = gyp.common.BuildFile(target)
dependencies = spec.get('dependencies')
@@ -1851,14 +1853,14 @@ def VerifyNoGYPFileCircularDependencies(targets):
dependency_nodes[build_file] = DependencyGraphNode(build_file)
# Set up the dependency links.
- for target, spec in targets.iteritems():
+ for target, spec in targets.items():
build_file = gyp.common.BuildFile(target)
build_file_node = dependency_nodes[build_file]
target_dependencies = spec.get('dependencies', [])
for dependency in target_dependencies:
try:
dependency_build_file = gyp.common.BuildFile(dependency)
- except GypError, e:
+ except GypError as e:
gyp.common.ExceptionAppend(
e, 'while computing dependencies of .gyp file %s' % build_file)
raise
@@ -2116,7 +2118,7 @@ def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
def MergeDicts(to, fro, to_file, fro_file):
# I wanted to name the parameter "from" but it's a Python keyword...
- for k, v in fro.iteritems():
+ for k, v in fro.items():
# It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
# copy semantics. Something else may want to merge from the |fro| dict
# later, and having the same dict ref pointed to twice in the tree isn't
@@ -2251,13 +2253,13 @@ def SetUpConfigurations(target, target_dict):
if not 'configurations' in target_dict:
target_dict['configurations'] = {'Default': {}}
if not 'default_configuration' in target_dict:
- concrete = [i for (i, config) in target_dict['configurations'].iteritems()
+ concrete = [i for (i, config) in target_dict['configurations'].items()
if not config.get('abstract')]
target_dict['default_configuration'] = sorted(concrete)[0]
merged_configurations = {}
configs = target_dict['configurations']
- for (configuration, old_configuration_dict) in configs.iteritems():
+ for (configuration, old_configuration_dict) in configs.items():
# Skip abstract configurations (saves work only).
if old_configuration_dict.get('abstract'):
continue
@@ -2265,7 +2267,7 @@ def SetUpConfigurations(target, target_dict):
# Get the inheritance relationship right by making a copy of the target
# dict.
new_configuration_dict = {}
- for (key, target_val) in target_dict.iteritems():
+ for (key, target_val) in target_dict.items():
key_ext = key[-1:]
if key_ext in key_suffixes:
key_base = key[:-1]
@@ -2349,7 +2351,7 @@ def ProcessListFiltersInDict(name, the_dict):
lists = []
del_lists = []
- for key, value in the_dict.iteritems():
+ for key, value in the_dict.items():
operation = key[-1]
if operation != '!' and operation != '/':
continue
@@ -2397,7 +2399,7 @@ def ProcessListFiltersInDict(name, the_dict):
exclude_key = list_key + '!'
if exclude_key in the_dict:
for exclude_item in the_dict[exclude_key]:
- for index in xrange(0, len(the_list)):
+ for index in range(0, len(the_list)):
if exclude_item == the_list[index]:
# This item matches the exclude_item, so set its action to 0
# (exclude).
@@ -2423,7 +2425,7 @@ def ProcessListFiltersInDict(name, the_dict):
raise ValueError('Unrecognized action ' + action + ' in ' + name + \
' key ' + regex_key)
- for index in xrange(0, len(the_list)):
+ for index in range(0, len(the_list)):
list_item = the_list[index]
if list_actions[index] == action_value:
# Even if the regex matches, nothing will change so continue (regex
@@ -2454,7 +2456,7 @@ def ProcessListFiltersInDict(name, the_dict):
# the indices of items that haven't been seen yet don't shift. That means
# that things need to be prepended to excluded_list to maintain them in the
# same order that they existed in the_list.
- for index in xrange(len(list_actions) - 1, -1, -1):
+ for index in range(len(list_actions) - 1, -1, -1):
if list_actions[index] == 0:
# Dump anything with action 0 (exclude). Keep anything with action 1
# (include) or -1 (no include or exclude seen for the item).
@@ -2467,7 +2469,7 @@ def ProcessListFiltersInDict(name, the_dict):
the_dict[excluded_key] = excluded_list
# Now recurse into subdicts and lists that may contain dicts.
- for key, value in the_dict.iteritems():
+ for key, value in the_dict.items():
if type(value) is dict:
ProcessListFiltersInDict(key, value)
elif type(value) is list:
@@ -2524,13 +2526,13 @@ def ValidateSourcesInTarget(target, target_dict, build_file,
basenames.setdefault(basename, []).append(source)
error = ''
- for basename, files in basenames.iteritems():
+ for basename, files in basenames.items():
if len(files) > 1:
error += ' %s: %s\n' % (basename, ' '.join(files))
if error:
- print('static library %s has several files with the same basename:\n' %
- target + error + 'libtool on Mac cannot handle that. Use '
+ print('static library %s has several files with the same basename:\n' % target
+ + error + 'libtool on Mac cannot handle that. Use '
'--no-duplicate-basename-check to disable this validation.')
raise GypError('Duplicate basenames in sources section, see list above')
@@ -2644,7 +2646,7 @@ def ValidateActionsInTarget(target, target_dict, build_file):
def TurnIntIntoStrInDict(the_dict):
"""Given dict the_dict, recursively converts all integers into strings.
"""
- # Use items instead of iteritems because there's no need to try to look at
+ # Use items instead of items because there's no need to try to look at
# reinserted keys and their associated values.
for k, v in the_dict.items():
if type(v) is int:
@@ -2663,7 +2665,7 @@ def TurnIntIntoStrInDict(the_dict):
def TurnIntIntoStrInList(the_list):
"""Given list the_list, recursively converts all integers into strings.
"""
- for index in xrange(0, len(the_list)):
+ for index in range(0, len(the_list)):
item = the_list[index]
if type(item) is int:
the_list[index] = str(item)
@@ -2781,7 +2783,7 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
try:
LoadTargetBuildFile(build_file, data, aux_data,
variables, includes, depth, check, True)
- except Exception, e:
+ except Exception as e:
gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
raise
@@ -2803,7 +2805,7 @@ def Load(build_files, variables, includes, depth, generator_input_info, check,
RemoveLinkDependenciesFromNoneTargets(targets)
# Apply exclude (!) and regex (/) list filters only for dependency_sections.
- for target_name, target_dict in targets.iteritems():
+ for target_name, target_dict in targets.items():
tmp_dict = {}
for key_base in dependency_sections:
for op in ('', '!', '/'):
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input_test.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
index 4234fbb830..1bc5e3d308 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
@@ -22,38 +22,38 @@ class TestFindCycles(unittest.TestCase):
dependency.dependents.append(dependent)
def test_no_cycle_empty_graph(self):
- for label, node in self.nodes.iteritems():
- self.assertEquals([], node.FindCycles())
+ for label, node in self.nodes.items():
+ self.assertEqual([], node.FindCycles())
def test_no_cycle_line(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
self._create_dependency(self.nodes['c'], self.nodes['d'])
- for label, node in self.nodes.iteritems():
- self.assertEquals([], node.FindCycles())
+ for label, node in self.nodes.items():
+ self.assertEqual([], node.FindCycles())
def test_no_cycle_dag(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['a'], self.nodes['c'])
self._create_dependency(self.nodes['b'], self.nodes['c'])
- for label, node in self.nodes.iteritems():
- self.assertEquals([], node.FindCycles())
+ for label, node in self.nodes.items():
+ self.assertEqual([], node.FindCycles())
def test_cycle_self_reference(self):
self._create_dependency(self.nodes['a'], self.nodes['a'])
- self.assertEquals([[self.nodes['a'], self.nodes['a']]],
+ self.assertEqual([[self.nodes['a'], self.nodes['a']]],
self.nodes['a'].FindCycles())
def test_cycle_two_nodes(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
self._create_dependency(self.nodes['b'], self.nodes['a'])
- self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
+ self.assertEqual([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
self.nodes['a'].FindCycles())
- self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
+ self.assertEqual([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
self.nodes['b'].FindCycles())
def test_two_cycles(self):
@@ -68,7 +68,7 @@ class TestFindCycles(unittest.TestCase):
[self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
self.assertTrue(
[self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
- self.assertEquals(2, len(cycles))
+ self.assertEqual(2, len(cycles))
def test_big_cycle(self):
self._create_dependency(self.nodes['a'], self.nodes['b'])
@@ -77,7 +77,7 @@ class TestFindCycles(unittest.TestCase):
self._create_dependency(self.nodes['d'], self.nodes['e'])
self._create_dependency(self.nodes['e'], self.nodes['a'])
- self.assertEquals([[self.nodes['a'],
+ self.assertEqual([[self.nodes['a'],
self.nodes['b'],
self.nodes['c'],
self.nodes['d'],
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
index eeeaceb0c7..b8b7344eff 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
@@ -8,6 +8,8 @@
These functions are executed via gyp-mac-tool when using the Makefile generator.
"""
+from __future__ import print_function
+
import fcntl
import fnmatch
import glob
@@ -125,7 +127,7 @@ class MacTool(object):
fp = open(file_name, 'rb')
try:
header = fp.read(3)
- except e:
+ except Exception:
fp.close()
return None
fp.close()
@@ -243,7 +245,7 @@ class MacTool(object):
_, err = libtoolout.communicate()
for line in err.splitlines():
if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
+ print(line, file=sys.stderr)
# Unconditionally touch the output .a file on the command line if present
# and the command succeeded. A bit hacky.
if not libtoolout.returncode:
@@ -324,7 +326,7 @@ class MacTool(object):
])
if keys:
keys = json.loads(keys)
- for key, value in keys.iteritems():
+ for key, value in keys.items():
arg_name = '--' + key
if isinstance(value, bool):
if value:
@@ -440,8 +442,7 @@ class MacTool(object):
profiles_dir = os.path.join(
os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
+ print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr)
sys.exit(1)
provisioning_profiles = None
if profile:
@@ -462,8 +463,7 @@ class MacTool(object):
valid_provisioning_profiles[app_id_pattern] = (
profile_path, profile_data, team_identifier)
if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
+ print('cannot find mobile provisioning for %s' % (bundle_identifier), file=sys.stderr)
sys.exit(1)
# If the user has multiple provisioning profiles installed that can be
# used for ${bundle_identifier}, pick the most specific one (ie. the
@@ -487,7 +487,7 @@ class MacTool(object):
def _MergePlist(self, merged_plist, plist):
"""Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
+ for key, value in plist.items():
if isinstance(value, dict):
merged_value = merged_plist.get(key, {})
if isinstance(merged_value, dict):
@@ -597,7 +597,7 @@ class MacTool(object):
the key was not found.
"""
if isinstance(data, str):
- for key, value in substitutions.iteritems():
+ for key, value in substitutions.items():
data = data.replace('$(%s)' % key, value)
return data
if isinstance(data, list):
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
index b1a51b9784..4a50b1b74c 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
@@ -205,7 +205,7 @@ class MsvsSettings(object):
configs = spec['configurations']
for field, default in supported_fields:
setattr(self, field, {})
- for configname, config in configs.iteritems():
+ for configname, config in configs.items():
getattr(self, field)[configname] = config.get(field, default())
self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
@@ -950,7 +950,7 @@ def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict."""
if '$' in string:
- for old, new in expansions.iteritems():
+ for old, new in expansions.items():
assert '$(' not in new, new
string = string.replace(old, new)
return string
@@ -994,7 +994,7 @@ def _FormatAsEnvironmentBlock(envvar_dict):
CreateProcess documentation for more details."""
block = ''
nul = '\0'
- for key, value in envvar_dict.iteritems():
+ for key, value in envvar_dict.items():
block += key + '=' + value + nul
block += nul
return block
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/ordered_dict.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/ordered_dict.py
index fd6b7276be..01fe413434 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/ordered_dict.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/ordered_dict.py
@@ -161,8 +161,8 @@ class OrderedDict(dict):
for k in self:
yield self[k]
- def iteritems(self):
- 'od.iteritems -> an iterator over the (key, value) items in od'
+ def items(self):
+ 'od.items -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
index 74c98c5a79..94a6f17dab 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
@@ -28,8 +28,12 @@ _deepcopy_dispatch = d = {}
def _deepcopy_atomic(x):
return x
-for x in (type(None), int, long, float,
- bool, str, unicode, type):
+try:
+ types = bool, float, int, str, type, type(None), long, unicode
+except NameError: # Python 3
+ types = bool, float, int, str, type, type(None)
+
+for x in types:
d[x] = _deepcopy_atomic
def _deepcopy_list(x):
@@ -38,7 +42,7 @@ d[list] = _deepcopy_list
def _deepcopy_dict(x):
y = {}
- for key, value in x.iteritems():
+ for key, value in x.items():
y[deepcopy(key)] = deepcopy(value)
return y
d[dict] = _deepcopy_dict
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
index bb6f1ea436..bca0b9e346 100755
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
@@ -9,6 +9,8 @@
These functions are executed via gyp-win-tool when using the ninja generator.
"""
+from __future__ import print_function
+
import os
import re
import shutil
@@ -126,7 +128,7 @@ class WinTool(object):
if (not line.startswith(' Creating library ') and
not line.startswith('Generating code') and
not line.startswith('Finished generating code')):
- print line
+ print(line)
return link.returncode
def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
@@ -215,7 +217,7 @@ class WinTool(object):
out, _ = popen.communicate()
for line in out.splitlines():
if line and 'manifest authoring warning 81010002' not in line:
- print line
+ print(line)
return popen.returncode
def ExecManifestToRc(self, arch, *args):
@@ -255,7 +257,7 @@ class WinTool(object):
for x in lines if x.startswith(prefixes))
for line in lines:
if not line.startswith(prefixes) and line not in processing:
- print line
+ print(line)
return popen.returncode
def ExecAsmWrapper(self, arch, *args):
@@ -269,7 +271,7 @@ class WinTool(object):
not line.startswith('Microsoft (R) Macro Assembler') and
not line.startswith(' Assembling: ') and
line):
- print line
+ print(line)
return popen.returncode
def ExecRcWrapper(self, arch, *args):
@@ -283,7 +285,7 @@ class WinTool(object):
if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and
not line.startswith('Copyright (C) Microsoft Corporation') and
line):
- print line
+ print(line)
return popen.returncode
def ExecActionWrapper(self, arch, rspfile, *dir):
@@ -292,7 +294,7 @@ class WinTool(object):
env = self._GetEnv(arch)
# TODO(scottmg): This is a temporary hack to get some specific variables
# through to actions that are set after gyp-time. http://crbug.com/333738.
- for k, v in os.environ.iteritems():
+ for k, v in os.environ.items():
if k not in env:
env[k] = v
args = open(rspfile).read()
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
index 69f7d97cfa..6ae41e293a 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
@@ -7,6 +7,8 @@ This module contains classes that help to emulate xcodebuild behavior on top of
other build systems, such as make and ninja.
"""
+from __future__ import print_function
+
import copy
import gyp.common
import os
@@ -73,7 +75,7 @@ class XcodeArchsDefault(object):
if arch not in expanded_archs:
expanded_archs.append(arch)
except KeyError as e:
- print 'Warning: Ignoring unsupported variable "%s".' % variable
+ print('Warning: Ignoring unsupported variable "%s".' % variable)
elif arch not in expanded_archs:
expanded_archs.append(arch)
return expanded_archs
@@ -168,7 +170,7 @@ class XcodeSettings(object):
# the same for all configs are implicitly per-target settings.
self.xcode_settings = {}
configs = spec['configurations']
- for configname, config in configs.iteritems():
+ for configname, config in configs.items():
self.xcode_settings[configname] = config.get('xcode_settings', {})
self._ConvertConditionalKeys(configname)
if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
@@ -194,8 +196,8 @@ class XcodeSettings(object):
new_key = key.split("[")[0]
settings[new_key] = settings[key]
else:
- print 'Warning: Conditional keys not implemented, ignoring:', \
- ' '.join(conditional_keys)
+ print('Warning: Conditional keys not implemented, ignoring:', \
+ ' '.join(conditional_keys))
del settings[key]
def _Settings(self):
@@ -213,7 +215,7 @@ class XcodeSettings(object):
def _WarnUnimplemented(self, test_key):
if test_key in self._Settings():
- print 'Warning: Ignoring not yet implemented key "%s".' % test_key
+ print('Warning: Ignoring not yet implemented key "%s".' % test_key)
def IsBinaryOutputFormat(self, configname):
default = "binary" if self.isIOS else "xml"
@@ -842,7 +844,7 @@ class XcodeSettings(object):
if self._IsXCTest():
platform_root = self._XcodePlatformPath(configname)
if platform_root:
- cflags.append('-F' + platform_root + '/Developer/Library/Frameworks/')
+ cflags.append('-F' + platform_root + '/Developer/Library/Frameworks/') # noqa TODO @cclauss
is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension()
if sdk_root and is_extension:
@@ -889,7 +891,7 @@ class XcodeSettings(object):
result = dict(self.xcode_settings[configname])
first_pass = False
else:
- for key, value in self.xcode_settings[configname].iteritems():
+ for key, value in self.xcode_settings[configname].items():
if key not in result:
continue
elif result[key] != value:
@@ -997,8 +999,8 @@ class XcodeSettings(object):
unimpl = ['OTHER_CODE_SIGN_FLAGS']
unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
if unimpl:
- print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
- ', '.join(sorted(unimpl)))
+ print('Warning: Some codesign keys not implemented, ignoring: %s' % (
+ ', '.join(sorted(unimpl))))
return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % (
os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
@@ -1597,7 +1599,7 @@ def _TopologicallySortedEnvVarKeys(env):
order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
order.reverse()
return order
- except gyp.common.CycleError, e:
+ except gyp.common.CycleError as e:
raise GypError(
'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
@@ -1637,7 +1639,7 @@ def _AddIOSDeviceConfigurations(targets):
for target_dict in targets.itervalues():
toolset = target_dict['toolset']
configs = target_dict['configurations']
- for config_name, config_dict in dict(configs).iteritems():
+ for config_name, config_dict in dict(configs).items():
iphoneos_config_dict = copy.deepcopy(config_dict)
configs[config_name + '-iphoneos'] = iphoneos_config_dict
configs[config_name + '-iphonesimulator'] = config_dict
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
index 3820d6bf04..5acd82e004 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
@@ -28,7 +28,7 @@ def _WriteWorkspace(main_gyp, sources_gyp, params):
workspace_path = os.path.join(options.generator_output, workspace_path)
try:
os.makedirs(workspace_path)
- except OSError, e:
+ except OSError as e:
if e.errno != errno.EEXIST:
raise
output_string = '<?xml version="1.0" encoding="UTF-8"?>\n' + \
@@ -161,7 +161,7 @@ def CreateWrapper(target_list, target_dicts, data, params):
params: Dict of global options for gyp.
"""
orig_gyp = params['build_files'][0]
- for gyp_name, gyp_dict in data.iteritems():
+ for gyp_name, gyp_dict in data.items():
if gyp_name == orig_gyp:
depth = gyp_dict['_DEPTH']
@@ -228,7 +228,7 @@ def CreateWrapper(target_list, target_dicts, data, params):
sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } }
sources = []
- for target, target_dict in target_dicts.iteritems():
+ for target, target_dict in target_dicts.items():
base = os.path.dirname(target)
files = target_dict.get('sources', []) + \
target_dict.get('mac_bundle_resources', [])
diff --git a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
index d08b7f7770..b0385468c5 100644
--- a/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
+++ b/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
@@ -138,21 +138,18 @@ a project file is output.
"""
import gyp.common
+import hashlib
import posixpath
import re
import struct
import sys
-# hashlib is supplied as of Python 2.5 as the replacement interface for sha
-# and other secure hashes. In 2.6, sha is deprecated. Import hashlib if
-# available, avoiding a deprecation warning under 2.6. Import sha otherwise,
-# preserving 2.4 compatibility.
try:
- import hashlib
- _new_sha1 = hashlib.sha1
-except ImportError:
- import sha
- _new_sha1 = sha.new
+ basestring, cmp, unicode
+except NameError: # Python 3
+ basestring = unicode = str
+ def cmp(x, y):
+ return (x > y) - (x < y)
# See XCObject._EncodeString. This pattern is used to determine when a string
@@ -314,7 +311,7 @@ class XCObject(object):
"""
that = self.__class__(id=self.id, parent=self.parent)
- for key, value in self._properties.iteritems():
+ for key, value in self._properties.items():
is_strong = self._schema[key][2]
if isinstance(value, XCObject):
@@ -324,8 +321,7 @@ class XCObject(object):
that._properties[key] = new_value
else:
that._properties[key] = value
- elif isinstance(value, str) or isinstance(value, unicode) or \
- isinstance(value, int):
+ elif isinstance(value, (basestring, int)):
that._properties[key] = value
elif isinstance(value, list):
if is_strong:
@@ -422,7 +418,7 @@ class XCObject(object):
hash.update(data)
if seed_hash is None:
- seed_hash = _new_sha1()
+ seed_hash = hashlib.sha1()
hash = seed_hash.copy()
@@ -452,7 +448,7 @@ class XCObject(object):
digest_int_count = hash.digest_size / 4
digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest())
id_ints = [0, 0, 0]
- for index in xrange(0, digest_int_count):
+ for index in range(0, digest_int_count):
id_ints[index % 3] ^= digest_ints[index]
self.id = '%08X%08X%08X' % tuple(id_ints)
@@ -475,7 +471,7 @@ class XCObject(object):
"""Returns a list of all of this object's owned (strong) children."""
children = []
- for property, attributes in self._schema.iteritems():
+ for property, attributes in self._schema.items():
(is_list, property_type, is_strong) = attributes[0:3]
if is_strong and property in self._properties:
if not is_list:
@@ -622,7 +618,7 @@ class XCObject(object):
printable += end_tabs + ')'
elif isinstance(value, dict):
printable = '{' + sep
- for item_key, item_value in sorted(value.iteritems()):
+ for item_key, item_value in sorted(value.items()):
printable += element_tabs + \
self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \
self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \
@@ -691,7 +687,7 @@ class XCObject(object):
printable_value[0] == '"' and printable_value[-1] == '"':
printable_value = printable_value[1:-1]
printable += printable_key + ' = ' + printable_value + ';' + after_kv
- except TypeError, e:
+ except TypeError as e:
gyp.common.ExceptionAppend(e,
'while printing key "%s"' % key)
raise
@@ -730,7 +726,7 @@ class XCObject(object):
self._XCKVPrint(file, 3, 'isa', self.__class__.__name__)
# The remaining elements of an object dictionary are sorted alphabetically.
- for property, value in sorted(self._properties.iteritems()):
+ for property, value in sorted(self._properties.items()):
self._XCKVPrint(file, 3, property, value)
# End the object.
@@ -752,7 +748,7 @@ class XCObject(object):
if properties is None:
return
- for property, value in properties.iteritems():
+ for property, value in properties.items():
# Make sure the property is in the schema.
if not property in self._schema:
raise KeyError(property + ' not in ' + self.__class__.__name__)
@@ -788,8 +784,7 @@ class XCObject(object):
self._properties[property] = value.Copy()
else:
self._properties[property] = value
- elif isinstance(value, str) or isinstance(value, unicode) or \
- isinstance(value, int):
+ elif isinstance(value, (basestring, int)):
self._properties[property] = value
elif isinstance(value, list):
if is_strong:
@@ -865,7 +860,7 @@ class XCObject(object):
# TODO(mark): A stronger verification mechanism is needed. Some
# subclasses need to perform validation beyond what the schema can enforce.
- for property, attributes in self._schema.iteritems():
+ for property, attributes in self._schema.items():
(is_list, property_type, is_strong, is_required) = attributes[0:4]
if is_required and not property in self._properties:
raise KeyError(self.__class__.__name__ + ' requires ' + property)
@@ -875,7 +870,7 @@ class XCObject(object):
overwrite properties that have already been set."""
defaults = {}
- for property, attributes in self._schema.iteritems():
+ for property, attributes in self._schema.items():
(is_list, property_type, is_strong, is_required) = attributes[0:4]
if is_required and len(attributes) >= 5 and \
not property in self._properties:
@@ -1426,7 +1421,7 @@ class XCFileLikeElement(XCHierarchicalElement):
xche = self
while xche != None and isinstance(xche, XCHierarchicalElement):
xche_hashables = xche.Hashables()
- for index in xrange(0, len(xche_hashables)):
+ for index in range(0, len(xche_hashables)):
hashables.insert(index, xche_hashables[index])
xche = xche.parent
return hashables
@@ -2401,7 +2396,7 @@ class PBXNativeTarget(XCTarget):
# The headers phase should come before the resources, sources, and
# frameworks phases, if any.
insert_at = len(self._properties['buildPhases'])
- for index in xrange(0, len(self._properties['buildPhases'])):
+ for index in range(0, len(self._properties['buildPhases'])):
phase = self._properties['buildPhases'][index]
if isinstance(phase, PBXResourcesBuildPhase) or \
isinstance(phase, PBXSourcesBuildPhase) or \
@@ -2422,7 +2417,7 @@ class PBXNativeTarget(XCTarget):
# The resources phase should come before the sources and frameworks
# phases, if any.
insert_at = len(self._properties['buildPhases'])
- for index in xrange(0, len(self._properties['buildPhases'])):
+ for index in range(0, len(self._properties['buildPhases'])):
phase = self._properties['buildPhases'][index]
if isinstance(phase, PBXSourcesBuildPhase) or \
isinstance(phase, PBXFrameworksBuildPhase):
@@ -2844,7 +2839,7 @@ class PBXProject(XCContainerPortal):
# determine the sort order.
return cmp(x_index, y_index)
- for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems():
+ for other_pbxproject, ref_dict in self._other_pbxprojects.items():
# Build up a list of products in the remote project file, ordered the
# same as the targets that produce them.
remote_products = []
@@ -2889,7 +2884,7 @@ class XCProjectFile(XCObject):
self._XCPrint(file, 0, '{ ')
else:
self._XCPrint(file, 0, '{\n')
- for property, value in sorted(self._properties.iteritems(),
+ for property, value in sorted(self._properties.items(),
cmp=lambda x, y: cmp(x, y)):
if property == 'objects':
self._PrintObjects(file)