summaryrefslogtreecommitdiff
path: root/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp
diff options
context:
space:
mode:
Diffstat (limited to 'deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp')
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/.npmignore3
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/CHANGELOG.md176
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/LICENSE24
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/README.md242
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/addon.gypi133
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/bin/node-gyp.js148
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/.npmignore1
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/AUTHORS12
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/DEPS24
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/LICENSE27
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/OWNERS1
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/PRESUBMIT.py137
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/aosp_manifest.xml466
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/buildbot_run.py136
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/commit_queue/OWNERS6
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/commit_queue/README3
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/commit_queue/cq_config.json15
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/codereview.settings10
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc12
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyp8
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyp.bat5
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyp_main.py16
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyptest.py274
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py340
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py208
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py1096
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py1483
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py58
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py147
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py270
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py443
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/__init__.py548
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/common.py608
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/common_test.py72
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py162
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py103
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py54
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py0
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py741
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py1095
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py1221
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py99
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py424
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py94
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py56
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py2220
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py3494
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py37
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py2410
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py47
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py1300
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py23
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/input.py2897
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/input_test.py90
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py610
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py1087
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py160
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/ordered_dict.py288
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py46
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py314
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py1629
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py270
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py2927
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py69
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/samples/samples81
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/samples/samples.bat5
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/setup.py19
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/README15
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/Xcode/README5
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec27
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec226
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/README12
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el63
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/gyp.el275
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/run-unit-tests.sh7
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp1105
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp.fontified1107
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/graphviz.py100
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/pretty_gyp.py155
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/pretty_sln.py169
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/pretty_vcproj.py329
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/Find-VS2017.cs271
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/build.js266
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/clean.js22
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/configure.js523
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/find-node-directory.js61
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/find-vs2017.js46
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/install.js469
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/list.js33
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/node-gyp.js215
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/process-release.js155
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/rebuild.js14
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/remove.js52
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/.npmignore5
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/.travis.yml9
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/LICENSE15
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/README.md76
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/filter-pipe.js134
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/pipe.js118
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/reader.js68
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/symlink-write.js27
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/fstream.js35
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/abstract.js85
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/collect.js70
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/dir-reader.js252
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/dir-writer.js174
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/file-reader.js150
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/file-writer.js107
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/get-type.js33
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/link-reader.js53
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/link-writer.js95
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/proxy-reader.js95
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/proxy-writer.js111
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/reader.js255
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/socket-reader.js36
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/writer.js390
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/package.json62
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/LICENSE15
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/README.md209
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/minimatch.js923
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/LICENSE21
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/README.md129
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/index.js200
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/.npmignore5
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/LICENSE.md21
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/README.md91
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/index.js59
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json77
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/.travis.yml4
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/LICENSE18
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/README.markdown62
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/example/map.js6
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/index.js13
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json93
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/test/map.js39
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/package.json75
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/package.json63
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/.npmignore1
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/.travis.yml9
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/LICENSE15
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/README.md211
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/bin/nopt.js54
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/examples/my-program.js30
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/lib/nopt.js415
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/package.json60
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/test/basic.js273
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/LICENSE15
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/README.md350
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/bin/semver133
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/package.json54
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/range.bnf16
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/semver.js1203
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/.npmignore5
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/.travis.yml4
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/LICENSE12
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/README.md50
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/examples/extracter.js19
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/examples/packer.js24
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/examples/reader.js35
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/buffer-entry.js30
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/entry-writer.js169
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/entry.js220
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js191
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/extended-header.js140
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/extract.js94
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js14
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/header.js384
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/pack.js236
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/parse.js275
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENCE25
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENSE15
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/README.md14
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/block-stream.js209
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/package.json66
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/package.json67
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/tar.js173
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js53
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgzbin4096 -> 0 bytes
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js177
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tarbin4608 -> 0 bytes
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js33
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/extract-move.js132
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/extract.js367
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/fixtures.tgzbin19352 -> 0 bytes
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/header.js183
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js886
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/pack.js952
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/parse-discard.js29
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/parse.js359
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js20
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/package.json92
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/src/win_delay_load_hook.cc36
-rwxr-xr-xdeps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/docker.sh164
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/ca-bundle.crt40
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/ca.crt21
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/server.crt19
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/server.key28
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/simple-proxy.js24
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-addon.js28
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-configure-python.js74
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-download.js102
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-find-accessible-sync.js86
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-find-node-directory.js115
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-find-python.js339
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-options.js25
-rw-r--r--deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-process-release.js637
206 files changed, 0 insertions, 51855 deletions
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/.npmignore b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/.npmignore
deleted file mode 100644
index 6748492014..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/.npmignore
+++ /dev/null
@@ -1,3 +0,0 @@
-gyp/test
-node_modules
-test/.node-gyp
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/CHANGELOG.md b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/CHANGELOG.md
deleted file mode 100644
index 9a56013dae..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/CHANGELOG.md
+++ /dev/null
@@ -1,176 +0,0 @@
-v3.6.2 2017-06-01
-=================
-
-* [[`72afdd62cd`](https://github.com/nodejs/node-gyp/commit/72afdd62cd)] - **build**: rename copyNodeLib() to doBuild() (Liu Chao) [#1206](https://github.com/nodejs/node-gyp/pull/1206)
-* [[`bad903ac70`](https://github.com/nodejs/node-gyp/commit/bad903ac70)] - **win**: more robust parsing of SDK version (Refael Ackermann) [#1198](https://github.com/nodejs/node-gyp/pull/1198)
-* [[`241752f381`](https://github.com/nodejs/node-gyp/commit/241752f381)] - Log dist-url. (Ben Noordhuis) [#1170](https://github.com/nodejs/node-gyp/pull/1170)
-* [[`386746c7d1`](https://github.com/nodejs/node-gyp/commit/386746c7d1)] - **configure**: use full path in node_lib_file GYP var (Pavel Medvedev) [#964](https://github.com/nodejs/node-gyp/pull/964)
-* [[`0913b2dd99`](https://github.com/nodejs/node-gyp/commit/0913b2dd99)] - **build, win**: use target_arch to link with node.lib (Pavel Medvedev) [#964](https://github.com/nodejs/node-gyp/pull/964)
-* [[`c307b302f7`](https://github.com/nodejs/node-gyp/commit/c307b302f7)] - **doc**: blorb about setting `npm_config_OPTION_NAME` (Refael Ackermann) [#1185](https://github.com/nodejs/node-gyp/pull/1185)
-
-v3.6.1 2017-04-30
-=================
-
-* [[`49801716c2`](https://github.com/nodejs/node-gyp/commit/49801716c2)] - **test**: fix test-find-python on v0.10.x buildbot. (Ben Noordhuis) [#1172](https://github.com/nodejs/node-gyp/pull/1172)
-* [[`a83a3801fc`](https://github.com/nodejs/node-gyp/commit/a83a3801fc)] - **test**: fix test/test-configure-python on AIX (Richard Lau) [#1131](https://github.com/nodejs/node-gyp/pull/1131)
-* [[`8a767145c9`](https://github.com/nodejs/node-gyp/commit/8a767145c9)] - **gyp**: Revert quote_cmd workaround (Kunal Pathak) [#1153](https://github.com/nodejs/node-gyp/pull/1153)
-* [[`c09cf7671e`](https://github.com/nodejs/node-gyp/commit/c09cf7671e)] - **doc**: add a note for using `configure` on Windows (Vse Mozhet Byt) [#1152](https://github.com/nodejs/node-gyp/pull/1152)
-* [[`da9cb5f411`](https://github.com/nodejs/node-gyp/commit/da9cb5f411)] - Delete superfluous .patch files. (Ben Noordhuis) [#1122](https://github.com/nodejs/node-gyp/pull/1122)
-
-v3.6.0 2017-03-16
-=================
-
-* [[`ae141e1906`](https://github.com/nodejs/node-gyp/commit/ae141e1906)] - **win**: find and setup for VS2017 (Refael Ackermann) [#1130](https://github.com/nodejs/node-gyp/pull/1130)
-* [[`ec5fc36a80`](https://github.com/nodejs/node-gyp/commit/ec5fc36a80)] - Add support to build node.js with chakracore for ARM. (Kunal Pathak) [#873](https://github.com/nodejs/node-gyp/pull/873)
-* [[`a04ea3051a`](https://github.com/nodejs/node-gyp/commit/a04ea3051a)] - Add support to build node.js with chakracore. (Kunal Pathak) [#873](https://github.com/nodejs/node-gyp/pull/873)
-* [[`93d7fa83c8`](https://github.com/nodejs/node-gyp/commit/93d7fa83c8)] - Upgrade semver dependency. (Ben Noordhuis) [#1107](https://github.com/nodejs/node-gyp/pull/1107)
-* [[`ff9a6fadfd`](https://github.com/nodejs/node-gyp/commit/ff9a6fadfd)] - Update link of gyp as Google code is shutting down (Peter Dave Hello) [#1061](https://github.com/nodejs/node-gyp/pull/1061)
-
-
-v3.5.0 2017-01-10
-=================
-
-* [[`762d19a39e`](https://github.com/nodejs/node-gyp/commit/762d19a39e)] - \[doc\] merge History.md and CHANGELOG.md (Rod Vagg)
-* [[`80fc5c3d31`](https://github.com/nodejs/node-gyp/commit/80fc5c3d31)] - Fix deprecated dependency warning (Simone Primarosa) [#1069](https://github.com/nodejs/node-gyp/pull/1069)
-* [[`05c44944fd`](https://github.com/nodejs/node-gyp/commit/05c44944fd)] - Open the build file with universal-newlines mode (Guy Margalit) [#1053](https://github.com/nodejs/node-gyp/pull/1053)
-* [[`37ae7be114`](https://github.com/nodejs/node-gyp/commit/37ae7be114)] - Try python launcher when stock python is python 3. (Ben Noordhuis) [#992](https://github.com/nodejs/node-gyp/pull/992)
-* [[`e3778d9907`](https://github.com/nodejs/node-gyp/commit/e3778d9907)] - Add lots of findPython() tests. (Ben Noordhuis) [#992](https://github.com/nodejs/node-gyp/pull/992)
-* [[`afc766adf6`](https://github.com/nodejs/node-gyp/commit/afc766adf6)] - Unset executable bit for .bat files (Pavel Medvedev) [#969](https://github.com/nodejs/node-gyp/pull/969)
-* [[`ddac348991`](https://github.com/nodejs/node-gyp/commit/ddac348991)] - Use push on PYTHONPATH and add tests (Michael Hart) [#990](https://github.com/nodejs/node-gyp/pull/990)
-* [[`b182a19042`](https://github.com/nodejs/node-gyp/commit/b182a19042)] - ***Revert*** "add "path-array" dep" (Michael Hart) [#990](https://github.com/nodejs/node-gyp/pull/990)
-* [[`7c08b85c5a`](https://github.com/nodejs/node-gyp/commit/7c08b85c5a)] - ***Revert*** "**configure**: use "path-array" for PYTHONPATH" (Michael Hart) [#990](https://github.com/nodejs/node-gyp/pull/990)
-* [[`9c8d275526`](https://github.com/nodejs/node-gyp/commit/9c8d275526)] - Add --devdir flag. (Ben Noordhuis) [#916](https://github.com/nodejs/node-gyp/pull/916)
-* [[`f6eab1f9e4`](https://github.com/nodejs/node-gyp/commit/f6eab1f9e4)] - **doc**: add windows-build-tools to readme (Felix Rieseberg) [#970](https://github.com/nodejs/node-gyp/pull/970)
-
-v3.4.0 2016-06-28
-=================
-
-* [[`ce5fd04e94`](https://github.com/nodejs/node-gyp/commit/ce5fd04e94)] - **deps**: update minimatch version (delphiactual) [#961](https://github.com/nodejs/node-gyp/pull/961)
-* [[`77383ddd85`](https://github.com/nodejs/node-gyp/commit/77383ddd85)] - Replace fs.accessSync call to fs.statSync (Richard Lau) [#955](https://github.com/nodejs/node-gyp/pull/955)
-* [[`0dba4bda57`](https://github.com/nodejs/node-gyp/commit/0dba4bda57)] - **test**: add simple addon test (Richard Lau) [#955](https://github.com/nodejs/node-gyp/pull/955)
-* [[`c4344b3889`](https://github.com/nodejs/node-gyp/commit/c4344b3889)] - **doc**: add --target option to README (Gibson Fahnestock) [#958](https://github.com/nodejs/node-gyp/pull/958)
-* [[`cc778e9215`](https://github.com/nodejs/node-gyp/commit/cc778e9215)] - Override BUILDING_UV_SHARED, BUILDING_V8_SHARED. (Ben Noordhuis) [#915](https://github.com/nodejs/node-gyp/pull/915)
-* [[`af35b2ad32`](https://github.com/nodejs/node-gyp/commit/af35b2ad32)] - Move VC++ Build Tools to Build Tools landing page. (Andrew Pardoe) [#953](https://github.com/nodejs/node-gyp/pull/953)
-* [[`f31482e226`](https://github.com/nodejs/node-gyp/commit/f31482e226)] - **win**: work around __pfnDliNotifyHook2 type change (Alexis Campailla) [#952](https://github.com/nodejs/node-gyp/pull/952)
-* [[`3df8222fa5`](https://github.com/nodejs/node-gyp/commit/3df8222fa5)] - Allow for npmlog@3.x (Rebecca Turner) [#950](https://github.com/nodejs/node-gyp/pull/950)
-* [[`a4fa07b390`](https://github.com/nodejs/node-gyp/commit/a4fa07b390)] - More verbose error on locating msbuild.exe failure. (Mateusz Jaworski) [#930](https://github.com/nodejs/node-gyp/pull/930)
-* [[`4ee31329e0`](https://github.com/nodejs/node-gyp/commit/4ee31329e0)] - **doc**: add command options to README.md (Gibson Fahnestock) [#937](https://github.com/nodejs/node-gyp/pull/937)
-* [[`c8c7ca86b9`](https://github.com/nodejs/node-gyp/commit/c8c7ca86b9)] - Add --silent option for zero output. (Gibson Fahnestock) [#937](https://github.com/nodejs/node-gyp/pull/937)
-* [[`ac29d23a7c`](https://github.com/nodejs/node-gyp/commit/ac29d23a7c)] - Upgrade to glob@7.0.3. (Ben Noordhuis) [#943](https://github.com/nodejs/node-gyp/pull/943)
-* [[`15fd56be3d`](https://github.com/nodejs/node-gyp/commit/15fd56be3d)] - Enable V8 deprecation warnings for native modules (Matt Loring) [#920](https://github.com/nodejs/node-gyp/pull/920)
-* [[`7f1c1b960c`](https://github.com/nodejs/node-gyp/commit/7f1c1b960c)] - **gyp**: improvements for android generator (Robert Chiras) [#935](https://github.com/nodejs/node-gyp/pull/935)
-* [[`088082766c`](https://github.com/nodejs/node-gyp/commit/088082766c)] - Update Windows install instructions (Sara Itani) [#867](https://github.com/nodejs/node-gyp/pull/867)
-* [[`625c1515f9`](https://github.com/nodejs/node-gyp/commit/625c1515f9)] - **gyp**: inherit CC/CXX for CC/CXX.host (Johan Bergström) [#908](https://github.com/nodejs/node-gyp/pull/908)
-* [[`3bcb1720e4`](https://github.com/nodejs/node-gyp/commit/3bcb1720e4)] - Add support for the Python launcher on Windows (Patrick Westerhoff) [#894](https://github.com/nodejs/node-gyp/pull/894
-
-v3.3.1 2016-03-04
-=================
-
-* [[`a981ef847a`](https://github.com/nodejs/node-gyp/commit/a981ef847a)] - **gyp**: fix android generator (Robert Chiras) [#889](https://github.com/nodejs/node-gyp/pull/889)
-
-v3.3.0 2016-02-16
-=================
-
-* [[`818d854a4d`](https://github.com/nodejs/node-gyp/commit/818d854a4d)] - Introduce NODEJS_ORG_MIRROR and IOJS_ORG_MIRROR (Rod Vagg) [#878](https://github.com/nodejs/node-gyp/pull/878)
-* [[`d1e4cc4b62`](https://github.com/nodejs/node-gyp/commit/d1e4cc4b62)] - **(SEMVER-MINOR)** Download headers tarball for ~0.12.10 || ~0.10.42 (Rod Vagg) [#877](https://github.com/nodejs/node-gyp/pull/877)
-* [[`6e28ad1bea`](https://github.com/nodejs/node-gyp/commit/6e28ad1bea)] - Allow for npmlog@2.x (Rebecca Turner) [#861](https://github.com/nodejs/node-gyp/pull/861)
-* [[`07371e5812`](https://github.com/nodejs/node-gyp/commit/07371e5812)] - Use -fPIC for NetBSD. (Marcin Cieślak) [#856](https://github.com/nodejs/node-gyp/pull/856)
-* [[`8c4b0ffa50`](https://github.com/nodejs/node-gyp/commit/8c4b0ffa50)] - **(SEMVER-MINOR)** Add --cafile command line option. (Ben Noordhuis) [#837](https://github.com/nodejs/node-gyp/pull/837)
-* [[`b3ad43498e`](https://github.com/nodejs/node-gyp/commit/b3ad43498e)] - **(SEMVER-MINOR)** Make download() function testable. (Ben Noordhuis) [#837](https://github.com/nodejs/node-gyp/pull/837)
-
-v3.2.1 2015-12-03
-=================
-
-* [[`ab89b477c4`](https://github.com/nodejs/node-gyp/commit/ab89b477c4)] - Upgrade gyp to b3cef02. (Ben Noordhuis) [#831](https://github.com/nodejs/node-gyp/pull/831)
-* [[`90078ecb17`](https://github.com/nodejs/node-gyp/commit/90078ecb17)] - Define WIN32_LEAN_AND_MEAN conditionally. (Ben Noordhuis) [#824](https://github.com/nodejs/node-gyp/pull/824)
-
-v3.2.0 2015-11-25
-=================
-
-* [[`268f1ca4c7`](https://github.com/nodejs/node-gyp/commit/268f1ca4c7)] - Use result of `which` when searching for python. (Refael Ackermann) [#668](https://github.com/nodejs/node-gyp/pull/668)
-* [[`817ed9bd78`](https://github.com/nodejs/node-gyp/commit/817ed9bd78)] - Add test for python executable search logic. (Ben Noordhuis) [#756](https://github.com/nodejs/node-gyp/pull/756)
-* [[`0e2dfda1f3`](https://github.com/nodejs/node-gyp/commit/0e2dfda1f3)] - Fix test/test-options when run through `npm test`. (Ben Noordhuis) [#755](https://github.com/nodejs/node-gyp/pull/755)
-* [[`9bfa0876b4`](https://github.com/nodejs/node-gyp/commit/9bfa0876b4)] - Add support for AIX (Michael Dawson) [#753](https://github.com/nodejs/node-gyp/pull/753)
-* [[`a8d441a0a2`](https://github.com/nodejs/node-gyp/commit/a8d441a0a2)] - Update README for Windows 10 support. (Jason Williams) [#766](https://github.com/nodejs/node-gyp/pull/766)
-* [[`d1d6015276`](https://github.com/nodejs/node-gyp/commit/d1d6015276)] - Update broken links and switch to HTTPS. (andrew morton)
-
-v3.1.0 2015-11-14
-=================
-
-* [[`9049241f91`](https://github.com/nodejs/node-gyp/commit/9049241f91)] - **gyp**: don't use links at all, just copy the files instead (Nathan Zadoks)
-* [[`8ef90348d1`](https://github.com/nodejs/node-gyp/commit/8ef90348d1)] - **gyp**: apply https://codereview.chromium.org/11361103/ (Nathan Rajlich)
-* [[`a2ed0df84e`](https://github.com/nodejs/node-gyp/commit/a2ed0df84e)] - **gyp**: always install into $PRODUCT_DIR (Nathan Rajlich)
-* [[`cc8b2fa83e`](https://github.com/nodejs/node-gyp/commit/cc8b2fa83e)] - Update gyp to b3cef02. (Imran Iqbal) [#781](https://github.com/nodejs/node-gyp/pull/781)
-* [[`f5d86eb84e`](https://github.com/nodejs/node-gyp/commit/f5d86eb84e)] - Update to tar@2.0.0. (Edgar Muentes) [#797](https://github.com/nodejs/node-gyp/pull/797)
-* [[`2ac7de02c4`](https://github.com/nodejs/node-gyp/commit/2ac7de02c4)] - Fix infinite loop with zero-length options. (Ben Noordhuis) [#745](https://github.com/nodejs/node-gyp/pull/745)
-* [[`101bed639b`](https://github.com/nodejs/node-gyp/commit/101bed639b)] - This platform value came from debian package, and now the value (Jérémy Lal) [#738](https://github.com/nodejs/node-gyp/pull/738)
-
-v3.0.3 2015-09-14
-=================
-
-* [[`ad827cda30`](https://github.com/nodejs/node-gyp/commit/ad827cda30)] - tarballUrl global and && when checking for iojs (Lars-Magnus Skog) [#729](https://github.com/nodejs/node-gyp/pull/729)
-
-v3.0.2 2015-09-12
-=================
-
-* [[`6e8c3bf3c6`](https://github.com/nodejs/node-gyp/commit/6e8c3bf3c6)] - add back support for passing additional cmdline args (Rod Vagg) [#723](https://github.com/nodejs/node-gyp/pull/723)
-* [[`ff82f2f3b9`](https://github.com/nodejs/node-gyp/commit/ff82f2f3b9)] - fixed broken link in docs to Visual Studio 2013 download (simon-p-r) [#722](https://github.com/nodejs/node-gyp/pull/722)
-
-v3.0.1 2015-09-08
-=================
-
-* [[`846337e36b`](https://github.com/nodejs/node-gyp/commit/846337e36b)] - normalise versions for target == this comparison (Rod Vagg) [#716](https://github.com/nodejs/node-gyp/pull/716)
-
-v3.0.0 2015-09-08
-=================
-
-* [[`9720d0373c`](https://github.com/nodejs/node-gyp/commit/9720d0373c)] - remove node_modules from tree (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711)
-* [[`6dcf220db7`](https://github.com/nodejs/node-gyp/commit/6dcf220db7)] - test version major directly, don't use semver.satisfies() (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711)
-* [[`938dd18d1c`](https://github.com/nodejs/node-gyp/commit/938dd18d1c)] - refactor for clarity, fix dist-url, add env var dist-url functionality (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711)
-* [[`9e9df66a06`](https://github.com/nodejs/node-gyp/commit/9e9df66a06)] - use process.release, make aware of io.js & node v4 differences (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711)
-* [[`1ea7ed01f4`](https://github.com/nodejs/node-gyp/commit/1ea7ed01f4)] - **deps**: update graceful-fs dependency to the latest (Sakthipriyan Vairamani) [#714](https://github.com/nodejs/node-gyp/pull/714)
-* [[`0fbc387b35`](https://github.com/nodejs/node-gyp/commit/0fbc387b35)] - Update repository URLs. (Ben Noordhuis) [#715](https://github.com/nodejs/node-gyp/pull/715)
-* [[`bbedb8868b`](https://github.com/nodejs/node-gyp/commit/bbedb8868b)] - **(SEMVER-MAJOR)** **win**: enable delay-load hook by default (Jeremiah Senkpiel) [#708](https://github.com/nodejs/node-gyp/pull/708)
-* [[`85ed107565`](https://github.com/nodejs/node-gyp/commit/85ed107565)] - Merge pull request #664 from othiym23/othiym23/allow-semver-5 (Nathan Rajlich)
-* [[`0c720d234c`](https://github.com/nodejs/node-gyp/commit/0c720d234c)] - allow semver@5 (Forrest L Norvell)
-
-2.0.2 / 2015-07-14
-==================
-
- * Use HTTPS for dist url (#656, @SonicHedgehog)
- * Merge pull request #648 from nevosegal/master
- * Merge pull request #650 from magic890/patch-1
- * Updated Installation section on README
- * Updated link to gyp user documentation
- * Fix download error message spelling (#643, @tomxtobin)
- * Merge pull request #637 from lygstate/master
- * Set NODE_GYP_DIR for addon.gypi to setting absolute path for
- src/win_delay_load_hook.c, and fixes of the long relative path issue on Win32.
- Fixes #636 (#637, @lygstate).
-
-2.0.1 / 2015-05-28
-==================
-
- * configure: try/catch the semver range.test() call
- * README: update for visual studio 2013 (#510, @samccone)
-
-2.0.0 / 2015-05-24
-==================
-
- * configure: check for python2 executable by default, fallback to python
- * configure: don't clobber existing $PYTHONPATH
- * configure: use "path-array" for PYTHONPATH
- * gyp: fix for non-acsii userprofile name on Windows
- * gyp: always install into $PRODUCT_DIR
- * gyp: apply https://codereview.chromium.org/11361103/
- * gyp: don't use links at all, just copy the files instead
- * gyp: update gyp to e1c8fcf7
- * Updated README.md with updated Windows build info
- * Show URL when a download fails
- * package: add a "license" field
- * move HMODULE m declaration to top
- * Only add "-undefined dynamic_lookup" to loadable_module targets
- * win: optionally allow node.exe/iojs.exe to be renamed
- * Avoid downloading shasums if using tarPath
- * Add target name preprocessor define: `NODE_GYP_MODULE_NAME`
- * Show better error message in case of bad network settings
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/LICENSE b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/LICENSE
deleted file mode 100644
index 2ea4dc5efb..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-(The MIT License)
-
-Copyright (c) 2012 Nathan Rajlich <nathan@tootallnate.net>
-
-Permission is hereby granted, free of charge, to any person
-obtaining a copy of this software and associated documentation
-files (the "Software"), to deal in the Software without
-restriction, including without limitation the rights to use,
-copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/README.md b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/README.md
deleted file mode 100644
index a5c1325c71..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/README.md
+++ /dev/null
@@ -1,242 +0,0 @@
-node-gyp
-=========
-### Node.js native addon build tool
-
-`node-gyp` is a cross-platform command-line tool written in Node.js for compiling
-native addon modules for Node.js. It bundles the [gyp](https://gyp.gsrc.io)
-project used by the Chromium team and takes away the pain of dealing with the
-various differences in build platforms. It is the replacement to the `node-waf`
-program which is removed for node `v0.8`. If you have a native addon for node that
-still has a `wscript` file, then you should definitely add a `binding.gyp` file
-to support the latest versions of node.
-
-Multiple target versions of node are supported (i.e. `0.8`, ..., `4`, `5`, `6`,
-etc.), regardless of what version of node is actually installed on your system
-(`node-gyp` downloads the necessary development files or headers for the target version).
-
-#### Features:
-
- * Easy to use, consistent interface
- * Same commands to build your module on every platform
- * Supports multiple target versions of Node
-
-
-Installation
-------------
-
-You can install with `npm`:
-
-``` bash
-$ npm install -g node-gyp
-```
-
-You will also need to install:
-
- * On Unix:
- * `python` (`v2.7` recommended, `v3.x.x` is __*not*__ supported)
- * `make`
- * A proper C/C++ compiler toolchain, like [GCC](https://gcc.gnu.org)
- * On Mac OS X:
- * `python` (`v2.7` recommended, `v3.x.x` is __*not*__ supported) (already installed on Mac OS X)
- * [Xcode](https://developer.apple.com/xcode/download/)
- * You also need to install the `Command Line Tools` via Xcode. You can find this under the menu `Xcode -> Preferences -> Downloads`
- * This step will install `gcc` and the related toolchain containing `make`
- * On Windows:
- * Option 1: Install all the required tools and configurations using Microsoft's [windows-build-tools](https://github.com/felixrieseberg/windows-build-tools) using `npm install --global --production windows-build-tools` from an elevated PowerShell or CMD.exe (run as Administrator).
- * Option 2: Install tools and configuration manually:
- * Visual C++ Build Environment:
- * Option 1: Install [Visual C++ Build Tools](http://landinghub.visualstudio.com/visual-cpp-build-tools) using the **Default Install** option.
-
- * Option 2: Install [Visual Studio 2015](https://www.visualstudio.com/products/visual-studio-community-vs) (or modify an existing installation) and select *Common Tools for Visual C++* during setup. This also works with the free Community and Express for Desktop editions.
-
- > :bulb: [Windows Vista / 7 only] requires [.NET Framework 4.5.1](http://www.microsoft.com/en-us/download/details.aspx?id=40773)
-
- * Install [Python 2.7](https://www.python.org/downloads/) (`v3.x.x` is not supported), and run `npm config set python python2.7` (or see below for further instructions on specifying the proper Python version and path.)
- * Launch cmd, `npm config set msvs_version 2015`
-
- If the above steps didn't work for you, please visit [Microsoft's Node.js Guidelines for Windows](https://github.com/Microsoft/nodejs-guidelines/blob/master/windows-environment.md#compiling-native-addon-modules) for additional tips.
-
-If you have multiple Python versions installed, you can identify which Python
-version `node-gyp` uses by setting the '--python' variable:
-
-``` bash
-$ node-gyp --python /path/to/python2.7
-```
-
-If `node-gyp` is called by way of `npm` *and* you have multiple versions of
-Python installed, then you can set `npm`'s 'python' config key to the appropriate
-value:
-
-``` bash
-$ npm config set python /path/to/executable/python2.7
-```
-
-Note that OS X is just a flavour of Unix and so needs `python`, `make`, and C/C++.
-An easy way to obtain these is to install XCode from Apple,
-and then use it to install the command line tools (under Preferences -> Downloads).
-
-How to Use
-----------
-
-To compile your native addon, first go to its root directory:
-
-``` bash
-$ cd my_node_addon
-```
-
-The next step is to generate the appropriate project build files for the current
-platform. Use `configure` for that:
-
-``` bash
-$ node-gyp configure
-```
-
-Auto-detection fails for Visual C++ Build Tools 2015, so `--msvs_version=2015`
-needs to be added (not needed when run by npm as configured above):
-``` bash
-$ node-gyp configure --msvs_version=2015
-```
-
-__Note__: The `configure` step looks for the `binding.gyp` file in the current
-directory to process. See below for instructions on creating the `binding.gyp` file.
-
-Now you will have either a `Makefile` (on Unix platforms) or a `vcxproj` file
-(on Windows) in the `build/` directory. Next invoke the `build` command:
-
-``` bash
-$ node-gyp build
-```
-
-Now you have your compiled `.node` bindings file! The compiled bindings end up
-in `build/Debug/` or `build/Release/`, depending on the build mode. At this point
-you can require the `.node` file with Node and run your tests!
-
-__Note:__ To create a _Debug_ build of the bindings file, pass the `--debug` (or
-`-d`) switch when running either the `configure`, `build` or `rebuild` command.
-
-
-The "binding.gyp" file
-----------------------
-
-Previously when node had `node-waf` you had to write a `wscript` file. The
-replacement for that is the `binding.gyp` file, which describes the configuration
-to build your module in a JSON-like format. This file gets placed in the root of
-your package, alongside the `package.json` file.
-
-A barebones `gyp` file appropriate for building a node addon looks like:
-
-``` python
-{
- "targets": [
- {
- "target_name": "binding",
- "sources": [ "src/binding.cc" ]
- }
- ]
-}
-```
-
-Some additional resources for addons and writing `gyp` files:
-
- * ["Going Native" a nodeschool.io tutorial](http://nodeschool.io/#goingnative)
- * ["Hello World" node addon example](https://github.com/nodejs/node/tree/master/test/addons/hello-world)
- * [gyp user documentation](https://gyp.gsrc.io/docs/UserDocumentation.md)
- * [gyp input format reference](https://gyp.gsrc.io/docs/InputFormatReference.md)
- * [*"binding.gyp" files out in the wild* wiki page](https://github.com/nodejs/node-gyp/wiki/%22binding.gyp%22-files-out-in-the-wild)
-
-
-Commands
---------
-
-`node-gyp` responds to the following commands:
-
-| **Command** | **Description**
-|:--------------|:---------------------------------------------------------------
-| `help` | Shows the help dialog
-| `build` | Invokes `make`/`msbuild.exe` and builds the native addon
-| `clean` | Removes the `build` directory if it exists
-| `configure` | Generates project build files for the current platform
-| `rebuild` | Runs `clean`, `configure` and `build` all in a row
-| `install` | Installs node header files for the given version
-| `list` | Lists the currently installed node header versions
-| `remove` | Removes the node header files for the given version
-
-
-Command Options
---------
-
-`node-gyp` accepts the following command options:
-
-| **Command** | **Description**
-|:----------------------------------|:------------------------------------------
-| `-j n`, `--jobs n` | Run make in parallel
-| `--target=v6.2.1` | Node version to build for (default=process.version)
-| `--silly`, `--loglevel=silly` | Log all progress to console
-| `--verbose`, `--loglevel=verbose` | Log most progress to console
-| `--silent`, `--loglevel=silent` | Don't log anything to console
-| `debug`, `--debug` | Make Debug build (default=Release)
-| `--release`, `--no-debug` | Make Release build
-| `-C $dir`, `--directory=$dir` | Run command in different directory
-| `--make=$make` | Override make command (e.g. gmake)
-| `--thin=yes` | Enable thin static libraries
-| `--arch=$arch` | Set target architecture (e.g. ia32)
-| `--tarball=$path` | Get headers from a local tarball
-| `--devdir=$path` | SDK download directory (default=~/.node-gyp)
-| `--ensure` | Don't reinstall headers if already present
-| `--dist-url=$url` | Download header tarball from custom URL
-| `--proxy=$url` | Set HTTP proxy for downloading header tarball
-| `--cafile=$cafile` | Override default CA chain (to download tarball)
-| `--nodedir=$path` | Set the path to the node binary
-| `--python=$path` | Set path to the python (2) binary
-| `--msvs_version=$version` | Set Visual Studio version (win)
-| `--solution=$solution` | Set Visual Studio Solution version (win)
-
-
-Configuration
---------
-
-__`node-gyp` responds to environment variables or `npm` configuration__
-1. Environment variables take the form `npm_config_OPTION_NAME` for any of the
- options listed above (dashes in option names should be replaced by underscores).
- These work also when `node-gyp` is invoked directly:
- `$ export npm_config_devdir=/tmp/.gyp`
- or on Windows
- `> set npm_config_devdir=c:\temp\.gyp`
-2. As `npm` configuration, variables take the form `OPTION_NAME`.
- This way only works when `node-gyp` is executed by `npm`:
- `$ npm config set [--global] devdir /tmp/.gyp`
- `$ npm i buffertools`
-
-
-
-License
--------
-
-(The MIT License)
-
-Copyright (c) 2012 Nathan Rajlich &lt;nathan@tootallnate.net&gt;
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-'Software'), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-
-[python-v2.7.10]: https://www.python.org/downloads/release/python-2710/
-[msvc2013]: https://www.microsoft.com/en-gb/download/details.aspx?id=44914
-[win7sdk]: https://www.microsoft.com/en-us/download/details.aspx?id=8279
-[compiler update for the Windows SDK 7.1]: https://www.microsoft.com/en-us/download/details.aspx?id=4422
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/addon.gypi b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/addon.gypi
deleted file mode 100644
index f2f6a7925e..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/addon.gypi
+++ /dev/null
@@ -1,133 +0,0 @@
-{
- 'variables' : {
- 'node_engine_include_dir%': 'deps/v8/include',
- },
- 'target_defaults': {
- 'type': 'loadable_module',
- 'win_delay_load_hook': 'true',
- 'product_prefix': '',
-
- 'conditions': [
- [ 'node_engine=="chakracore"', {
- 'variables': {
- 'node_engine_include_dir%': 'deps/chakrashim/include'
- },
- }]
- ],
-
- 'include_dirs': [
- '<(node_root_dir)/include/node',
- '<(node_root_dir)/src',
- '<(node_root_dir)/deps/uv/include',
- '<(node_root_dir)/<(node_engine_include_dir)'
- ],
- 'defines!': [
- 'BUILDING_UV_SHARED=1', # Inherited from common.gypi.
- 'BUILDING_V8_SHARED=1', # Inherited from common.gypi.
- ],
- 'defines': [
- 'NODE_GYP_MODULE_NAME=>(_target_name)',
- 'USING_UV_SHARED=1',
- 'USING_V8_SHARED=1',
- # Warn when using deprecated V8 APIs.
- 'V8_DEPRECATION_WARNINGS=1'
- ],
-
- 'target_conditions': [
- ['_type=="loadable_module"', {
- 'product_extension': 'node',
- 'defines': [
- 'BUILDING_NODE_EXTENSION'
- ],
- 'xcode_settings': {
- 'OTHER_LDFLAGS': [
- '-undefined dynamic_lookup'
- ],
- },
- }],
-
- ['_type=="static_library"', {
- # set to `1` to *disable* the -T thin archive 'ld' flag.
- # older linkers don't support this flag.
- 'standalone_static_library': '<(standalone_static_library)'
- }],
-
- ['_win_delay_load_hook=="true"', {
- # If the addon specifies `'win_delay_load_hook': 'true'` in its
- # binding.gyp, link a delay-load hook into the DLL. This hook ensures
- # that the addon will work regardless of whether the node/iojs binary
- # is named node.exe, iojs.exe, or something else.
- 'conditions': [
- [ 'OS=="win"', {
- 'sources': [
- '<(node_gyp_dir)/src/win_delay_load_hook.cc',
- ],
- 'msvs_settings': {
- 'VCLinkerTool': {
- 'DelayLoadDLLs': [ 'iojs.exe', 'node.exe' ],
- # Don't print a linker warning when no imports from either .exe
- # are used.
- 'AdditionalOptions': [ '/ignore:4199' ],
- },
- },
- }],
- ],
- }],
- ],
-
- 'conditions': [
- [ 'OS=="mac"', {
- 'defines': [
- '_DARWIN_USE_64_BIT_INODE=1'
- ],
- 'xcode_settings': {
- 'DYLIB_INSTALL_NAME_BASE': '@rpath'
- },
- }],
- [ 'OS=="aix"', {
- 'ldflags': [
- '-Wl,-bimport:<(node_exp_file)'
- ],
- }],
- [ 'OS=="win"', {
- 'conditions': [
- ['node_engine=="chakracore"', {
- 'library_dirs': [ '<(node_root_dir)/$(ConfigurationName)' ],
- 'libraries': [ '<@(node_engine_libs)' ],
- }],
- ],
- 'libraries': [
- '-lkernel32.lib',
- '-luser32.lib',
- '-lgdi32.lib',
- '-lwinspool.lib',
- '-lcomdlg32.lib',
- '-ladvapi32.lib',
- '-lshell32.lib',
- '-lole32.lib',
- '-loleaut32.lib',
- '-luuid.lib',
- '-lodbc32.lib',
- '-lDelayImp.lib',
- '-l"<(node_lib_file)"'
- ],
- 'msvs_disabled_warnings': [
- # warning C4251: 'node::ObjectWrap::handle_' : class 'v8::Persistent<T>'
- # needs to have dll-interface to be used by
- # clients of class 'node::ObjectWrap'
- 4251
- ],
- }, {
- # OS!="win"
- 'defines': [
- '_LARGEFILE_SOURCE',
- '_FILE_OFFSET_BITS=64'
- ],
- }],
- [ 'OS in "freebsd openbsd netbsd solaris" or \
- (OS=="linux" and target_arch!="ia32")', {
- 'cflags': [ '-fPIC' ],
- }]
- ]
- }
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/bin/node-gyp.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/bin/node-gyp.js
deleted file mode 100755
index 70d7d50262..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/bin/node-gyp.js
+++ /dev/null
@@ -1,148 +0,0 @@
-#!/usr/bin/env node
-
-/**
- * Set the title.
- */
-
-process.title = 'node-gyp'
-
-/**
- * Module dependencies.
- */
-
-var gyp = require('../')
-var log = require('npmlog')
-var osenv = require('osenv')
-var path = require('path')
-
-/**
- * Process and execute the selected commands.
- */
-
-var prog = gyp()
-var completed = false
-prog.parseArgv(process.argv)
-prog.devDir = prog.opts.devdir
-
-var homeDir = osenv.home()
-if (prog.devDir) {
- prog.devDir = prog.devDir.replace(/^~/, homeDir)
-} else if (homeDir) {
- prog.devDir = path.resolve(homeDir, '.node-gyp')
-} else {
- throw new Error(
- "node-gyp requires that the user's home directory is specified " +
- "in either of the environmental variables HOME or USERPROFILE. " +
- "Overide with: --devdir /path/to/.node-gyp")
-}
-
-if (prog.todo.length === 0) {
- if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) {
- console.log('v%s', prog.version)
- } else {
- console.log('%s', prog.usage())
- }
- return process.exit(0)
-}
-
-log.info('it worked if it ends with', 'ok')
-log.verbose('cli', process.argv)
-log.info('using', 'node-gyp@%s', prog.version)
-log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch)
-
-
-/**
- * Change dir if -C/--directory was passed.
- */
-
-var dir = prog.opts.directory
-if (dir) {
- var fs = require('fs')
- try {
- var stat = fs.statSync(dir)
- if (stat.isDirectory()) {
- log.info('chdir', dir)
- process.chdir(dir)
- } else {
- log.warn('chdir', dir + ' is not a directory')
- }
- } catch (e) {
- if (e.code === 'ENOENT') {
- log.warn('chdir', dir + ' is not a directory')
- } else {
- log.warn('chdir', 'error during chdir() "%s"', e.message)
- }
- }
-}
-
-function run () {
- var command = prog.todo.shift()
- if (!command) {
- // done!
- completed = true
- log.info('ok')
- return
- }
-
- prog.commands[command.name](command.args, function (err) {
- if (err) {
- log.error(command.name + ' error')
- log.error('stack', err.stack)
- errorMessage()
- log.error('not ok')
- return process.exit(1)
- }
- if (command.name == 'list') {
- var versions = arguments[1]
- if (versions.length > 0) {
- versions.forEach(function (version) {
- console.log(version)
- })
- } else {
- console.log('No node development files installed. Use `node-gyp install` to install a version.')
- }
- } else if (arguments.length >= 2) {
- console.log.apply(console, [].slice.call(arguments, 1))
- }
-
- // now run the next command in the queue
- process.nextTick(run)
- })
-}
-
-process.on('exit', function (code) {
- if (!completed && !code) {
- log.error('Completion callback never invoked!')
- issueMessage()
- process.exit(6)
- }
-})
-
-process.on('uncaughtException', function (err) {
- log.error('UNCAUGHT EXCEPTION')
- log.error('stack', err.stack)
- issueMessage()
- process.exit(7)
-})
-
-function errorMessage () {
- // copied from npm's lib/util/error-handler.js
- var os = require('os')
- log.error('System', os.type() + ' ' + os.release())
- log.error('command', process.argv
- .map(JSON.stringify).join(' '))
- log.error('cwd', process.cwd())
- log.error('node -v', process.version)
- log.error('node-gyp -v', 'v' + prog.package.version)
-}
-
-function issueMessage () {
- errorMessage()
- log.error('', [ 'This is a bug in `node-gyp`.'
- , 'Try to update node-gyp and file an Issue if it does not help:'
- , ' <https://github.com/nodejs/node-gyp/issues>'
- ].join('\n'))
-}
-
-// start running the given commands!
-run()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/.npmignore b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/.npmignore
deleted file mode 100644
index 0d20b6487c..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/.npmignore
+++ /dev/null
@@ -1 +0,0 @@
-*.pyc
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/AUTHORS b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/AUTHORS
deleted file mode 100644
index fecf84a1c4..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/AUTHORS
+++ /dev/null
@@ -1,12 +0,0 @@
-# Names should be added to this file like so:
-# Name or Organization <email address>
-
-Google Inc.
-Bloomberg Finance L.P.
-Yandex LLC
-
-Steven Knight <knight@baldmt.com>
-Ryan Norton <rnorton10@gmail.com>
-David J. Sankel <david@sankelsoftware.com>
-Eric N. Vander Weele <ericvw@gmail.com>
-Tom Freudenberg <th.freudenberg@gmail.com>
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/DEPS b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/DEPS
deleted file mode 100644
index 2e1120f274..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/DEPS
+++ /dev/null
@@ -1,24 +0,0 @@
-# DEPS file for gclient use in buildbot execution of gyp tests.
-#
-# (You don't need to use gclient for normal GYP development work.)
-
-vars = {
- "chrome_trunk": "http://src.chromium.org/svn/trunk",
- "googlecode_url": "http://%s.googlecode.com/svn",
-}
-
-deps = {
-}
-
-deps_os = {
- "win": {
- "third_party/cygwin":
- Var("chrome_trunk") + "/deps/third_party/cygwin@66844",
-
- "third_party/python_26":
- Var("chrome_trunk") + "/tools/third_party/python_26@89111",
-
- "src/third_party/pefile":
- (Var("googlecode_url") % "pefile") + "/trunk@63",
- },
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/LICENSE b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/LICENSE
deleted file mode 100644
index ab6b011a10..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-Copyright (c) 2009 Google Inc. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
- * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
- * Neither the name of Google Inc. nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/OWNERS b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/OWNERS
deleted file mode 100644
index 72e8ffc0db..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-*
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/PRESUBMIT.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/PRESUBMIT.py
deleted file mode 100644
index dde025383c..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/PRESUBMIT.py
+++ /dev/null
@@ -1,137 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-
-"""Top-level presubmit script for GYP.
-
-See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
-for more details about the presubmit API built into gcl.
-"""
-
-
-PYLINT_BLACKLIST = [
- # TODO: fix me.
- # From SCons, not done in google style.
- 'test/lib/TestCmd.py',
- 'test/lib/TestCommon.py',
- 'test/lib/TestGyp.py',
-]
-
-
-PYLINT_DISABLED_WARNINGS = [
- # TODO: fix me.
- # Many tests include modules they don't use.
- 'W0611',
- # Possible unbalanced tuple unpacking with sequence.
- 'W0632',
- # Attempting to unpack a non-sequence.
- 'W0633',
- # Include order doesn't properly include local files?
- 'F0401',
- # Some use of built-in names.
- 'W0622',
- # Some unused variables.
- 'W0612',
- # Operator not preceded/followed by space.
- 'C0323',
- 'C0322',
- # Unnecessary semicolon.
- 'W0301',
- # Unused argument.
- 'W0613',
- # String has no effect (docstring in wrong place).
- 'W0105',
- # map/filter on lambda could be replaced by comprehension.
- 'W0110',
- # Use of eval.
- 'W0123',
- # Comma not followed by space.
- 'C0324',
- # Access to a protected member.
- 'W0212',
- # Bad indent.
- 'W0311',
- # Line too long.
- 'C0301',
- # Undefined variable.
- 'E0602',
- # Not exception type specified.
- 'W0702',
- # No member of that name.
- 'E1101',
- # Dangerous default {}.
- 'W0102',
- # Cyclic import.
- 'R0401',
- # Others, too many to sort.
- 'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231',
- 'R0201', 'E0101', 'C0321',
- # ************* Module copy
- # W0104:427,12:_test.odict.__setitem__: Statement seems to have no effect
- 'W0104',
-]
-
-
-def CheckChangeOnUpload(input_api, output_api):
- report = []
- report.extend(input_api.canned_checks.PanProjectChecks(
- input_api, output_api))
- return report
-
-
-def CheckChangeOnCommit(input_api, output_api):
- report = []
-
- # Accept any year number from 2009 to the current year.
- current_year = int(input_api.time.strftime('%Y'))
- allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1)))
- years_re = '(' + '|'.join(allowed_years) + ')'
-
- # The (c) is deprecated, but tolerate it until it's removed from all files.
- license = (
- r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n'
- r'.*? Use of this source code is governed by a BSD-style license that '
- r'can be\n'
- r'.*? found in the LICENSE file\.\n'
- ) % {
- 'year': years_re,
- }
-
- report.extend(input_api.canned_checks.PanProjectChecks(
- input_api, output_api, license_header=license))
- report.extend(input_api.canned_checks.CheckTreeIsOpen(
- input_api, output_api,
- 'http://gyp-status.appspot.com/status',
- 'http://gyp-status.appspot.com/current'))
-
- import os
- import sys
- old_sys_path = sys.path
- try:
- sys.path = ['pylib', 'test/lib'] + sys.path
- blacklist = PYLINT_BLACKLIST
- if sys.platform == 'win32':
- blacklist = [os.path.normpath(x).replace('\\', '\\\\')
- for x in PYLINT_BLACKLIST]
- report.extend(input_api.canned_checks.RunPylint(
- input_api,
- output_api,
- black_list=blacklist,
- disabled_warnings=PYLINT_DISABLED_WARNINGS))
- finally:
- sys.path = old_sys_path
- return report
-
-
-TRYBOTS = [
- 'linux_try',
- 'mac_try',
- 'win_try',
-]
-
-
-def GetPreferredTryMasters(_, change):
- return {
- 'client.gyp': { t: set(['defaulttests']) for t in TRYBOTS },
- }
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/aosp_manifest.xml b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/aosp_manifest.xml
deleted file mode 100644
index bd73b303c6..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/aosp_manifest.xml
+++ /dev/null
@@ -1,466 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<manifest>
- <remote name="aosp"
- fetch=".."
- review="https://android-review.googlesource.com/" />
- <default revision="master"
- remote="aosp"
- sync-j="4" />
-
- <project groups="device,flo" name="device/asus/deb" revision="0ce3a783d549d023ddc553a04fed717ffb2ff533" />
- <project groups="device,flo" name="device/asus/flo" revision="55ea79b11f9f82b2aa03f44a3429112fc5c06d07" />
- <project groups="device,flo" name="device/asus/flo-kernel" revision="6d74123947016999ae62d9c3067ae97782fdba21" />
- <project groups="device,grouper" name="device/asus/grouper" revision="78fe48f44e90ef3a7eceab5465dbad63cd16ce88" />
- <project groups="device,grouper" name="device/asus/tilapia" revision="e5033bc80764067cbb1c9dc3970f0718e35ae8c7" />
- <project name="device/common" revision="6a2995683de147791e516aae2ccb31fdfbe2ad30" />
- <project groups="pdk" name="device/generic/armv7-a-neon" revision="8bcf4b7a6380b26c2b42dae00dd8443de2a8e12c" />
- <project groups="pdk" name="device/generic/common" revision="11c092a6cbfcf6207f07a9a8e3398e747e7f5461" />
- <project groups="pdk" name="device/generic/goldfish" revision="638ee524f83053613c47ddea22c4bf98a0175c2f" />
- <project groups="pdk" name="device/generic/mini-emulator-armv7-a-neon" revision="2a7ade61377b7906187ab46b5859c896baa0ab0e" />
- <project groups="pdk" name="device/generic/mini-emulator-mips" revision="2ff06dda649ba43507a911057f7854a3373ef7d6" />
- <project groups="pdk" name="device/generic/mini-emulator-x86" revision="a2f05b8c5259c232be5b029b2d5e721ba3f70917" />
- <project groups="pdk" name="device/generic/mips" revision="dd06e7883227cc68bb1206584c8e3a768e49d02d" />
- <project name="device/generic/qemu" revision="bd2543e810f3fa56e9dcfe301b893832534c85db" />
- <project groups="pdk" name="device/generic/x86" revision="f111878fb41e2bdf4eb092d1edf0eb53cc5d0153" />
- <project groups="device" name="device/google/accessory/arduino" revision="abc5159a3ca9dbb5c7e364a1eab99901a4440ac5" />
- <project groups="device" name="device/google/accessory/demokit" revision="7dfe7f89a3b174709c773fe319531006e46440d9" />
- <project groups="device,hammerhead" name="device/lge/hammerhead" revision="ec229bf178b891cc18552833f2de743acf390a7c" />
- <project groups="device,hammerhead" name="device/lge/hammerhead-kernel" revision="a1dc58be96e7a71496e3e89079ac704930f982f2" />
- <project groups="device,mako" name="device/lge/mako" revision="7e5f0f313819ffa3b45cd4208ab552f446c33936" />
- <project groups="device,mako" name="device/lge/mako-kernel" revision="b7de901b8cb86036e9b92b3b6f188b45a524b125" />
- <project groups="pdk" name="device/sample" revision="096f9eb5763fd2766fcbbe4f6b9da51c87f61797" />
- <project groups="device,manta" name="device/samsung/manta" revision="78fe248ddb214aca2215df46be83882dc50c9283" />
- <project groups="pdk" name="platform/abi/cpp" path="abi/cpp" revision="a0f99286d0909f7a30b0bee742bec2a0b62c4dd0" />
- <project name="platform/art" path="art" revision="36b111c7d3d635e262114dabde4c26952c7dcbe6" />
- <project groups="pdk" name="platform/bionic" path="bionic" revision="36bacd237de931c48714d1a8aa4aa9522283e407" />
- <project name="platform/bootable/bootloader/legacy" path="bootable/bootloader/legacy" revision="3c491d6efb8ff2534a6934702760a6273f197918" />
- <project name="platform/bootable/diskinstaller" path="bootable/diskinstaller" revision="ca40959a8caafa0df6a5c3d845e2afe6b252093f" />
- <project groups="pdk" name="platform/bootable/recovery" path="bootable/recovery" revision="974fe112ae6df95ca6d49688d6e3e459d87e16de" />
- <project groups="pdk" name="platform/build" path="build" revision="d23798bfdc9bb34909177c3c5f06f0c97cc9897e" >
- <copyfile dest="Makefile" src="core/root.mk"/>
- </project>
- <project groups="cts" name="platform/cts" path="cts" revision="e15e8f846e19816e18ee3293c5b99f78463be28e" />
- <project name="platform/dalvik" path="dalvik" revision="fb5b0d5bc46bce9c8ed6b1150498d6e145811a7d" />
- <project name="platform/developers/build" path="developers/build" revision="75c5c41b06f045c3304b1b19d8250f04a8da8f10" />
- <project name="platform/developers/demos" path="developers/demos" revision="64526120cd8da89bcb9a48acf95307d2c172a6e8" />
- <project name="platform/developers/docs" path="developers/docs" revision="c0b835ddd9acc27176dc9a0f7d1aa2faf5d51806" />
- <project name="platform/developers/samples/android" path="developers/samples/android" revision="dea82fa23f038d66bd9cfdff2afb8ef22add1c4f" />
- <project name="platform/development" path="development" revision="0efeb2c66bff9b36feecd9315d14d2afb46e4669" />
- <project name="platform/docs/source.android.com" path="docs/source.android.com" revision="c4795fa0df2c5fb4832ae65482944e8e5400e4f6" />
- <project groups="pdk" name="platform/external/aac" path="external/aac" revision="35f30c5ab8089f38681d2fdd416c00aebef5a7ff" />
- <project name="platform/external/android-clat" path="external/android-clat" revision="18921713780edb45ceef327d5fcf3387818300f3" />
- <project name="platform/external/android-mock" path="external/android-mock" revision="4fe497660c2e939300dc5b743d662aef458b1726" />
- <project name="platform/external/ant-glob" path="external/ant-glob" revision="0f189400fd2a36bf11bfb058e7f3917eb7ed163a" />
- <project name="platform/external/antlr" path="external/antlr" revision="47997265eeb7d954a32ece693bbe6dab740872dd" />
- <project name="platform/external/apache-harmony" path="external/apache-harmony" revision="6942e08fdbbd8402c9deabb0f60c8c871194b244" />
- <project name="platform/external/apache-http" path="external/apache-http" revision="85ed0e10781c3c57343300a02556dd5131c450aa" />
- <project name="platform/external/apache-qp" path="external/apache-qp" revision="64ea622b23e6612eb8e7dcae6bfd4314beb022a8" />
- <project name="platform/external/apache-xml" path="external/apache-xml" revision="00ee83ff1bd827a852065986ed0da7a3ded57a55" />
- <project name="platform/external/arduino" path="external/arduino" revision="d06daf9bbc46838400461eb8e15842974e38d82a" />
- <project groups="pdk" name="platform/external/bison" path="external/bison" revision="c2418b886165add7f5a31fc5609f0ce2d004a90e" />
- <project name="platform/external/blktrace" path="external/blktrace" revision="d345431f16b8f76f30a58193ff2b26d5853e1109" />
- <project groups="pdk" name="platform/external/bluetooth/bluedroid" path="external/bluetooth/bluedroid" revision="3b4040093ddf0e0025d0dd034aa65078bb695514" />
- <project name="platform/external/bouncycastle" path="external/bouncycastle" revision="234720ebe66540a53cff98b2448dddbc884bd09f" />
- <project groups="pdk" name="platform/external/bsdiff" path="external/bsdiff" revision="6f503758fad2cbcf8359e8f0af32e4d79a2a48ae" />
- <project groups="pdk" name="platform/external/bzip2" path="external/bzip2" revision="1cb636bd8e9e5cdfd5d5b2909a122f6e80db62de" />
- <project name="platform/external/ceres-solver" path="external/ceres-solver" revision="399f7d09e0c45af54b77b4ab9508d6f23759b927" />
- <project groups="pdk" name="platform/external/checkpolicy" path="external/checkpolicy" revision="c66ac590eebc731f6021f267ebea208e87d8f04f" />
- <project name="platform/external/chromium" path="external/chromium" revision="f294081d501ad98b7d7f50bc73f291063caf2c5f" />
- <project name="platform/external/chromium-libpac" path="external/chromium-libpac" revision="09cf45bf5a650fe1abd50b9d61c2670a62f62767" />
- <project groups="pdk" name="platform/external/chromium-trace" path="external/chromium-trace" revision="8252ae6b83ea65cf871e7981e981da07379f5a0f" />
- <project name="platform/external/chromium_org" path="external/chromium_org" revision="43165a58c6167882aabb62f470c4e4d21f807d79" />
- <project name="platform/external/chromium_org/sdch/open-vcdiff" path="external/chromium_org/sdch/open-vcdiff" revision="6d634da5463d9bc5fc88f86aec1d2ac4fe6f612e" />
- <project name="platform/external/chromium_org/testing/gtest" path="external/chromium_org/testing/gtest" revision="65df883d09205766c521f2e6c126f4070a423141" />
- <project name="platform/external/chromium_org/third_party/WebKit" path="external/chromium_org/third_party/WebKit" revision="a25b4978c2c50d573391a6d56a0e8ad35f52ffc8" />
- <project name="platform/external/chromium_org/third_party/angle" path="external/chromium_org/third_party/angle" revision="8b77c2b2231f7d895979f6341e1ad1964a654ce4" />
- <project name="platform/external/chromium_org/third_party/boringssl/src" path="external/chromium_org/third_party/boringssl/src" revision="85fb7432d3c851200342dd982b211f8dac860687" />
- <project name="platform/external/chromium_org/third_party/brotli/src" path="external/chromium_org/third_party/brotli/src" revision="96f298ac43a9216b251d6c3264d8f5ada89e107f" />
- <project name="platform/external/chromium_org/third_party/eyesfree/src/android/java/src/com/googlecode/eyesfree/braille" path="external/chromium_org/third_party/eyesfree/src/android/java/src/com/googlecode/eyesfree/braille" revision="bb4c72f1deb0b8b2b0468b0bf1050462ebcf6135" />
- <project name="platform/external/chromium_org/third_party/freetype" path="external/chromium_org/third_party/freetype" revision="dc263f2ee2786739da036911ed8b29c07a639ab9" />
- <project name="platform/external/chromium_org/third_party/icu" path="external/chromium_org/third_party/icu" revision="85e5871666cade1bb4b53f0cebfae53bc7d8d1f2" />
- <project name="platform/external/chromium_org/third_party/leveldatabase/src" path="external/chromium_org/third_party/leveldatabase/src" revision="d4e10f2a91f5de7bd17adcdbd80c54b19ab336fe" />
- <project name="platform/external/chromium_org/third_party/libaddressinput/src" path="external/chromium_org/third_party/libaddressinput/src" revision="7127f6844fac19d7610e34f4f7e03398fcd95531" />
- <project name="platform/external/chromium_org/third_party/libjingle/source/talk" path="external/chromium_org/third_party/libjingle/source/talk" revision="8fd7b6a4d9e6757c5e1ff50147e6089979bf6701" />
- <project name="platform/external/chromium_org/third_party/libphonenumber/src/phonenumbers" path="external/chromium_org/third_party/libphonenumber/src/phonenumbers" revision="de6af28b9f9f34a31ffb7772b7510fd215a0814e" />
- <project name="platform/external/chromium_org/third_party/libphonenumber/src/resources" path="external/chromium_org/third_party/libphonenumber/src/resources" revision="8f194ead1ebd76ebb28b7e2dfc0a7baddc62bb22" />
- <project name="platform/external/chromium_org/third_party/libsrtp" path="external/chromium_org/third_party/libsrtp" revision="5eddd5b3436aa8b2c7eb1f3c6db154281c6b91c5" />
- <project name="platform/external/chromium_org/third_party/libvpx" path="external/chromium_org/third_party/libvpx" revision="c20d6540c47e427470c5a56b35fea3c5e9098748" />
- <project name="platform/external/chromium_org/third_party/libyuv" path="external/chromium_org/third_party/libyuv" revision="6e77b766a9eb7889c1a10cab978705ffe03ff3e7" />
- <project name="platform/external/chromium_org/third_party/mesa/src" path="external/chromium_org/third_party/mesa/src" revision="e70a8ff30d20e1bf6bb5c06b5cd7bd4ea9ae20e1" />
- <project name="platform/external/chromium_org/third_party/openmax_dl" path="external/chromium_org/third_party/openmax_dl" revision="83d0254a412b93e81b06a354b90fb627408b4ec8" />
- <project name="platform/external/chromium_org/third_party/openssl" path="external/chromium_org/third_party/openssl" revision="c2a9402712e13e15fcae2b17ec0cbecb816ef52e" />
- <project name="platform/external/chromium_org/third_party/opus/src" path="external/chromium_org/third_party/opus/src" revision="e383b38591b010ab08ebddf1fd8d821796bd961a" />
- <project name="platform/external/chromium_org/third_party/ots" path="external/chromium_org/third_party/ots" revision="4d6e4ddc4b0db2023b1380236c33aa04a7e9e927" />
- <project name="platform/external/chromium_org/third_party/sfntly/cpp/src" path="external/chromium_org/third_party/sfntly/cpp/src" revision="2bac2ec7167835b214bfe42e762cd2ce6cf8cf1a" />
- <project name="platform/external/chromium_org/third_party/skia" path="external/chromium_org/third_party/skia" revision="2d75d0865c7bac54bf5e234855609d0f628388b7" />
- <project name="platform/external/chromium_org/third_party/smhasher/src" path="external/chromium_org/third_party/smhasher/src" revision="09e3094b8ab52bb1ad9ab8c8351d99df50327b67" />
- <project name="platform/external/chromium_org/third_party/usrsctp/usrsctplib" path="external/chromium_org/third_party/usrsctp/usrsctplib" revision="ed9a6fb519aa7606cab965b2c4218756e849ddb6" />
- <project name="platform/external/chromium_org/third_party/webrtc" path="external/chromium_org/third_party/webrtc" revision="8b45a80ec9c21b148a5674d3a23ca5fa70981f71" />
- <project name="platform/external/chromium_org/third_party/yasm/source/patched-yasm" path="external/chromium_org/third_party/yasm/source/patched-yasm" revision="0f308c9bc9aa3258a0e90285b9d4e69bbb5b0a73" />
- <project name="platform/external/chromium_org/tools/grit" path="external/chromium_org/tools/grit" revision="4ad93ed16c8ae7742fd7c34c83036b8d03c21fb9" />
- <project name="platform/external/chromium_org/tools/gyp" path="external/chromium_org/tools/gyp" revision="9c42a79388ce87185ad04cb02047c1e56ac5e066" />
- <project name="platform/external/chromium_org/v8" path="external/chromium_org/v8" revision="db865e6839e98cc9d07609bf81bb6610117ba6ff" />
- <project groups="pdk" name="platform/external/clang" path="external/clang" revision="070ed154a0a8bc2c0fd4fb9b8a86a0f1218e6dfa" />
- <project groups="pdk" name="platform/external/compiler-rt" path="external/compiler-rt" revision="c185902e393cd71823258016ead1b315ed062b24" />
- <project name="platform/external/conscrypt" path="external/conscrypt" revision="26163c268a6d2625384b87e907afad8ef19f9a47" />
- <project name="platform/external/dexmaker" path="external/dexmaker" revision="2b528c4b156f2de5c641875b98e59e0b09ebaccd" />
- <project name="platform/external/dhcpcd" path="external/dhcpcd" revision="03baf5eab896198b5060d287af3fd60d360bf48f" />
- <project groups="pdk" name="platform/external/dnsmasq" path="external/dnsmasq" revision="7674911bc9d10adf57c2c2d15d0c641b48e4afe6" />
- <project name="platform/external/doclava" path="external/doclava" revision="b9d279d8f9c29a3044d13482846efb21f27b5df4" />
- <project groups="pdk" name="platform/external/e2fsprogs" path="external/e2fsprogs" revision="721f3bc56989b5f4101e646a02d598ddb4a7ff6e" />
- <project name="platform/external/easymock" path="external/easymock" revision="c9a234086537e5fd820b110bbd99e3cdc695004c" />
- <project name="platform/external/eclipse-basebuilder" path="external/eclipse-basebuilder" revision="6134da6347cc997e0cf2921aaadfb46f21c05d85" />
- <project name="platform/external/eclipse-windowbuilder" path="external/eclipse-windowbuilder" revision="a5f3ee137e94737538ec3bdf9b3716765d178c17" />
- <project name="platform/external/eigen" path="external/eigen" revision="b015e75e8c7ba1ab4ddb91e9372a57e76f3fd159" />
- <project name="platform/external/elfutils" path="external/elfutils" revision="38ecac0276825a9463803485440646582e477e78" />
- <project name="platform/external/embunit" path="external/embunit" revision="336b7c65098af0d1be69f2db55f4e75342d73b3f" />
- <project name="platform/external/emma" path="external/emma" revision="daacd02a6b9f7a3e82bdf1cc5b84db85ed59edb1" />
- <project name="platform/external/esd" path="external/esd" revision="224a67f2683a7ee997179fc5dd16115e39987b0f" />
- <project groups="pdk" name="platform/external/expat" path="external/expat" revision="907ec055718996baf36961e7f47f8447e49b3865" />
- <project name="platform/external/eyes-free" path="external/eyes-free" revision="16bd4c7a4d1bfe229068b637614dad7c48dd2ceb" />
- <project name="platform/external/f2fs-tools" path="external/f2fs-tools" revision="00dc8a1c6c87acf687e64e66cfc2fd7ca28e646e" />
- <project name="platform/external/fdlibm" path="external/fdlibm" revision="c831c726067e0d8a05362e710e2405f0eff81e07" />
- <project name="platform/external/fio" path="external/fio" revision="6f4e805b805f1ab3025482e471147bb51efa99bd" />
- <project groups="pdk" name="platform/external/flac" path="external/flac" revision="7f32dd57579bdff88e46e1e403154be0b99165da" />
- <project groups="pdk" name="platform/external/freetype" path="external/freetype" revision="899c67b6cfcd2010784fbf08c5415af16c526e0c" />
- <project name="platform/external/fsck_msdos" path="external/fsck_msdos" revision="17a1471db8c528cd9d44ec4385d2eb3614138856" />
- <project name="platform/external/ganymed-ssh2" path="external/ganymed-ssh2" revision="d3724dabc1cfbacd105fe6c422b4dcba80e4fb2d" />
- <project groups="pdk" name="platform/external/gcc-demangle" path="external/gcc-demangle" revision="9241386b62c353302c2f9eccda0672685b252b4d" />
- <project name="platform/external/genext2fs" path="external/genext2fs" revision="e11a9c7fe6f1cef99aad2f25afaea37b72fe9f93" />
- <project name="platform/external/giflib" path="external/giflib" revision="621696a283c0ce34956417f760f1005fadcd12ae" />
- <project name="platform/external/google-diff-match-patch" path="external/google-diff-match-patch" revision="cecbe12841337860291c2d6a5728b681ec5fca2a" />
- <project name="platform/external/google-fonts/carrois-gothic-sc" path="external/google-fonts/carrois-gothic-sc" revision="0062a10458d4c357f3082d66bcb129d11913aaae" />
- <project name="platform/external/google-fonts/coming-soon" path="external/google-fonts/coming-soon" revision="2c5cb418c690815545bbb0316eae5fd33b9fc859" />
- <project name="platform/external/google-fonts/dancing-script" path="external/google-fonts/dancing-script" revision="7b6623bd54cee3e48ae8a4f477f616366643cc78" />
- <project name="platform/external/grub" path="external/grub" revision="33a4e7e4cfa81dc21d37091515891859ef3ab934" />
- <project groups="pdk" name="platform/external/gtest" path="external/gtest" revision="fa3c26b862ca17c0d2db67606226b49d1648b4bf" />
- <project name="platform/external/guava" path="external/guava" revision="5e6db342fc75b1945298142530f2d1d1861bce73" />
- <project name="platform/external/hamcrest" path="external/hamcrest" revision="ba28ac1e0386f26d9a45be5ed16fc9c598b27e70" />
- <project name="platform/external/harfbuzz" path="external/harfbuzz" revision="7a08026033b424da3b7022ebcce35f033949df8b" />
- <project name="platform/external/harfbuzz_ng" path="external/harfbuzz_ng" revision="3e537b48a7b56c742ecf3c2ed24ff15fcb73f575" />
- <project name="platform/external/hyphenation" path="external/hyphenation" revision="bfa84834dfeb7fe8d058c2e7e07b5981451ddf82" />
- <project name="platform/external/icu" path="external/icu" revision="3c09e2ebbdae6000f3bd471c34d055bc1913f7e4" />
- <project groups="pdk" name="platform/external/icu4c" path="external/icu4c" revision="e5311394ca22b280da41cd17059288dab3fb1ea6" />
- <project groups="pdk" name="platform/external/iproute2" path="external/iproute2" revision="5d4c86892885ae1bc12e0e157b35ef44e8ba81bd" />
- <project name="platform/external/ipsec-tools" path="external/ipsec-tools" revision="f4cb1ee4b00abbfb6f968dc25818c23b4b47e584" />
- <project name="platform/external/iptables" path="external/iptables" revision="e3928b77f18db0fdc615693017c6c15eb71bf4e0" />
- <project name="platform/external/iputils" path="external/iputils" revision="1c7c426ab377c3a005a36d612ebbb16de86fb7d4" />
- <project name="platform/external/jack" path="external/jack" revision="5ceb2025ac5d25ed48183ac2d3dac4691fe761fb" />
- <project name="platform/external/javasqlite" path="external/javasqlite" revision="b8501bdeb0b7e39a0d82f2a96ad382c05a763b22" />
- <project name="platform/external/javassist" path="external/javassist" revision="9566207cff5871c672fac1f0d4332d93292036d7" />
- <project name="platform/external/jdiff" path="external/jdiff" revision="e4694302d6a3786c64d954e0b3cf42786283bd3c" />
- <project name="platform/external/jemalloc" path="external/jemalloc" revision="615fe54259e545c33275753a316c2bfd1198b4f0" />
- <project groups="pdk" name="platform/external/jhead" path="external/jhead" revision="871af5c305ce1d3087e58fae091c60c359f5fa45" />
- <project name="platform/external/jmdns" path="external/jmdns" revision="f4eb7466d5c09098f9dc54137ed3235e3c43fc9f" />
- <project name="platform/external/jmonkeyengine" path="external/jmonkeyengine" revision="a6b44658eb1c55295f132a36233a11aa2bd8f9cf" />
- <project groups="pdk" name="platform/external/jpeg" path="external/jpeg" revision="213197252c8c4825f6572c651126c22067025fe9" />
- <project name="platform/external/jsilver" path="external/jsilver" revision="739060b01245f1dc5f1800949b3c30c291253cff" />
- <project name="platform/external/jsr305" path="external/jsr305" revision="a82868820d6350811b9ddfde4bf8ed5016084269" />
- <project name="platform/external/junit" path="external/junit" revision="8f312e0c3d6dff30d015d2c85fdaae0a39220fd6" />
- <project name="platform/external/kernel-headers" path="external/kernel-headers" revision="8b663ef01dcaadfe1dec7ba826e5cd1cf0bb2c91" />
- <project name="platform/external/libcap-ng" path="external/libcap-ng" revision="1d1011a3c5049a7f9eef99d22f3704e4367579cc" />
- <project name="platform/external/libcxx" path="external/libcxx" revision="a9aa30b5d18422fce29a42ce1a704bc5f28febde" />
- <project name="platform/external/libcxxabi" path="external/libcxxabi" revision="87a9be28aceed80250cd1d1a47eb8afa0ee67b51" />
- <project name="platform/external/libcxxrt" path="external/libcxxrt" revision="d1ee2b2a4946a073596514462d7629373d22fb27" />
- <project name="platform/external/libexif" path="external/libexif" revision="25d371312cee1452a2adcf8b7f6cad6267bda32d" />
- <project name="platform/external/libffi" path="external/libffi" revision="385ba8b006b9995456d3c9283fd20dded90809cc" />
- <project groups="pdk" name="platform/external/libgsm" path="external/libgsm" revision="50761abed8f4734970874165b386cfd4d9599db4" />
- <project groups="pdk" name="platform/external/liblzf" path="external/liblzf" revision="6946aa575b0949d045722794850896099d937cbb" />
- <project name="platform/external/libmtp" path="external/libmtp" revision="7075348937f6a8c9d9211942fcb6c376f4227776" />
- <project groups="pdk" name="platform/external/libnfc-nci" path="external/libnfc-nci" revision="46abb3dcf960058e48d1444b6a11cc7e84912339" />
- <project groups="pdk" name="platform/external/libnfc-nxp" path="external/libnfc-nxp" revision="15d81f71a668b3092549c6b7f83694bf680d9c49" />
- <project name="platform/external/libnl" path="external/libnl" revision="99debfa4c01b49c9b470884cc56f81fcdee0fa1f" />
- <project groups="pdk" name="platform/external/libnl-headers" path="external/libnl-headers" revision="52c926a9de955fa2d987bf8c5d4a1304b5a2a611" />
- <project name="platform/external/libogg" path="external/libogg" revision="ec0b24fb1468abe37be4164a6feb16568e036bde" />
- <project name="platform/external/libpcap" path="external/libpcap" revision="9dab0cd7430a4d23e0a7752fb13b941692171c3d" />
- <project name="platform/external/libphonenumber" path="external/libphonenumber" revision="485e6d5c6e48a1fc43cc0a090e687c723dac056c" />
- <project groups="pdk" name="platform/external/libpng" path="external/libpng" revision="48b7ba25a15a9eae83d366c02475539725d035d0" />
- <project name="platform/external/libppp" path="external/libppp" revision="706e567fc5ff6b79738a5f470e5aa7b2cae76459" />
- <project name="platform/external/libseccomp-helper" path="external/libseccomp-helper" revision="e87019943a8b5a7cd0880910f671c37b240d5754" />
- <project groups="pdk" name="platform/external/libselinux" path="external/libselinux" revision="da4208c8808e6a62fcfe848343abd3e2f3b339cc" />
- <project groups="pdk" name="platform/external/libsepol" path="external/libsepol" revision="d26204e7d0a3be178a97d4920b82007e05a2a632" />
- <project name="platform/external/libssh2" path="external/libssh2" revision="2bb40f2445cab3ba588efb29e1835cdba2b27248" />
- <project name="platform/external/libunwind" path="external/libunwind" revision="b3436a3feed4dcb22dafc8f7818b742cacaddd1d" />
- <project name="platform/external/libusb" path="external/libusb" revision="2801917fe150393d4f4a354165fe89550ae22613" />
- <project name="platform/external/libusb-compat" path="external/libusb-compat" revision="94867ba54eb7faa8efca81cf2214d00bb9143d27" />
- <project name="platform/external/libvorbis" path="external/libvorbis" revision="de559619fd4dd0d2d9608436696fd44bdf74eba8" />
- <project groups="pdk" name="platform/external/libvpx" path="external/libvpx" revision="d64f247f64fbb814c9ecf06a56bcb0948bfca21f" />
- <project name="platform/external/libxml2" path="external/libxml2" revision="399e808f940777d18efe377bd34f738dc84729e0" />
- <project name="platform/external/libxslt" path="external/libxslt" revision="98f5140c33273d3bd67ca03566f8417406001016" />
- <project groups="libyuv" name="platform/external/libyuv" path="external/libyuv" revision="482a582884351288fb701532359652970b1ba7c0" />
- <project name="platform/external/linux-tools-perf" path="external/linux-tools-perf" revision="3e1937964f6c183eb6a0000e2dca27fc3a419ca2" />
- <project name="platform/external/littlemock" path="external/littlemock" revision="328b01eada8965cd38feea884d4080c31e3763b0" />
- <project groups="pdk" name="platform/external/llvm" path="external/llvm" revision="c4c1f81ae1b07138df50e0459631abf3082bda9c" />
- <project name="platform/external/ltrace" path="external/ltrace" revision="82ae18484c7b6a8af05354caf6de3a7f1ac5fcf9" />
- <project name="platform/external/lzma" path="external/lzma" revision="19cf4f773361c09e47a2ffe1613d66cbf632227f" />
- <project name="platform/external/marisa-trie" path="external/marisa-trie" revision="629ed059b1e85cd8e4de363d8b3dc53c15c3e08a" />
- <project name="platform/external/markdown" path="external/markdown" revision="6f2e3554ae38cc90518d32e02cb57d05988270a6" />
- <project groups="pdk" name="platform/external/mdnsresponder" path="external/mdnsresponder" revision="b25c2507ecc3f674e3b4f0a770acf9ad8fd874d0" />
- <project name="platform/external/mesa3d" path="external/mesa3d" revision="97d3f36a59ea448fa77e47a90bf04f1254670542" />
- <project name="platform/external/messageformat" path="external/messageformat" revision="180a28770171075aa484729a69d14c7cf0c93fcf" />
- <project groups="pdk" name="platform/external/mksh" path="external/mksh" revision="2a54bce0ae98f53f0b867e949b26d081691e1493" />
- <project name="platform/external/mockito" path="external/mockito" revision="4d0dcd53b27a243baf72ee0b127b188a058b318d" />
- <project name="platform/external/mockwebserver" path="external/mockwebserver" revision="2f7659c426de53122ee7922b0981058a900124a7" />
- <project name="platform/external/mp4parser" path="external/mp4parser" revision="16051e950485c6b62127c0446a760111de1a0cb9" />
- <project name="platform/external/mtpd" path="external/mtpd" revision="5ea8006691664b7e6d46d6a6dc889eac91b7fe37" />
- <project name="platform/external/naver-fonts" path="external/naver-fonts" revision="3bba7d2430bc3ec8105678a27f03fb080f0f8384" />
- <project name="platform/external/netcat" path="external/netcat" revision="444644cfa9a2f3002863caa168fb2d6b34dfd1e8" />
- <project name="platform/external/netperf" path="external/netperf" revision="38e47cd883738cb84bdb47a7d263f14f14062d7b" />
- <project name="platform/external/neven" path="external/neven" revision="504ee5ccaabd8bce4da3430b0f4e9714ac2a8e6c" />
- <project name="platform/external/nfacct" path="external/nfacct" revision="6f7aae0264821b44e9fe80fb5596c525d3e2f475" />
- <project name="platform/external/nist-pkits" path="external/nist-pkits" revision="b7a53ad5a587926cb880d9bb6f3d51657596474c" />
- <project name="platform/external/nist-sip" path="external/nist-sip" revision="b23dbfce7ea84c39cea75b612868a5832cb9af2b" />
- <project name="platform/external/noto-fonts" path="external/noto-fonts" revision="90372d894b5d9c9f2a111315d2eb3b8de1979ee4" />
- <project name="platform/external/oauth" path="external/oauth" revision="bc170f58de82000ed6460f111686a850a1890c07" />
- <project name="platform/external/objenesis" path="external/objenesis" revision="2a7655c0d503fcf5989098f65bf89eae78c32e5a" />
- <project name="platform/external/okhttp" path="external/okhttp" revision="4909663c795d974d0d4b0e2d1ebd6e179486c897" />
- <project name="platform/external/open-vcdiff" path="external/open-vcdiff" revision="6d29f2f083baf8250db94ed0b4807e513a84163d" />
- <project name="platform/external/opencv" path="external/opencv" revision="4a99e243b42afcb885d036bb451eb3c2739275b6" />
- <project name="platform/external/openfst" path="external/openfst" revision="b7434caa51427a0f5ab5c807e1a92d6ca2af8884" />
- <project name="platform/external/openssh" path="external/openssh" revision="3c335c9fb9c12375ad62748fa1d1e5ebe4710c94" />
- <project groups="pdk" name="platform/external/openssl" path="external/openssl" revision="cfe73257599ae4baae3ffb50c2c841d9249d2d16" />
- <project name="platform/external/oprofile" path="external/oprofile" revision="3722f1053f4cab90c4daf61451713a2d61d79c71" />
- <project name="platform/external/owasp/sanitizer" path="external/owasp/sanitizer" revision="6a304233f9f2010821a5a1dd40e2832b68353a3c" />
- <project name="platform/external/pcre" path="external/pcre" revision="993a14b71c8e7af03eb929d44a444137393a5324" />
- <project name="platform/external/pixman" path="external/pixman" revision="afd5bbd8074cedec8544d07920fa06786d5a4f08" />
- <project name="platform/external/ppp" path="external/ppp" revision="8b58d9bd02e2c55f547fafbe9ba55b1160665761" />
- <project groups="pdk-java" name="platform/external/proguard" path="external/proguard" revision="3fd19dba2bdc0c4b64afda4d75836e1dcf7abf97" />
- <project groups="pdk" name="platform/external/protobuf" path="external/protobuf" revision="95d99df4574c28debcf9646056a0350ff44bc7c9" />
- <project name="platform/external/qemu" path="external/qemu" revision="539e1f25ecbfe80814dba2ea77feb22087b9d53b" />
- <project name="platform/external/qemu-pc-bios" path="external/qemu-pc-bios" revision="20349dae98d7de09a7e390d4a706c64f1db6edc2" />
- <project name="platform/external/regex-re2" path="external/regex-re2" revision="0d4c52358a1af421705c54bd8a9fdd8a30558a2e" />
- <project name="platform/external/replicaisland" path="external/replicaisland" revision="99e2e54c5d036048caf09bb05eea0969de093104" />
- <project name="platform/external/robolectric" path="external/robolectric" revision="6bf395c984ed3f69711663b006aeffbb0f7e8a90" />
- <project groups="pdk" name="platform/external/safe-iop" path="external/safe-iop" revision="aa0725fb1da35e47676b6da30009322eb5ed59be" />
- <project groups="pdk" name="platform/external/scrypt" path="external/scrypt" revision="dde037b82e5cd6215244e3240dbaad417928eafa" />
- <project groups="pdk" name="platform/external/sepolicy" path="external/sepolicy" revision="21ada26daea538397029396099dce865267bae2f" />
- <project name="platform/external/sfntly" path="external/sfntly" revision="6723e5241a45c6de224c96384a595a1bf5bc5449" />
- <project name="platform/external/sil-fonts" path="external/sil-fonts" revision="795a2f4339f8a82d6cff187e2a77bb01d5911aac" />
- <project name="platform/external/skia" path="external/skia" revision="d6f2c76fdb9b0469261fa2db0b29ed48c7ac38b5" />
- <project name="platform/external/smack" path="external/smack" revision="d7955ce24d294fb2014c59d11fca184471056f44" />
- <project name="platform/external/smali" path="external/smali" revision="5fd395796e215a80c722815bf180728948868f18" />
- <project groups="pdk" name="platform/external/sonivox" path="external/sonivox" revision="c0723d864b10fbd6c5cbbfa65e886c5e9eb3aafd" />
- <project groups="pdk" name="platform/external/speex" path="external/speex" revision="eaa4765b8cc6a6dd5ee0d26dc1b61a1044817f32" />
- <project groups="pdk" name="platform/external/sqlite" path="external/sqlite" revision="50af37d784661b2d54c8e043de52ffc4f02a1a50" />
- <project name="platform/external/srec" path="external/srec" revision="540e7ee8dbf1d7ee72ef45c92efbebcb89bf6d1a" />
- <project name="platform/external/srtp" path="external/srtp" revision="98bd63b48a31b4633cdfdc8138577dfa6d8dd2a6" />
- <project groups="pdk" name="platform/external/stlport" path="external/stlport" revision="dc05ca5be2319f74b41cb429ea50f30fceff4ace" />
- <project name="platform/external/strace" path="external/strace" revision="a2adbed6e2d3ce85ebb167e16ae370681a8b5188" />
- <project name="platform/external/stressapptest" path="external/stressapptest" revision="0956427aa995561acb4471764158ae057a36dad5" />
- <project name="platform/external/svox" path="external/svox" revision="ad0a55bd0e13a27ed11034346eee9c47e3684ef2" />
- <project name="platform/external/syspatch" path="external/syspatch" revision="358a4f86b8c2cb3d3f879a37f6773dd09d4b77b0" />
- <project name="platform/external/tagsoup" path="external/tagsoup" revision="a97828cb3f8f3a1af8470e55d3c5cd62d6a7cb4c" />
- <project name="platform/external/tcpdump" path="external/tcpdump" revision="de49cdcfddf36f2b41ef3278e98a8a550a189952" />
- <project name="platform/external/timezonepicker-support" path="external/timezonepicker-support" revision="99e91a76fd74bad10266623d67cdb98d011f709e" />
- <project groups="pdk" name="platform/external/tinyalsa" path="external/tinyalsa" revision="653e7a4015341c87b4d55ec9a94ec7bdee044f6f" />
- <project groups="pdk" name="platform/external/tinycompress" path="external/tinycompress" revision="aeee2c6a19b9d3765f72bc79555005786a424233" />
- <project groups="pdk" name="platform/external/tinyxml" path="external/tinyxml" revision="f065a8058659c0e6c5a5ccddcdb4faf0fe645cd0" />
- <project groups="pdk" name="platform/external/tinyxml2" path="external/tinyxml2" revision="c74b546f5af36968ffa56d7fd4529f4273b96f48" />
- <project groups="pdk" name="platform/external/tremolo" path="external/tremolo" revision="0fec2aefa8143c83df43752bb0218dfa371cc57e" />
- <project groups="pdk" name="platform/external/valgrind" path="external/valgrind" revision="893257d6c86a18cc5cf6c92528b7027f327dca70" />
- <project name="platform/external/vixl" path="external/vixl" revision="e1ab25cde167109efb28fa6a86d5c2c80b762d58" />
- <project name="platform/external/webp" path="external/webp" revision="0db01fc3411621bec473d50db0071fd2a225962e" />
- <project groups="pdk" name="platform/external/webrtc" path="external/webrtc" revision="d62aeac391d16d4953a12120c0ff614ccde02a30" />
- <project groups="pdk" name="platform/external/wpa_supplicant_8" path="external/wpa_supplicant_8" revision="88ef20ce4facae68a3e6b05429bb9f3f73a93996" />
- <project name="platform/external/xdelta3" path="external/xdelta3" revision="52d9c642e6a307c43881f20a4ed1c10e947234ba" />
- <project name="platform/external/xmlwriter" path="external/xmlwriter" revision="e95d92246ee35273dde2bee8b00485cc14c12be5" />
- <project name="platform/external/xmp_toolkit" path="external/xmp_toolkit" revision="42ea4dc6d1fc2206a7778029070ed9213e3b0fbf" />
- <project groups="pdk" name="platform/external/yaffs2" path="external/yaffs2" revision="a2cff2275e1b501ff478b03757d6e4f05fddc2db" />
- <project groups="pdk" name="platform/external/zlib" path="external/zlib" revision="8d977782c1cfe9d75cc9a464439c2ff1e27e1665" />
- <project name="platform/external/zxing" path="external/zxing" revision="7620644768ffc235607b3a94671e49518c18686f" />
- <project groups="pdk" name="platform/frameworks/av" path="frameworks/av" revision="a018cd4926460f8f5ab30a9a11df9775572d8620" />
- <project name="platform/frameworks/base" path="frameworks/base" revision="6a58309e734086a21580dd8d9175ac1817ca3ab2" />
- <project groups="pdk" name="platform/frameworks/compile/libbcc" path="frameworks/compile/libbcc" revision="3fc91521640692f844aece8b1743c4df702d1c66" />
- <project groups="pdk" name="platform/frameworks/compile/mclinker" path="frameworks/compile/mclinker" revision="e673be8f0526f9cbc83093fb579c0f76de9e4e3c" />
- <project groups="pdk" name="platform/frameworks/compile/slang" path="frameworks/compile/slang" revision="c957dd47b0a0705a686896b26cd1859d25824552" />
- <project name="platform/frameworks/ex" path="frameworks/ex" revision="3696df848aa7c574f913c97c3bf415b634934048" />
- <project name="platform/frameworks/mff" path="frameworks/mff" revision="b9669b8540a1e5c953374d53b115514335e23c27" />
- <project name="platform/frameworks/ml" path="frameworks/ml" revision="b020ad88ca28ada76a596b5dcc7e6c2854fcc132" />
- <project name="platform/frameworks/multidex" path="frameworks/multidex" revision="590a07e63868f0a1da311ff22b4a9f35eb48a865" />
- <project groups="pdk" name="platform/frameworks/native" path="frameworks/native" revision="e8878921db4a51ff5d4e75d9c8958d889a048603" />
- <project name="platform/frameworks/opt/calendar" path="frameworks/opt/calendar" revision="03b18577f8f8f799e87a62b8e03889ddacf6daa2" />
- <project name="platform/frameworks/opt/carddav" path="frameworks/opt/carddav" revision="f08aa2df132dd8dc32a0013d3750137d9dd9280a" />
- <project name="platform/frameworks/opt/colorpicker" path="frameworks/opt/colorpicker" revision="720a40ae24d526268b3c0f2dd8497b5df2cc6f23" />
- <project name="platform/frameworks/opt/datetimepicker" path="frameworks/opt/datetimepicker" revision="8a1c55baaf5ced7a98b196c689ccdd59238f6e58" />
- <project name="platform/frameworks/opt/emoji" path="frameworks/opt/emoji" revision="709f713ebcd62c61defc270d945810efca179621" />
- <project name="platform/frameworks/opt/inputmethodcommon" path="frameworks/opt/inputmethodcommon" revision="df9dd39c2047992a43b64e13bb0fc348a1630f3b" />
- <project name="platform/frameworks/opt/mailcommon" path="frameworks/opt/mailcommon" revision="1537812900e59f875cfea0483f0ae261b16d3e4b" />
- <project name="platform/frameworks/opt/mms" path="frameworks/opt/mms" revision="64817e848552fd0a429a3e026b7b1562103c56bb" />
- <project name="platform/frameworks/opt/net/voip" path="frameworks/opt/net/voip" revision="0f722c7f09ce67e058eb1cfaabf1d85f1abdf797" />
- <project name="platform/frameworks/opt/photoviewer" path="frameworks/opt/photoviewer" revision="8c32972911bf73babdb01d30267f57255e242d78" />
- <project groups="pdk" name="platform/frameworks/opt/telephony" path="frameworks/opt/telephony" revision="93faaed9056491c551ef7046e9e1de7d6397e95c" />
- <project name="platform/frameworks/opt/timezonepicker" path="frameworks/opt/timezonepicker" revision="3820b87bfbc86d066e9093e78254e1f3728ad77d" />
- <project name="platform/frameworks/opt/vcard" path="frameworks/opt/vcard" revision="5907243e6cf0603adf266ebfa7ee5ee465b9c596" />
- <project name="platform/frameworks/opt/widget" path="frameworks/opt/widget" revision="466e0e0307b3f6aa4f4be3d9419b5996bd389da5" />
- <project groups="pdk" name="platform/frameworks/rs" path="frameworks/rs" revision="ad0544fdf918e64cec05d1c98588880f10b09220" />
- <project name="platform/frameworks/support" path="frameworks/support" revision="f05c07d3528765076adc16337a1f68f1700955dc" />
- <project name="platform/frameworks/testing" path="frameworks/testing" revision="5c8e0271db889518f5969b142a37faa01a4ee54d" />
- <project name="platform/frameworks/volley" path="frameworks/volley" revision="0e406003b5d434d8f16d7d6ad97d446060b788e6" />
- <project name="platform/frameworks/webview" path="frameworks/webview" revision="6ed700e171cb2ee3303c08a1db2abc0e56fd307a" />
- <project name="platform/frameworks/wilhelm" path="frameworks/wilhelm" revision="a62c3572e60ae0446632de15418a65089cccf551" />
- <project name="platform/hardware/akm" path="hardware/akm" revision="32838ef838d1341aa8b77022869b801fb0bbb26c" />
- <project groups="pdk" name="platform/hardware/broadcom/libbt" path="hardware/broadcom/libbt" revision="55ddd0cce019e88829f92b2fe4e17d5869daa9b9" />
- <project groups="broadcom_wlan" name="platform/hardware/broadcom/wlan" path="hardware/broadcom/wlan" revision="47a3b8f496e6d2a836ac6b7268e5626c969542ec" />
- <project groups="invensense" name="platform/hardware/invensense" path="hardware/invensense" revision="0f5bc7cd710fac85377621a8b9a4c364af80605f" />
- <project groups="pdk" name="platform/hardware/libhardware" path="hardware/libhardware" revision="3e618a6aa10c783d1536f20edfc3347939cfa18e" />
- <project groups="pdk" name="platform/hardware/libhardware_legacy" path="hardware/libhardware_legacy" revision="4c20a09e8684657448f0bc97a2da4e56c94d484e" />
- <project groups="qcom" name="platform/hardware/qcom/audio" path="hardware/qcom/audio" revision="d47ff224c7b24933c701acae8d5e4c98a1bc80af" />
- <project groups="qcom" name="platform/hardware/qcom/bt" path="hardware/qcom/bt" revision="cf314a462ba06f4bd3352d5d4630edcf6edbbe97" />
- <project groups="qcom" name="platform/hardware/qcom/camera" path="hardware/qcom/camera" revision="fbf72e519ec5fe2f2720b1a3d119e2d69e172e34" />
- <project groups="qcom" name="platform/hardware/qcom/display" path="hardware/qcom/display" revision="0a611c6ae11b65fec5ada5ecaa0893541db34156" />
- <project groups="qcom" name="platform/hardware/qcom/keymaster" path="hardware/qcom/keymaster" revision="70d36107318e1d3f7abf62a56279b3f9da3ff000" />
- <project groups="qcom" name="platform/hardware/qcom/media" path="hardware/qcom/media" revision="1208a868bcb0ffaa650a7e68b51031254c775d39" />
- <project groups="qcom_msm8960" name="platform/hardware/qcom/msm8960" path="hardware/qcom/msm8960" revision="ca38ed098b05a79d20e852348f27d7c40a53f801" />
- <project groups="qcom_msm8x74" name="platform/hardware/qcom/msm8x74" path="hardware/qcom/msm8x74" revision="0c6844ea9ee14fd7bbfd6af0bcc6b6b682f46d1c" />
- <project groups="qcom" name="platform/hardware/qcom/power" path="hardware/qcom/power" revision="ff9f4538c09399030fa73e3e65a167852cb91e8f" />
- <project groups="qcom" name="platform/hardware/qcom/sensors" path="hardware/qcom/sensors" revision="07c5bcdb36158e22d33bac02eecd83d4ff1fb2f8" />
- <project groups="qcom_wlan" name="platform/hardware/qcom/wlan" path="hardware/qcom/wlan" revision="daa321b0ad8c10b454dc28d7e6dadc72196a8c7a" />
- <project groups="pdk" name="platform/hardware/ril" path="hardware/ril" revision="eb2a93458204a928edfe36f043ddb48cf5575143" />
- <project groups="exynos5" name="platform/hardware/samsung_slsi/exynos5" path="hardware/samsung_slsi/exynos5" revision="d7bd354358ecfb1e52afb3da4fc586c0822c696a" />
- <project name="platform/hardware/ti/omap3" path="hardware/ti/omap3" revision="949aad363a9cc794f9ac8fd42338ae1678e50bc1" />
- <project groups="omap4" name="platform/hardware/ti/omap4xxx" path="hardware/ti/omap4xxx" revision="c32caab84ff9edc1489ed6c8079c7d252caafc4d" />
- <project name="platform/libcore" path="libcore" revision="d343e35535a99bad32eea0defc8a3e9c23c9967f" />
- <project groups="pdk-java" name="platform/libnativehelper" path="libnativehelper" revision="b37e11d07dec2d49b576709ae8e0568a9daabd07" />
- <project name="platform/ndk" path="ndk" revision="f584f76882baf374166cf12b99cd5f3dbdf3b6b9" />
- <project name="platform/packages/apps/BasicSmsReceiver" path="packages/apps/BasicSmsReceiver" revision="80327793c4b4ebf4a6a53b72e46c477afe18f135" />
- <project name="platform/packages/apps/Bluetooth" path="packages/apps/Bluetooth" revision="7efa9db2129c99475684a2e44c4fb89cce3134bc" />
- <project name="platform/packages/apps/Browser" path="packages/apps/Browser" revision="fe4083510dc773911651456f150bf5432f81a6c0" />
- <project name="platform/packages/apps/Calculator" path="packages/apps/Calculator" revision="6c7521bb685c9b7b7c36f2077612d4b1a0e808d4" />
- <project name="platform/packages/apps/Calendar" path="packages/apps/Calendar" revision="2d72f6bed6a0eeaddbda08393063fe873c1c7922" />
- <project name="platform/packages/apps/Camera" path="packages/apps/Camera" revision="b0e357d548fb8d10896200add2b932199a96a2ea" />
- <project name="platform/packages/apps/Camera2" path="packages/apps/Camera2" revision="ece4866dc575b956801f6dab2d6c4923e272c5fa" />
- <project name="platform/packages/apps/CellBroadcastReceiver" path="packages/apps/CellBroadcastReceiver" revision="21d8baf492007cc01545905de33ecefe5d947843" />
- <project name="platform/packages/apps/CertInstaller" path="packages/apps/CertInstaller" revision="483a188feda6e9d311aef437d28f30e1fb6afeb0" />
- <project name="platform/packages/apps/Contacts" path="packages/apps/Contacts" revision="24a4f48dc5c768188143648e267889477e4185e8" />
- <project name="platform/packages/apps/ContactsCommon" path="packages/apps/ContactsCommon" revision="6ce4a3bc083a7dbcc7ffa2bebff242638d7f8e61" />
- <project name="platform/packages/apps/DeskClock" path="packages/apps/DeskClock" revision="d3bfe9223f3e70271813f48b8ef5500c3a90c0b3" />
- <project name="platform/packages/apps/Dialer" path="packages/apps/Dialer" revision="5cb300ef50e9942eef746319dd1b1b6e7c2c05e2" />
- <project name="platform/packages/apps/Email" path="packages/apps/Email" revision="22766dcf6a44416b2972c053739472317017257d" />
- <project name="platform/packages/apps/Exchange" path="packages/apps/Exchange" revision="ab03a7f9b197b6ffcc390dd5fb589067a5161148" />
- <project name="platform/packages/apps/Gallery" path="packages/apps/Gallery" revision="9595006a3347c08e6b8e31d679903bb8f77a343d" />
- <project name="platform/packages/apps/Gallery2" path="packages/apps/Gallery2" revision="9cde04ed08f3a5201a007d78b3c89f43fb3003e0" />
- <project name="platform/packages/apps/HTMLViewer" path="packages/apps/HTMLViewer" revision="7498890092c388dc59ca932e09ec79dd568b1a19" />
- <project name="platform/packages/apps/InCallUI" path="packages/apps/InCallUI" revision="d968d1a28dae45229b1be9f05bef8df13821e94d" />
- <project name="platform/packages/apps/KeyChain" path="packages/apps/KeyChain" revision="e6243f79f3ce6daeb2d8d879e6e1a684ffc1b2fc" />
- <project name="platform/packages/apps/Launcher2" path="packages/apps/Launcher2" revision="31569f6dbd44d443ff54c460b733e62fc37d2319" />
- <project name="platform/packages/apps/Launcher3" path="packages/apps/Launcher3" revision="3a9f3a7806a0153865415d6207c6812915d3f6b1" />
- <project name="platform/packages/apps/LegacyCamera" path="packages/apps/LegacyCamera" revision="d9b5d8941d1ec47ff391da2b8cc8ec90f902062f" />
- <project name="platform/packages/apps/Mms" path="packages/apps/Mms" revision="e770738ea4389afddb0b4e6c69749f9456ed0f48" />
- <project name="platform/packages/apps/Music" path="packages/apps/Music" revision="bfca689bb6605cfcd1e0c1781c707735efb7444e" />
- <project name="platform/packages/apps/MusicFX" path="packages/apps/MusicFX" revision="aaa2f99caac6f088b23de55fe2eb1e8ee305b1fb" />
- <project name="platform/packages/apps/Nfc" path="packages/apps/Nfc" revision="f62a9a00a13ba333e88cb9e8ce2553d6acf708ad" />
- <project name="platform/packages/apps/OneTimeInitializer" path="packages/apps/OneTimeInitializer" revision="01e429c08e51291315890de9677151a7e0b6ad35" />
- <project name="platform/packages/apps/PackageInstaller" path="packages/apps/PackageInstaller" revision="212398024b4491276ef00cf7fcd829c89200b6ba" />
- <project name="platform/packages/apps/Phone" path="packages/apps/Phone" revision="bf4ec5b1258628bfa6a82aa0d80f348a77bbf194" />
- <project name="platform/packages/apps/PhoneCommon" path="packages/apps/PhoneCommon" revision="16f62c5ab5c21981e63d678187ad4b44e686332b" />
- <project name="platform/packages/apps/Protips" path="packages/apps/Protips" revision="325232e344b257a3c236ead2adc60c7378f226c0" />
- <project name="platform/packages/apps/Provision" path="packages/apps/Provision" revision="78ca0db658fe6253d506916e36319e620476f809" />
- <project name="platform/packages/apps/QuickSearchBox" path="packages/apps/QuickSearchBox" revision="cfb7af6652a7fbcc0f86341edfe14dc316c9ff37" />
- <project name="platform/packages/apps/Settings" path="packages/apps/Settings" revision="2abbacb7d46657e5863eb2ef0035521ffc41a0a8" />
- <project name="platform/packages/apps/SmartCardService" path="packages/apps/SmartCardService" revision="29eae320a4bd222b5ff1c092f84f1aebba88d0b7" />
- <project name="platform/packages/apps/SoundRecorder" path="packages/apps/SoundRecorder" revision="b0e671faf142fa0b933b4f1cd7d186b1f37ebe46" />
- <project name="platform/packages/apps/SpareParts" path="packages/apps/SpareParts" revision="4db997871e3f4c3f84660815096e5276b47c5c91" />
- <project name="platform/packages/apps/SpeechRecorder" path="packages/apps/SpeechRecorder" revision="536aa74ff3a77186bef29dc9333a34688fa59d13" />
- <project name="platform/packages/apps/Stk" path="packages/apps/Stk" revision="115b75461f8e1fb432fe1a892549ca1c96cef497" />
- <project name="platform/packages/apps/Tag" path="packages/apps/Tag" revision="f830b07335bd2dd794b84507b5390f7d893fe428" />
- <project name="platform/packages/apps/TvSettings" path="packages/apps/TvSettings" revision="24e45eaf3b4badaf02e449e7f6d07c72e743f521" />
- <project name="platform/packages/apps/UnifiedEmail" path="packages/apps/UnifiedEmail" revision="d4537c907920f4470b70e91c187ef7a0b31632db" />
- <project name="platform/packages/apps/VideoEditor" path="packages/apps/VideoEditor" revision="a49ea28e1628f507ae3a564215664c29c5fa1215" />
- <project name="platform/packages/apps/VoiceDialer" path="packages/apps/VoiceDialer" revision="72df4532dfca9a82e8aef55fcdfce3026d3d3312" />
- <project name="platform/packages/experimental" path="packages/experimental" revision="588c7cda9c62fb77d23bc089a63cba8a96bc9ffb" />
- <project name="platform/packages/inputmethods/LatinIME" path="packages/inputmethods/LatinIME" revision="159474f2ae5d13308ca1b92b8a5ccd809ec6a450" />
- <project name="platform/packages/inputmethods/OpenWnn" path="packages/inputmethods/OpenWnn" revision="59aefa242169b7a51c2381daee58ff22fd1834ce" />
- <project name="platform/packages/inputmethods/PinyinIME" path="packages/inputmethods/PinyinIME" revision="49aebad1c1cfbbcaa9288ffed5161e79e57c3679" />
- <project name="platform/packages/providers/ApplicationsProvider" path="packages/providers/ApplicationsProvider" revision="3347f31bd268ca3153abe5def9361f625bd73efd" />
- <project name="platform/packages/providers/CalendarProvider" path="packages/providers/CalendarProvider" revision="20360f2fdd7ad2de1234b7ed61e3ea120f0dc635" />
- <project name="platform/packages/providers/ContactsProvider" path="packages/providers/ContactsProvider" revision="6ac2395324c0e7539434b7c68ec738f867d7ed37" />
- <project name="platform/packages/providers/DownloadProvider" path="packages/providers/DownloadProvider" revision="90e7485d68095b5fc5044dd1bc6cd4dfc485eaa3" />
- <project name="platform/packages/providers/MediaProvider" path="packages/providers/MediaProvider" revision="501b93fb00db86fe4fb53dc000f6f11587afe4b0" />
- <project name="platform/packages/providers/PartnerBookmarksProvider" path="packages/providers/PartnerBookmarksProvider" revision="96d0a80af45923767baf449fc8c735c2f71d64ae" />
- <project name="platform/packages/providers/TelephonyProvider" path="packages/providers/TelephonyProvider" revision="91e705bc7662192ea33f2bac6b0a6c79fc9bc7ab" />
- <project name="platform/packages/providers/UserDictionaryProvider" path="packages/providers/UserDictionaryProvider" revision="361f35b7b1fe758d93e0952536a298b2ed045a89" />
- <project name="platform/packages/screensavers/Basic" path="packages/screensavers/Basic" revision="4b5d9d8bea733c4e5876541831f27bf40588b516" />
- <project name="platform/packages/screensavers/PhotoTable" path="packages/screensavers/PhotoTable" revision="a5e0fee8e923cfc8682eb4431bc3997ed15f649a" />
- <project name="platform/packages/screensavers/WebView" path="packages/screensavers/WebView" revision="6e0a80f6faed6191acc8ce1b6c79eada09e9e042" />
- <project name="platform/packages/services/Telephony" path="packages/services/Telephony" revision="aa156251eb0414b8c6546c98769789dc28b38140" />
- <project name="platform/packages/wallpapers/Basic" path="packages/wallpapers/Basic" revision="2e1d8404b87caf13cde644959f28213f2db09843" />
- <project name="platform/packages/wallpapers/Galaxy4" path="packages/wallpapers/Galaxy4" revision="34b31b45e75b2e73a770fef1a2f9a862b10f1a57" />
- <project name="platform/packages/wallpapers/HoloSpiral" path="packages/wallpapers/HoloSpiral" revision="63b75996a7cfb713a6a6feb5c774ba4b46c7d6eb" />
- <project name="platform/packages/wallpapers/LivePicker" path="packages/wallpapers/LivePicker" revision="8082f92e76774607d62412e8e1191dd940f055ba" />
- <project name="platform/packages/wallpapers/MagicSmoke" path="packages/wallpapers/MagicSmoke" revision="f01ea4c07914010d52a42130acb7e67d4306fbda" />
- <project name="platform/packages/wallpapers/MusicVisualization" path="packages/wallpapers/MusicVisualization" revision="72fbcf3a8e4ebee42c36a5887432ca823ef0e4e5" />
- <project name="platform/packages/wallpapers/NoiseField" path="packages/wallpapers/NoiseField" revision="7d3e52a18a1255baffd7c0675a465f1b85b99f56" />
- <project name="platform/packages/wallpapers/PhaseBeam" path="packages/wallpapers/PhaseBeam" revision="0da76f35378677f1102e0be218ce1993c0e528b6" />
- <project groups="pdk" name="platform/pdk" path="pdk" revision="d440d4219412981df7ef90bed65acf29b2e7ea6a" />
- <project name="platform/prebuilts/android-emulator" path="prebuilts/android-emulator" revision="d6a246c24accff42eb433f5e39d14cb24faf1e58" />
- <project groups="pdk,darwin" name="platform/prebuilts/clang/darwin-x86/3.1" path="prebuilts/clang/darwin-x86/3.1" revision="426233405bef3c7c825095ab14256c3773894b9b" />
- <project groups="pdk,darwin" name="platform/prebuilts/clang/darwin-x86/3.2" path="prebuilts/clang/darwin-x86/3.2" revision="af856d77b3cbb1f6afccdc531bee991403c28907" />
- <project groups="darwin,arm" name="platform/prebuilts/clang/darwin-x86/arm/3.3" path="prebuilts/clang/darwin-x86/arm/3.3" revision="54acc51e28850485e380b55916868a4e1ff17998" />
- <project groups="pdk,darwin" name="platform/prebuilts/clang/darwin-x86/host/3.4" path="prebuilts/clang/darwin-x86/host/3.4" revision="a798fe00dbd92ad4e5f7123a2e2bc1d805db04f6" />
- <project groups="pdk,darwin" name="platform/prebuilts/clang/darwin-x86/host/3.5" path="prebuilts/clang/darwin-x86/host/3.5" revision="ce812d27fb78972e71482e93241b9770ca54845d" />
- <project groups="darwin,mips" name="platform/prebuilts/clang/darwin-x86/mips/3.3" path="prebuilts/clang/darwin-x86/mips/3.3" revision="da3dad928542362835082b2eda44e4dc315d65bb" />
- <project groups="darwin,x86" name="platform/prebuilts/clang/darwin-x86/x86/3.3" path="prebuilts/clang/darwin-x86/x86/3.3" revision="f67a83f35e30f92b312fbee852184c3f6dc38f34" />
- <project groups="pdk,linux" name="platform/prebuilts/clang/linux-x86/3.1" path="prebuilts/clang/linux-x86/3.1" revision="e95b4ce22c825da44d14299e1190ea39a5260bde" />
- <project groups="pdk,linux" name="platform/prebuilts/clang/linux-x86/3.2" path="prebuilts/clang/linux-x86/3.2" revision="471afab478649078ad7c75ec6b252481a59e19b8" />
- <project groups="linux,arm" name="platform/prebuilts/clang/linux-x86/arm/3.3" path="prebuilts/clang/linux-x86/arm/3.3" revision="2f6d2db9e2af3507d132cf5d286a42fe1d47f7bc" />
- <project groups="pdk,linux" name="platform/prebuilts/clang/linux-x86/host/3.4" path="prebuilts/clang/linux-x86/host/3.4" revision="fae26a039f79d780ddedcad07f164d9e6c05fc87" />
- <project groups="pdk,linux" name="platform/prebuilts/clang/linux-x86/host/3.5" path="prebuilts/clang/linux-x86/host/3.5" revision="485ffdc99707f81f4201e85bbbb937f23e1e04aa" />
- <project groups="linux,mips" name="platform/prebuilts/clang/linux-x86/mips/3.3" path="prebuilts/clang/linux-x86/mips/3.3" revision="51f8e2760628588fe268438d612d942c30d13fb2" />
- <project groups="linux,x86" name="platform/prebuilts/clang/linux-x86/x86/3.3" path="prebuilts/clang/linux-x86/x86/3.3" revision="017a8a67f92a66b29ab17772e50642a7b9d0f8e6" />
- <project name="platform/prebuilts/devtools" path="prebuilts/devtools" revision="be724be535ea50585d8c625b768ccb63aacd2926" />
- <project groups="pdk" name="platform/prebuilts/eclipse" path="prebuilts/eclipse" revision="cf9f78f8cf41b16edf9f712598a42743d5cea4af" />
- <project groups="notdefault,eclipse" name="platform/prebuilts/eclipse-build-deps" path="prebuilts/eclipse-build-deps" revision="ceb739d6a7c10f5fb5a6cf6e1f702453b1361ad3" />
- <project groups="notdefault,eclipse" name="platform/prebuilts/eclipse-build-deps-sources" path="prebuilts/eclipse-build-deps-sources" revision="8b7d8f6033ffe2d22905d10cf6d57d5bdcbe519b" />
- <project groups="pdk,darwin,arm" name="platform/prebuilts/gcc/darwin-x86/aarch64/aarch64-linux-android-4.8" path="prebuilts/gcc/darwin-x86/aarch64/aarch64-linux-android-4.8" revision="a261d38eaebb7ff406a6bb60237b36fc61714d46" />
- <project groups="pdk,darwin,arm" name="platform/prebuilts/gcc/darwin-x86/aarch64/aarch64-linux-android-4.9" path="prebuilts/gcc/darwin-x86/aarch64/aarch64-linux-android-4.9" revision="32d722d66d7a935a8b6f8e6ab2d5d8bf0e9e0986" />
- <project groups="pdk,darwin,arm" name="platform/prebuilts/gcc/darwin-x86/arm/arm-eabi-4.8" path="prebuilts/gcc/darwin-x86/arm/arm-eabi-4.8" revision="6d08ca9f45ff685648fd13c75bf5cac4b11c19bb" />
- <project groups="pdk,darwin,arm" name="platform/prebuilts/gcc/darwin-x86/arm/arm-linux-androideabi-4.8" path="prebuilts/gcc/darwin-x86/arm/arm-linux-androideabi-4.8" revision="264394c23b2686ce52cd4ffb116ced127aa7f8fc" />
- <project groups="pdk,darwin" name="platform/prebuilts/gcc/darwin-x86/host/headers" path="prebuilts/gcc/darwin-x86/host/headers" revision="4ac4f7cc41cf3c9e36fc3d6cf37fd1cfa9587a68" />
- <project groups="pdk,darwin" name="platform/prebuilts/gcc/darwin-x86/host/i686-apple-darwin-4.2.1" path="prebuilts/gcc/darwin-x86/host/i686-apple-darwin-4.2.1" revision="8834958755acc291d126ba7ee38ac731d04f9c9e" />
- <project groups="pdk,darwin,mips" name="platform/prebuilts/gcc/darwin-x86/mips/mips64el-linux-android-4.8" path="prebuilts/gcc/darwin-x86/mips/mips64el-linux-android-4.8" revision="3b5bef47de8017ff39ef5bfbe801e3fa6b272fab" />
- <project name="platform/prebuilts/gcc/darwin-x86/mips/mips64el-linux-android-4.9" path="prebuilts/gcc/darwin-x86/mips/mips64el-linux-android-4.9" revision="367a6529b0cc9f5ac5ca69226f583420563fd473" />
- <project groups="pdk,darwin,mips" name="platform/prebuilts/gcc/darwin-x86/mips/mipsel-linux-android-4.8" path="prebuilts/gcc/darwin-x86/mips/mipsel-linux-android-4.8" revision="ba97180acd4251d3acf08530faa4a724af74abd3" />
- <project groups="pdk,darwin,x86" name="platform/prebuilts/gcc/darwin-x86/x86/x86_64-linux-android-4.8" path="prebuilts/gcc/darwin-x86/x86/x86_64-linux-android-4.8" revision="c3c37a54f07d51a50e17d63dbf1d92da343f45ce" />
- <project name="platform/prebuilts/gcc/darwin-x86/x86/x86_64-linux-android-4.9" path="prebuilts/gcc/darwin-x86/x86/x86_64-linux-android-4.9" revision="a7c5a1df753fd3a24494d5e1fe00211048be5c1d" />
- <project groups="pdk,linux,arm" name="platform/prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.8" path="prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.8" revision="7334f0a7a872700d0aaf00bea75917c077c45530" />
- <project groups="pdk,linux,arm" name="platform/prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.9" path="prebuilts/gcc/linux-x86/aarch64/aarch64-linux-android-4.9" revision="a3f0180676c6b6cd9c664704f86855d3404ae4dd" />
- <project groups="pdk,linux,arm" name="platform/prebuilts/gcc/linux-x86/arm/arm-eabi-4.8" path="prebuilts/gcc/linux-x86/arm/arm-eabi-4.8" revision="26e93f6af47f7bd3a9beb5c102a5f45e19bfa38a" />
- <project groups="pdk,linux,arm" name="platform/prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.8" path="prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.8" revision="d9735fc81434f2af2c44d86ca57740c673c8d9bc" />
- <project groups="pdk,linux" name="platform/prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.11-4.6" path="prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.11-4.6" revision="eb5c9f0ae36bf964f6855bde54e1b387e2c26bb6" />
- <project groups="pdk,linux" name="platform/prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.11-4.8" path="prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.11-4.8" revision="1b0544da652fda90a41a1f69889d6b137ce20fb9" />
- <project name="platform/prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8" path="prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8" revision="2725a175a32032fb9a63e247c176ecc3d448ea27" />
- <project groups="pdk,linux,mips" name="platform/prebuilts/gcc/linux-x86/mips/mips64el-linux-android-4.8" path="prebuilts/gcc/linux-x86/mips/mips64el-linux-android-4.8" revision="38586de6b44714b4adcf21119fe6b267e33f3ca6" />
- <project name="platform/prebuilts/gcc/linux-x86/mips/mips64el-linux-android-4.9" path="prebuilts/gcc/linux-x86/mips/mips64el-linux-android-4.9" revision="eabc7ae8ed527ee3d4517196732fa3f3e8939a28" />
- <project groups="pdk,linux,mips" name="platform/prebuilts/gcc/linux-x86/mips/mipsel-linux-android-4.8" path="prebuilts/gcc/linux-x86/mips/mipsel-linux-android-4.8" revision="c06b9b305c365163c99d4ffba49ac37ce2716024" />
- <project groups="pdk,linux,x86" name="platform/prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.8" path="prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.8" revision="e08fa7e57a573a9baa5ccd8d4b8d73cc871f9b48" />
- <project name="platform/prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9" path="prebuilts/gcc/linux-x86/x86/x86_64-linux-android-4.9" revision="e99278016e6285363bc20d1b35d4b9b5c4e8b0a0" />
- <project name="platform/prebuilts/gradle-plugin" path="prebuilts/gradle-plugin" revision="e7814a3cbb96742ff74505a1fc152cb534fbf2f9" />
- <project name="platform/prebuilts/maven_repo/android" path="prebuilts/maven_repo/android" revision="0dbe3df0f057de9e83e599b9be2ca866c673130d" />
- <project groups="pdk" name="platform/prebuilts/misc" path="prebuilts/misc" revision="3cc2e316acf9da501479bbfd85159407239994d2" />
- <project groups="pdk" name="platform/prebuilts/ndk" path="prebuilts/ndk" revision="7a8bc5c36d519c41de61765ff94245f56c4bed7a" />
- <project groups="darwin" name="platform/prebuilts/python/darwin-x86/2.7.5" path="prebuilts/python/darwin-x86/2.7.5" revision="2bdd4fd418614c7c0101147d02199d0e47c4980e" />
- <project groups="linux" name="platform/prebuilts/python/linux-x86/2.7.5" path="prebuilts/python/linux-x86/2.7.5" revision="6fbc8802b3b68d24a4ee83f164b22490cf702ff2" />
- <project groups="pdk" name="platform/prebuilts/qemu-kernel" path="prebuilts/qemu-kernel" revision="5f91f38eac40a8465f3a7e4aa298a75afcf2936a" />
- <project name="platform/prebuilts/runtime" path="prebuilts/runtime" revision="56e663b8ec9cd0df9ce5afdc7b7d56460faf44c8" />
- <project groups="pdk" name="platform/prebuilts/sdk" path="prebuilts/sdk" revision="52043ca65e06bc84779dd8d3e55e72ad04bcef59" />
- <project groups="pdk,tools" name="platform/prebuilts/tools" path="prebuilts/tools" revision="130c3d0a1a484d617531d75ddd50714f68213cbb" />
- <project name="platform/sdk" path="sdk" revision="1af9ef83f5f6c6fd9202d5bdd8d4248a4eb855aa" />
- <project groups="pdk" name="platform/system/core" path="system/core" revision="cddc97cb3a927d179a42e0fec77f0d267fcd74d1" />
- <project groups="pdk" name="platform/system/extras" path="system/extras" revision="97ed949ec7bef088ca3d06fb7b5f3bdad9a5103c" />
- <project name="platform/system/keymaster" path="system/keymaster" revision="7a70abbf29293b30bb1e7ed3a58deb40f8774a53" />
- <project groups="pdk" name="platform/system/media" path="system/media" revision="77f0f32b32adc5ba1134e7a68e4d907c4f695eb6" />
- <project groups="pdk" name="platform/system/netd" path="system/netd" revision="f5d949ef0991737af9daa7ba702cc2ec638e435b" />
- <project groups="pdk" name="platform/system/security" path="system/security" revision="0387a7fd23021b904612101b727a2060847f6169" />
- <project groups="pdk" name="platform/system/vold" path="system/vold" revision="c0c2867518eed4539444434c95fad8185a6ac08e" />
- <project groups="notdefault,tools" name="platform/tools/adt/eclipse" path="tools/adt/eclipse" revision="ede2ed86419bb4c78428f1ac09825b1a247d8e24" />
- <project groups="notdefault,tools" name="platform/tools/adt/idea" path="tools/adt/idea" revision="50a5da1af3e851df7aff37c291541000685bcad4" />
- <project groups="notdefault,tools" name="platform/tools/base" path="tools/base" revision="4dc06057ba77596807e2d28c715719f240f71549" />
- <project groups="notdefault,tools" name="platform/tools/build" path="tools/build" revision="69c4b95102b4b9862bfba68b3eaf5b7537a705ee" />
- <project groups="notdefault,tools" name="platform/tools/emulator" path="tools/emulator" revision="c427e5d5227ba9413307670a5d758d9ced394a7e" />
- <project groups="tools" name="platform/tools/external/fat32lib" path="tools/external/fat32lib" revision="3880776e41ff7def06e351720f2d162f88b58a03" />
- <project groups="tools" name="platform/tools/external/gradle" path="tools/external/gradle" revision="842b7a27df8606faa29b0875a13270701eb78dd8" />
- <project groups="notdefault,tools" name="platform/tools/idea" path="tools/idea" revision="12356153d01fcde14dd3a06948cfded92c20d068" />
- <project groups="notdefault,motodev" name="platform/tools/motodev" path="tools/motodev" revision="69989786cefbde82527960a1e100ec9afba46a98" />
- <project groups="notdefault,tools" name="platform/tools/studio/cloud" path="tools/studio/cloud" revision="839eb097c5fc73f91a722f1457a2e8e11eb4e1a5" />
- <project groups="notdefault,tools" name="platform/tools/swt" path="tools/swt" revision="aaf3131b0e4b15d39156a6e94e5da06b0183d61d" />
-</manifest>
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/buildbot_run.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/buildbot_run.py
deleted file mode 100755
index 9a2b71f1b3..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/buildbot_run.py
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Argument-less script to select what to run on the buildbots."""
-
-import os
-import shutil
-import subprocess
-import sys
-
-
-BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__))
-TRUNK_DIR = os.path.dirname(BUILDBOT_DIR)
-ROOT_DIR = os.path.dirname(TRUNK_DIR)
-CMAKE_DIR = os.path.join(ROOT_DIR, 'cmake')
-CMAKE_BIN_DIR = os.path.join(CMAKE_DIR, 'bin')
-OUT_DIR = os.path.join(TRUNK_DIR, 'out')
-
-
-def CallSubProcess(*args, **kwargs):
- """Wrapper around subprocess.call which treats errors as build exceptions."""
- with open(os.devnull) as devnull_fd:
- retcode = subprocess.call(stdin=devnull_fd, *args, **kwargs)
- if retcode != 0:
- print '@@@STEP_EXCEPTION@@@'
- sys.exit(1)
-
-
-def PrepareCmake():
- """Build CMake 2.8.8 since the version in Precise is 2.8.7."""
- if os.environ['BUILDBOT_CLOBBER'] == '1':
- print '@@@BUILD_STEP Clobber CMake checkout@@@'
- shutil.rmtree(CMAKE_DIR)
-
- # We always build CMake 2.8.8, so no need to do anything
- # if the directory already exists.
- if os.path.isdir(CMAKE_DIR):
- return
-
- print '@@@BUILD_STEP Initialize CMake checkout@@@'
- os.mkdir(CMAKE_DIR)
-
- print '@@@BUILD_STEP Sync CMake@@@'
- CallSubProcess(
- ['git', 'clone',
- '--depth', '1',
- '--single-branch',
- '--branch', 'v2.8.8',
- '--',
- 'git://cmake.org/cmake.git',
- CMAKE_DIR],
- cwd=CMAKE_DIR)
-
- print '@@@BUILD_STEP Build CMake@@@'
- CallSubProcess(
- ['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR],
- cwd=CMAKE_DIR)
-
- CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR)
-
-
-def GypTestFormat(title, format=None, msvs_version=None, tests=[]):
- """Run the gyp tests for a given format, emitting annotator tags.
-
- See annotator docs at:
- https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations
- Args:
- format: gyp format to test.
- Returns:
- 0 for sucesss, 1 for failure.
- """
- if not format:
- format = title
-
- print '@@@BUILD_STEP ' + title + '@@@'
- sys.stdout.flush()
- env = os.environ.copy()
- if msvs_version:
- env['GYP_MSVS_VERSION'] = msvs_version
- command = ' '.join(
- [sys.executable, 'gyp/gyptest.py',
- '--all',
- '--passed',
- '--format', format,
- '--path', CMAKE_BIN_DIR,
- '--chdir', 'gyp'] + tests)
- retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True)
- if retcode:
- # Emit failure tag, and keep going.
- print '@@@STEP_FAILURE@@@'
- return 1
- return 0
-
-
-def GypBuild():
- # Dump out/ directory.
- print '@@@BUILD_STEP cleanup@@@'
- print 'Removing %s...' % OUT_DIR
- shutil.rmtree(OUT_DIR, ignore_errors=True)
- print 'Done.'
-
- retcode = 0
- if sys.platform.startswith('linux'):
- retcode += GypTestFormat('ninja')
- retcode += GypTestFormat('make')
- PrepareCmake()
- retcode += GypTestFormat('cmake')
- elif sys.platform == 'darwin':
- retcode += GypTestFormat('ninja')
- retcode += GypTestFormat('xcode')
- retcode += GypTestFormat('make')
- elif sys.platform == 'win32':
- retcode += GypTestFormat('ninja')
- if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64':
- retcode += GypTestFormat('msvs-ninja-2013', format='msvs-ninja',
- msvs_version='2013',
- tests=[
- r'test\generator-output\gyptest-actions.py',
- r'test\generator-output\gyptest-relocate.py',
- r'test\generator-output\gyptest-rules.py'])
- retcode += GypTestFormat('msvs-2013', format='msvs', msvs_version='2013')
- else:
- raise Exception('Unknown platform')
- if retcode:
- # TODO(bradnelson): once the annotator supports a postscript (section for
- # after the build proper that could be used for cumulative failures),
- # use that instead of this. This isolates the final return value so
- # that it isn't misattributed to the last stage.
- print '@@@BUILD_STEP failures@@@'
- sys.exit(retcode)
-
-
-if __name__ == '__main__':
- GypBuild()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/commit_queue/OWNERS b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/commit_queue/OWNERS
deleted file mode 100644
index b269c198b4..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/commit_queue/OWNERS
+++ /dev/null
@@ -1,6 +0,0 @@
-set noparent
-bradnelson@chromium.org
-bradnelson@google.com
-iannucci@chromium.org
-scottmg@chromium.org
-thakis@chromium.org
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/commit_queue/README b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/commit_queue/README
deleted file mode 100644
index 9428497883..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/commit_queue/README
+++ /dev/null
@@ -1,3 +0,0 @@
-cq_config.json describes the trybots that must pass in order
-to land a change through the commit queue.
-Comments are here as the file is strictly JSON.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/commit_queue/cq_config.json b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/commit_queue/cq_config.json
deleted file mode 100644
index 656c21e54f..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/buildbot/commit_queue/cq_config.json
+++ /dev/null
@@ -1,15 +0,0 @@
-{
- "trybots": {
- "launched": {
- "tryserver.nacl": {
- "gyp-presubmit": ["defaulttests"],
- "gyp-linux": ["defaulttests"],
- "gyp-mac": ["defaulttests"],
- "gyp-win32": ["defaulttests"],
- "gyp-win64": ["defaulttests"]
- }
- },
- "triggered": {
- }
- }
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/codereview.settings b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/codereview.settings
deleted file mode 100644
index faf37f1145..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/codereview.settings
+++ /dev/null
@@ -1,10 +0,0 @@
-# This file is used by gcl to get repository specific information.
-CODE_REVIEW_SERVER: codereview.chromium.org
-CC_LIST: gyp-developer@googlegroups.com
-VIEW_VC: https://chromium.googlesource.com/external/gyp/+/
-TRY_ON_UPLOAD: False
-TRYSERVER_PROJECT: gyp
-TRYSERVER_PATCHLEVEL: 1
-TRYSERVER_ROOT: gyp
-TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl
-PROJECT: gyp
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc
deleted file mode 100644
index 8bca510815..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright (c) 2013 Google Inc. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is
-// then used during the final link for modules that have large PDBs. Otherwise,
-// the linker will generate a pdb with a page size of 1KB, which imposes a limit
-// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler
-// (rather than the linker), this limit is avoided. With this in place PDBs may
-// grow to 2GB.
-//
-// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyp b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyp
deleted file mode 100755
index 1b8b9bdfb0..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyp
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/sh
-# Copyright 2013 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-set -e
-base=$(dirname "$0")
-exec python "${base}/gyp_main.py" "$@"
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyp.bat b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyp.bat
deleted file mode 100644
index ad797c3081..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyp.bat
+++ /dev/null
@@ -1,5 +0,0 @@
-@rem Copyright (c) 2009 Google Inc. All rights reserved.
-@rem Use of this source code is governed by a BSD-style license that can be
-@rem found in the LICENSE file.
-
-@python "%~dp0gyp_main.py" %*
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyp_main.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyp_main.py
deleted file mode 100755
index 25a6eba94a..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyp_main.py
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-# Make sure we're using the version of pylib in this repo, not one installed
-# elsewhere on the system.
-sys.path.insert(0, os.path.join(os.path.dirname(sys.argv[0]), 'pylib'))
-import gyp
-
-if __name__ == '__main__':
- sys.exit(gyp.script_main())
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyptest.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyptest.py
deleted file mode 100755
index 8e4fc47d5c..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/gyptest.py
+++ /dev/null
@@ -1,274 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-__doc__ = """
-gyptest.py -- test runner for GYP tests.
-"""
-
-import os
-import optparse
-import subprocess
-import sys
-
-class CommandRunner(object):
- """
- Executor class for commands, including "commands" implemented by
- Python functions.
- """
- verbose = True
- active = True
-
- def __init__(self, dictionary={}):
- self.subst_dictionary(dictionary)
-
- def subst_dictionary(self, dictionary):
- self._subst_dictionary = dictionary
-
- def subst(self, string, dictionary=None):
- """
- Substitutes (via the format operator) the values in the specified
- dictionary into the specified command.
-
- The command can be an (action, string) tuple. In all cases, we
- perform substitution on strings and don't worry if something isn't
- a string. (It's probably a Python function to be executed.)
- """
- if dictionary is None:
- dictionary = self._subst_dictionary
- if dictionary:
- try:
- string = string % dictionary
- except TypeError:
- pass
- return string
-
- def display(self, command, stdout=None, stderr=None):
- if not self.verbose:
- return
- if type(command) == type(()):
- func = command[0]
- args = command[1:]
- s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args)))
- if type(command) == type([]):
- # TODO: quote arguments containing spaces
- # TODO: handle meta characters?
- s = ' '.join(command)
- else:
- s = self.subst(command)
- if not s.endswith('\n'):
- s += '\n'
- sys.stdout.write(s)
- sys.stdout.flush()
-
- def execute(self, command, stdout=None, stderr=None):
- """
- Executes a single command.
- """
- if not self.active:
- return 0
- if type(command) == type(''):
- command = self.subst(command)
- cmdargs = shlex.split(command)
- if cmdargs[0] == 'cd':
- command = (os.chdir,) + tuple(cmdargs[1:])
- if type(command) == type(()):
- func = command[0]
- args = command[1:]
- return func(*args)
- else:
- if stdout is sys.stdout:
- # Same as passing sys.stdout, except python2.4 doesn't fail on it.
- subout = None
- else:
- # Open pipe for anything else so Popen works on python2.4.
- subout = subprocess.PIPE
- if stderr is sys.stderr:
- # Same as passing sys.stderr, except python2.4 doesn't fail on it.
- suberr = None
- elif stderr is None:
- # Merge with stdout if stderr isn't specified.
- suberr = subprocess.STDOUT
- else:
- # Open pipe for anything else so Popen works on python2.4.
- suberr = subprocess.PIPE
- p = subprocess.Popen(command,
- shell=(sys.platform == 'win32'),
- stdout=subout,
- stderr=suberr)
- p.wait()
- if stdout is None:
- self.stdout = p.stdout.read()
- elif stdout is not sys.stdout:
- stdout.write(p.stdout.read())
- if stderr not in (None, sys.stderr):
- stderr.write(p.stderr.read())
- return p.returncode
-
- def run(self, command, display=None, stdout=None, stderr=None):
- """
- Runs a single command, displaying it first.
- """
- if display is None:
- display = command
- self.display(display)
- return self.execute(command, stdout, stderr)
-
-
-class Unbuffered(object):
- def __init__(self, fp):
- self.fp = fp
- def write(self, arg):
- self.fp.write(arg)
- self.fp.flush()
- def __getattr__(self, attr):
- return getattr(self.fp, attr)
-
-sys.stdout = Unbuffered(sys.stdout)
-sys.stderr = Unbuffered(sys.stderr)
-
-
-def is_test_name(f):
- return f.startswith('gyptest') and f.endswith('.py')
-
-
-def find_all_gyptest_files(directory):
- result = []
- for root, dirs, files in os.walk(directory):
- if '.svn' in dirs:
- dirs.remove('.svn')
- result.extend([ os.path.join(root, f) for f in files if is_test_name(f) ])
- result.sort()
- return result
-
-
-def main(argv=None):
- if argv is None:
- argv = sys.argv
-
- usage = "gyptest.py [-ahlnq] [-f formats] [test ...]"
- parser = optparse.OptionParser(usage=usage)
- parser.add_option("-a", "--all", action="store_true",
- help="run all tests")
- parser.add_option("-C", "--chdir", action="store", default=None,
- help="chdir to the specified directory")
- parser.add_option("-f", "--format", action="store", default='',
- help="run tests with the specified formats")
- parser.add_option("-G", '--gyp_option', action="append", default=[],
- help="Add -G options to the gyp command line")
- parser.add_option("-l", "--list", action="store_true",
- help="list available tests and exit")
- parser.add_option("-n", "--no-exec", action="store_true",
- help="no execute, just print the command line")
- parser.add_option("--passed", action="store_true",
- help="report passed tests")
- parser.add_option("--path", action="append", default=[],
- help="additional $PATH directory")
- parser.add_option("-q", "--quiet", action="store_true",
- help="quiet, don't print test command lines")
- opts, args = parser.parse_args(argv[1:])
-
- if opts.chdir:
- os.chdir(opts.chdir)
-
- if opts.path:
- extra_path = [os.path.abspath(p) for p in opts.path]
- extra_path = os.pathsep.join(extra_path)
- os.environ['PATH'] = extra_path + os.pathsep + os.environ['PATH']
-
- if not args:
- if not opts.all:
- sys.stderr.write('Specify -a to get all tests.\n')
- return 1
- args = ['test']
-
- tests = []
- for arg in args:
- if os.path.isdir(arg):
- tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
- else:
- if not is_test_name(os.path.basename(arg)):
- print >>sys.stderr, arg, 'is not a valid gyp test name.'
- sys.exit(1)
- tests.append(arg)
-
- if opts.list:
- for test in tests:
- print test
- sys.exit(0)
-
- CommandRunner.verbose = not opts.quiet
- CommandRunner.active = not opts.no_exec
- cr = CommandRunner()
-
- os.environ['PYTHONPATH'] = os.path.abspath('test/lib')
- if not opts.quiet:
- sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH'])
-
- passed = []
- failed = []
- no_result = []
-
- if opts.format:
- format_list = opts.format.split(',')
- else:
- # TODO: not duplicate this mapping from pylib/gyp/__init__.py
- format_list = {
- 'aix5': ['make'],
- 'freebsd7': ['make'],
- 'freebsd8': ['make'],
- 'openbsd5': ['make'],
- 'cygwin': ['msvs'],
- 'win32': ['msvs', 'ninja'],
- 'linux2': ['make', 'ninja'],
- 'linux3': ['make', 'ninja'],
- 'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'],
- }[sys.platform]
-
- for format in format_list:
- os.environ['TESTGYP_FORMAT'] = format
- if not opts.quiet:
- sys.stdout.write('TESTGYP_FORMAT=%s\n' % format)
-
- gyp_options = []
- for option in opts.gyp_option:
- gyp_options += ['-G', option]
- if gyp_options and not opts.quiet:
- sys.stdout.write('Extra Gyp options: %s\n' % gyp_options)
-
- for test in tests:
- status = cr.run([sys.executable, test] + gyp_options,
- stdout=sys.stdout,
- stderr=sys.stderr)
- if status == 2:
- no_result.append(test)
- elif status:
- failed.append(test)
- else:
- passed.append(test)
-
- if not opts.quiet:
- def report(description, tests):
- if tests:
- if len(tests) == 1:
- sys.stdout.write("\n%s the following test:\n" % description)
- else:
- fmt = "\n%s the following %d tests:\n"
- sys.stdout.write(fmt % (description, len(tests)))
- sys.stdout.write("\t" + "\n\t".join(tests) + "\n")
-
- if opts.passed:
- report("Passed", passed)
- report("Failed", failed)
- report("No result from", no_result)
-
- if failed:
- return 1
- else:
- return 0
-
-
-if __name__ == "__main__":
- sys.exit(main())
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
deleted file mode 100644
index 593f0e5b0b..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
+++ /dev/null
@@ -1,340 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""New implementation of Visual Studio project generation."""
-
-import os
-import random
-
-import gyp.common
-
-# hashlib is supplied as of Python 2.5 as the replacement interface for md5
-# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if
-# available, avoiding a deprecation warning under 2.6. Import md5 otherwise,
-# preserving 2.4 compatibility.
-try:
- import hashlib
- _new_md5 = hashlib.md5
-except ImportError:
- import md5
- _new_md5 = md5.new
-
-
-# Initialize random number generator
-random.seed()
-
-# GUIDs for project types
-ENTRY_TYPE_GUIDS = {
- 'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}',
- 'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}',
-}
-
-#------------------------------------------------------------------------------
-# Helper functions
-
-
-def MakeGuid(name, seed='msvs_new'):
- """Returns a GUID for the specified target name.
-
- Args:
- name: Target name.
- seed: Seed for MD5 hash.
- Returns:
- A GUID-line string calculated from the name and seed.
-
- This generates something which looks like a GUID, but depends only on the
- name and seed. This means the same name/seed will always generate the same
- GUID, so that projects and solutions which refer to each other can explicitly
- determine the GUID to refer to explicitly. It also means that the GUID will
- not change when the project for a target is rebuilt.
- """
- # Calculate a MD5 signature for the seed and name.
- d = _new_md5(str(seed) + str(name)).hexdigest().upper()
- # Convert most of the signature to GUID form (discard the rest)
- guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20]
- + '-' + d[20:32] + '}')
- return guid
-
-#------------------------------------------------------------------------------
-
-
-class MSVSSolutionEntry(object):
- def __cmp__(self, other):
- # Sort by name then guid (so things are in order on vs2008).
- return cmp((self.name, self.get_guid()), (other.name, other.get_guid()))
-
-
-class MSVSFolder(MSVSSolutionEntry):
- """Folder in a Visual Studio project or solution."""
-
- def __init__(self, path, name = None, entries = None,
- guid = None, items = None):
- """Initializes the folder.
-
- Args:
- path: Full path to the folder.
- name: Name of the folder.
- entries: List of folder entries to nest inside this folder. May contain
- Folder or Project objects. May be None, if the folder is empty.
- guid: GUID to use for folder, if not None.
- items: List of solution items to include in the folder project. May be
- None, if the folder does not directly contain items.
- """
- if name:
- self.name = name
- else:
- # Use last layer.
- self.name = os.path.basename(path)
-
- self.path = path
- self.guid = guid
-
- # Copy passed lists (or set to empty lists)
- self.entries = sorted(list(entries or []))
- self.items = list(items or [])
-
- self.entry_type_guid = ENTRY_TYPE_GUIDS['folder']
-
- def get_guid(self):
- if self.guid is None:
- # Use consistent guids for folders (so things don't regenerate).
- self.guid = MakeGuid(self.path, seed='msvs_folder')
- return self.guid
-
-
-#------------------------------------------------------------------------------
-
-
-class MSVSProject(MSVSSolutionEntry):
- """Visual Studio project."""
-
- def __init__(self, path, name = None, dependencies = None, guid = None,
- spec = None, build_file = None, config_platform_overrides = None,
- fixpath_prefix = None):
- """Initializes the project.
-
- Args:
- path: Absolute path to the project file.
- name: Name of project. If None, the name will be the same as the base
- name of the project file.
- dependencies: List of other Project objects this project is dependent
- upon, if not None.
- guid: GUID to use for project, if not None.
- spec: Dictionary specifying how to build this project.
- build_file: Filename of the .gyp file that the vcproj file comes from.
- config_platform_overrides: optional dict of configuration platforms to
- used in place of the default for this target.
- fixpath_prefix: the path used to adjust the behavior of _fixpath
- """
- self.path = path
- self.guid = guid
- self.spec = spec
- self.build_file = build_file
- # Use project filename if name not specified
- self.name = name or os.path.splitext(os.path.basename(path))[0]
-
- # Copy passed lists (or set to empty lists)
- self.dependencies = list(dependencies or [])
-
- self.entry_type_guid = ENTRY_TYPE_GUIDS['project']
-
- if config_platform_overrides:
- self.config_platform_overrides = config_platform_overrides
- else:
- self.config_platform_overrides = {}
- self.fixpath_prefix = fixpath_prefix
- self.msbuild_toolset = None
-
- def set_dependencies(self, dependencies):
- self.dependencies = list(dependencies or [])
-
- def get_guid(self):
- if self.guid is None:
- # Set GUID from path
- # TODO(rspangler): This is fragile.
- # 1. We can't just use the project filename sans path, since there could
- # be multiple projects with the same base name (for example,
- # foo/unittest.vcproj and bar/unittest.vcproj).
- # 2. The path needs to be relative to $SOURCE_ROOT, so that the project
- # GUID is the same whether it's included from base/base.sln or
- # foo/bar/baz/baz.sln.
- # 3. The GUID needs to be the same each time this builder is invoked, so
- # that we don't need to rebuild the solution when the project changes.
- # 4. We should be able to handle pre-built project files by reading the
- # GUID from the files.
- self.guid = MakeGuid(self.name)
- return self.guid
-
- def set_msbuild_toolset(self, msbuild_toolset):
- self.msbuild_toolset = msbuild_toolset
-
-#------------------------------------------------------------------------------
-
-
-class MSVSSolution(object):
- """Visual Studio solution."""
-
- def __init__(self, path, version, entries=None, variants=None,
- websiteProperties=True):
- """Initializes the solution.
-
- Args:
- path: Path to solution file.
- version: Format version to emit.
- entries: List of entries in solution. May contain Folder or Project
- objects. May be None, if the folder is empty.
- variants: List of build variant strings. If none, a default list will
- be used.
- websiteProperties: Flag to decide if the website properties section
- is generated.
- """
- self.path = path
- self.websiteProperties = websiteProperties
- self.version = version
-
- # Copy passed lists (or set to empty lists)
- self.entries = list(entries or [])
-
- if variants:
- # Copy passed list
- self.variants = variants[:]
- else:
- # Use default
- self.variants = ['Debug|Win32', 'Release|Win32']
- # TODO(rspangler): Need to be able to handle a mapping of solution config
- # to project config. Should we be able to handle variants being a dict,
- # or add a separate variant_map variable? If it's a dict, we can't
- # guarantee the order of variants since dict keys aren't ordered.
-
-
- # TODO(rspangler): Automatically write to disk for now; should delay until
- # node-evaluation time.
- self.Write()
-
-
- def Write(self, writer=gyp.common.WriteOnDiff):
- """Writes the solution file to disk.
-
- Raises:
- IndexError: An entry appears multiple times.
- """
- # Walk the entry tree and collect all the folders and projects.
- all_entries = set()
- entries_to_check = self.entries[:]
- while entries_to_check:
- e = entries_to_check.pop(0)
-
- # If this entry has been visited, nothing to do.
- if e in all_entries:
- continue
-
- all_entries.add(e)
-
- # If this is a folder, check its entries too.
- if isinstance(e, MSVSFolder):
- entries_to_check += e.entries
-
- all_entries = sorted(all_entries)
-
- # Open file and print header
- f = writer(self.path)
- f.write('Microsoft Visual Studio Solution File, '
- 'Format Version %s\r\n' % self.version.SolutionVersion())
- f.write('# %s\r\n' % self.version.Description())
-
- # Project entries
- sln_root = os.path.split(self.path)[0]
- for e in all_entries:
- relative_path = gyp.common.RelativePath(e.path, sln_root)
- # msbuild does not accept an empty folder_name.
- # use '.' in case relative_path is empty.
- folder_name = relative_path.replace('/', '\\') or '.'
- f.write('Project("%s") = "%s", "%s", "%s"\r\n' % (
- e.entry_type_guid, # Entry type GUID
- e.name, # Folder name
- folder_name, # Folder name (again)
- e.get_guid(), # Entry GUID
- ))
-
- # TODO(rspangler): Need a way to configure this stuff
- if self.websiteProperties:
- f.write('\tProjectSection(WebsiteProperties) = preProject\r\n'
- '\t\tDebug.AspNetCompiler.Debug = "True"\r\n'
- '\t\tRelease.AspNetCompiler.Debug = "False"\r\n'
- '\tEndProjectSection\r\n')
-
- if isinstance(e, MSVSFolder):
- if e.items:
- f.write('\tProjectSection(SolutionItems) = preProject\r\n')
- for i in e.items:
- f.write('\t\t%s = %s\r\n' % (i, i))
- f.write('\tEndProjectSection\r\n')
-
- if isinstance(e, MSVSProject):
- if e.dependencies:
- f.write('\tProjectSection(ProjectDependencies) = postProject\r\n')
- for d in e.dependencies:
- f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid()))
- f.write('\tEndProjectSection\r\n')
-
- f.write('EndProject\r\n')
-
- # Global section
- f.write('Global\r\n')
-
- # Configurations (variants)
- f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n')
- for v in self.variants:
- f.write('\t\t%s = %s\r\n' % (v, v))
- f.write('\tEndGlobalSection\r\n')
-
- # Sort config guids for easier diffing of solution changes.
- config_guids = []
- config_guids_overrides = {}
- for e in all_entries:
- if isinstance(e, MSVSProject):
- config_guids.append(e.get_guid())
- config_guids_overrides[e.get_guid()] = e.config_platform_overrides
- config_guids.sort()
-
- f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n')
- for g in config_guids:
- for v in self.variants:
- nv = config_guids_overrides[g].get(v, v)
- # Pick which project configuration to build for this solution
- # configuration.
- f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % (
- g, # Project GUID
- v, # Solution build configuration
- nv, # Project build config for that solution config
- ))
-
- # Enable project in this solution configuration.
- f.write('\t\t%s.%s.Build.0 = %s\r\n' % (
- g, # Project GUID
- v, # Solution build configuration
- nv, # Project build config for that solution config
- ))
- f.write('\tEndGlobalSection\r\n')
-
- # TODO(rspangler): Should be able to configure this stuff too (though I've
- # never seen this be any different)
- f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n')
- f.write('\t\tHideSolutionNode = FALSE\r\n')
- f.write('\tEndGlobalSection\r\n')
-
- # Folder mappings
- # Omit this section if there are no folders
- if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
- f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
- for e in all_entries:
- if not isinstance(e, MSVSFolder):
- continue # Does not apply to projects, only folders
- for subentry in e.entries:
- f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
- f.write('\tEndGlobalSection\r\n')
-
- f.write('EndGlobal\r\n')
-
- f.close()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
deleted file mode 100644
index db1ceede34..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
+++ /dev/null
@@ -1,208 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Visual Studio project reader/writer."""
-
-import gyp.common
-import gyp.easy_xml as easy_xml
-
-#------------------------------------------------------------------------------
-
-
-class Tool(object):
- """Visual Studio tool."""
-
- def __init__(self, name, attrs=None):
- """Initializes the tool.
-
- Args:
- name: Tool name.
- attrs: Dict of tool attributes; may be None.
- """
- self._attrs = attrs or {}
- self._attrs['Name'] = name
-
- def _GetSpecification(self):
- """Creates an element for the tool.
-
- Returns:
- A new xml.dom.Element for the tool.
- """
- return ['Tool', self._attrs]
-
-class Filter(object):
- """Visual Studio filter - that is, a virtual folder."""
-
- def __init__(self, name, contents=None):
- """Initializes the folder.
-
- Args:
- name: Filter (folder) name.
- contents: List of filenames and/or Filter objects contained.
- """
- self.name = name
- self.contents = list(contents or [])
-
-
-#------------------------------------------------------------------------------
-
-
-class Writer(object):
- """Visual Studio XML project writer."""
-
- def __init__(self, project_path, version, name, guid=None, platforms=None):
- """Initializes the project.
-
- Args:
- project_path: Path to the project file.
- version: Format version to emit.
- name: Name of the project.
- guid: GUID to use for project, if not None.
- platforms: Array of string, the supported platforms. If null, ['Win32']
- """
- self.project_path = project_path
- self.version = version
- self.name = name
- self.guid = guid
-
- # Default to Win32 for platforms.
- if not platforms:
- platforms = ['Win32']
-
- # Initialize the specifications of the various sections.
- self.platform_section = ['Platforms']
- for platform in platforms:
- self.platform_section.append(['Platform', {'Name': platform}])
- self.tool_files_section = ['ToolFiles']
- self.configurations_section = ['Configurations']
- self.files_section = ['Files']
-
- # Keep a dict keyed on filename to speed up access.
- self.files_dict = dict()
-
- def AddToolFile(self, path):
- """Adds a tool file to the project.
-
- Args:
- path: Relative path from project to tool file.
- """
- self.tool_files_section.append(['ToolFile', {'RelativePath': path}])
-
- def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
- """Returns the specification for a configuration.
-
- Args:
- config_type: Type of configuration node.
- config_name: Configuration name.
- attrs: Dict of configuration attributes; may be None.
- tools: List of tools (strings or Tool objects); may be None.
- Returns:
- """
- # Handle defaults
- if not attrs:
- attrs = {}
- if not tools:
- tools = []
-
- # Add configuration node and its attributes
- node_attrs = attrs.copy()
- node_attrs['Name'] = config_name
- specification = [config_type, node_attrs]
-
- # Add tool nodes and their attributes
- if tools:
- for t in tools:
- if isinstance(t, Tool):
- specification.append(t._GetSpecification())
- else:
- specification.append(Tool(t)._GetSpecification())
- return specification
-
-
- def AddConfig(self, name, attrs=None, tools=None):
- """Adds a configuration to the project.
-
- Args:
- name: Configuration name.
- attrs: Dict of configuration attributes; may be None.
- tools: List of tools (strings or Tool objects); may be None.
- """
- spec = self._GetSpecForConfiguration('Configuration', name, attrs, tools)
- self.configurations_section.append(spec)
-
- def _AddFilesToNode(self, parent, files):
- """Adds files and/or filters to the parent node.
-
- Args:
- parent: Destination node
- files: A list of Filter objects and/or relative paths to files.
-
- Will call itself recursively, if the files list contains Filter objects.
- """
- for f in files:
- if isinstance(f, Filter):
- node = ['Filter', {'Name': f.name}]
- self._AddFilesToNode(node, f.contents)
- else:
- node = ['File', {'RelativePath': f}]
- self.files_dict[f] = node
- parent.append(node)
-
- def AddFiles(self, files):
- """Adds files to the project.
-
- Args:
- files: A list of Filter objects and/or relative paths to files.
-
- This makes a copy of the file/filter tree at the time of this call. If you
- later add files to a Filter object which was passed into a previous call
- to AddFiles(), it will not be reflected in this project.
- """
- self._AddFilesToNode(self.files_section, files)
- # TODO(rspangler) This also doesn't handle adding files to an existing
- # filter. That is, it doesn't merge the trees.
-
- def AddFileConfig(self, path, config, attrs=None, tools=None):
- """Adds a configuration to a file.
-
- Args:
- path: Relative path to the file.
- config: Name of configuration to add.
- attrs: Dict of configuration attributes; may be None.
- tools: List of tools (strings or Tool objects); may be None.
-
- Raises:
- ValueError: Relative path does not match any file added via AddFiles().
- """
- # Find the file node with the right relative path
- parent = self.files_dict.get(path)
- if not parent:
- raise ValueError('AddFileConfig: file "%s" not in project.' % path)
-
- # Add the config to the file node
- spec = self._GetSpecForConfiguration('FileConfiguration', config, attrs,
- tools)
- parent.append(spec)
-
- def WriteIfChanged(self):
- """Writes the project file."""
- # First create XML content definition
- content = [
- 'VisualStudioProject',
- {'ProjectType': 'Visual C++',
- 'Version': self.version.ProjectVersion(),
- 'Name': self.name,
- 'ProjectGUID': self.guid,
- 'RootNamespace': self.name,
- 'Keyword': 'Win32Proj'
- },
- self.platform_section,
- self.tool_files_section,
- self.configurations_section,
- ['References'], # empty section
- self.files_section,
- ['Globals'] # empty section
- ]
- easy_xml.WriteXmlIfChanged(content, self.project_path,
- encoding="Windows-1252")
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
deleted file mode 100644
index 4985756bdd..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
+++ /dev/null
@@ -1,1096 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-r"""Code to validate and convert settings of the Microsoft build tools.
-
-This file contains code to validate and convert settings of the Microsoft
-build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(),
-and ValidateMSBuildSettings() are the entry points.
-
-This file was created by comparing the projects created by Visual Studio 2008
-and Visual Studio 2010 for all available settings through the user interface.
-The MSBuild schemas were also considered. They are typically found in the
-MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild
-"""
-
-import sys
-import re
-
-# Dictionaries of settings validators. The key is the tool name, the value is
-# a dictionary mapping setting names to validation functions.
-_msvs_validators = {}
-_msbuild_validators = {}
-
-
-# A dictionary of settings converters. The key is the tool name, the value is
-# a dictionary mapping setting names to conversion functions.
-_msvs_to_msbuild_converters = {}
-
-
-# Tool name mapping from MSVS to MSBuild.
-_msbuild_name_of_tool = {}
-
-
-class _Tool(object):
- """Represents a tool used by MSVS or MSBuild.
-
- Attributes:
- msvs_name: The name of the tool in MSVS.
- msbuild_name: The name of the tool in MSBuild.
- """
-
- def __init__(self, msvs_name, msbuild_name):
- self.msvs_name = msvs_name
- self.msbuild_name = msbuild_name
-
-
-def _AddTool(tool):
- """Adds a tool to the four dictionaries used to process settings.
-
- This only defines the tool. Each setting also needs to be added.
-
- Args:
- tool: The _Tool object to be added.
- """
- _msvs_validators[tool.msvs_name] = {}
- _msbuild_validators[tool.msbuild_name] = {}
- _msvs_to_msbuild_converters[tool.msvs_name] = {}
- _msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name
-
-
-def _GetMSBuildToolSettings(msbuild_settings, tool):
- """Returns an MSBuild tool dictionary. Creates it if needed."""
- return msbuild_settings.setdefault(tool.msbuild_name, {})
-
-
-class _Type(object):
- """Type of settings (Base class)."""
-
- def ValidateMSVS(self, value):
- """Verifies that the value is legal for MSVS.
-
- Args:
- value: the value to check for this type.
-
- Raises:
- ValueError if value is not valid for MSVS.
- """
-
- def ValidateMSBuild(self, value):
- """Verifies that the value is legal for MSBuild.
-
- Args:
- value: the value to check for this type.
-
- Raises:
- ValueError if value is not valid for MSBuild.
- """
-
- def ConvertToMSBuild(self, value):
- """Returns the MSBuild equivalent of the MSVS value given.
-
- Args:
- value: the MSVS value to convert.
-
- Returns:
- the MSBuild equivalent.
-
- Raises:
- ValueError if value is not valid.
- """
- return value
-
-
-class _String(_Type):
- """A setting that's just a string."""
-
- def ValidateMSVS(self, value):
- if not isinstance(value, basestring):
- raise ValueError('expected string; got %r' % value)
-
- def ValidateMSBuild(self, value):
- if not isinstance(value, basestring):
- raise ValueError('expected string; got %r' % value)
-
- def ConvertToMSBuild(self, value):
- # Convert the macros
- return ConvertVCMacrosToMSBuild(value)
-
-
-class _StringList(_Type):
- """A settings that's a list of strings."""
-
- def ValidateMSVS(self, value):
- if not isinstance(value, basestring) and not isinstance(value, list):
- raise ValueError('expected string list; got %r' % value)
-
- def ValidateMSBuild(self, value):
- if not isinstance(value, basestring) and not isinstance(value, list):
- raise ValueError('expected string list; got %r' % value)
-
- def ConvertToMSBuild(self, value):
- # Convert the macros
- if isinstance(value, list):
- return [ConvertVCMacrosToMSBuild(i) for i in value]
- else:
- return ConvertVCMacrosToMSBuild(value)
-
-
-class _Boolean(_Type):
- """Boolean settings, can have the values 'false' or 'true'."""
-
- def _Validate(self, value):
- if value != 'true' and value != 'false':
- raise ValueError('expected bool; got %r' % value)
-
- def ValidateMSVS(self, value):
- self._Validate(value)
-
- def ValidateMSBuild(self, value):
- self._Validate(value)
-
- def ConvertToMSBuild(self, value):
- self._Validate(value)
- return value
-
-
-class _Integer(_Type):
- """Integer settings."""
-
- def __init__(self, msbuild_base=10):
- _Type.__init__(self)
- self._msbuild_base = msbuild_base
-
- def ValidateMSVS(self, value):
- # Try to convert, this will raise ValueError if invalid.
- self.ConvertToMSBuild(value)
-
- def ValidateMSBuild(self, value):
- # Try to convert, this will raise ValueError if invalid.
- int(value, self._msbuild_base)
-
- def ConvertToMSBuild(self, value):
- msbuild_format = (self._msbuild_base == 10) and '%d' or '0x%04x'
- return msbuild_format % int(value)
-
-
-class _Enumeration(_Type):
- """Type of settings that is an enumeration.
-
- In MSVS, the values are indexes like '0', '1', and '2'.
- MSBuild uses text labels that are more representative, like 'Win32'.
-
- Constructor args:
- label_list: an array of MSBuild labels that correspond to the MSVS index.
- In the rare cases where MSVS has skipped an index value, None is
- used in the array to indicate the unused spot.
- new: an array of labels that are new to MSBuild.
- """
-
- def __init__(self, label_list, new=None):
- _Type.__init__(self)
- self._label_list = label_list
- self._msbuild_values = set(value for value in label_list
- if value is not None)
- if new is not None:
- self._msbuild_values.update(new)
-
- def ValidateMSVS(self, value):
- # Try to convert. It will raise an exception if not valid.
- self.ConvertToMSBuild(value)
-
- def ValidateMSBuild(self, value):
- if value not in self._msbuild_values:
- raise ValueError('unrecognized enumerated value %s' % value)
-
- def ConvertToMSBuild(self, value):
- index = int(value)
- if index < 0 or index >= len(self._label_list):
- raise ValueError('index value (%d) not in expected range [0, %d)' %
- (index, len(self._label_list)))
- label = self._label_list[index]
- if label is None:
- raise ValueError('converted value for %s not specified.' % value)
- return label
-
-
-# Instantiate the various generic types.
-_boolean = _Boolean()
-_integer = _Integer()
-# For now, we don't do any special validation on these types:
-_string = _String()
-_file_name = _String()
-_folder_name = _String()
-_file_list = _StringList()
-_folder_list = _StringList()
-_string_list = _StringList()
-# Some boolean settings went from numerical values to boolean. The
-# mapping is 0: default, 1: false, 2: true.
-_newly_boolean = _Enumeration(['', 'false', 'true'])
-
-
-def _Same(tool, name, setting_type):
- """Defines a setting that has the same name in MSVS and MSBuild.
-
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- name: the name of the setting.
- setting_type: the type of this setting.
- """
- _Renamed(tool, name, name, setting_type)
-
-
-def _Renamed(tool, msvs_name, msbuild_name, setting_type):
- """Defines a setting for which the name has changed.
-
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- msvs_name: the name of the MSVS setting.
- msbuild_name: the name of the MSBuild setting.
- setting_type: the type of this setting.
- """
-
- def _Translate(value, msbuild_settings):
- msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
- msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value)
-
- _msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS
- _msbuild_validators[tool.msbuild_name][msbuild_name] = (
- setting_type.ValidateMSBuild)
- _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
-
-
-def _Moved(tool, settings_name, msbuild_tool_name, setting_type):
- _MovedAndRenamed(tool, settings_name, msbuild_tool_name, settings_name,
- setting_type)
-
-
-def _MovedAndRenamed(tool, msvs_settings_name, msbuild_tool_name,
- msbuild_settings_name, setting_type):
- """Defines a setting that may have moved to a new section.
-
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- msvs_settings_name: the MSVS name of the setting.
- msbuild_tool_name: the name of the MSBuild tool to place the setting under.
- msbuild_settings_name: the MSBuild name of the setting.
- setting_type: the type of this setting.
- """
-
- def _Translate(value, msbuild_settings):
- tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {})
- tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value)
-
- _msvs_validators[tool.msvs_name][msvs_settings_name] = (
- setting_type.ValidateMSVS)
- validator = setting_type.ValidateMSBuild
- _msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator
- _msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate
-
-
-def _MSVSOnly(tool, name, setting_type):
- """Defines a setting that is only found in MSVS.
-
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- name: the name of the setting.
- setting_type: the type of this setting.
- """
-
- def _Translate(unused_value, unused_msbuild_settings):
- # Since this is for MSVS only settings, no translation will happen.
- pass
-
- _msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS
- _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
-
-
-def _MSBuildOnly(tool, name, setting_type):
- """Defines a setting that is only found in MSBuild.
-
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- name: the name of the setting.
- setting_type: the type of this setting.
- """
-
- def _Translate(value, msbuild_settings):
- # Let msbuild-only properties get translated as-is from msvs_settings.
- tool_settings = msbuild_settings.setdefault(tool.msbuild_name, {})
- tool_settings[name] = value
-
- _msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild
- _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate
-
-
-def _ConvertedToAdditionalOption(tool, msvs_name, flag):
- """Defines a setting that's handled via a command line option in MSBuild.
-
- Args:
- tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
- msvs_name: the name of the MSVS setting that if 'true' becomes a flag
- flag: the flag to insert at the end of the AdditionalOptions
- """
-
- def _Translate(value, msbuild_settings):
- if value == 'true':
- tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
- if 'AdditionalOptions' in tool_settings:
- new_flags = '%s %s' % (tool_settings['AdditionalOptions'], flag)
- else:
- new_flags = flag
- tool_settings['AdditionalOptions'] = new_flags
- _msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS
- _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
-
-
-def _CustomGeneratePreprocessedFile(tool, msvs_name):
- def _Translate(value, msbuild_settings):
- tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
- if value == '0':
- tool_settings['PreprocessToFile'] = 'false'
- tool_settings['PreprocessSuppressLineNumbers'] = 'false'
- elif value == '1': # /P
- tool_settings['PreprocessToFile'] = 'true'
- tool_settings['PreprocessSuppressLineNumbers'] = 'false'
- elif value == '2': # /EP /P
- tool_settings['PreprocessToFile'] = 'true'
- tool_settings['PreprocessSuppressLineNumbers'] = 'true'
- else:
- raise ValueError('value must be one of [0, 1, 2]; got %s' % value)
- # Create a bogus validator that looks for '0', '1', or '2'
- msvs_validator = _Enumeration(['a', 'b', 'c']).ValidateMSVS
- _msvs_validators[tool.msvs_name][msvs_name] = msvs_validator
- msbuild_validator = _boolean.ValidateMSBuild
- msbuild_tool_validators = _msbuild_validators[tool.msbuild_name]
- msbuild_tool_validators['PreprocessToFile'] = msbuild_validator
- msbuild_tool_validators['PreprocessSuppressLineNumbers'] = msbuild_validator
- _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate
-
-
-fix_vc_macro_slashes_regex_list = ('IntDir', 'OutDir')
-fix_vc_macro_slashes_regex = re.compile(
- r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list)
-)
-
-# Regular expression to detect keys that were generated by exclusion lists
-_EXCLUDED_SUFFIX_RE = re.compile('^(.*)_excluded$')
-
-
-def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
- """Verify that 'setting' is valid if it is generated from an exclusion list.
-
- If the setting appears to be generated from an exclusion list, the root name
- is checked.
-
- Args:
- setting: A string that is the setting name to validate
- settings: A dictionary where the keys are valid settings
- error_msg: The message to emit in the event of error
- stderr: The stream receiving the error messages.
- """
- # This may be unrecognized because it's an exclusion list. If the
- # setting name has the _excluded suffix, then check the root name.
- unrecognized = True
- m = re.match(_EXCLUDED_SUFFIX_RE, setting)
- if m:
- root_setting = m.group(1)
- unrecognized = root_setting not in settings
-
- if unrecognized:
- # We don't know this setting. Give a warning.
- print >> stderr, error_msg
-
-
-def FixVCMacroSlashes(s):
- """Replace macros which have excessive following slashes.
-
- These macros are known to have a built-in trailing slash. Furthermore, many
- scripts hiccup on processing paths with extra slashes in the middle.
-
- This list is probably not exhaustive. Add as needed.
- """
- if '$' in s:
- s = fix_vc_macro_slashes_regex.sub(r'\1', s)
- return s
-
-
-def ConvertVCMacrosToMSBuild(s):
- """Convert the the MSVS macros found in the string to the MSBuild equivalent.
-
- This list is probably not exhaustive. Add as needed.
- """
- if '$' in s:
- replace_map = {
- '$(ConfigurationName)': '$(Configuration)',
- '$(InputDir)': '%(RelativeDir)',
- '$(InputExt)': '%(Extension)',
- '$(InputFileName)': '%(Filename)%(Extension)',
- '$(InputName)': '%(Filename)',
- '$(InputPath)': '%(Identity)',
- '$(ParentName)': '$(ProjectFileName)',
- '$(PlatformName)': '$(Platform)',
- '$(SafeInputName)': '%(Filename)',
- }
- for old, new in replace_map.iteritems():
- s = s.replace(old, new)
- s = FixVCMacroSlashes(s)
- return s
-
-
-def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
- """Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+).
-
- Args:
- msvs_settings: A dictionary. The key is the tool name. The values are
- themselves dictionaries of settings and their values.
- stderr: The stream receiving the error messages.
-
- Returns:
- A dictionary of MSBuild settings. The key is either the MSBuild tool name
- or the empty string (for the global settings). The values are themselves
- dictionaries of settings and their values.
- """
- msbuild_settings = {}
- for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems():
- if msvs_tool_name in _msvs_to_msbuild_converters:
- msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name]
- for msvs_setting, msvs_value in msvs_tool_settings.iteritems():
- if msvs_setting in msvs_tool:
- # Invoke the translation function.
- try:
- msvs_tool[msvs_setting](msvs_value, msbuild_settings)
- except ValueError, e:
- print >> stderr, ('Warning: while converting %s/%s to MSBuild, '
- '%s' % (msvs_tool_name, msvs_setting, e))
- else:
- _ValidateExclusionSetting(msvs_setting,
- msvs_tool,
- ('Warning: unrecognized setting %s/%s '
- 'while converting to MSBuild.' %
- (msvs_tool_name, msvs_setting)),
- stderr)
- else:
- print >> stderr, ('Warning: unrecognized tool %s while converting to '
- 'MSBuild.' % msvs_tool_name)
- return msbuild_settings
-
-
-def ValidateMSVSSettings(settings, stderr=sys.stderr):
- """Validates that the names of the settings are valid for MSVS.
-
- Args:
- settings: A dictionary. The key is the tool name. The values are
- themselves dictionaries of settings and their values.
- stderr: The stream receiving the error messages.
- """
- _ValidateSettings(_msvs_validators, settings, stderr)
-
-
-def ValidateMSBuildSettings(settings, stderr=sys.stderr):
- """Validates that the names of the settings are valid for MSBuild.
-
- Args:
- settings: A dictionary. The key is the tool name. The values are
- themselves dictionaries of settings and their values.
- stderr: The stream receiving the error messages.
- """
- _ValidateSettings(_msbuild_validators, settings, stderr)
-
-
-def _ValidateSettings(validators, settings, stderr):
- """Validates that the settings are valid for MSBuild or MSVS.
-
- We currently only validate the names of the settings, not their values.
-
- Args:
- validators: A dictionary of tools and their validators.
- settings: A dictionary. The key is the tool name. The values are
- themselves dictionaries of settings and their values.
- stderr: The stream receiving the error messages.
- """
- for tool_name in settings:
- if tool_name in validators:
- tool_validators = validators[tool_name]
- for setting, value in settings[tool_name].iteritems():
- if setting in tool_validators:
- try:
- tool_validators[setting](value)
- except ValueError, e:
- print >> stderr, ('Warning: for %s/%s, %s' %
- (tool_name, setting, e))
- else:
- _ValidateExclusionSetting(setting,
- tool_validators,
- ('Warning: unrecognized setting %s/%s' %
- (tool_name, setting)),
- stderr)
-
- else:
- print >> stderr, ('Warning: unrecognized tool %s' % tool_name)
-
-
-# MSVS and MBuild names of the tools.
-_compile = _Tool('VCCLCompilerTool', 'ClCompile')
-_link = _Tool('VCLinkerTool', 'Link')
-_midl = _Tool('VCMIDLTool', 'Midl')
-_rc = _Tool('VCResourceCompilerTool', 'ResourceCompile')
-_lib = _Tool('VCLibrarianTool', 'Lib')
-_manifest = _Tool('VCManifestTool', 'Manifest')
-_masm = _Tool('MASM', 'MASM')
-
-
-_AddTool(_compile)
-_AddTool(_link)
-_AddTool(_midl)
-_AddTool(_rc)
-_AddTool(_lib)
-_AddTool(_manifest)
-_AddTool(_masm)
-# Add sections only found in the MSBuild settings.
-_msbuild_validators[''] = {}
-_msbuild_validators['ProjectReference'] = {}
-_msbuild_validators['ManifestResourceCompile'] = {}
-
-# Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and
-# ClCompile in MSBuild.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for
-# the schema of the MSBuild ClCompile settings.
-
-# Options that have the same name in MSVS and MSBuild
-_Same(_compile, 'AdditionalIncludeDirectories', _folder_list) # /I
-_Same(_compile, 'AdditionalOptions', _string_list)
-_Same(_compile, 'AdditionalUsingDirectories', _folder_list) # /AI
-_Same(_compile, 'AssemblerListingLocation', _file_name) # /Fa
-_Same(_compile, 'BrowseInformationFile', _file_name)
-_Same(_compile, 'BufferSecurityCheck', _boolean) # /GS
-_Same(_compile, 'DisableLanguageExtensions', _boolean) # /Za
-_Same(_compile, 'DisableSpecificWarnings', _string_list) # /wd
-_Same(_compile, 'EnableFiberSafeOptimizations', _boolean) # /GT
-_Same(_compile, 'EnablePREfast', _boolean) # /analyze Visible='false'
-_Same(_compile, 'ExpandAttributedSource', _boolean) # /Fx
-_Same(_compile, 'FloatingPointExceptions', _boolean) # /fp:except
-_Same(_compile, 'ForceConformanceInForLoopScope', _boolean) # /Zc:forScope
-_Same(_compile, 'ForcedIncludeFiles', _file_list) # /FI
-_Same(_compile, 'ForcedUsingFiles', _file_list) # /FU
-_Same(_compile, 'GenerateXMLDocumentationFiles', _boolean) # /doc
-_Same(_compile, 'IgnoreStandardIncludePath', _boolean) # /X
-_Same(_compile, 'MinimalRebuild', _boolean) # /Gm
-_Same(_compile, 'OmitDefaultLibName', _boolean) # /Zl
-_Same(_compile, 'OmitFramePointers', _boolean) # /Oy
-_Same(_compile, 'PreprocessorDefinitions', _string_list) # /D
-_Same(_compile, 'ProgramDataBaseFileName', _file_name) # /Fd
-_Same(_compile, 'RuntimeTypeInfo', _boolean) # /GR
-_Same(_compile, 'ShowIncludes', _boolean) # /showIncludes
-_Same(_compile, 'SmallerTypeCheck', _boolean) # /RTCc
-_Same(_compile, 'StringPooling', _boolean) # /GF
-_Same(_compile, 'SuppressStartupBanner', _boolean) # /nologo
-_Same(_compile, 'TreatWChar_tAsBuiltInType', _boolean) # /Zc:wchar_t
-_Same(_compile, 'UndefineAllPreprocessorDefinitions', _boolean) # /u
-_Same(_compile, 'UndefinePreprocessorDefinitions', _string_list) # /U
-_Same(_compile, 'UseFullPaths', _boolean) # /FC
-_Same(_compile, 'WholeProgramOptimization', _boolean) # /GL
-_Same(_compile, 'XMLDocumentationFileName', _file_name)
-
-_Same(_compile, 'AssemblerOutput',
- _Enumeration(['NoListing',
- 'AssemblyCode', # /FA
- 'All', # /FAcs
- 'AssemblyAndMachineCode', # /FAc
- 'AssemblyAndSourceCode'])) # /FAs
-_Same(_compile, 'BasicRuntimeChecks',
- _Enumeration(['Default',
- 'StackFrameRuntimeCheck', # /RTCs
- 'UninitializedLocalUsageCheck', # /RTCu
- 'EnableFastChecks'])) # /RTC1
-_Same(_compile, 'BrowseInformation',
- _Enumeration(['false',
- 'true', # /FR
- 'true'])) # /Fr
-_Same(_compile, 'CallingConvention',
- _Enumeration(['Cdecl', # /Gd
- 'FastCall', # /Gr
- 'StdCall', # /Gz
- 'VectorCall'])) # /Gv
-_Same(_compile, 'CompileAs',
- _Enumeration(['Default',
- 'CompileAsC', # /TC
- 'CompileAsCpp'])) # /TP
-_Same(_compile, 'DebugInformationFormat',
- _Enumeration(['', # Disabled
- 'OldStyle', # /Z7
- None,
- 'ProgramDatabase', # /Zi
- 'EditAndContinue'])) # /ZI
-_Same(_compile, 'EnableEnhancedInstructionSet',
- _Enumeration(['NotSet',
- 'StreamingSIMDExtensions', # /arch:SSE
- 'StreamingSIMDExtensions2', # /arch:SSE2
- 'AdvancedVectorExtensions', # /arch:AVX (vs2012+)
- 'NoExtensions', # /arch:IA32 (vs2012+)
- # This one only exists in the new msbuild format.
- 'AdvancedVectorExtensions2', # /arch:AVX2 (vs2013r2+)
- ]))
-_Same(_compile, 'ErrorReporting',
- _Enumeration(['None', # /errorReport:none
- 'Prompt', # /errorReport:prompt
- 'Queue'], # /errorReport:queue
- new=['Send'])) # /errorReport:send"
-_Same(_compile, 'ExceptionHandling',
- _Enumeration(['false',
- 'Sync', # /EHsc
- 'Async'], # /EHa
- new=['SyncCThrow'])) # /EHs
-_Same(_compile, 'FavorSizeOrSpeed',
- _Enumeration(['Neither',
- 'Speed', # /Ot
- 'Size'])) # /Os
-_Same(_compile, 'FloatingPointModel',
- _Enumeration(['Precise', # /fp:precise
- 'Strict', # /fp:strict
- 'Fast'])) # /fp:fast
-_Same(_compile, 'InlineFunctionExpansion',
- _Enumeration(['Default',
- 'OnlyExplicitInline', # /Ob1
- 'AnySuitable'], # /Ob2
- new=['Disabled'])) # /Ob0
-_Same(_compile, 'Optimization',
- _Enumeration(['Disabled', # /Od
- 'MinSpace', # /O1
- 'MaxSpeed', # /O2
- 'Full'])) # /Ox
-_Same(_compile, 'RuntimeLibrary',
- _Enumeration(['MultiThreaded', # /MT
- 'MultiThreadedDebug', # /MTd
- 'MultiThreadedDLL', # /MD
- 'MultiThreadedDebugDLL'])) # /MDd
-_Same(_compile, 'StructMemberAlignment',
- _Enumeration(['Default',
- '1Byte', # /Zp1
- '2Bytes', # /Zp2
- '4Bytes', # /Zp4
- '8Bytes', # /Zp8
- '16Bytes'])) # /Zp16
-_Same(_compile, 'WarningLevel',
- _Enumeration(['TurnOffAllWarnings', # /W0
- 'Level1', # /W1
- 'Level2', # /W2
- 'Level3', # /W3
- 'Level4'], # /W4
- new=['EnableAllWarnings'])) # /Wall
-
-# Options found in MSVS that have been renamed in MSBuild.
-_Renamed(_compile, 'EnableFunctionLevelLinking', 'FunctionLevelLinking',
- _boolean) # /Gy
-_Renamed(_compile, 'EnableIntrinsicFunctions', 'IntrinsicFunctions',
- _boolean) # /Oi
-_Renamed(_compile, 'KeepComments', 'PreprocessKeepComments', _boolean) # /C
-_Renamed(_compile, 'ObjectFile', 'ObjectFileName', _file_name) # /Fo
-_Renamed(_compile, 'OpenMP', 'OpenMPSupport', _boolean) # /openmp
-_Renamed(_compile, 'PrecompiledHeaderThrough', 'PrecompiledHeaderFile',
- _file_name) # Used with /Yc and /Yu
-_Renamed(_compile, 'PrecompiledHeaderFile', 'PrecompiledHeaderOutputFile',
- _file_name) # /Fp
-_Renamed(_compile, 'UsePrecompiledHeader', 'PrecompiledHeader',
- _Enumeration(['NotUsing', # VS recognized '' for this value too.
- 'Create', # /Yc
- 'Use'])) # /Yu
-_Renamed(_compile, 'WarnAsError', 'TreatWarningAsError', _boolean) # /WX
-
-_ConvertedToAdditionalOption(_compile, 'DefaultCharIsUnsigned', '/J')
-
-# MSVS options not found in MSBuild.
-_MSVSOnly(_compile, 'Detect64BitPortabilityProblems', _boolean)
-_MSVSOnly(_compile, 'UseUnicodeResponseFiles', _boolean)
-
-# MSBuild options not found in MSVS.
-_MSBuildOnly(_compile, 'BuildingInIDE', _boolean)
-_MSBuildOnly(_compile, 'CompileAsManaged',
- _Enumeration([], new=['false',
- 'true'])) # /clr
-_MSBuildOnly(_compile, 'CreateHotpatchableImage', _boolean) # /hotpatch
-_MSBuildOnly(_compile, 'MultiProcessorCompilation', _boolean) # /MP
-_MSBuildOnly(_compile, 'PreprocessOutputPath', _string) # /Fi
-_MSBuildOnly(_compile, 'ProcessorNumber', _integer) # the number of processors
-_MSBuildOnly(_compile, 'TrackerLogDirectory', _folder_name)
-_MSBuildOnly(_compile, 'TreatSpecificWarningsAsErrors', _string_list) # /we
-_MSBuildOnly(_compile, 'UseUnicodeForAssemblerListing', _boolean) # /FAu
-
-# Defines a setting that needs very customized processing
-_CustomGeneratePreprocessedFile(_compile, 'GeneratePreprocessedFile')
-
-
-# Directives for converting MSVS VCLinkerTool to MSBuild Link.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for
-# the schema of the MSBuild Link settings.
-
-# Options that have the same name in MSVS and MSBuild
-_Same(_link, 'AdditionalDependencies', _file_list)
-_Same(_link, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH
-# /MANIFESTDEPENDENCY:
-_Same(_link, 'AdditionalManifestDependencies', _file_list)
-_Same(_link, 'AdditionalOptions', _string_list)
-_Same(_link, 'AddModuleNamesToAssembly', _file_list) # /ASSEMBLYMODULE
-_Same(_link, 'AllowIsolation', _boolean) # /ALLOWISOLATION
-_Same(_link, 'AssemblyLinkResource', _file_list) # /ASSEMBLYLINKRESOURCE
-_Same(_link, 'BaseAddress', _string) # /BASE
-_Same(_link, 'CLRUnmanagedCodeCheck', _boolean) # /CLRUNMANAGEDCODECHECK
-_Same(_link, 'DelayLoadDLLs', _file_list) # /DELAYLOAD
-_Same(_link, 'DelaySign', _boolean) # /DELAYSIGN
-_Same(_link, 'EmbedManagedResourceFile', _file_list) # /ASSEMBLYRESOURCE
-_Same(_link, 'EnableUAC', _boolean) # /MANIFESTUAC
-_Same(_link, 'EntryPointSymbol', _string) # /ENTRY
-_Same(_link, 'ForceSymbolReferences', _file_list) # /INCLUDE
-_Same(_link, 'FunctionOrder', _file_name) # /ORDER
-_Same(_link, 'GenerateDebugInformation', _boolean) # /DEBUG
-_Same(_link, 'GenerateMapFile', _boolean) # /MAP
-_Same(_link, 'HeapCommitSize', _string)
-_Same(_link, 'HeapReserveSize', _string) # /HEAP
-_Same(_link, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB
-_Same(_link, 'IgnoreEmbeddedIDL', _boolean) # /IGNOREIDL
-_Same(_link, 'ImportLibrary', _file_name) # /IMPLIB
-_Same(_link, 'KeyContainer', _file_name) # /KEYCONTAINER
-_Same(_link, 'KeyFile', _file_name) # /KEYFILE
-_Same(_link, 'ManifestFile', _file_name) # /ManifestFile
-_Same(_link, 'MapExports', _boolean) # /MAPINFO:EXPORTS
-_Same(_link, 'MapFileName', _file_name)
-_Same(_link, 'MergedIDLBaseFileName', _file_name) # /IDLOUT
-_Same(_link, 'MergeSections', _string) # /MERGE
-_Same(_link, 'MidlCommandFile', _file_name) # /MIDL
-_Same(_link, 'ModuleDefinitionFile', _file_name) # /DEF
-_Same(_link, 'OutputFile', _file_name) # /OUT
-_Same(_link, 'PerUserRedirection', _boolean)
-_Same(_link, 'Profile', _boolean) # /PROFILE
-_Same(_link, 'ProfileGuidedDatabase', _file_name) # /PGD
-_Same(_link, 'ProgramDatabaseFile', _file_name) # /PDB
-_Same(_link, 'RegisterOutput', _boolean)
-_Same(_link, 'SetChecksum', _boolean) # /RELEASE
-_Same(_link, 'StackCommitSize', _string)
-_Same(_link, 'StackReserveSize', _string) # /STACK
-_Same(_link, 'StripPrivateSymbols', _file_name) # /PDBSTRIPPED
-_Same(_link, 'SupportUnloadOfDelayLoadedDLL', _boolean) # /DELAY:UNLOAD
-_Same(_link, 'SuppressStartupBanner', _boolean) # /NOLOGO
-_Same(_link, 'SwapRunFromCD', _boolean) # /SWAPRUN:CD
-_Same(_link, 'TurnOffAssemblyGeneration', _boolean) # /NOASSEMBLY
-_Same(_link, 'TypeLibraryFile', _file_name) # /TLBOUT
-_Same(_link, 'TypeLibraryResourceID', _integer) # /TLBID
-_Same(_link, 'UACUIAccess', _boolean) # /uiAccess='true'
-_Same(_link, 'Version', _string) # /VERSION
-
-_Same(_link, 'EnableCOMDATFolding', _newly_boolean) # /OPT:ICF
-_Same(_link, 'FixedBaseAddress', _newly_boolean) # /FIXED
-_Same(_link, 'LargeAddressAware', _newly_boolean) # /LARGEADDRESSAWARE
-_Same(_link, 'OptimizeReferences', _newly_boolean) # /OPT:REF
-_Same(_link, 'RandomizedBaseAddress', _newly_boolean) # /DYNAMICBASE
-_Same(_link, 'TerminalServerAware', _newly_boolean) # /TSAWARE
-
-_subsystem_enumeration = _Enumeration(
- ['NotSet',
- 'Console', # /SUBSYSTEM:CONSOLE
- 'Windows', # /SUBSYSTEM:WINDOWS
- 'Native', # /SUBSYSTEM:NATIVE
- 'EFI Application', # /SUBSYSTEM:EFI_APPLICATION
- 'EFI Boot Service Driver', # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER
- 'EFI ROM', # /SUBSYSTEM:EFI_ROM
- 'EFI Runtime', # /SUBSYSTEM:EFI_RUNTIME_DRIVER
- 'WindowsCE'], # /SUBSYSTEM:WINDOWSCE
- new=['POSIX']) # /SUBSYSTEM:POSIX
-
-_target_machine_enumeration = _Enumeration(
- ['NotSet',
- 'MachineX86', # /MACHINE:X86
- None,
- 'MachineARM', # /MACHINE:ARM
- 'MachineEBC', # /MACHINE:EBC
- 'MachineIA64', # /MACHINE:IA64
- None,
- 'MachineMIPS', # /MACHINE:MIPS
- 'MachineMIPS16', # /MACHINE:MIPS16
- 'MachineMIPSFPU', # /MACHINE:MIPSFPU
- 'MachineMIPSFPU16', # /MACHINE:MIPSFPU16
- None,
- None,
- None,
- 'MachineSH4', # /MACHINE:SH4
- None,
- 'MachineTHUMB', # /MACHINE:THUMB
- 'MachineX64']) # /MACHINE:X64
-
-_Same(_link, 'AssemblyDebug',
- _Enumeration(['',
- 'true', # /ASSEMBLYDEBUG
- 'false'])) # /ASSEMBLYDEBUG:DISABLE
-_Same(_link, 'CLRImageType',
- _Enumeration(['Default',
- 'ForceIJWImage', # /CLRIMAGETYPE:IJW
- 'ForcePureILImage', # /Switch="CLRIMAGETYPE:PURE
- 'ForceSafeILImage'])) # /Switch="CLRIMAGETYPE:SAFE
-_Same(_link, 'CLRThreadAttribute',
- _Enumeration(['DefaultThreadingAttribute', # /CLRTHREADATTRIBUTE:NONE
- 'MTAThreadingAttribute', # /CLRTHREADATTRIBUTE:MTA
- 'STAThreadingAttribute'])) # /CLRTHREADATTRIBUTE:STA
-_Same(_link, 'DataExecutionPrevention',
- _Enumeration(['',
- 'false', # /NXCOMPAT:NO
- 'true'])) # /NXCOMPAT
-_Same(_link, 'Driver',
- _Enumeration(['NotSet',
- 'Driver', # /Driver
- 'UpOnly', # /DRIVER:UPONLY
- 'WDM'])) # /DRIVER:WDM
-_Same(_link, 'LinkTimeCodeGeneration',
- _Enumeration(['Default',
- 'UseLinkTimeCodeGeneration', # /LTCG
- 'PGInstrument', # /LTCG:PGInstrument
- 'PGOptimization', # /LTCG:PGOptimize
- 'PGUpdate'])) # /LTCG:PGUpdate
-_Same(_link, 'ShowProgress',
- _Enumeration(['NotSet',
- 'LinkVerbose', # /VERBOSE
- 'LinkVerboseLib'], # /VERBOSE:Lib
- new=['LinkVerboseICF', # /VERBOSE:ICF
- 'LinkVerboseREF', # /VERBOSE:REF
- 'LinkVerboseSAFESEH', # /VERBOSE:SAFESEH
- 'LinkVerboseCLR'])) # /VERBOSE:CLR
-_Same(_link, 'SubSystem', _subsystem_enumeration)
-_Same(_link, 'TargetMachine', _target_machine_enumeration)
-_Same(_link, 'UACExecutionLevel',
- _Enumeration(['AsInvoker', # /level='asInvoker'
- 'HighestAvailable', # /level='highestAvailable'
- 'RequireAdministrator'])) # /level='requireAdministrator'
-_Same(_link, 'MinimumRequiredVersion', _string)
-_Same(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX
-
-
-# Options found in MSVS that have been renamed in MSBuild.
-_Renamed(_link, 'ErrorReporting', 'LinkErrorReporting',
- _Enumeration(['NoErrorReport', # /ERRORREPORT:NONE
- 'PromptImmediately', # /ERRORREPORT:PROMPT
- 'QueueForNextLogin'], # /ERRORREPORT:QUEUE
- new=['SendErrorReport'])) # /ERRORREPORT:SEND
-_Renamed(_link, 'IgnoreDefaultLibraryNames', 'IgnoreSpecificDefaultLibraries',
- _file_list) # /NODEFAULTLIB
-_Renamed(_link, 'ResourceOnlyDLL', 'NoEntryPoint', _boolean) # /NOENTRY
-_Renamed(_link, 'SwapRunFromNet', 'SwapRunFromNET', _boolean) # /SWAPRUN:NET
-
-_Moved(_link, 'GenerateManifest', '', _boolean)
-_Moved(_link, 'IgnoreImportLibrary', '', _boolean)
-_Moved(_link, 'LinkIncremental', '', _newly_boolean)
-_Moved(_link, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
-_Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean)
-
-# MSVS options not found in MSBuild.
-_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean)
-_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean)
-
-# MSBuild options not found in MSVS.
-_MSBuildOnly(_link, 'BuildingInIDE', _boolean)
-_MSBuildOnly(_link, 'ImageHasSafeExceptionHandlers', _boolean) # /SAFESEH
-_MSBuildOnly(_link, 'LinkDLL', _boolean) # /DLL Visible='false'
-_MSBuildOnly(_link, 'LinkStatus', _boolean) # /LTCG:STATUS
-_MSBuildOnly(_link, 'PreventDllBinding', _boolean) # /ALLOWBIND
-_MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean) # /DELAY:NOBIND
-_MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name)
-_MSBuildOnly(_link, 'MSDOSStubFileName', _file_name) # /STUB Visible='false'
-_MSBuildOnly(_link, 'SectionAlignment', _integer) # /ALIGN
-_MSBuildOnly(_link, 'SpecifySectionAttributes', _string) # /SECTION
-_MSBuildOnly(_link, 'ForceFileOutput',
- _Enumeration([], new=['Enabled', # /FORCE
- # /FORCE:MULTIPLE
- 'MultiplyDefinedSymbolOnly',
- 'UndefinedSymbolOnly'])) # /FORCE:UNRESOLVED
-_MSBuildOnly(_link, 'CreateHotPatchableImage',
- _Enumeration([], new=['Enabled', # /FUNCTIONPADMIN
- 'X86Image', # /FUNCTIONPADMIN:5
- 'X64Image', # /FUNCTIONPADMIN:6
- 'ItaniumImage'])) # /FUNCTIONPADMIN:16
-_MSBuildOnly(_link, 'CLRSupportLastError',
- _Enumeration([], new=['Enabled', # /CLRSupportLastError
- 'Disabled', # /CLRSupportLastError:NO
- # /CLRSupportLastError:SYSTEMDLL
- 'SystemDlls']))
-
-
-# Directives for converting VCResourceCompilerTool to ResourceCompile.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for
-# the schema of the MSBuild ResourceCompile settings.
-
-_Same(_rc, 'AdditionalOptions', _string_list)
-_Same(_rc, 'AdditionalIncludeDirectories', _folder_list) # /I
-_Same(_rc, 'Culture', _Integer(msbuild_base=16))
-_Same(_rc, 'IgnoreStandardIncludePath', _boolean) # /X
-_Same(_rc, 'PreprocessorDefinitions', _string_list) # /D
-_Same(_rc, 'ResourceOutputFileName', _string) # /fo
-_Same(_rc, 'ShowProgress', _boolean) # /v
-# There is no UI in VisualStudio 2008 to set the following properties.
-# However they are found in CL and other tools. Include them here for
-# completeness, as they are very likely to have the same usage pattern.
-_Same(_rc, 'SuppressStartupBanner', _boolean) # /nologo
-_Same(_rc, 'UndefinePreprocessorDefinitions', _string_list) # /u
-
-# MSBuild options not found in MSVS.
-_MSBuildOnly(_rc, 'NullTerminateStrings', _boolean) # /n
-_MSBuildOnly(_rc, 'TrackerLogDirectory', _folder_name)
-
-
-# Directives for converting VCMIDLTool to Midl.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for
-# the schema of the MSBuild Midl settings.
-
-_Same(_midl, 'AdditionalIncludeDirectories', _folder_list) # /I
-_Same(_midl, 'AdditionalOptions', _string_list)
-_Same(_midl, 'CPreprocessOptions', _string) # /cpp_opt
-_Same(_midl, 'ErrorCheckAllocations', _boolean) # /error allocation
-_Same(_midl, 'ErrorCheckBounds', _boolean) # /error bounds_check
-_Same(_midl, 'ErrorCheckEnumRange', _boolean) # /error enum
-_Same(_midl, 'ErrorCheckRefPointers', _boolean) # /error ref
-_Same(_midl, 'ErrorCheckStubData', _boolean) # /error stub_data
-_Same(_midl, 'GenerateStublessProxies', _boolean) # /Oicf
-_Same(_midl, 'GenerateTypeLibrary', _boolean)
-_Same(_midl, 'HeaderFileName', _file_name) # /h
-_Same(_midl, 'IgnoreStandardIncludePath', _boolean) # /no_def_idir
-_Same(_midl, 'InterfaceIdentifierFileName', _file_name) # /iid
-_Same(_midl, 'MkTypLibCompatible', _boolean) # /mktyplib203
-_Same(_midl, 'OutputDirectory', _string) # /out
-_Same(_midl, 'PreprocessorDefinitions', _string_list) # /D
-_Same(_midl, 'ProxyFileName', _file_name) # /proxy
-_Same(_midl, 'RedirectOutputAndErrors', _file_name) # /o
-_Same(_midl, 'SuppressStartupBanner', _boolean) # /nologo
-_Same(_midl, 'TypeLibraryName', _file_name) # /tlb
-_Same(_midl, 'UndefinePreprocessorDefinitions', _string_list) # /U
-_Same(_midl, 'WarnAsError', _boolean) # /WX
-
-_Same(_midl, 'DefaultCharType',
- _Enumeration(['Unsigned', # /char unsigned
- 'Signed', # /char signed
- 'Ascii'])) # /char ascii7
-_Same(_midl, 'TargetEnvironment',
- _Enumeration(['NotSet',
- 'Win32', # /env win32
- 'Itanium', # /env ia64
- 'X64'])) # /env x64
-_Same(_midl, 'EnableErrorChecks',
- _Enumeration(['EnableCustom',
- 'None', # /error none
- 'All'])) # /error all
-_Same(_midl, 'StructMemberAlignment',
- _Enumeration(['NotSet',
- '1', # Zp1
- '2', # Zp2
- '4', # Zp4
- '8'])) # Zp8
-_Same(_midl, 'WarningLevel',
- _Enumeration(['0', # /W0
- '1', # /W1
- '2', # /W2
- '3', # /W3
- '4'])) # /W4
-
-_Renamed(_midl, 'DLLDataFileName', 'DllDataFileName', _file_name) # /dlldata
-_Renamed(_midl, 'ValidateParameters', 'ValidateAllParameters',
- _boolean) # /robust
-
-# MSBuild options not found in MSVS.
-_MSBuildOnly(_midl, 'ApplicationConfigurationMode', _boolean) # /app_config
-_MSBuildOnly(_midl, 'ClientStubFile', _file_name) # /cstub
-_MSBuildOnly(_midl, 'GenerateClientFiles',
- _Enumeration([], new=['Stub', # /client stub
- 'None'])) # /client none
-_MSBuildOnly(_midl, 'GenerateServerFiles',
- _Enumeration([], new=['Stub', # /client stub
- 'None'])) # /client none
-_MSBuildOnly(_midl, 'LocaleID', _integer) # /lcid DECIMAL
-_MSBuildOnly(_midl, 'ServerStubFile', _file_name) # /sstub
-_MSBuildOnly(_midl, 'SuppressCompilerWarnings', _boolean) # /no_warn
-_MSBuildOnly(_midl, 'TrackerLogDirectory', _folder_name)
-_MSBuildOnly(_midl, 'TypeLibFormat',
- _Enumeration([], new=['NewFormat', # /newtlb
- 'OldFormat'])) # /oldtlb
-
-
-# Directives for converting VCLibrarianTool to Lib.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for
-# the schema of the MSBuild Lib settings.
-
-_Same(_lib, 'AdditionalDependencies', _file_list)
-_Same(_lib, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH
-_Same(_lib, 'AdditionalOptions', _string_list)
-_Same(_lib, 'ExportNamedFunctions', _string_list) # /EXPORT
-_Same(_lib, 'ForceSymbolReferences', _string) # /INCLUDE
-_Same(_lib, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB
-_Same(_lib, 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB
-_Same(_lib, 'ModuleDefinitionFile', _file_name) # /DEF
-_Same(_lib, 'OutputFile', _file_name) # /OUT
-_Same(_lib, 'SuppressStartupBanner', _boolean) # /NOLOGO
-_Same(_lib, 'UseUnicodeResponseFiles', _boolean)
-_Same(_lib, 'LinkTimeCodeGeneration', _boolean) # /LTCG
-_Same(_lib, 'TargetMachine', _target_machine_enumeration)
-
-# TODO(jeanluc) _link defines the same value that gets moved to
-# ProjectReference. We may want to validate that they are consistent.
-_Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
-
-_MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false'
-_MSBuildOnly(_lib, 'ErrorReporting',
- _Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT
- 'QueueForNextLogin', # /ERRORREPORT:QUEUE
- 'SendErrorReport', # /ERRORREPORT:SEND
- 'NoErrorReport'])) # /ERRORREPORT:NONE
-_MSBuildOnly(_lib, 'MinimumRequiredVersion', _string)
-_MSBuildOnly(_lib, 'Name', _file_name) # /NAME
-_MSBuildOnly(_lib, 'RemoveObjects', _file_list) # /REMOVE
-_MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration)
-_MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name)
-_MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean) # /WX
-_MSBuildOnly(_lib, 'Verbose', _boolean)
-
-
-# Directives for converting VCManifestTool to Mt.
-# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for
-# the schema of the MSBuild Lib settings.
-
-# Options that have the same name in MSVS and MSBuild
-_Same(_manifest, 'AdditionalManifestFiles', _file_list) # /manifest
-_Same(_manifest, 'AdditionalOptions', _string_list)
-_Same(_manifest, 'AssemblyIdentity', _string) # /identity:
-_Same(_manifest, 'ComponentFileName', _file_name) # /dll
-_Same(_manifest, 'GenerateCatalogFiles', _boolean) # /makecdfs
-_Same(_manifest, 'InputResourceManifests', _string) # /inputresource
-_Same(_manifest, 'OutputManifestFile', _file_name) # /out
-_Same(_manifest, 'RegistrarScriptFile', _file_name) # /rgs
-_Same(_manifest, 'ReplacementsFile', _file_name) # /replacements
-_Same(_manifest, 'SuppressStartupBanner', _boolean) # /nologo
-_Same(_manifest, 'TypeLibraryFile', _file_name) # /tlb:
-_Same(_manifest, 'UpdateFileHashes', _boolean) # /hashupdate
-_Same(_manifest, 'UpdateFileHashesSearchPath', _file_name)
-_Same(_manifest, 'VerboseOutput', _boolean) # /verbose
-
-# Options that have moved location.
-_MovedAndRenamed(_manifest, 'ManifestResourceFile',
- 'ManifestResourceCompile',
- 'ResourceOutputFileName',
- _file_name)
-_Moved(_manifest, 'EmbedManifest', '', _boolean)
-
-# MSVS options not found in MSBuild.
-_MSVSOnly(_manifest, 'DependencyInformationFile', _file_name)
-_MSVSOnly(_manifest, 'UseFAT32Workaround', _boolean)
-_MSVSOnly(_manifest, 'UseUnicodeResponseFiles', _boolean)
-
-# MSBuild options not found in MSVS.
-_MSBuildOnly(_manifest, 'EnableDPIAwareness', _boolean)
-_MSBuildOnly(_manifest, 'GenerateCategoryTags', _boolean) # /category
-_MSBuildOnly(_manifest, 'ManifestFromManagedAssembly',
- _file_name) # /managedassemblyname
-_MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource
-_MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency
-_MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name)
-
-
-# Directives for MASM.
-# See "$(VCTargetsPath)\BuildCustomizations\masm.xml" for the schema of the
-# MSBuild MASM settings.
-
-# Options that have the same name in MSVS and MSBuild.
-_Same(_masm, 'UseSafeExceptionHandlers', _boolean) # /safeseh
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
deleted file mode 100755
index bf6ea6b802..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
+++ /dev/null
@@ -1,1483 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for the MSVSSettings.py file."""
-
-import StringIO
-import unittest
-import gyp.MSVSSettings as MSVSSettings
-
-
-class TestSequenceFunctions(unittest.TestCase):
-
- def setUp(self):
- self.stderr = StringIO.StringIO()
-
- def _ExpectedWarnings(self, expected):
- """Compares recorded lines to expected warnings."""
- self.stderr.seek(0)
- actual = self.stderr.read().split('\n')
- actual = [line for line in actual if line]
- self.assertEqual(sorted(expected), sorted(actual))
-
- def testValidateMSVSSettings_tool_names(self):
- """Tests that only MSVS tool names are allowed."""
- MSVSSettings.ValidateMSVSSettings(
- {'VCCLCompilerTool': {},
- 'VCLinkerTool': {},
- 'VCMIDLTool': {},
- 'foo': {},
- 'VCResourceCompilerTool': {},
- 'VCLibrarianTool': {},
- 'VCManifestTool': {},
- 'ClCompile': {}},
- self.stderr)
- self._ExpectedWarnings([
- 'Warning: unrecognized tool foo',
- 'Warning: unrecognized tool ClCompile'])
-
- def testValidateMSVSSettings_settings(self):
- """Tests that for invalid MSVS settings."""
- MSVSSettings.ValidateMSVSSettings(
- {'VCCLCompilerTool': {
- 'AdditionalIncludeDirectories': 'folder1;folder2',
- 'AdditionalOptions': ['string1', 'string2'],
- 'AdditionalUsingDirectories': 'folder1;folder2',
- 'AssemblerListingLocation': 'a_file_name',
- 'AssemblerOutput': '0',
- 'BasicRuntimeChecks': '5',
- 'BrowseInformation': 'fdkslj',
- 'BrowseInformationFile': 'a_file_name',
- 'BufferSecurityCheck': 'true',
- 'CallingConvention': '-1',
- 'CompileAs': '1',
- 'DebugInformationFormat': '2',
- 'DefaultCharIsUnsigned': 'true',
- 'Detect64BitPortabilityProblems': 'true',
- 'DisableLanguageExtensions': 'true',
- 'DisableSpecificWarnings': 'string1;string2',
- 'EnableEnhancedInstructionSet': '1',
- 'EnableFiberSafeOptimizations': 'true',
- 'EnableFunctionLevelLinking': 'true',
- 'EnableIntrinsicFunctions': 'true',
- 'EnablePREfast': 'true',
- 'Enableprefast': 'bogus',
- 'ErrorReporting': '1',
- 'ExceptionHandling': '1',
- 'ExpandAttributedSource': 'true',
- 'FavorSizeOrSpeed': '1',
- 'FloatingPointExceptions': 'true',
- 'FloatingPointModel': '1',
- 'ForceConformanceInForLoopScope': 'true',
- 'ForcedIncludeFiles': 'file1;file2',
- 'ForcedUsingFiles': 'file1;file2',
- 'GeneratePreprocessedFile': '1',
- 'GenerateXMLDocumentationFiles': 'true',
- 'IgnoreStandardIncludePath': 'true',
- 'InlineFunctionExpansion': '1',
- 'KeepComments': 'true',
- 'MinimalRebuild': 'true',
- 'ObjectFile': 'a_file_name',
- 'OmitDefaultLibName': 'true',
- 'OmitFramePointers': 'true',
- 'OpenMP': 'true',
- 'Optimization': '1',
- 'PrecompiledHeaderFile': 'a_file_name',
- 'PrecompiledHeaderThrough': 'a_file_name',
- 'PreprocessorDefinitions': 'string1;string2',
- 'ProgramDataBaseFileName': 'a_file_name',
- 'RuntimeLibrary': '1',
- 'RuntimeTypeInfo': 'true',
- 'ShowIncludes': 'true',
- 'SmallerTypeCheck': 'true',
- 'StringPooling': 'true',
- 'StructMemberAlignment': '1',
- 'SuppressStartupBanner': 'true',
- 'TreatWChar_tAsBuiltInType': 'true',
- 'UndefineAllPreprocessorDefinitions': 'true',
- 'UndefinePreprocessorDefinitions': 'string1;string2',
- 'UseFullPaths': 'true',
- 'UsePrecompiledHeader': '1',
- 'UseUnicodeResponseFiles': 'true',
- 'WarnAsError': 'true',
- 'WarningLevel': '1',
- 'WholeProgramOptimization': 'true',
- 'XMLDocumentationFileName': 'a_file_name',
- 'ZZXYZ': 'bogus'},
- 'VCLinkerTool': {
- 'AdditionalDependencies': 'file1;file2',
- 'AdditionalDependencies_excluded': 'file3',
- 'AdditionalLibraryDirectories': 'folder1;folder2',
- 'AdditionalManifestDependencies': 'file1;file2',
- 'AdditionalOptions': 'a string1',
- 'AddModuleNamesToAssembly': 'file1;file2',
- 'AllowIsolation': 'true',
- 'AssemblyDebug': '2',
- 'AssemblyLinkResource': 'file1;file2',
- 'BaseAddress': 'a string1',
- 'CLRImageType': '2',
- 'CLRThreadAttribute': '2',
- 'CLRUnmanagedCodeCheck': 'true',
- 'DataExecutionPrevention': '2',
- 'DelayLoadDLLs': 'file1;file2',
- 'DelaySign': 'true',
- 'Driver': '2',
- 'EmbedManagedResourceFile': 'file1;file2',
- 'EnableCOMDATFolding': '2',
- 'EnableUAC': 'true',
- 'EntryPointSymbol': 'a string1',
- 'ErrorReporting': '2',
- 'FixedBaseAddress': '2',
- 'ForceSymbolReferences': 'file1;file2',
- 'FunctionOrder': 'a_file_name',
- 'GenerateDebugInformation': 'true',
- 'GenerateManifest': 'true',
- 'GenerateMapFile': 'true',
- 'HeapCommitSize': 'a string1',
- 'HeapReserveSize': 'a string1',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreDefaultLibraryNames': 'file1;file2',
- 'IgnoreEmbeddedIDL': 'true',
- 'IgnoreImportLibrary': 'true',
- 'ImportLibrary': 'a_file_name',
- 'KeyContainer': 'a_file_name',
- 'KeyFile': 'a_file_name',
- 'LargeAddressAware': '2',
- 'LinkIncremental': '2',
- 'LinkLibraryDependencies': 'true',
- 'LinkTimeCodeGeneration': '2',
- 'ManifestFile': 'a_file_name',
- 'MapExports': 'true',
- 'MapFileName': 'a_file_name',
- 'MergedIDLBaseFileName': 'a_file_name',
- 'MergeSections': 'a string1',
- 'MidlCommandFile': 'a_file_name',
- 'ModuleDefinitionFile': 'a_file_name',
- 'OptimizeForWindows98': '1',
- 'OptimizeReferences': '2',
- 'OutputFile': 'a_file_name',
- 'PerUserRedirection': 'true',
- 'Profile': 'true',
- 'ProfileGuidedDatabase': 'a_file_name',
- 'ProgramDatabaseFile': 'a_file_name',
- 'RandomizedBaseAddress': '2',
- 'RegisterOutput': 'true',
- 'ResourceOnlyDLL': 'true',
- 'SetChecksum': 'true',
- 'ShowProgress': '2',
- 'StackCommitSize': 'a string1',
- 'StackReserveSize': 'a string1',
- 'StripPrivateSymbols': 'a_file_name',
- 'SubSystem': '2',
- 'SupportUnloadOfDelayLoadedDLL': 'true',
- 'SuppressStartupBanner': 'true',
- 'SwapRunFromCD': 'true',
- 'SwapRunFromNet': 'true',
- 'TargetMachine': '2',
- 'TerminalServerAware': '2',
- 'TurnOffAssemblyGeneration': 'true',
- 'TypeLibraryFile': 'a_file_name',
- 'TypeLibraryResourceID': '33',
- 'UACExecutionLevel': '2',
- 'UACUIAccess': 'true',
- 'UseLibraryDependencyInputs': 'true',
- 'UseUnicodeResponseFiles': 'true',
- 'Version': 'a string1'},
- 'VCMIDLTool': {
- 'AdditionalIncludeDirectories': 'folder1;folder2',
- 'AdditionalOptions': 'a string1',
- 'CPreprocessOptions': 'a string1',
- 'DefaultCharType': '1',
- 'DLLDataFileName': 'a_file_name',
- 'EnableErrorChecks': '1',
- 'ErrorCheckAllocations': 'true',
- 'ErrorCheckBounds': 'true',
- 'ErrorCheckEnumRange': 'true',
- 'ErrorCheckRefPointers': 'true',
- 'ErrorCheckStubData': 'true',
- 'GenerateStublessProxies': 'true',
- 'GenerateTypeLibrary': 'true',
- 'HeaderFileName': 'a_file_name',
- 'IgnoreStandardIncludePath': 'true',
- 'InterfaceIdentifierFileName': 'a_file_name',
- 'MkTypLibCompatible': 'true',
- 'notgood': 'bogus',
- 'OutputDirectory': 'a string1',
- 'PreprocessorDefinitions': 'string1;string2',
- 'ProxyFileName': 'a_file_name',
- 'RedirectOutputAndErrors': 'a_file_name',
- 'StructMemberAlignment': '1',
- 'SuppressStartupBanner': 'true',
- 'TargetEnvironment': '1',
- 'TypeLibraryName': 'a_file_name',
- 'UndefinePreprocessorDefinitions': 'string1;string2',
- 'ValidateParameters': 'true',
- 'WarnAsError': 'true',
- 'WarningLevel': '1'},
- 'VCResourceCompilerTool': {
- 'AdditionalOptions': 'a string1',
- 'AdditionalIncludeDirectories': 'folder1;folder2',
- 'Culture': '1003',
- 'IgnoreStandardIncludePath': 'true',
- 'notgood2': 'bogus',
- 'PreprocessorDefinitions': 'string1;string2',
- 'ResourceOutputFileName': 'a string1',
- 'ShowProgress': 'true',
- 'SuppressStartupBanner': 'true',
- 'UndefinePreprocessorDefinitions': 'string1;string2'},
- 'VCLibrarianTool': {
- 'AdditionalDependencies': 'file1;file2',
- 'AdditionalLibraryDirectories': 'folder1;folder2',
- 'AdditionalOptions': 'a string1',
- 'ExportNamedFunctions': 'string1;string2',
- 'ForceSymbolReferences': 'a string1',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreSpecificDefaultLibraries': 'file1;file2',
- 'LinkLibraryDependencies': 'true',
- 'ModuleDefinitionFile': 'a_file_name',
- 'OutputFile': 'a_file_name',
- 'SuppressStartupBanner': 'true',
- 'UseUnicodeResponseFiles': 'true'},
- 'VCManifestTool': {
- 'AdditionalManifestFiles': 'file1;file2',
- 'AdditionalOptions': 'a string1',
- 'AssemblyIdentity': 'a string1',
- 'ComponentFileName': 'a_file_name',
- 'DependencyInformationFile': 'a_file_name',
- 'GenerateCatalogFiles': 'true',
- 'InputResourceManifests': 'a string1',
- 'ManifestResourceFile': 'a_file_name',
- 'OutputManifestFile': 'a_file_name',
- 'RegistrarScriptFile': 'a_file_name',
- 'ReplacementsFile': 'a_file_name',
- 'SuppressStartupBanner': 'true',
- 'TypeLibraryFile': 'a_file_name',
- 'UpdateFileHashes': 'truel',
- 'UpdateFileHashesSearchPath': 'a_file_name',
- 'UseFAT32Workaround': 'true',
- 'UseUnicodeResponseFiles': 'true',
- 'VerboseOutput': 'true'}},
- self.stderr)
- self._ExpectedWarnings([
- 'Warning: for VCCLCompilerTool/BasicRuntimeChecks, '
- 'index value (5) not in expected range [0, 4)',
- 'Warning: for VCCLCompilerTool/BrowseInformation, '
- "invalid literal for int() with base 10: 'fdkslj'",
- 'Warning: for VCCLCompilerTool/CallingConvention, '
- 'index value (-1) not in expected range [0, 4)',
- 'Warning: for VCCLCompilerTool/DebugInformationFormat, '
- 'converted value for 2 not specified.',
- 'Warning: unrecognized setting VCCLCompilerTool/Enableprefast',
- 'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ',
- 'Warning: for VCLinkerTool/TargetMachine, '
- 'converted value for 2 not specified.',
- 'Warning: unrecognized setting VCMIDLTool/notgood',
- 'Warning: unrecognized setting VCResourceCompilerTool/notgood2',
- 'Warning: for VCManifestTool/UpdateFileHashes, '
- "expected bool; got 'truel'"
- ''])
-
- def testValidateMSBuildSettings_settings(self):
- """Tests that for invalid MSBuild settings."""
- MSVSSettings.ValidateMSBuildSettings(
- {'ClCompile': {
- 'AdditionalIncludeDirectories': 'folder1;folder2',
- 'AdditionalOptions': ['string1', 'string2'],
- 'AdditionalUsingDirectories': 'folder1;folder2',
- 'AssemblerListingLocation': 'a_file_name',
- 'AssemblerOutput': 'NoListing',
- 'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
- 'BrowseInformation': 'false',
- 'BrowseInformationFile': 'a_file_name',
- 'BufferSecurityCheck': 'true',
- 'BuildingInIDE': 'true',
- 'CallingConvention': 'Cdecl',
- 'CompileAs': 'CompileAsC',
- 'CompileAsManaged': 'true',
- 'CreateHotpatchableImage': 'true',
- 'DebugInformationFormat': 'ProgramDatabase',
- 'DisableLanguageExtensions': 'true',
- 'DisableSpecificWarnings': 'string1;string2',
- 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
- 'EnableFiberSafeOptimizations': 'true',
- 'EnablePREfast': 'true',
- 'Enableprefast': 'bogus',
- 'ErrorReporting': 'Prompt',
- 'ExceptionHandling': 'SyncCThrow',
- 'ExpandAttributedSource': 'true',
- 'FavorSizeOrSpeed': 'Neither',
- 'FloatingPointExceptions': 'true',
- 'FloatingPointModel': 'Precise',
- 'ForceConformanceInForLoopScope': 'true',
- 'ForcedIncludeFiles': 'file1;file2',
- 'ForcedUsingFiles': 'file1;file2',
- 'FunctionLevelLinking': 'false',
- 'GenerateXMLDocumentationFiles': 'true',
- 'IgnoreStandardIncludePath': 'true',
- 'InlineFunctionExpansion': 'OnlyExplicitInline',
- 'IntrinsicFunctions': 'false',
- 'MinimalRebuild': 'true',
- 'MultiProcessorCompilation': 'true',
- 'ObjectFileName': 'a_file_name',
- 'OmitDefaultLibName': 'true',
- 'OmitFramePointers': 'true',
- 'OpenMPSupport': 'true',
- 'Optimization': 'Disabled',
- 'PrecompiledHeader': 'NotUsing',
- 'PrecompiledHeaderFile': 'a_file_name',
- 'PrecompiledHeaderOutputFile': 'a_file_name',
- 'PreprocessKeepComments': 'true',
- 'PreprocessorDefinitions': 'string1;string2',
- 'PreprocessOutputPath': 'a string1',
- 'PreprocessSuppressLineNumbers': 'false',
- 'PreprocessToFile': 'false',
- 'ProcessorNumber': '33',
- 'ProgramDataBaseFileName': 'a_file_name',
- 'RuntimeLibrary': 'MultiThreaded',
- 'RuntimeTypeInfo': 'true',
- 'ShowIncludes': 'true',
- 'SmallerTypeCheck': 'true',
- 'StringPooling': 'true',
- 'StructMemberAlignment': '1Byte',
- 'SuppressStartupBanner': 'true',
- 'TrackerLogDirectory': 'a_folder',
- 'TreatSpecificWarningsAsErrors': 'string1;string2',
- 'TreatWarningAsError': 'true',
- 'TreatWChar_tAsBuiltInType': 'true',
- 'UndefineAllPreprocessorDefinitions': 'true',
- 'UndefinePreprocessorDefinitions': 'string1;string2',
- 'UseFullPaths': 'true',
- 'UseUnicodeForAssemblerListing': 'true',
- 'WarningLevel': 'TurnOffAllWarnings',
- 'WholeProgramOptimization': 'true',
- 'XMLDocumentationFileName': 'a_file_name',
- 'ZZXYZ': 'bogus'},
- 'Link': {
- 'AdditionalDependencies': 'file1;file2',
- 'AdditionalLibraryDirectories': 'folder1;folder2',
- 'AdditionalManifestDependencies': 'file1;file2',
- 'AdditionalOptions': 'a string1',
- 'AddModuleNamesToAssembly': 'file1;file2',
- 'AllowIsolation': 'true',
- 'AssemblyDebug': '',
- 'AssemblyLinkResource': 'file1;file2',
- 'BaseAddress': 'a string1',
- 'BuildingInIDE': 'true',
- 'CLRImageType': 'ForceIJWImage',
- 'CLRSupportLastError': 'Enabled',
- 'CLRThreadAttribute': 'MTAThreadingAttribute',
- 'CLRUnmanagedCodeCheck': 'true',
- 'CreateHotPatchableImage': 'X86Image',
- 'DataExecutionPrevention': 'false',
- 'DelayLoadDLLs': 'file1;file2',
- 'DelaySign': 'true',
- 'Driver': 'NotSet',
- 'EmbedManagedResourceFile': 'file1;file2',
- 'EnableCOMDATFolding': 'false',
- 'EnableUAC': 'true',
- 'EntryPointSymbol': 'a string1',
- 'FixedBaseAddress': 'false',
- 'ForceFileOutput': 'Enabled',
- 'ForceSymbolReferences': 'file1;file2',
- 'FunctionOrder': 'a_file_name',
- 'GenerateDebugInformation': 'true',
- 'GenerateMapFile': 'true',
- 'HeapCommitSize': 'a string1',
- 'HeapReserveSize': 'a string1',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreEmbeddedIDL': 'true',
- 'IgnoreSpecificDefaultLibraries': 'a_file_list',
- 'ImageHasSafeExceptionHandlers': 'true',
- 'ImportLibrary': 'a_file_name',
- 'KeyContainer': 'a_file_name',
- 'KeyFile': 'a_file_name',
- 'LargeAddressAware': 'false',
- 'LinkDLL': 'true',
- 'LinkErrorReporting': 'SendErrorReport',
- 'LinkStatus': 'true',
- 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
- 'ManifestFile': 'a_file_name',
- 'MapExports': 'true',
- 'MapFileName': 'a_file_name',
- 'MergedIDLBaseFileName': 'a_file_name',
- 'MergeSections': 'a string1',
- 'MidlCommandFile': 'a_file_name',
- 'MinimumRequiredVersion': 'a string1',
- 'ModuleDefinitionFile': 'a_file_name',
- 'MSDOSStubFileName': 'a_file_name',
- 'NoEntryPoint': 'true',
- 'OptimizeReferences': 'false',
- 'OutputFile': 'a_file_name',
- 'PerUserRedirection': 'true',
- 'PreventDllBinding': 'true',
- 'Profile': 'true',
- 'ProfileGuidedDatabase': 'a_file_name',
- 'ProgramDatabaseFile': 'a_file_name',
- 'RandomizedBaseAddress': 'false',
- 'RegisterOutput': 'true',
- 'SectionAlignment': '33',
- 'SetChecksum': 'true',
- 'ShowProgress': 'LinkVerboseREF',
- 'SpecifySectionAttributes': 'a string1',
- 'StackCommitSize': 'a string1',
- 'StackReserveSize': 'a string1',
- 'StripPrivateSymbols': 'a_file_name',
- 'SubSystem': 'Console',
- 'SupportNobindOfDelayLoadedDLL': 'true',
- 'SupportUnloadOfDelayLoadedDLL': 'true',
- 'SuppressStartupBanner': 'true',
- 'SwapRunFromCD': 'true',
- 'SwapRunFromNET': 'true',
- 'TargetMachine': 'MachineX86',
- 'TerminalServerAware': 'false',
- 'TrackerLogDirectory': 'a_folder',
- 'TreatLinkerWarningAsErrors': 'true',
- 'TurnOffAssemblyGeneration': 'true',
- 'TypeLibraryFile': 'a_file_name',
- 'TypeLibraryResourceID': '33',
- 'UACExecutionLevel': 'AsInvoker',
- 'UACUIAccess': 'true',
- 'Version': 'a string1'},
- 'ResourceCompile': {
- 'AdditionalIncludeDirectories': 'folder1;folder2',
- 'AdditionalOptions': 'a string1',
- 'Culture': '0x236',
- 'IgnoreStandardIncludePath': 'true',
- 'NullTerminateStrings': 'true',
- 'PreprocessorDefinitions': 'string1;string2',
- 'ResourceOutputFileName': 'a string1',
- 'ShowProgress': 'true',
- 'SuppressStartupBanner': 'true',
- 'TrackerLogDirectory': 'a_folder',
- 'UndefinePreprocessorDefinitions': 'string1;string2'},
- 'Midl': {
- 'AdditionalIncludeDirectories': 'folder1;folder2',
- 'AdditionalOptions': 'a string1',
- 'ApplicationConfigurationMode': 'true',
- 'ClientStubFile': 'a_file_name',
- 'CPreprocessOptions': 'a string1',
- 'DefaultCharType': 'Signed',
- 'DllDataFileName': 'a_file_name',
- 'EnableErrorChecks': 'EnableCustom',
- 'ErrorCheckAllocations': 'true',
- 'ErrorCheckBounds': 'true',
- 'ErrorCheckEnumRange': 'true',
- 'ErrorCheckRefPointers': 'true',
- 'ErrorCheckStubData': 'true',
- 'GenerateClientFiles': 'Stub',
- 'GenerateServerFiles': 'None',
- 'GenerateStublessProxies': 'true',
- 'GenerateTypeLibrary': 'true',
- 'HeaderFileName': 'a_file_name',
- 'IgnoreStandardIncludePath': 'true',
- 'InterfaceIdentifierFileName': 'a_file_name',
- 'LocaleID': '33',
- 'MkTypLibCompatible': 'true',
- 'OutputDirectory': 'a string1',
- 'PreprocessorDefinitions': 'string1;string2',
- 'ProxyFileName': 'a_file_name',
- 'RedirectOutputAndErrors': 'a_file_name',
- 'ServerStubFile': 'a_file_name',
- 'StructMemberAlignment': 'NotSet',
- 'SuppressCompilerWarnings': 'true',
- 'SuppressStartupBanner': 'true',
- 'TargetEnvironment': 'Itanium',
- 'TrackerLogDirectory': 'a_folder',
- 'TypeLibFormat': 'NewFormat',
- 'TypeLibraryName': 'a_file_name',
- 'UndefinePreprocessorDefinitions': 'string1;string2',
- 'ValidateAllParameters': 'true',
- 'WarnAsError': 'true',
- 'WarningLevel': '1'},
- 'Lib': {
- 'AdditionalDependencies': 'file1;file2',
- 'AdditionalLibraryDirectories': 'folder1;folder2',
- 'AdditionalOptions': 'a string1',
- 'DisplayLibrary': 'a string1',
- 'ErrorReporting': 'PromptImmediately',
- 'ExportNamedFunctions': 'string1;string2',
- 'ForceSymbolReferences': 'a string1',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreSpecificDefaultLibraries': 'file1;file2',
- 'LinkTimeCodeGeneration': 'true',
- 'MinimumRequiredVersion': 'a string1',
- 'ModuleDefinitionFile': 'a_file_name',
- 'Name': 'a_file_name',
- 'OutputFile': 'a_file_name',
- 'RemoveObjects': 'file1;file2',
- 'SubSystem': 'Console',
- 'SuppressStartupBanner': 'true',
- 'TargetMachine': 'MachineX86i',
- 'TrackerLogDirectory': 'a_folder',
- 'TreatLibWarningAsErrors': 'true',
- 'UseUnicodeResponseFiles': 'true',
- 'Verbose': 'true'},
- 'Manifest': {
- 'AdditionalManifestFiles': 'file1;file2',
- 'AdditionalOptions': 'a string1',
- 'AssemblyIdentity': 'a string1',
- 'ComponentFileName': 'a_file_name',
- 'EnableDPIAwareness': 'fal',
- 'GenerateCatalogFiles': 'truel',
- 'GenerateCategoryTags': 'true',
- 'InputResourceManifests': 'a string1',
- 'ManifestFromManagedAssembly': 'a_file_name',
- 'notgood3': 'bogus',
- 'OutputManifestFile': 'a_file_name',
- 'OutputResourceManifests': 'a string1',
- 'RegistrarScriptFile': 'a_file_name',
- 'ReplacementsFile': 'a_file_name',
- 'SuppressDependencyElement': 'true',
- 'SuppressStartupBanner': 'true',
- 'TrackerLogDirectory': 'a_folder',
- 'TypeLibraryFile': 'a_file_name',
- 'UpdateFileHashes': 'true',
- 'UpdateFileHashesSearchPath': 'a_file_name',
- 'VerboseOutput': 'true'},
- 'ProjectReference': {
- 'LinkLibraryDependencies': 'true',
- 'UseLibraryDependencyInputs': 'true'},
- 'ManifestResourceCompile': {
- 'ResourceOutputFileName': 'a_file_name'},
- '': {
- 'EmbedManifest': 'true',
- 'GenerateManifest': 'true',
- 'IgnoreImportLibrary': 'true',
- 'LinkIncremental': 'false'}},
- self.stderr)
- self._ExpectedWarnings([
- 'Warning: unrecognized setting ClCompile/Enableprefast',
- 'Warning: unrecognized setting ClCompile/ZZXYZ',
- 'Warning: unrecognized setting Manifest/notgood3',
- 'Warning: for Manifest/GenerateCatalogFiles, '
- "expected bool; got 'truel'",
- 'Warning: for Lib/TargetMachine, unrecognized enumerated value '
- 'MachineX86i',
- "Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'"])
-
- def testConvertToMSBuildSettings_empty(self):
- """Tests an empty conversion."""
- msvs_settings = {}
- expected_msbuild_settings = {}
- actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
- msvs_settings,
- self.stderr)
- self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
- self._ExpectedWarnings([])
-
- def testConvertToMSBuildSettings_minimal(self):
- """Tests a minimal conversion."""
- msvs_settings = {
- 'VCCLCompilerTool': {
- 'AdditionalIncludeDirectories': 'dir1',
- 'AdditionalOptions': '/foo',
- 'BasicRuntimeChecks': '0',
- },
- 'VCLinkerTool': {
- 'LinkTimeCodeGeneration': '1',
- 'ErrorReporting': '1',
- 'DataExecutionPrevention': '2',
- },
- }
- expected_msbuild_settings = {
- 'ClCompile': {
- 'AdditionalIncludeDirectories': 'dir1',
- 'AdditionalOptions': '/foo',
- 'BasicRuntimeChecks': 'Default',
- },
- 'Link': {
- 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
- 'LinkErrorReporting': 'PromptImmediately',
- 'DataExecutionPrevention': 'true',
- },
- }
- actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
- msvs_settings,
- self.stderr)
- self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
- self._ExpectedWarnings([])
-
- def testConvertToMSBuildSettings_warnings(self):
- """Tests conversion that generates warnings."""
- msvs_settings = {
- 'VCCLCompilerTool': {
- 'AdditionalIncludeDirectories': '1',
- 'AdditionalOptions': '2',
- # These are incorrect values:
- 'BasicRuntimeChecks': '12',
- 'BrowseInformation': '21',
- 'UsePrecompiledHeader': '13',
- 'GeneratePreprocessedFile': '14'},
- 'VCLinkerTool': {
- # These are incorrect values:
- 'Driver': '10',
- 'LinkTimeCodeGeneration': '31',
- 'ErrorReporting': '21',
- 'FixedBaseAddress': '6'},
- 'VCResourceCompilerTool': {
- # Custom
- 'Culture': '1003'}}
- expected_msbuild_settings = {
- 'ClCompile': {
- 'AdditionalIncludeDirectories': '1',
- 'AdditionalOptions': '2'},
- 'Link': {},
- 'ResourceCompile': {
- # Custom
- 'Culture': '0x03eb'}}
- actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
- msvs_settings,
- self.stderr)
- self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
- self._ExpectedWarnings([
- 'Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to '
- 'MSBuild, index value (12) not in expected range [0, 4)',
- 'Warning: while converting VCCLCompilerTool/BrowseInformation to '
- 'MSBuild, index value (21) not in expected range [0, 3)',
- 'Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to '
- 'MSBuild, index value (13) not in expected range [0, 3)',
- 'Warning: while converting VCCLCompilerTool/GeneratePreprocessedFile to '
- 'MSBuild, value must be one of [0, 1, 2]; got 14',
-
- 'Warning: while converting VCLinkerTool/Driver to '
- 'MSBuild, index value (10) not in expected range [0, 4)',
- 'Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to '
- 'MSBuild, index value (31) not in expected range [0, 5)',
- 'Warning: while converting VCLinkerTool/ErrorReporting to '
- 'MSBuild, index value (21) not in expected range [0, 3)',
- 'Warning: while converting VCLinkerTool/FixedBaseAddress to '
- 'MSBuild, index value (6) not in expected range [0, 3)',
- ])
-
- def testConvertToMSBuildSettings_full_synthetic(self):
- """Tests conversion of all the MSBuild settings."""
- msvs_settings = {
- 'VCCLCompilerTool': {
- 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string',
- 'AdditionalUsingDirectories': 'folder1;folder2;folder3',
- 'AssemblerListingLocation': 'a_file_name',
- 'AssemblerOutput': '0',
- 'BasicRuntimeChecks': '1',
- 'BrowseInformation': '2',
- 'BrowseInformationFile': 'a_file_name',
- 'BufferSecurityCheck': 'true',
- 'CallingConvention': '0',
- 'CompileAs': '1',
- 'DebugInformationFormat': '4',
- 'DefaultCharIsUnsigned': 'true',
- 'Detect64BitPortabilityProblems': 'true',
- 'DisableLanguageExtensions': 'true',
- 'DisableSpecificWarnings': 'd1;d2;d3',
- 'EnableEnhancedInstructionSet': '0',
- 'EnableFiberSafeOptimizations': 'true',
- 'EnableFunctionLevelLinking': 'true',
- 'EnableIntrinsicFunctions': 'true',
- 'EnablePREfast': 'true',
- 'ErrorReporting': '1',
- 'ExceptionHandling': '2',
- 'ExpandAttributedSource': 'true',
- 'FavorSizeOrSpeed': '0',
- 'FloatingPointExceptions': 'true',
- 'FloatingPointModel': '1',
- 'ForceConformanceInForLoopScope': 'true',
- 'ForcedIncludeFiles': 'file1;file2;file3',
- 'ForcedUsingFiles': 'file1;file2;file3',
- 'GeneratePreprocessedFile': '1',
- 'GenerateXMLDocumentationFiles': 'true',
- 'IgnoreStandardIncludePath': 'true',
- 'InlineFunctionExpansion': '2',
- 'KeepComments': 'true',
- 'MinimalRebuild': 'true',
- 'ObjectFile': 'a_file_name',
- 'OmitDefaultLibName': 'true',
- 'OmitFramePointers': 'true',
- 'OpenMP': 'true',
- 'Optimization': '3',
- 'PrecompiledHeaderFile': 'a_file_name',
- 'PrecompiledHeaderThrough': 'a_file_name',
- 'PreprocessorDefinitions': 'd1;d2;d3',
- 'ProgramDataBaseFileName': 'a_file_name',
- 'RuntimeLibrary': '0',
- 'RuntimeTypeInfo': 'true',
- 'ShowIncludes': 'true',
- 'SmallerTypeCheck': 'true',
- 'StringPooling': 'true',
- 'StructMemberAlignment': '1',
- 'SuppressStartupBanner': 'true',
- 'TreatWChar_tAsBuiltInType': 'true',
- 'UndefineAllPreprocessorDefinitions': 'true',
- 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
- 'UseFullPaths': 'true',
- 'UsePrecompiledHeader': '1',
- 'UseUnicodeResponseFiles': 'true',
- 'WarnAsError': 'true',
- 'WarningLevel': '2',
- 'WholeProgramOptimization': 'true',
- 'XMLDocumentationFileName': 'a_file_name'},
- 'VCLinkerTool': {
- 'AdditionalDependencies': 'file1;file2;file3',
- 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
- 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
- 'AdditionalManifestDependencies': 'file1;file2;file3',
- 'AdditionalOptions': 'a_string',
- 'AddModuleNamesToAssembly': 'file1;file2;file3',
- 'AllowIsolation': 'true',
- 'AssemblyDebug': '0',
- 'AssemblyLinkResource': 'file1;file2;file3',
- 'BaseAddress': 'a_string',
- 'CLRImageType': '1',
- 'CLRThreadAttribute': '2',
- 'CLRUnmanagedCodeCheck': 'true',
- 'DataExecutionPrevention': '0',
- 'DelayLoadDLLs': 'file1;file2;file3',
- 'DelaySign': 'true',
- 'Driver': '1',
- 'EmbedManagedResourceFile': 'file1;file2;file3',
- 'EnableCOMDATFolding': '0',
- 'EnableUAC': 'true',
- 'EntryPointSymbol': 'a_string',
- 'ErrorReporting': '0',
- 'FixedBaseAddress': '1',
- 'ForceSymbolReferences': 'file1;file2;file3',
- 'FunctionOrder': 'a_file_name',
- 'GenerateDebugInformation': 'true',
- 'GenerateManifest': 'true',
- 'GenerateMapFile': 'true',
- 'HeapCommitSize': 'a_string',
- 'HeapReserveSize': 'a_string',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreDefaultLibraryNames': 'file1;file2;file3',
- 'IgnoreEmbeddedIDL': 'true',
- 'IgnoreImportLibrary': 'true',
- 'ImportLibrary': 'a_file_name',
- 'KeyContainer': 'a_file_name',
- 'KeyFile': 'a_file_name',
- 'LargeAddressAware': '2',
- 'LinkIncremental': '1',
- 'LinkLibraryDependencies': 'true',
- 'LinkTimeCodeGeneration': '2',
- 'ManifestFile': 'a_file_name',
- 'MapExports': 'true',
- 'MapFileName': 'a_file_name',
- 'MergedIDLBaseFileName': 'a_file_name',
- 'MergeSections': 'a_string',
- 'MidlCommandFile': 'a_file_name',
- 'ModuleDefinitionFile': 'a_file_name',
- 'OptimizeForWindows98': '1',
- 'OptimizeReferences': '0',
- 'OutputFile': 'a_file_name',
- 'PerUserRedirection': 'true',
- 'Profile': 'true',
- 'ProfileGuidedDatabase': 'a_file_name',
- 'ProgramDatabaseFile': 'a_file_name',
- 'RandomizedBaseAddress': '1',
- 'RegisterOutput': 'true',
- 'ResourceOnlyDLL': 'true',
- 'SetChecksum': 'true',
- 'ShowProgress': '0',
- 'StackCommitSize': 'a_string',
- 'StackReserveSize': 'a_string',
- 'StripPrivateSymbols': 'a_file_name',
- 'SubSystem': '2',
- 'SupportUnloadOfDelayLoadedDLL': 'true',
- 'SuppressStartupBanner': 'true',
- 'SwapRunFromCD': 'true',
- 'SwapRunFromNet': 'true',
- 'TargetMachine': '3',
- 'TerminalServerAware': '2',
- 'TurnOffAssemblyGeneration': 'true',
- 'TypeLibraryFile': 'a_file_name',
- 'TypeLibraryResourceID': '33',
- 'UACExecutionLevel': '1',
- 'UACUIAccess': 'true',
- 'UseLibraryDependencyInputs': 'false',
- 'UseUnicodeResponseFiles': 'true',
- 'Version': 'a_string'},
- 'VCResourceCompilerTool': {
- 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string',
- 'Culture': '1003',
- 'IgnoreStandardIncludePath': 'true',
- 'PreprocessorDefinitions': 'd1;d2;d3',
- 'ResourceOutputFileName': 'a_string',
- 'ShowProgress': 'true',
- 'SuppressStartupBanner': 'true',
- 'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
- 'VCMIDLTool': {
- 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string',
- 'CPreprocessOptions': 'a_string',
- 'DefaultCharType': '0',
- 'DLLDataFileName': 'a_file_name',
- 'EnableErrorChecks': '2',
- 'ErrorCheckAllocations': 'true',
- 'ErrorCheckBounds': 'true',
- 'ErrorCheckEnumRange': 'true',
- 'ErrorCheckRefPointers': 'true',
- 'ErrorCheckStubData': 'true',
- 'GenerateStublessProxies': 'true',
- 'GenerateTypeLibrary': 'true',
- 'HeaderFileName': 'a_file_name',
- 'IgnoreStandardIncludePath': 'true',
- 'InterfaceIdentifierFileName': 'a_file_name',
- 'MkTypLibCompatible': 'true',
- 'OutputDirectory': 'a_string',
- 'PreprocessorDefinitions': 'd1;d2;d3',
- 'ProxyFileName': 'a_file_name',
- 'RedirectOutputAndErrors': 'a_file_name',
- 'StructMemberAlignment': '3',
- 'SuppressStartupBanner': 'true',
- 'TargetEnvironment': '1',
- 'TypeLibraryName': 'a_file_name',
- 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
- 'ValidateParameters': 'true',
- 'WarnAsError': 'true',
- 'WarningLevel': '4'},
- 'VCLibrarianTool': {
- 'AdditionalDependencies': 'file1;file2;file3',
- 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
- 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string',
- 'ExportNamedFunctions': 'd1;d2;d3',
- 'ForceSymbolReferences': 'a_string',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
- 'LinkLibraryDependencies': 'true',
- 'ModuleDefinitionFile': 'a_file_name',
- 'OutputFile': 'a_file_name',
- 'SuppressStartupBanner': 'true',
- 'UseUnicodeResponseFiles': 'true'},
- 'VCManifestTool': {
- 'AdditionalManifestFiles': 'file1;file2;file3',
- 'AdditionalOptions': 'a_string',
- 'AssemblyIdentity': 'a_string',
- 'ComponentFileName': 'a_file_name',
- 'DependencyInformationFile': 'a_file_name',
- 'EmbedManifest': 'true',
- 'GenerateCatalogFiles': 'true',
- 'InputResourceManifests': 'a_string',
- 'ManifestResourceFile': 'my_name',
- 'OutputManifestFile': 'a_file_name',
- 'RegistrarScriptFile': 'a_file_name',
- 'ReplacementsFile': 'a_file_name',
- 'SuppressStartupBanner': 'true',
- 'TypeLibraryFile': 'a_file_name',
- 'UpdateFileHashes': 'true',
- 'UpdateFileHashesSearchPath': 'a_file_name',
- 'UseFAT32Workaround': 'true',
- 'UseUnicodeResponseFiles': 'true',
- 'VerboseOutput': 'true'}}
- expected_msbuild_settings = {
- 'ClCompile': {
- 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string /J',
- 'AdditionalUsingDirectories': 'folder1;folder2;folder3',
- 'AssemblerListingLocation': 'a_file_name',
- 'AssemblerOutput': 'NoListing',
- 'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
- 'BrowseInformation': 'true',
- 'BrowseInformationFile': 'a_file_name',
- 'BufferSecurityCheck': 'true',
- 'CallingConvention': 'Cdecl',
- 'CompileAs': 'CompileAsC',
- 'DebugInformationFormat': 'EditAndContinue',
- 'DisableLanguageExtensions': 'true',
- 'DisableSpecificWarnings': 'd1;d2;d3',
- 'EnableEnhancedInstructionSet': 'NotSet',
- 'EnableFiberSafeOptimizations': 'true',
- 'EnablePREfast': 'true',
- 'ErrorReporting': 'Prompt',
- 'ExceptionHandling': 'Async',
- 'ExpandAttributedSource': 'true',
- 'FavorSizeOrSpeed': 'Neither',
- 'FloatingPointExceptions': 'true',
- 'FloatingPointModel': 'Strict',
- 'ForceConformanceInForLoopScope': 'true',
- 'ForcedIncludeFiles': 'file1;file2;file3',
- 'ForcedUsingFiles': 'file1;file2;file3',
- 'FunctionLevelLinking': 'true',
- 'GenerateXMLDocumentationFiles': 'true',
- 'IgnoreStandardIncludePath': 'true',
- 'InlineFunctionExpansion': 'AnySuitable',
- 'IntrinsicFunctions': 'true',
- 'MinimalRebuild': 'true',
- 'ObjectFileName': 'a_file_name',
- 'OmitDefaultLibName': 'true',
- 'OmitFramePointers': 'true',
- 'OpenMPSupport': 'true',
- 'Optimization': 'Full',
- 'PrecompiledHeader': 'Create',
- 'PrecompiledHeaderFile': 'a_file_name',
- 'PrecompiledHeaderOutputFile': 'a_file_name',
- 'PreprocessKeepComments': 'true',
- 'PreprocessorDefinitions': 'd1;d2;d3',
- 'PreprocessSuppressLineNumbers': 'false',
- 'PreprocessToFile': 'true',
- 'ProgramDataBaseFileName': 'a_file_name',
- 'RuntimeLibrary': 'MultiThreaded',
- 'RuntimeTypeInfo': 'true',
- 'ShowIncludes': 'true',
- 'SmallerTypeCheck': 'true',
- 'StringPooling': 'true',
- 'StructMemberAlignment': '1Byte',
- 'SuppressStartupBanner': 'true',
- 'TreatWarningAsError': 'true',
- 'TreatWChar_tAsBuiltInType': 'true',
- 'UndefineAllPreprocessorDefinitions': 'true',
- 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
- 'UseFullPaths': 'true',
- 'WarningLevel': 'Level2',
- 'WholeProgramOptimization': 'true',
- 'XMLDocumentationFileName': 'a_file_name'},
- 'Link': {
- 'AdditionalDependencies': 'file1;file2;file3',
- 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
- 'AdditionalManifestDependencies': 'file1;file2;file3',
- 'AdditionalOptions': 'a_string',
- 'AddModuleNamesToAssembly': 'file1;file2;file3',
- 'AllowIsolation': 'true',
- 'AssemblyDebug': '',
- 'AssemblyLinkResource': 'file1;file2;file3',
- 'BaseAddress': 'a_string',
- 'CLRImageType': 'ForceIJWImage',
- 'CLRThreadAttribute': 'STAThreadingAttribute',
- 'CLRUnmanagedCodeCheck': 'true',
- 'DataExecutionPrevention': '',
- 'DelayLoadDLLs': 'file1;file2;file3',
- 'DelaySign': 'true',
- 'Driver': 'Driver',
- 'EmbedManagedResourceFile': 'file1;file2;file3',
- 'EnableCOMDATFolding': '',
- 'EnableUAC': 'true',
- 'EntryPointSymbol': 'a_string',
- 'FixedBaseAddress': 'false',
- 'ForceSymbolReferences': 'file1;file2;file3',
- 'FunctionOrder': 'a_file_name',
- 'GenerateDebugInformation': 'true',
- 'GenerateMapFile': 'true',
- 'HeapCommitSize': 'a_string',
- 'HeapReserveSize': 'a_string',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreEmbeddedIDL': 'true',
- 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
- 'ImportLibrary': 'a_file_name',
- 'KeyContainer': 'a_file_name',
- 'KeyFile': 'a_file_name',
- 'LargeAddressAware': 'true',
- 'LinkErrorReporting': 'NoErrorReport',
- 'LinkTimeCodeGeneration': 'PGInstrument',
- 'ManifestFile': 'a_file_name',
- 'MapExports': 'true',
- 'MapFileName': 'a_file_name',
- 'MergedIDLBaseFileName': 'a_file_name',
- 'MergeSections': 'a_string',
- 'MidlCommandFile': 'a_file_name',
- 'ModuleDefinitionFile': 'a_file_name',
- 'NoEntryPoint': 'true',
- 'OptimizeReferences': '',
- 'OutputFile': 'a_file_name',
- 'PerUserRedirection': 'true',
- 'Profile': 'true',
- 'ProfileGuidedDatabase': 'a_file_name',
- 'ProgramDatabaseFile': 'a_file_name',
- 'RandomizedBaseAddress': 'false',
- 'RegisterOutput': 'true',
- 'SetChecksum': 'true',
- 'ShowProgress': 'NotSet',
- 'StackCommitSize': 'a_string',
- 'StackReserveSize': 'a_string',
- 'StripPrivateSymbols': 'a_file_name',
- 'SubSystem': 'Windows',
- 'SupportUnloadOfDelayLoadedDLL': 'true',
- 'SuppressStartupBanner': 'true',
- 'SwapRunFromCD': 'true',
- 'SwapRunFromNET': 'true',
- 'TargetMachine': 'MachineARM',
- 'TerminalServerAware': 'true',
- 'TurnOffAssemblyGeneration': 'true',
- 'TypeLibraryFile': 'a_file_name',
- 'TypeLibraryResourceID': '33',
- 'UACExecutionLevel': 'HighestAvailable',
- 'UACUIAccess': 'true',
- 'Version': 'a_string'},
- 'ResourceCompile': {
- 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string',
- 'Culture': '0x03eb',
- 'IgnoreStandardIncludePath': 'true',
- 'PreprocessorDefinitions': 'd1;d2;d3',
- 'ResourceOutputFileName': 'a_string',
- 'ShowProgress': 'true',
- 'SuppressStartupBanner': 'true',
- 'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
- 'Midl': {
- 'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string',
- 'CPreprocessOptions': 'a_string',
- 'DefaultCharType': 'Unsigned',
- 'DllDataFileName': 'a_file_name',
- 'EnableErrorChecks': 'All',
- 'ErrorCheckAllocations': 'true',
- 'ErrorCheckBounds': 'true',
- 'ErrorCheckEnumRange': 'true',
- 'ErrorCheckRefPointers': 'true',
- 'ErrorCheckStubData': 'true',
- 'GenerateStublessProxies': 'true',
- 'GenerateTypeLibrary': 'true',
- 'HeaderFileName': 'a_file_name',
- 'IgnoreStandardIncludePath': 'true',
- 'InterfaceIdentifierFileName': 'a_file_name',
- 'MkTypLibCompatible': 'true',
- 'OutputDirectory': 'a_string',
- 'PreprocessorDefinitions': 'd1;d2;d3',
- 'ProxyFileName': 'a_file_name',
- 'RedirectOutputAndErrors': 'a_file_name',
- 'StructMemberAlignment': '4',
- 'SuppressStartupBanner': 'true',
- 'TargetEnvironment': 'Win32',
- 'TypeLibraryName': 'a_file_name',
- 'UndefinePreprocessorDefinitions': 'd1;d2;d3',
- 'ValidateAllParameters': 'true',
- 'WarnAsError': 'true',
- 'WarningLevel': '4'},
- 'Lib': {
- 'AdditionalDependencies': 'file1;file2;file3',
- 'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
- 'AdditionalOptions': 'a_string',
- 'ExportNamedFunctions': 'd1;d2;d3',
- 'ForceSymbolReferences': 'a_string',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
- 'ModuleDefinitionFile': 'a_file_name',
- 'OutputFile': 'a_file_name',
- 'SuppressStartupBanner': 'true',
- 'UseUnicodeResponseFiles': 'true'},
- 'Manifest': {
- 'AdditionalManifestFiles': 'file1;file2;file3',
- 'AdditionalOptions': 'a_string',
- 'AssemblyIdentity': 'a_string',
- 'ComponentFileName': 'a_file_name',
- 'GenerateCatalogFiles': 'true',
- 'InputResourceManifests': 'a_string',
- 'OutputManifestFile': 'a_file_name',
- 'RegistrarScriptFile': 'a_file_name',
- 'ReplacementsFile': 'a_file_name',
- 'SuppressStartupBanner': 'true',
- 'TypeLibraryFile': 'a_file_name',
- 'UpdateFileHashes': 'true',
- 'UpdateFileHashesSearchPath': 'a_file_name',
- 'VerboseOutput': 'true'},
- 'ManifestResourceCompile': {
- 'ResourceOutputFileName': 'my_name'},
- 'ProjectReference': {
- 'LinkLibraryDependencies': 'true',
- 'UseLibraryDependencyInputs': 'false'},
- '': {
- 'EmbedManifest': 'true',
- 'GenerateManifest': 'true',
- 'IgnoreImportLibrary': 'true',
- 'LinkIncremental': 'false'}}
- actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
- msvs_settings,
- self.stderr)
- self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
- self._ExpectedWarnings([])
-
- def testConvertToMSBuildSettings_actual(self):
- """Tests the conversion of an actual project.
-
- A VS2008 project with most of the options defined was created through the
- VS2008 IDE. It was then converted to VS2010. The tool settings found in
- the .vcproj and .vcxproj files were converted to the two dictionaries
- msvs_settings and expected_msbuild_settings.
-
- Note that for many settings, the VS2010 converter adds macros like
- %(AdditionalIncludeDirectories) to make sure than inherited values are
- included. Since the Gyp projects we generate do not use inheritance,
- we removed these macros. They were:
- ClCompile:
- AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)'
- AdditionalOptions: ' %(AdditionalOptions)'
- AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)'
- DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
- ForcedIncludeFiles: ';%(ForcedIncludeFiles)',
- ForcedUsingFiles: ';%(ForcedUsingFiles)',
- PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
- UndefinePreprocessorDefinitions:
- ';%(UndefinePreprocessorDefinitions)',
- Link:
- AdditionalDependencies: ';%(AdditionalDependencies)',
- AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)',
- AdditionalManifestDependencies:
- ';%(AdditionalManifestDependencies)',
- AdditionalOptions: ' %(AdditionalOptions)',
- AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)',
- AssemblyLinkResource: ';%(AssemblyLinkResource)',
- DelayLoadDLLs: ';%(DelayLoadDLLs)',
- EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)',
- ForceSymbolReferences: ';%(ForceSymbolReferences)',
- IgnoreSpecificDefaultLibraries:
- ';%(IgnoreSpecificDefaultLibraries)',
- ResourceCompile:
- AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)',
- AdditionalOptions: ' %(AdditionalOptions)',
- PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
- Manifest:
- AdditionalManifestFiles: ';%(AdditionalManifestFiles)',
- AdditionalOptions: ' %(AdditionalOptions)',
- InputResourceManifests: ';%(InputResourceManifests)',
- """
- msvs_settings = {
- 'VCCLCompilerTool': {
- 'AdditionalIncludeDirectories': 'dir1',
- 'AdditionalOptions': '/more',
- 'AdditionalUsingDirectories': 'test',
- 'AssemblerListingLocation': '$(IntDir)\\a',
- 'AssemblerOutput': '1',
- 'BasicRuntimeChecks': '3',
- 'BrowseInformation': '1',
- 'BrowseInformationFile': '$(IntDir)\\e',
- 'BufferSecurityCheck': 'false',
- 'CallingConvention': '1',
- 'CompileAs': '1',
- 'DebugInformationFormat': '4',
- 'DefaultCharIsUnsigned': 'true',
- 'Detect64BitPortabilityProblems': 'true',
- 'DisableLanguageExtensions': 'true',
- 'DisableSpecificWarnings': 'abc',
- 'EnableEnhancedInstructionSet': '1',
- 'EnableFiberSafeOptimizations': 'true',
- 'EnableFunctionLevelLinking': 'true',
- 'EnableIntrinsicFunctions': 'true',
- 'EnablePREfast': 'true',
- 'ErrorReporting': '2',
- 'ExceptionHandling': '2',
- 'ExpandAttributedSource': 'true',
- 'FavorSizeOrSpeed': '2',
- 'FloatingPointExceptions': 'true',
- 'FloatingPointModel': '1',
- 'ForceConformanceInForLoopScope': 'false',
- 'ForcedIncludeFiles': 'def',
- 'ForcedUsingFiles': 'ge',
- 'GeneratePreprocessedFile': '2',
- 'GenerateXMLDocumentationFiles': 'true',
- 'IgnoreStandardIncludePath': 'true',
- 'InlineFunctionExpansion': '1',
- 'KeepComments': 'true',
- 'MinimalRebuild': 'true',
- 'ObjectFile': '$(IntDir)\\b',
- 'OmitDefaultLibName': 'true',
- 'OmitFramePointers': 'true',
- 'OpenMP': 'true',
- 'Optimization': '3',
- 'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche',
- 'PrecompiledHeaderThrough': 'StdAfx.hd',
- 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
- 'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb',
- 'RuntimeLibrary': '3',
- 'RuntimeTypeInfo': 'false',
- 'ShowIncludes': 'true',
- 'SmallerTypeCheck': 'true',
- 'StringPooling': 'true',
- 'StructMemberAlignment': '3',
- 'SuppressStartupBanner': 'false',
- 'TreatWChar_tAsBuiltInType': 'false',
- 'UndefineAllPreprocessorDefinitions': 'true',
- 'UndefinePreprocessorDefinitions': 'wer',
- 'UseFullPaths': 'true',
- 'UsePrecompiledHeader': '0',
- 'UseUnicodeResponseFiles': 'false',
- 'WarnAsError': 'true',
- 'WarningLevel': '3',
- 'WholeProgramOptimization': 'true',
- 'XMLDocumentationFileName': '$(IntDir)\\c'},
- 'VCLinkerTool': {
- 'AdditionalDependencies': 'zx',
- 'AdditionalLibraryDirectories': 'asd',
- 'AdditionalManifestDependencies': 's2',
- 'AdditionalOptions': '/mor2',
- 'AddModuleNamesToAssembly': 'd1',
- 'AllowIsolation': 'false',
- 'AssemblyDebug': '1',
- 'AssemblyLinkResource': 'd5',
- 'BaseAddress': '23423',
- 'CLRImageType': '3',
- 'CLRThreadAttribute': '1',
- 'CLRUnmanagedCodeCheck': 'true',
- 'DataExecutionPrevention': '0',
- 'DelayLoadDLLs': 'd4',
- 'DelaySign': 'true',
- 'Driver': '2',
- 'EmbedManagedResourceFile': 'd2',
- 'EnableCOMDATFolding': '1',
- 'EnableUAC': 'false',
- 'EntryPointSymbol': 'f5',
- 'ErrorReporting': '2',
- 'FixedBaseAddress': '1',
- 'ForceSymbolReferences': 'd3',
- 'FunctionOrder': 'fssdfsd',
- 'GenerateDebugInformation': 'true',
- 'GenerateManifest': 'false',
- 'GenerateMapFile': 'true',
- 'HeapCommitSize': '13',
- 'HeapReserveSize': '12',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreDefaultLibraryNames': 'flob;flok',
- 'IgnoreEmbeddedIDL': 'true',
- 'IgnoreImportLibrary': 'true',
- 'ImportLibrary': 'f4',
- 'KeyContainer': 'f7',
- 'KeyFile': 'f6',
- 'LargeAddressAware': '2',
- 'LinkIncremental': '0',
- 'LinkLibraryDependencies': 'false',
- 'LinkTimeCodeGeneration': '1',
- 'ManifestFile':
- '$(IntDir)\\$(TargetFileName).2intermediate.manifest',
- 'MapExports': 'true',
- 'MapFileName': 'd5',
- 'MergedIDLBaseFileName': 'f2',
- 'MergeSections': 'f5',
- 'MidlCommandFile': 'f1',
- 'ModuleDefinitionFile': 'sdsd',
- 'OptimizeForWindows98': '2',
- 'OptimizeReferences': '2',
- 'OutputFile': '$(OutDir)\\$(ProjectName)2.exe',
- 'PerUserRedirection': 'true',
- 'Profile': 'true',
- 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
- 'ProgramDatabaseFile': 'Flob.pdb',
- 'RandomizedBaseAddress': '1',
- 'RegisterOutput': 'true',
- 'ResourceOnlyDLL': 'true',
- 'SetChecksum': 'false',
- 'ShowProgress': '1',
- 'StackCommitSize': '15',
- 'StackReserveSize': '14',
- 'StripPrivateSymbols': 'd3',
- 'SubSystem': '1',
- 'SupportUnloadOfDelayLoadedDLL': 'true',
- 'SuppressStartupBanner': 'false',
- 'SwapRunFromCD': 'true',
- 'SwapRunFromNet': 'true',
- 'TargetMachine': '1',
- 'TerminalServerAware': '1',
- 'TurnOffAssemblyGeneration': 'true',
- 'TypeLibraryFile': 'f3',
- 'TypeLibraryResourceID': '12',
- 'UACExecutionLevel': '2',
- 'UACUIAccess': 'true',
- 'UseLibraryDependencyInputs': 'true',
- 'UseUnicodeResponseFiles': 'false',
- 'Version': '333'},
- 'VCResourceCompilerTool': {
- 'AdditionalIncludeDirectories': 'f3',
- 'AdditionalOptions': '/more3',
- 'Culture': '3084',
- 'IgnoreStandardIncludePath': 'true',
- 'PreprocessorDefinitions': '_UNICODE;UNICODE2',
- 'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res',
- 'ShowProgress': 'true'},
- 'VCManifestTool': {
- 'AdditionalManifestFiles': 'sfsdfsd',
- 'AdditionalOptions': 'afdsdafsd',
- 'AssemblyIdentity': 'sddfdsadfsa',
- 'ComponentFileName': 'fsdfds',
- 'DependencyInformationFile': '$(IntDir)\\mt.depdfd',
- 'EmbedManifest': 'false',
- 'GenerateCatalogFiles': 'true',
- 'InputResourceManifests': 'asfsfdafs',
- 'ManifestResourceFile':
- '$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf',
- 'OutputManifestFile': '$(TargetPath).manifestdfs',
- 'RegistrarScriptFile': 'sdfsfd',
- 'ReplacementsFile': 'sdffsd',
- 'SuppressStartupBanner': 'false',
- 'TypeLibraryFile': 'sfsd',
- 'UpdateFileHashes': 'true',
- 'UpdateFileHashesSearchPath': 'sfsd',
- 'UseFAT32Workaround': 'true',
- 'UseUnicodeResponseFiles': 'false',
- 'VerboseOutput': 'true'}}
- expected_msbuild_settings = {
- 'ClCompile': {
- 'AdditionalIncludeDirectories': 'dir1',
- 'AdditionalOptions': '/more /J',
- 'AdditionalUsingDirectories': 'test',
- 'AssemblerListingLocation': '$(IntDir)a',
- 'AssemblerOutput': 'AssemblyCode',
- 'BasicRuntimeChecks': 'EnableFastChecks',
- 'BrowseInformation': 'true',
- 'BrowseInformationFile': '$(IntDir)e',
- 'BufferSecurityCheck': 'false',
- 'CallingConvention': 'FastCall',
- 'CompileAs': 'CompileAsC',
- 'DebugInformationFormat': 'EditAndContinue',
- 'DisableLanguageExtensions': 'true',
- 'DisableSpecificWarnings': 'abc',
- 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
- 'EnableFiberSafeOptimizations': 'true',
- 'EnablePREfast': 'true',
- 'ErrorReporting': 'Queue',
- 'ExceptionHandling': 'Async',
- 'ExpandAttributedSource': 'true',
- 'FavorSizeOrSpeed': 'Size',
- 'FloatingPointExceptions': 'true',
- 'FloatingPointModel': 'Strict',
- 'ForceConformanceInForLoopScope': 'false',
- 'ForcedIncludeFiles': 'def',
- 'ForcedUsingFiles': 'ge',
- 'FunctionLevelLinking': 'true',
- 'GenerateXMLDocumentationFiles': 'true',
- 'IgnoreStandardIncludePath': 'true',
- 'InlineFunctionExpansion': 'OnlyExplicitInline',
- 'IntrinsicFunctions': 'true',
- 'MinimalRebuild': 'true',
- 'ObjectFileName': '$(IntDir)b',
- 'OmitDefaultLibName': 'true',
- 'OmitFramePointers': 'true',
- 'OpenMPSupport': 'true',
- 'Optimization': 'Full',
- 'PrecompiledHeader': 'NotUsing', # Actual conversion gives ''
- 'PrecompiledHeaderFile': 'StdAfx.hd',
- 'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche',
- 'PreprocessKeepComments': 'true',
- 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
- 'PreprocessSuppressLineNumbers': 'true',
- 'PreprocessToFile': 'true',
- 'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb',
- 'RuntimeLibrary': 'MultiThreadedDebugDLL',
- 'RuntimeTypeInfo': 'false',
- 'ShowIncludes': 'true',
- 'SmallerTypeCheck': 'true',
- 'StringPooling': 'true',
- 'StructMemberAlignment': '4Bytes',
- 'SuppressStartupBanner': 'false',
- 'TreatWarningAsError': 'true',
- 'TreatWChar_tAsBuiltInType': 'false',
- 'UndefineAllPreprocessorDefinitions': 'true',
- 'UndefinePreprocessorDefinitions': 'wer',
- 'UseFullPaths': 'true',
- 'WarningLevel': 'Level3',
- 'WholeProgramOptimization': 'true',
- 'XMLDocumentationFileName': '$(IntDir)c'},
- 'Link': {
- 'AdditionalDependencies': 'zx',
- 'AdditionalLibraryDirectories': 'asd',
- 'AdditionalManifestDependencies': 's2',
- 'AdditionalOptions': '/mor2',
- 'AddModuleNamesToAssembly': 'd1',
- 'AllowIsolation': 'false',
- 'AssemblyDebug': 'true',
- 'AssemblyLinkResource': 'd5',
- 'BaseAddress': '23423',
- 'CLRImageType': 'ForceSafeILImage',
- 'CLRThreadAttribute': 'MTAThreadingAttribute',
- 'CLRUnmanagedCodeCheck': 'true',
- 'DataExecutionPrevention': '',
- 'DelayLoadDLLs': 'd4',
- 'DelaySign': 'true',
- 'Driver': 'UpOnly',
- 'EmbedManagedResourceFile': 'd2',
- 'EnableCOMDATFolding': 'false',
- 'EnableUAC': 'false',
- 'EntryPointSymbol': 'f5',
- 'FixedBaseAddress': 'false',
- 'ForceSymbolReferences': 'd3',
- 'FunctionOrder': 'fssdfsd',
- 'GenerateDebugInformation': 'true',
- 'GenerateMapFile': 'true',
- 'HeapCommitSize': '13',
- 'HeapReserveSize': '12',
- 'IgnoreAllDefaultLibraries': 'true',
- 'IgnoreEmbeddedIDL': 'true',
- 'IgnoreSpecificDefaultLibraries': 'flob;flok',
- 'ImportLibrary': 'f4',
- 'KeyContainer': 'f7',
- 'KeyFile': 'f6',
- 'LargeAddressAware': 'true',
- 'LinkErrorReporting': 'QueueForNextLogin',
- 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
- 'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest',
- 'MapExports': 'true',
- 'MapFileName': 'd5',
- 'MergedIDLBaseFileName': 'f2',
- 'MergeSections': 'f5',
- 'MidlCommandFile': 'f1',
- 'ModuleDefinitionFile': 'sdsd',
- 'NoEntryPoint': 'true',
- 'OptimizeReferences': 'true',
- 'OutputFile': '$(OutDir)$(ProjectName)2.exe',
- 'PerUserRedirection': 'true',
- 'Profile': 'true',
- 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
- 'ProgramDatabaseFile': 'Flob.pdb',
- 'RandomizedBaseAddress': 'false',
- 'RegisterOutput': 'true',
- 'SetChecksum': 'false',
- 'ShowProgress': 'LinkVerbose',
- 'StackCommitSize': '15',
- 'StackReserveSize': '14',
- 'StripPrivateSymbols': 'd3',
- 'SubSystem': 'Console',
- 'SupportUnloadOfDelayLoadedDLL': 'true',
- 'SuppressStartupBanner': 'false',
- 'SwapRunFromCD': 'true',
- 'SwapRunFromNET': 'true',
- 'TargetMachine': 'MachineX86',
- 'TerminalServerAware': 'false',
- 'TurnOffAssemblyGeneration': 'true',
- 'TypeLibraryFile': 'f3',
- 'TypeLibraryResourceID': '12',
- 'UACExecutionLevel': 'RequireAdministrator',
- 'UACUIAccess': 'true',
- 'Version': '333'},
- 'ResourceCompile': {
- 'AdditionalIncludeDirectories': 'f3',
- 'AdditionalOptions': '/more3',
- 'Culture': '0x0c0c',
- 'IgnoreStandardIncludePath': 'true',
- 'PreprocessorDefinitions': '_UNICODE;UNICODE2',
- 'ResourceOutputFileName': '$(IntDir)%(Filename)3.res',
- 'ShowProgress': 'true'},
- 'Manifest': {
- 'AdditionalManifestFiles': 'sfsdfsd',
- 'AdditionalOptions': 'afdsdafsd',
- 'AssemblyIdentity': 'sddfdsadfsa',
- 'ComponentFileName': 'fsdfds',
- 'GenerateCatalogFiles': 'true',
- 'InputResourceManifests': 'asfsfdafs',
- 'OutputManifestFile': '$(TargetPath).manifestdfs',
- 'RegistrarScriptFile': 'sdfsfd',
- 'ReplacementsFile': 'sdffsd',
- 'SuppressStartupBanner': 'false',
- 'TypeLibraryFile': 'sfsd',
- 'UpdateFileHashes': 'true',
- 'UpdateFileHashesSearchPath': 'sfsd',
- 'VerboseOutput': 'true'},
- 'ProjectReference': {
- 'LinkLibraryDependencies': 'false',
- 'UseLibraryDependencyInputs': 'true'},
- '': {
- 'EmbedManifest': 'false',
- 'GenerateManifest': 'false',
- 'IgnoreImportLibrary': 'true',
- 'LinkIncremental': ''
- },
- 'ManifestResourceCompile': {
- 'ResourceOutputFileName':
- '$(IntDir)$(TargetFileName).embed.manifest.resfdsf'}
- }
- actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
- msvs_settings,
- self.stderr)
- self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
- self._ExpectedWarnings([])
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
deleted file mode 100644
index 74e529a17f..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Visual Studio project reader/writer."""
-
-import gyp.common
-import gyp.easy_xml as easy_xml
-
-
-class Writer(object):
- """Visual Studio XML tool file writer."""
-
- def __init__(self, tool_file_path, name):
- """Initializes the tool file.
-
- Args:
- tool_file_path: Path to the tool file.
- name: Name of the tool file.
- """
- self.tool_file_path = tool_file_path
- self.name = name
- self.rules_section = ['Rules']
-
- def AddCustomBuildRule(self, name, cmd, description,
- additional_dependencies,
- outputs, extensions):
- """Adds a rule to the tool file.
-
- Args:
- name: Name of the rule.
- description: Description of the rule.
- cmd: Command line of the rule.
- additional_dependencies: other files which may trigger the rule.
- outputs: outputs of the rule.
- extensions: extensions handled by the rule.
- """
- rule = ['CustomBuildRule',
- {'Name': name,
- 'ExecutionDescription': description,
- 'CommandLine': cmd,
- 'Outputs': ';'.join(outputs),
- 'FileExtensions': ';'.join(extensions),
- 'AdditionalDependencies':
- ';'.join(additional_dependencies)
- }]
- self.rules_section.append(rule)
-
- def WriteIfChanged(self):
- """Writes the tool file."""
- content = ['VisualStudioToolFile',
- {'Version': '8.00',
- 'Name': self.name
- },
- self.rules_section
- ]
- easy_xml.WriteXmlIfChanged(content, self.tool_file_path,
- encoding="Windows-1252")
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
deleted file mode 100644
index 6c07e9a893..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
+++ /dev/null
@@ -1,147 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Visual Studio user preferences file writer."""
-
-import os
-import re
-import socket # for gethostname
-
-import gyp.common
-import gyp.easy_xml as easy_xml
-
-
-#------------------------------------------------------------------------------
-
-def _FindCommandInPath(command):
- """If there are no slashes in the command given, this function
- searches the PATH env to find the given command, and converts it
- to an absolute path. We have to do this because MSVS is looking
- for an actual file to launch a debugger on, not just a command
- line. Note that this happens at GYP time, so anything needing to
- be built needs to have a full path."""
- if '/' in command or '\\' in command:
- # If the command already has path elements (either relative or
- # absolute), then assume it is constructed properly.
- return command
- else:
- # Search through the path list and find an existing file that
- # we can access.
- paths = os.environ.get('PATH','').split(os.pathsep)
- for path in paths:
- item = os.path.join(path, command)
- if os.path.isfile(item) and os.access(item, os.X_OK):
- return item
- return command
-
-def _QuoteWin32CommandLineArgs(args):
- new_args = []
- for arg in args:
- # Replace all double-quotes with double-double-quotes to escape
- # them for cmd shell, and then quote the whole thing if there
- # are any.
- if arg.find('"') != -1:
- arg = '""'.join(arg.split('"'))
- arg = '"%s"' % arg
-
- # Otherwise, if there are any spaces, quote the whole arg.
- elif re.search(r'[ \t\n]', arg):
- arg = '"%s"' % arg
- new_args.append(arg)
- return new_args
-
-class Writer(object):
- """Visual Studio XML user user file writer."""
-
- def __init__(self, user_file_path, version, name):
- """Initializes the user file.
-
- Args:
- user_file_path: Path to the user file.
- version: Version info.
- name: Name of the user file.
- """
- self.user_file_path = user_file_path
- self.version = version
- self.name = name
- self.configurations = {}
-
- def AddConfig(self, name):
- """Adds a configuration to the project.
-
- Args:
- name: Configuration name.
- """
- self.configurations[name] = ['Configuration', {'Name': name}]
-
- def AddDebugSettings(self, config_name, command, environment = {},
- working_directory=""):
- """Adds a DebugSettings node to the user file for a particular config.
-
- Args:
- command: command line to run. First element in the list is the
- executable. All elements of the command will be quoted if
- necessary.
- working_directory: other files which may trigger the rule. (optional)
- """
- command = _QuoteWin32CommandLineArgs(command)
-
- abs_command = _FindCommandInPath(command[0])
-
- if environment and isinstance(environment, dict):
- env_list = ['%s="%s"' % (key, val)
- for (key,val) in environment.iteritems()]
- environment = ' '.join(env_list)
- else:
- environment = ''
-
- n_cmd = ['DebugSettings',
- {'Command': abs_command,
- 'WorkingDirectory': working_directory,
- 'CommandArguments': " ".join(command[1:]),
- 'RemoteMachine': socket.gethostname(),
- 'Environment': environment,
- 'EnvironmentMerge': 'true',
- # Currently these are all "dummy" values that we're just setting
- # in the default manner that MSVS does it. We could use some of
- # these to add additional capabilities, I suppose, but they might
- # not have parity with other platforms then.
- 'Attach': 'false',
- 'DebuggerType': '3', # 'auto' debugger
- 'Remote': '1',
- 'RemoteCommand': '',
- 'HttpUrl': '',
- 'PDBPath': '',
- 'SQLDebugging': '',
- 'DebuggerFlavor': '0',
- 'MPIRunCommand': '',
- 'MPIRunArguments': '',
- 'MPIRunWorkingDirectory': '',
- 'ApplicationCommand': '',
- 'ApplicationArguments': '',
- 'ShimCommand': '',
- 'MPIAcceptMode': '',
- 'MPIAcceptFilter': ''
- }]
-
- # Find the config, and add it if it doesn't exist.
- if config_name not in self.configurations:
- self.AddConfig(config_name)
-
- # Add the DebugSettings onto the appropriate config.
- self.configurations[config_name].append(n_cmd)
-
- def WriteIfChanged(self):
- """Writes the user file."""
- configs = ['Configurations']
- for config, spec in sorted(self.configurations.iteritems()):
- configs.append(spec)
-
- content = ['VisualStudioUserFile',
- {'Version': self.version.ProjectVersion(),
- 'Name': self.name
- },
- configs]
- easy_xml.WriteXmlIfChanged(content, self.user_file_path,
- encoding="Windows-1252")
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
deleted file mode 100644
index 0b32e91180..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
+++ /dev/null
@@ -1,270 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions shared amongst the Windows generators."""
-
-import copy
-import os
-
-
-# A dictionary mapping supported target types to extensions.
-TARGET_TYPE_EXT = {
- 'executable': 'exe',
- 'loadable_module': 'dll',
- 'shared_library': 'dll',
- 'static_library': 'lib',
-}
-
-
-def _GetLargePdbShimCcPath():
- """Returns the path of the large_pdb_shim.cc file."""
- this_dir = os.path.abspath(os.path.dirname(__file__))
- src_dir = os.path.abspath(os.path.join(this_dir, '..', '..'))
- win_data_dir = os.path.join(src_dir, 'data', 'win')
- large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc')
- return large_pdb_shim_cc
-
-
-def _DeepCopySomeKeys(in_dict, keys):
- """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
-
- Arguments:
- in_dict: The dictionary to copy.
- keys: The keys to be copied. If a key is in this list and doesn't exist in
- |in_dict| this is not an error.
- Returns:
- The partially deep-copied dictionary.
- """
- d = {}
- for key in keys:
- if key not in in_dict:
- continue
- d[key] = copy.deepcopy(in_dict[key])
- return d
-
-
-def _SuffixName(name, suffix):
- """Add a suffix to the end of a target.
-
- Arguments:
- name: name of the target (foo#target)
- suffix: the suffix to be added
- Returns:
- Target name with suffix added (foo_suffix#target)
- """
- parts = name.rsplit('#', 1)
- parts[0] = '%s_%s' % (parts[0], suffix)
- return '#'.join(parts)
-
-
-def _ShardName(name, number):
- """Add a shard number to the end of a target.
-
- Arguments:
- name: name of the target (foo#target)
- number: shard number
- Returns:
- Target name with shard added (foo_1#target)
- """
- return _SuffixName(name, str(number))
-
-
-def ShardTargets(target_list, target_dicts):
- """Shard some targets apart to work around the linkers limits.
-
- Arguments:
- target_list: List of target pairs: 'base/base.gyp:base'.
- target_dicts: Dict of target properties keyed on target pair.
- Returns:
- Tuple of the new sharded versions of the inputs.
- """
- # Gather the targets to shard, and how many pieces.
- targets_to_shard = {}
- for t in target_dicts:
- shards = int(target_dicts[t].get('msvs_shard', 0))
- if shards:
- targets_to_shard[t] = shards
- # Shard target_list.
- new_target_list = []
- for t in target_list:
- if t in targets_to_shard:
- for i in range(targets_to_shard[t]):
- new_target_list.append(_ShardName(t, i))
- else:
- new_target_list.append(t)
- # Shard target_dict.
- new_target_dicts = {}
- for t in target_dicts:
- if t in targets_to_shard:
- for i in range(targets_to_shard[t]):
- name = _ShardName(t, i)
- new_target_dicts[name] = copy.copy(target_dicts[t])
- new_target_dicts[name]['target_name'] = _ShardName(
- new_target_dicts[name]['target_name'], i)
- sources = new_target_dicts[name].get('sources', [])
- new_sources = []
- for pos in range(i, len(sources), targets_to_shard[t]):
- new_sources.append(sources[pos])
- new_target_dicts[name]['sources'] = new_sources
- else:
- new_target_dicts[t] = target_dicts[t]
- # Shard dependencies.
- for t in new_target_dicts:
- for deptype in ('dependencies', 'dependencies_original'):
- dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
- new_dependencies = []
- for d in dependencies:
- if d in targets_to_shard:
- for i in range(targets_to_shard[d]):
- new_dependencies.append(_ShardName(d, i))
- else:
- new_dependencies.append(d)
- new_target_dicts[t][deptype] = new_dependencies
-
- return (new_target_list, new_target_dicts)
-
-
-def _GetPdbPath(target_dict, config_name, vars):
- """Returns the path to the PDB file that will be generated by a given
- configuration.
-
- The lookup proceeds as follows:
- - Look for an explicit path in the VCLinkerTool configuration block.
- - Look for an 'msvs_large_pdb_path' variable.
- - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
- specified.
- - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
-
- Arguments:
- target_dict: The target dictionary to be searched.
- config_name: The name of the configuration of interest.
- vars: A dictionary of common GYP variables with generator-specific values.
- Returns:
- The path of the corresponding PDB file.
- """
- config = target_dict['configurations'][config_name]
- msvs = config.setdefault('msvs_settings', {})
-
- linker = msvs.get('VCLinkerTool', {})
-
- pdb_path = linker.get('ProgramDatabaseFile')
- if pdb_path:
- return pdb_path
-
- variables = target_dict.get('variables', {})
- pdb_path = variables.get('msvs_large_pdb_path', None)
- if pdb_path:
- return pdb_path
-
-
- pdb_base = target_dict.get('product_name', target_dict['target_name'])
- pdb_base = '%s.%s.pdb' % (pdb_base, TARGET_TYPE_EXT[target_dict['type']])
- pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
-
- return pdb_path
-
-
-def InsertLargePdbShims(target_list, target_dicts, vars):
- """Insert a shim target that forces the linker to use 4KB pagesize PDBs.
-
- This is a workaround for targets with PDBs greater than 1GB in size, the
- limit for the 1KB pagesize PDBs created by the linker by default.
-
- Arguments:
- target_list: List of target pairs: 'base/base.gyp:base'.
- target_dicts: Dict of target properties keyed on target pair.
- vars: A dictionary of common GYP variables with generator-specific values.
- Returns:
- Tuple of the shimmed version of the inputs.
- """
- # Determine which targets need shimming.
- targets_to_shim = []
- for t in target_dicts:
- target_dict = target_dicts[t]
-
- # We only want to shim targets that have msvs_large_pdb enabled.
- if not int(target_dict.get('msvs_large_pdb', 0)):
- continue
- # This is intended for executable, shared_library and loadable_module
- # targets where every configuration is set up to produce a PDB output.
- # If any of these conditions is not true then the shim logic will fail
- # below.
- targets_to_shim.append(t)
-
- large_pdb_shim_cc = _GetLargePdbShimCcPath()
-
- for t in targets_to_shim:
- target_dict = target_dicts[t]
- target_name = target_dict.get('target_name')
-
- base_dict = _DeepCopySomeKeys(target_dict,
- ['configurations', 'default_configuration', 'toolset'])
-
- # This is the dict for copying the source file (part of the GYP tree)
- # to the intermediate directory of the project. This is necessary because
- # we can't always build a relative path to the shim source file (on Windows
- # GYP and the project may be on different drives), and Ninja hates absolute
- # paths (it ends up generating the .obj and .obj.d alongside the source
- # file, polluting GYPs tree).
- copy_suffix = 'large_pdb_copy'
- copy_target_name = target_name + '_' + copy_suffix
- full_copy_target_name = _SuffixName(t, copy_suffix)
- shim_cc_basename = os.path.basename(large_pdb_shim_cc)
- shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name
- shim_cc_path = shim_cc_dir + '/' + shim_cc_basename
- copy_dict = copy.deepcopy(base_dict)
- copy_dict['target_name'] = copy_target_name
- copy_dict['type'] = 'none'
- copy_dict['sources'] = [ large_pdb_shim_cc ]
- copy_dict['copies'] = [{
- 'destination': shim_cc_dir,
- 'files': [ large_pdb_shim_cc ]
- }]
-
- # This is the dict for the PDB generating shim target. It depends on the
- # copy target.
- shim_suffix = 'large_pdb_shim'
- shim_target_name = target_name + '_' + shim_suffix
- full_shim_target_name = _SuffixName(t, shim_suffix)
- shim_dict = copy.deepcopy(base_dict)
- shim_dict['target_name'] = shim_target_name
- shim_dict['type'] = 'static_library'
- shim_dict['sources'] = [ shim_cc_path ]
- shim_dict['dependencies'] = [ full_copy_target_name ]
-
- # Set up the shim to output its PDB to the same location as the final linker
- # target.
- for config_name, config in shim_dict.get('configurations').iteritems():
- pdb_path = _GetPdbPath(target_dict, config_name, vars)
-
- # A few keys that we don't want to propagate.
- for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']:
- config.pop(key, None)
-
- msvs = config.setdefault('msvs_settings', {})
-
- # Update the compiler directives in the shim target.
- compiler = msvs.setdefault('VCCLCompilerTool', {})
- compiler['DebugInformationFormat'] = '3'
- compiler['ProgramDataBaseFileName'] = pdb_path
-
- # Set the explicit PDB path in the appropriate configuration of the
- # original target.
- config = target_dict['configurations'][config_name]
- msvs = config.setdefault('msvs_settings', {})
- linker = msvs.setdefault('VCLinkerTool', {})
- linker['GenerateDebugInformation'] = 'true'
- linker['ProgramDatabaseFile'] = pdb_path
-
- # Add the new targets. They must go to the beginning of the list so that
- # the dependency generation works as expected in ninja.
- target_list.insert(0, full_copy_target_name)
- target_list.insert(0, full_shim_target_name)
- target_dicts[full_copy_target_name] = copy_dict
- target_dicts[full_shim_target_name] = shim_dict
-
- # Update the original target to depend on the shim target.
- target_dict.setdefault('dependencies', []).append(full_shim_target_name)
-
- return (target_list, target_dicts)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
deleted file mode 100644
index d9bfa684fa..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
+++ /dev/null
@@ -1,443 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Handle version information related to Visual Stuio."""
-
-import errno
-import os
-import re
-import subprocess
-import sys
-import gyp
-import glob
-
-
-class VisualStudioVersion(object):
- """Information regarding a version of Visual Studio."""
-
- def __init__(self, short_name, description,
- solution_version, project_version, flat_sln, uses_vcxproj,
- path, sdk_based, default_toolset=None):
- self.short_name = short_name
- self.description = description
- self.solution_version = solution_version
- self.project_version = project_version
- self.flat_sln = flat_sln
- self.uses_vcxproj = uses_vcxproj
- self.path = path
- self.sdk_based = sdk_based
- self.default_toolset = default_toolset
-
- def ShortName(self):
- return self.short_name
-
- def Description(self):
- """Get the full description of the version."""
- return self.description
-
- def SolutionVersion(self):
- """Get the version number of the sln files."""
- return self.solution_version
-
- def ProjectVersion(self):
- """Get the version number of the vcproj or vcxproj files."""
- return self.project_version
-
- def FlatSolution(self):
- return self.flat_sln
-
- def UsesVcxproj(self):
- """Returns true if this version uses a vcxproj file."""
- return self.uses_vcxproj
-
- def ProjectExtension(self):
- """Returns the file extension for the project."""
- return self.uses_vcxproj and '.vcxproj' or '.vcproj'
-
- def Path(self):
- """Returns the path to Visual Studio installation."""
- return self.path
-
- def ToolPath(self, tool):
- """Returns the path to a given compiler tool. """
- return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
-
- def DefaultToolset(self):
- """Returns the msbuild toolset version that will be used in the absence
- of a user override."""
- return self.default_toolset
-
- def SetupScript(self, target_arch):
- """Returns a command (with arguments) to be used to set up the
- environment."""
- # Check if we are running in the SDK command line environment and use
- # the setup script from the SDK if so. |target_arch| should be either
- # 'x86' or 'x64'.
- assert target_arch in ('x86', 'x64')
- sdk_dir = os.environ.get('WindowsSDKDir')
- if self.sdk_based and sdk_dir:
- return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
- '/' + target_arch]
- else:
- # We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
- # vcvars32, which it can only find if VS??COMNTOOLS is set, which it
- # isn't always.
- if target_arch == 'x86':
- if self.short_name >= '2013' and self.short_name[-1] != 'e' and (
- os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
- os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
- # VS2013 and later, non-Express have a x64-x86 cross that we want
- # to prefer.
- return [os.path.normpath(
- os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
- # Otherwise, the standard x86 compiler.
- return [os.path.normpath(
- os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
- else:
- assert target_arch == 'x64'
- arg = 'x86_amd64'
- # Use the 64-on-64 compiler if we're not using an express
- # edition and we're running on a 64bit OS.
- if self.short_name[-1] != 'e' and (
- os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
- os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
- arg = 'amd64'
- return [os.path.normpath(
- os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
-
-
-def _RegistryQueryBase(sysdir, key, value):
- """Use reg.exe to read a particular key.
-
- While ideally we might use the win32 module, we would like gyp to be
- python neutral, so for instance cygwin python lacks this module.
-
- Arguments:
- sysdir: The system subdirectory to attempt to launch reg.exe from.
- key: The registry key to read from.
- value: The particular value to read.
- Return:
- stdout from reg.exe, or None for failure.
- """
- # Skip if not on Windows or Python Win32 setup issue
- if sys.platform not in ('win32', 'cygwin'):
- return None
- # Setup params to pass to and attempt to launch reg.exe
- cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'),
- 'query', key]
- if value:
- cmd.extend(['/v', value])
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- # Obtain the stdout from reg.exe, reading to the end so p.returncode is valid
- # Note that the error text may be in [1] in some cases
- text = p.communicate()[0]
- # Check return code from reg.exe; officially 0==success and 1==error
- if p.returncode:
- return None
- return text
-
-
-def _RegistryQuery(key, value=None):
- r"""Use reg.exe to read a particular key through _RegistryQueryBase.
-
- First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
- that fails, it falls back to System32. Sysnative is available on Vista and
- up and available on Windows Server 2003 and XP through KB patch 942589. Note
- that Sysnative will always fail if using 64-bit python due to it being a
- virtual directory and System32 will work correctly in the first place.
-
- KB 942589 - http://support.microsoft.com/kb/942589/en-us.
-
- Arguments:
- key: The registry key.
- value: The particular registry value to read (optional).
- Return:
- stdout from reg.exe, or None for failure.
- """
- text = None
- try:
- text = _RegistryQueryBase('Sysnative', key, value)
- except OSError, e:
- if e.errno == errno.ENOENT:
- text = _RegistryQueryBase('System32', key, value)
- else:
- raise
- return text
-
-
-def _RegistryGetValueUsingWinReg(key, value):
- """Use the _winreg module to obtain the value of a registry key.
-
- Args:
- key: The registry key.
- value: The particular registry value to read.
- Return:
- contents of the registry key's value, or None on failure. Throws
- ImportError if _winreg is unavailable.
- """
- import _winreg
- try:
- root, subkey = key.split('\\', 1)
- assert root == 'HKLM' # Only need HKLM for now.
- with _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, subkey) as hkey:
- return _winreg.QueryValueEx(hkey, value)[0]
- except WindowsError:
- return None
-
-
-def _RegistryGetValue(key, value):
- """Use _winreg or reg.exe to obtain the value of a registry key.
-
- Using _winreg is preferable because it solves an issue on some corporate
- environments where access to reg.exe is locked down. However, we still need
- to fallback to reg.exe for the case where the _winreg module is not available
- (for example in cygwin python).
-
- Args:
- key: The registry key.
- value: The particular registry value to read.
- Return:
- contents of the registry key's value, or None on failure.
- """
- try:
- return _RegistryGetValueUsingWinReg(key, value)
- except ImportError:
- pass
-
- # Fallback to reg.exe if we fail to import _winreg.
- text = _RegistryQuery(key, value)
- if not text:
- return None
- # Extract value.
- match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text)
- if not match:
- return None
- return match.group(1)
-
-
-def _CreateVersion(name, path, sdk_based=False):
- """Sets up MSVS project generation.
-
- Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
- autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
- passed in that doesn't match a value in versions python will throw a error.
- """
- if path:
- path = os.path.normpath(path)
- versions = {
- '2015': VisualStudioVersion('2015',
- 'Visual Studio 2015',
- solution_version='12.00',
- project_version='14.0',
- flat_sln=False,
- uses_vcxproj=True,
- path=path,
- sdk_based=sdk_based,
- default_toolset='v140'),
- '2013': VisualStudioVersion('2013',
- 'Visual Studio 2013',
- solution_version='13.00',
- project_version='12.0',
- flat_sln=False,
- uses_vcxproj=True,
- path=path,
- sdk_based=sdk_based,
- default_toolset='v120'),
- '2013e': VisualStudioVersion('2013e',
- 'Visual Studio 2013',
- solution_version='13.00',
- project_version='12.0',
- flat_sln=True,
- uses_vcxproj=True,
- path=path,
- sdk_based=sdk_based,
- default_toolset='v120'),
- '2012': VisualStudioVersion('2012',
- 'Visual Studio 2012',
- solution_version='12.00',
- project_version='4.0',
- flat_sln=False,
- uses_vcxproj=True,
- path=path,
- sdk_based=sdk_based,
- default_toolset='v110'),
- '2012e': VisualStudioVersion('2012e',
- 'Visual Studio 2012',
- solution_version='12.00',
- project_version='4.0',
- flat_sln=True,
- uses_vcxproj=True,
- path=path,
- sdk_based=sdk_based,
- default_toolset='v110'),
- '2010': VisualStudioVersion('2010',
- 'Visual Studio 2010',
- solution_version='11.00',
- project_version='4.0',
- flat_sln=False,
- uses_vcxproj=True,
- path=path,
- sdk_based=sdk_based),
- '2010e': VisualStudioVersion('2010e',
- 'Visual C++ Express 2010',
- solution_version='11.00',
- project_version='4.0',
- flat_sln=True,
- uses_vcxproj=True,
- path=path,
- sdk_based=sdk_based),
- '2008': VisualStudioVersion('2008',
- 'Visual Studio 2008',
- solution_version='10.00',
- project_version='9.00',
- flat_sln=False,
- uses_vcxproj=False,
- path=path,
- sdk_based=sdk_based),
- '2008e': VisualStudioVersion('2008e',
- 'Visual Studio 2008',
- solution_version='10.00',
- project_version='9.00',
- flat_sln=True,
- uses_vcxproj=False,
- path=path,
- sdk_based=sdk_based),
- '2005': VisualStudioVersion('2005',
- 'Visual Studio 2005',
- solution_version='9.00',
- project_version='8.00',
- flat_sln=False,
- uses_vcxproj=False,
- path=path,
- sdk_based=sdk_based),
- '2005e': VisualStudioVersion('2005e',
- 'Visual Studio 2005',
- solution_version='9.00',
- project_version='8.00',
- flat_sln=True,
- uses_vcxproj=False,
- path=path,
- sdk_based=sdk_based),
- }
- return versions[str(name)]
-
-
-def _ConvertToCygpath(path):
- """Convert to cygwin path if we are using cygwin."""
- if sys.platform == 'cygwin':
- p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE)
- path = p.communicate()[0].strip()
- return path
-
-
-def _DetectVisualStudioVersions(versions_to_check, force_express):
- """Collect the list of installed visual studio versions.
-
- Returns:
- A list of visual studio versions installed in descending order of
- usage preference.
- Base this on the registry and a quick check if devenv.exe exists.
- Only versions 8-10 are considered.
- Possibilities are:
- 2005(e) - Visual Studio 2005 (8)
- 2008(e) - Visual Studio 2008 (9)
- 2010(e) - Visual Studio 2010 (10)
- 2012(e) - Visual Studio 2012 (11)
- 2013(e) - Visual Studio 2013 (12)
- 2015 - Visual Studio 2015 (14)
- Where (e) is e for express editions of MSVS and blank otherwise.
- """
- version_to_year = {
- '8.0': '2005',
- '9.0': '2008',
- '10.0': '2010',
- '11.0': '2012',
- '12.0': '2013',
- '14.0': '2015',
- }
- versions = []
- for version in versions_to_check:
- # Old method of searching for which VS version is installed
- # We don't use the 2010-encouraged-way because we also want to get the
- # path to the binaries, which it doesn't offer.
- keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version,
- r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version,
- r'HKLM\Software\Microsoft\VCExpress\%s' % version,
- r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version]
- for index in range(len(keys)):
- path = _RegistryGetValue(keys[index], 'InstallDir')
- if not path:
- continue
- path = _ConvertToCygpath(path)
- # Check for full.
- full_path = os.path.join(path, 'devenv.exe')
- express_path = os.path.join(path, '*express.exe')
- if not force_express and os.path.exists(full_path):
- # Add this one.
- versions.append(_CreateVersion(version_to_year[version],
- os.path.join(path, '..', '..')))
- # Check for express.
- elif glob.glob(express_path):
- # Add this one.
- versions.append(_CreateVersion(version_to_year[version] + 'e',
- os.path.join(path, '..', '..')))
-
- # The old method above does not work when only SDK is installed.
- keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7',
- r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7']
- for index in range(len(keys)):
- path = _RegistryGetValue(keys[index], version)
- if not path:
- continue
- path = _ConvertToCygpath(path)
- if version != '14.0': # There is no Express edition for 2015.
- versions.append(_CreateVersion(version_to_year[version] + 'e',
- os.path.join(path, '..'), sdk_based=True))
-
- return versions
-
-
-def SelectVisualStudioVersion(version='auto', allow_fallback=True):
- """Select which version of Visual Studio projects to generate.
-
- Arguments:
- version: Hook to allow caller to force a particular version (vs auto).
- Returns:
- An object representing a visual studio project format version.
- """
- # In auto mode, check environment variable for override.
- if version == 'auto':
- version = os.environ.get('GYP_MSVS_VERSION', 'auto')
- version_map = {
- 'auto': ('14.0', '12.0', '10.0', '9.0', '8.0', '11.0'),
- '2005': ('8.0',),
- '2005e': ('8.0',),
- '2008': ('9.0',),
- '2008e': ('9.0',),
- '2010': ('10.0',),
- '2010e': ('10.0',),
- '2012': ('11.0',),
- '2012e': ('11.0',),
- '2013': ('12.0',),
- '2013e': ('12.0',),
- '2015': ('14.0',),
- }
- override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
- if override_path:
- msvs_version = os.environ.get('GYP_MSVS_VERSION')
- if not msvs_version:
- raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
- 'set to a particular version (e.g. 2010e).')
- return _CreateVersion(msvs_version, override_path, sdk_based=True)
- version = str(version)
- versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
- if not versions:
- if not allow_fallback:
- raise ValueError('Could not locate Visual Studio installation.')
- if version == 'auto':
- # Default to 2005 if we couldn't find anything
- return _CreateVersion('2005', None)
- else:
- return _CreateVersion(version, None)
- return versions[0]
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/__init__.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
deleted file mode 100755
index 668f38b60d..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
+++ /dev/null
@@ -1,548 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import copy
-import gyp.input
-import optparse
-import os.path
-import re
-import shlex
-import sys
-import traceback
-from gyp.common import GypError
-
-# Default debug modes for GYP
-debug = {}
-
-# List of "official" debug modes, but you can use anything you like.
-DEBUG_GENERAL = 'general'
-DEBUG_VARIABLES = 'variables'
-DEBUG_INCLUDES = 'includes'
-
-
-def DebugOutput(mode, message, *args):
- if 'all' in gyp.debug or mode in gyp.debug:
- ctx = ('unknown', 0, 'unknown')
- try:
- f = traceback.extract_stack(limit=2)
- if f:
- ctx = f[0][:3]
- except:
- pass
- if args:
- message %= args
- print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]),
- ctx[1], ctx[2], message)
-
-def FindBuildFiles():
- extension = '.gyp'
- files = os.listdir(os.getcwd())
- build_files = []
- for file in files:
- if file.endswith(extension):
- build_files.append(file)
- return build_files
-
-
-def Load(build_files, format, default_variables={},
- includes=[], depth='.', params=None, check=False,
- circular_check=True, duplicate_basename_check=True):
- """
- Loads one or more specified build files.
- default_variables and includes will be copied before use.
- Returns the generator for the specified format and the
- data returned by loading the specified build files.
- """
- if params is None:
- params = {}
-
- if '-' in format:
- format, params['flavor'] = format.split('-', 1)
-
- default_variables = copy.copy(default_variables)
-
- # Default variables provided by this program and its modules should be
- # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace,
- # avoiding collisions with user and automatic variables.
- default_variables['GENERATOR'] = format
- default_variables['GENERATOR_FLAVOR'] = params.get('flavor', '')
-
- # Format can be a custom python file, or by default the name of a module
- # within gyp.generator.
- if format.endswith('.py'):
- generator_name = os.path.splitext(format)[0]
- path, generator_name = os.path.split(generator_name)
-
- # Make sure the path to the custom generator is in sys.path
- # Don't worry about removing it once we are done. Keeping the path
- # to each generator that is used in sys.path is likely harmless and
- # arguably a good idea.
- path = os.path.abspath(path)
- if path not in sys.path:
- sys.path.insert(0, path)
- else:
- generator_name = 'gyp.generator.' + format
-
- # These parameters are passed in order (as opposed to by key)
- # because ActivePython cannot handle key parameters to __import__.
- generator = __import__(generator_name, globals(), locals(), generator_name)
- for (key, val) in generator.generator_default_variables.items():
- default_variables.setdefault(key, val)
-
- # Give the generator the opportunity to set additional variables based on
- # the params it will receive in the output phase.
- if getattr(generator, 'CalculateVariables', None):
- generator.CalculateVariables(default_variables, params)
-
- # Give the generator the opportunity to set generator_input_info based on
- # the params it will receive in the output phase.
- if getattr(generator, 'CalculateGeneratorInputInfo', None):
- generator.CalculateGeneratorInputInfo(params)
-
- # Fetch the generator specific info that gets fed to input, we use getattr
- # so we can default things and the generators only have to provide what
- # they need.
- generator_input_info = {
- 'non_configuration_keys':
- getattr(generator, 'generator_additional_non_configuration_keys', []),
- 'path_sections':
- getattr(generator, 'generator_additional_path_sections', []),
- 'extra_sources_for_rules':
- getattr(generator, 'generator_extra_sources_for_rules', []),
- 'generator_supports_multiple_toolsets':
- getattr(generator, 'generator_supports_multiple_toolsets', False),
- 'generator_wants_static_library_dependencies_adjusted':
- getattr(generator,
- 'generator_wants_static_library_dependencies_adjusted', True),
- 'generator_wants_sorted_dependencies':
- getattr(generator, 'generator_wants_sorted_dependencies', False),
- 'generator_filelist_paths':
- getattr(generator, 'generator_filelist_paths', None),
- }
-
- # Process the input specific to this generator.
- result = gyp.input.Load(build_files, default_variables, includes[:],
- depth, generator_input_info, check, circular_check,
- duplicate_basename_check,
- params['parallel'], params['root_targets'])
- return [generator] + result
-
-def NameValueListToDict(name_value_list):
- """
- Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
- of the pairs. If a string is simply NAME, then the value in the dictionary
- is set to True. If VALUE can be converted to an integer, it is.
- """
- result = { }
- for item in name_value_list:
- tokens = item.split('=', 1)
- if len(tokens) == 2:
- # If we can make it an int, use that, otherwise, use the string.
- try:
- token_value = int(tokens[1])
- except ValueError:
- token_value = tokens[1]
- # Set the variable to the supplied value.
- result[tokens[0]] = token_value
- else:
- # No value supplied, treat it as a boolean and set it.
- result[tokens[0]] = True
- return result
-
-def ShlexEnv(env_name):
- flags = os.environ.get(env_name, [])
- if flags:
- flags = shlex.split(flags)
- return flags
-
-def FormatOpt(opt, value):
- if opt.startswith('--'):
- return '%s=%s' % (opt, value)
- return opt + value
-
-def RegenerateAppendFlag(flag, values, predicate, env_name, options):
- """Regenerate a list of command line flags, for an option of action='append'.
-
- The |env_name|, if given, is checked in the environment and used to generate
- an initial list of options, then the options that were specified on the
- command line (given in |values|) are appended. This matches the handling of
- environment variables and command line flags where command line flags override
- the environment, while not requiring the environment to be set when the flags
- are used again.
- """
- flags = []
- if options.use_environment and env_name:
- for flag_value in ShlexEnv(env_name):
- value = FormatOpt(flag, predicate(flag_value))
- if value in flags:
- flags.remove(value)
- flags.append(value)
- if values:
- for flag_value in values:
- flags.append(FormatOpt(flag, predicate(flag_value)))
- return flags
-
-def RegenerateFlags(options):
- """Given a parsed options object, and taking the environment variables into
- account, returns a list of flags that should regenerate an equivalent options
- object (even in the absence of the environment variables.)
-
- Any path options will be normalized relative to depth.
-
- The format flag is not included, as it is assumed the calling generator will
- set that as appropriate.
- """
- def FixPath(path):
- path = gyp.common.FixIfRelativePath(path, options.depth)
- if not path:
- return os.path.curdir
- return path
-
- def Noop(value):
- return value
-
- # We always want to ignore the environment when regenerating, to avoid
- # duplicate or changed flags in the environment at the time of regeneration.
- flags = ['--ignore-environment']
- for name, metadata in options._regeneration_metadata.iteritems():
- opt = metadata['opt']
- value = getattr(options, name)
- value_predicate = metadata['type'] == 'path' and FixPath or Noop
- action = metadata['action']
- env_name = metadata['env_name']
- if action == 'append':
- flags.extend(RegenerateAppendFlag(opt, value, value_predicate,
- env_name, options))
- elif action in ('store', None): # None is a synonym for 'store'.
- if value:
- flags.append(FormatOpt(opt, value_predicate(value)))
- elif options.use_environment and env_name and os.environ.get(env_name):
- flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name))))
- elif action in ('store_true', 'store_false'):
- if ((action == 'store_true' and value) or
- (action == 'store_false' and not value)):
- flags.append(opt)
- elif options.use_environment and env_name:
- print >>sys.stderr, ('Warning: environment regeneration unimplemented '
- 'for %s flag %r env_name %r' % (action, opt,
- env_name))
- else:
- print >>sys.stderr, ('Warning: regeneration unimplemented for action %r '
- 'flag %r' % (action, opt))
-
- return flags
-
-class RegeneratableOptionParser(optparse.OptionParser):
- def __init__(self):
- self.__regeneratable_options = {}
- optparse.OptionParser.__init__(self)
-
- def add_option(self, *args, **kw):
- """Add an option to the parser.
-
- This accepts the same arguments as OptionParser.add_option, plus the
- following:
- regenerate: can be set to False to prevent this option from being included
- in regeneration.
- env_name: name of environment variable that additional values for this
- option come from.
- type: adds type='path', to tell the regenerator that the values of
- this option need to be made relative to options.depth
- """
- env_name = kw.pop('env_name', None)
- if 'dest' in kw and kw.pop('regenerate', True):
- dest = kw['dest']
-
- # The path type is needed for regenerating, for optparse we can just treat
- # it as a string.
- type = kw.get('type')
- if type == 'path':
- kw['type'] = 'string'
-
- self.__regeneratable_options[dest] = {
- 'action': kw.get('action'),
- 'type': type,
- 'env_name': env_name,
- 'opt': args[0],
- }
-
- optparse.OptionParser.add_option(self, *args, **kw)
-
- def parse_args(self, *args):
- values, args = optparse.OptionParser.parse_args(self, *args)
- values._regeneration_metadata = self.__regeneratable_options
- return values, args
-
-def gyp_main(args):
- my_name = os.path.basename(sys.argv[0])
-
- parser = RegeneratableOptionParser()
- usage = 'usage: %s [options ...] [build_file ...]'
- parser.set_usage(usage.replace('%s', '%prog'))
- parser.add_option('--build', dest='configs', action='append',
- help='configuration for build after project generation')
- parser.add_option('--check', dest='check', action='store_true',
- help='check format of gyp files')
- parser.add_option('--config-dir', dest='config_dir', action='store',
- env_name='GYP_CONFIG_DIR', default=None,
- help='The location for configuration files like '
- 'include.gypi.')
- parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE',
- action='append', default=[], help='turn on a debugging '
- 'mode for debugging GYP. Supported modes are "variables", '
- '"includes" and "general" or "all" for all of them.')
- parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL',
- env_name='GYP_DEFINES',
- help='sets variable VAR to value VAL')
- parser.add_option('--depth', dest='depth', metavar='PATH', type='path',
- help='set DEPTH gyp variable to a relative path to PATH')
- parser.add_option('-f', '--format', dest='formats', action='append',
- env_name='GYP_GENERATORS', regenerate=False,
- help='output formats to generate')
- parser.add_option('-G', dest='generator_flags', action='append', default=[],
- metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS',
- help='sets generator flag FLAG to VAL')
- parser.add_option('--generator-output', dest='generator_output',
- action='store', default=None, metavar='DIR', type='path',
- env_name='GYP_GENERATOR_OUTPUT',
- help='puts generated build files under DIR')
- parser.add_option('--ignore-environment', dest='use_environment',
- action='store_false', default=True, regenerate=False,
- help='do not read options from environment variables')
- parser.add_option('-I', '--include', dest='includes', action='append',
- metavar='INCLUDE', type='path',
- help='files to include in all loaded .gyp files')
- # --no-circular-check disables the check for circular relationships between
- # .gyp files. These relationships should not exist, but they've only been
- # observed to be harmful with the Xcode generator. Chromium's .gyp files
- # currently have some circular relationships on non-Mac platforms, so this
- # option allows the strict behavior to be used on Macs and the lenient
- # behavior to be used elsewhere.
- # TODO(mark): Remove this option when http://crbug.com/35878 is fixed.
- parser.add_option('--no-circular-check', dest='circular_check',
- action='store_false', default=True, regenerate=False,
- help="don't check for circular relationships between files")
- # --no-duplicate-basename-check disables the check for duplicate basenames
- # in a static_library/shared_library project. Visual C++ 2008 generator
- # doesn't support this configuration. Libtool on Mac also generates warnings
- # when duplicate basenames are passed into Make generator on Mac.
- # TODO(yukawa): Remove this option when these legacy generators are
- # deprecated.
- parser.add_option('--no-duplicate-basename-check',
- dest='duplicate_basename_check', action='store_false',
- default=True, regenerate=False,
- help="don't check for duplicate basenames")
- parser.add_option('--no-parallel', action='store_true', default=False,
- help='Disable multiprocessing')
- parser.add_option('-S', '--suffix', dest='suffix', default='',
- help='suffix to add to generated files')
- parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
- default=None, metavar='DIR', type='path',
- help='directory to use as the root of the source tree')
- parser.add_option('-R', '--root-target', dest='root_targets',
- action='append', metavar='TARGET',
- help='include only TARGET and its deep dependencies')
-
- options, build_files_arg = parser.parse_args(args)
- build_files = build_files_arg
-
- # Set up the configuration directory (defaults to ~/.gyp)
- if not options.config_dir:
- home = None
- home_dot_gyp = None
- if options.use_environment:
- home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
- if home_dot_gyp:
- home_dot_gyp = os.path.expanduser(home_dot_gyp)
-
- if not home_dot_gyp:
- home_vars = ['HOME']
- if sys.platform in ('cygwin', 'win32'):
- home_vars.append('USERPROFILE')
- for home_var in home_vars:
- home = os.getenv(home_var)
- if home != None:
- home_dot_gyp = os.path.join(home, '.gyp')
- if not os.path.exists(home_dot_gyp):
- home_dot_gyp = None
- else:
- break
- else:
- home_dot_gyp = os.path.expanduser(options.config_dir)
-
- if home_dot_gyp and not os.path.exists(home_dot_gyp):
- home_dot_gyp = None
-
- if not options.formats:
- # If no format was given on the command line, then check the env variable.
- generate_formats = []
- if options.use_environment:
- generate_formats = os.environ.get('GYP_GENERATORS', [])
- if generate_formats:
- generate_formats = re.split(r'[\s,]', generate_formats)
- if generate_formats:
- options.formats = generate_formats
- else:
- # Nothing in the variable, default based on platform.
- if sys.platform == 'darwin':
- options.formats = ['xcode']
- elif sys.platform in ('win32', 'cygwin'):
- options.formats = ['msvs']
- else:
- options.formats = ['make']
-
- if not options.generator_output and options.use_environment:
- g_o = os.environ.get('GYP_GENERATOR_OUTPUT')
- if g_o:
- options.generator_output = g_o
-
- options.parallel = not options.no_parallel
-
- for mode in options.debug:
- gyp.debug[mode] = 1
-
- # Do an extra check to avoid work when we're not debugging.
- if DEBUG_GENERAL in gyp.debug:
- DebugOutput(DEBUG_GENERAL, 'running with these options:')
- for option, value in sorted(options.__dict__.items()):
- if option[0] == '_':
- continue
- if isinstance(value, basestring):
- DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value)
- else:
- DebugOutput(DEBUG_GENERAL, " %s: %s", option, value)
-
- if not build_files:
- build_files = FindBuildFiles()
- if not build_files:
- raise GypError((usage + '\n\n%s: error: no build_file') %
- (my_name, my_name))
-
- # TODO(mark): Chromium-specific hack!
- # For Chromium, the gyp "depth" variable should always be a relative path
- # to Chromium's top-level "src" directory. If no depth variable was set
- # on the command line, try to find a "src" directory by looking at the
- # absolute path to each build file's directory. The first "src" component
- # found will be treated as though it were the path used for --depth.
- if not options.depth:
- for build_file in build_files:
- build_file_dir = os.path.abspath(os.path.dirname(build_file))
- build_file_dir_components = build_file_dir.split(os.path.sep)
- components_len = len(build_file_dir_components)
- for index in xrange(components_len - 1, -1, -1):
- if build_file_dir_components[index] == 'src':
- options.depth = os.path.sep.join(build_file_dir_components)
- break
- del build_file_dir_components[index]
-
- # If the inner loop found something, break without advancing to another
- # build file.
- if options.depth:
- break
-
- if not options.depth:
- raise GypError('Could not automatically locate src directory. This is'
- 'a temporary Chromium feature that will be removed. Use'
- '--depth as a workaround.')
-
- # If toplevel-dir is not set, we assume that depth is the root of our source
- # tree.
- if not options.toplevel_dir:
- options.toplevel_dir = options.depth
-
- # -D on the command line sets variable defaults - D isn't just for define,
- # it's for default. Perhaps there should be a way to force (-F?) a
- # variable's value so that it can't be overridden by anything else.
- cmdline_default_variables = {}
- defines = []
- if options.use_environment:
- defines += ShlexEnv('GYP_DEFINES')
- if options.defines:
- defines += options.defines
- cmdline_default_variables = NameValueListToDict(defines)
- if DEBUG_GENERAL in gyp.debug:
- DebugOutput(DEBUG_GENERAL,
- "cmdline_default_variables: %s", cmdline_default_variables)
-
- # Set up includes.
- includes = []
-
- # If ~/.gyp/include.gypi exists, it'll be forcibly included into every
- # .gyp file that's loaded, before anything else is included.
- if home_dot_gyp != None:
- default_include = os.path.join(home_dot_gyp, 'include.gypi')
- if os.path.exists(default_include):
- print 'Using overrides found in ' + default_include
- includes.append(default_include)
-
- # Command-line --include files come after the default include.
- if options.includes:
- includes.extend(options.includes)
-
- # Generator flags should be prefixed with the target generator since they
- # are global across all generator runs.
- gen_flags = []
- if options.use_environment:
- gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS')
- if options.generator_flags:
- gen_flags += options.generator_flags
- generator_flags = NameValueListToDict(gen_flags)
- if DEBUG_GENERAL in gyp.debug.keys():
- DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
-
- # Generate all requested formats (use a set in case we got one format request
- # twice)
- for format in set(options.formats):
- params = {'options': options,
- 'build_files': build_files,
- 'generator_flags': generator_flags,
- 'cwd': os.getcwd(),
- 'build_files_arg': build_files_arg,
- 'gyp_binary': sys.argv[0],
- 'home_dot_gyp': home_dot_gyp,
- 'parallel': options.parallel,
- 'root_targets': options.root_targets,
- 'target_arch': cmdline_default_variables.get('target_arch', '')}
-
- # Start with the default variables from the command line.
- [generator, flat_list, targets, data] = Load(
- build_files, format, cmdline_default_variables, includes, options.depth,
- params, options.check, options.circular_check,
- options.duplicate_basename_check)
-
- # TODO(mark): Pass |data| for now because the generator needs a list of
- # build files that came in. In the future, maybe it should just accept
- # a list, and not the whole data dict.
- # NOTE: flat_list is the flattened dependency graph specifying the order
- # that targets may be built. Build systems that operate serially or that
- # need to have dependencies defined before dependents reference them should
- # generate targets in the order specified in flat_list.
- generator.GenerateOutput(flat_list, targets, data, params)
-
- if options.configs:
- valid_configs = targets[flat_list[0]]['configurations'].keys()
- for conf in options.configs:
- if conf not in valid_configs:
- raise GypError('Invalid config specified via --build: %s' % conf)
- generator.PerformBuild(data, options.configs, params)
-
- # Done
- return 0
-
-
-def main(args):
- try:
- return gyp_main(args)
- except GypError, e:
- sys.stderr.write("gyp: %s\n" % e)
- return 1
-
-# NOTE: setuptools generated console_scripts calls function with no arguments
-def script_main():
- return main(sys.argv[1:])
-
-if __name__ == '__main__':
- sys.exit(script_main())
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/common.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/common.py
deleted file mode 100644
index 256e3f3a6b..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/common.py
+++ /dev/null
@@ -1,608 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from __future__ import with_statement
-
-import collections
-import errno
-import filecmp
-import os.path
-import re
-import tempfile
-import sys
-
-
-# A minimal memoizing decorator. It'll blow up if the args aren't immutable,
-# among other "problems".
-class memoize(object):
- def __init__(self, func):
- self.func = func
- self.cache = {}
- def __call__(self, *args):
- try:
- return self.cache[args]
- except KeyError:
- result = self.func(*args)
- self.cache[args] = result
- return result
-
-
-class GypError(Exception):
- """Error class representing an error, which is to be presented
- to the user. The main entry point will catch and display this.
- """
- pass
-
-
-def ExceptionAppend(e, msg):
- """Append a message to the given exception's message."""
- if not e.args:
- e.args = (msg,)
- elif len(e.args) == 1:
- e.args = (str(e.args[0]) + ' ' + msg,)
- else:
- e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
-
-
-def FindQualifiedTargets(target, qualified_list):
- """
- Given a list of qualified targets, return the qualified targets for the
- specified |target|.
- """
- return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
-
-
-def ParseQualifiedTarget(target):
- # Splits a qualified target into a build file, target name and toolset.
-
- # NOTE: rsplit is used to disambiguate the Windows drive letter separator.
- target_split = target.rsplit(':', 1)
- if len(target_split) == 2:
- [build_file, target] = target_split
- else:
- build_file = None
-
- target_split = target.rsplit('#', 1)
- if len(target_split) == 2:
- [target, toolset] = target_split
- else:
- toolset = None
-
- return [build_file, target, toolset]
-
-
-def ResolveTarget(build_file, target, toolset):
- # This function resolves a target into a canonical form:
- # - a fully defined build file, either absolute or relative to the current
- # directory
- # - a target name
- # - a toolset
- #
- # build_file is the file relative to which 'target' is defined.
- # target is the qualified target.
- # toolset is the default toolset for that target.
- [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target)
-
- if parsed_build_file:
- if build_file:
- # If a relative path, parsed_build_file is relative to the directory
- # containing build_file. If build_file is not in the current directory,
- # parsed_build_file is not a usable path as-is. Resolve it by
- # interpreting it as relative to build_file. If parsed_build_file is
- # absolute, it is usable as a path regardless of the current directory,
- # and os.path.join will return it as-is.
- build_file = os.path.normpath(os.path.join(os.path.dirname(build_file),
- parsed_build_file))
- # Further (to handle cases like ../cwd), make it relative to cwd)
- if not os.path.isabs(build_file):
- build_file = RelativePath(build_file, '.')
- else:
- build_file = parsed_build_file
-
- if parsed_toolset:
- toolset = parsed_toolset
-
- return [build_file, target, toolset]
-
-
-def BuildFile(fully_qualified_target):
- # Extracts the build file from the fully qualified target.
- return ParseQualifiedTarget(fully_qualified_target)[0]
-
-
-def GetEnvironFallback(var_list, default):
- """Look up a key in the environment, with fallback to secondary keys
- and finally falling back to a default value."""
- for var in var_list:
- if var in os.environ:
- return os.environ[var]
- return default
-
-
-def QualifiedTarget(build_file, target, toolset):
- # "Qualified" means the file that a target was defined in and the target
- # name, separated by a colon, suffixed by a # and the toolset name:
- # /path/to/file.gyp:target_name#toolset
- fully_qualified = build_file + ':' + target
- if toolset:
- fully_qualified = fully_qualified + '#' + toolset
- return fully_qualified
-
-
-@memoize
-def RelativePath(path, relative_to, follow_path_symlink=True):
- # Assuming both |path| and |relative_to| are relative to the current
- # directory, returns a relative path that identifies path relative to
- # relative_to.
- # If |follow_symlink_path| is true (default) and |path| is a symlink, then
- # this method returns a path to the real file represented by |path|. If it is
- # false, this method returns a path to the symlink. If |path| is not a
- # symlink, this option has no effect.
-
- # Convert to normalized (and therefore absolute paths).
- if follow_path_symlink:
- path = os.path.realpath(path)
- else:
- path = os.path.abspath(path)
- relative_to = os.path.realpath(relative_to)
-
- # On Windows, we can't create a relative path to a different drive, so just
- # use the absolute path.
- if sys.platform == 'win32':
- if (os.path.splitdrive(path)[0].lower() !=
- os.path.splitdrive(relative_to)[0].lower()):
- return path
-
- # Split the paths into components.
- path_split = path.split(os.path.sep)
- relative_to_split = relative_to.split(os.path.sep)
-
- # Determine how much of the prefix the two paths share.
- prefix_len = len(os.path.commonprefix([path_split, relative_to_split]))
-
- # Put enough ".." components to back up out of relative_to to the common
- # prefix, and then append the part of path_split after the common prefix.
- relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \
- path_split[prefix_len:]
-
- if len(relative_split) == 0:
- # The paths were the same.
- return ''
-
- # Turn it back into a string and we're done.
- return os.path.join(*relative_split)
-
-
-@memoize
-def InvertRelativePath(path, toplevel_dir=None):
- """Given a path like foo/bar that is relative to toplevel_dir, return
- the inverse relative path back to the toplevel_dir.
-
- E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
- should always produce the empty string, unless the path contains symlinks.
- """
- if not path:
- return path
- toplevel_dir = '.' if toplevel_dir is None else toplevel_dir
- return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path))
-
-
-def FixIfRelativePath(path, relative_to):
- # Like RelativePath but returns |path| unchanged if it is absolute.
- if os.path.isabs(path):
- return path
- return RelativePath(path, relative_to)
-
-
-def UnrelativePath(path, relative_to):
- # Assuming that |relative_to| is relative to the current directory, and |path|
- # is a path relative to the dirname of |relative_to|, returns a path that
- # identifies |path| relative to the current directory.
- rel_dir = os.path.dirname(relative_to)
- return os.path.normpath(os.path.join(rel_dir, path))
-
-
-# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at
-# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02
-# and the documentation for various shells.
-
-# _quote is a pattern that should match any argument that needs to be quoted
-# with double-quotes by EncodePOSIXShellArgument. It matches the following
-# characters appearing anywhere in an argument:
-# \t, \n, space parameter separators
-# # comments
-# $ expansions (quoted to always expand within one argument)
-# % called out by IEEE 1003.1 XCU.2.2
-# & job control
-# ' quoting
-# (, ) subshell execution
-# *, ?, [ pathname expansion
-# ; command delimiter
-# <, >, | redirection
-# = assignment
-# {, } brace expansion (bash)
-# ~ tilde expansion
-# It also matches the empty string, because "" (or '') is the only way to
-# represent an empty string literal argument to a POSIX shell.
-#
-# This does not match the characters in _escape, because those need to be
-# backslash-escaped regardless of whether they appear in a double-quoted
-# string.
-_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$')
-
-# _escape is a pattern that should match any character that needs to be
-# escaped with a backslash, whether or not the argument matched the _quote
-# pattern. _escape is used with re.sub to backslash anything in _escape's
-# first match group, hence the (parentheses) in the regular expression.
-#
-# _escape matches the following characters appearing anywhere in an argument:
-# " to prevent POSIX shells from interpreting this character for quoting
-# \ to prevent POSIX shells from interpreting this character for escaping
-# ` to prevent POSIX shells from interpreting this character for command
-# substitution
-# Missing from this list is $, because the desired behavior of
-# EncodePOSIXShellArgument is to permit parameter (variable) expansion.
-#
-# Also missing from this list is !, which bash will interpret as the history
-# expansion character when history is enabled. bash does not enable history
-# by default in non-interactive shells, so this is not thought to be a problem.
-# ! was omitted from this list because bash interprets "\!" as a literal string
-# including the backslash character (avoiding history expansion but retaining
-# the backslash), which would not be correct for argument encoding. Handling
-# this case properly would also be problematic because bash allows the history
-# character to be changed with the histchars shell variable. Fortunately,
-# as history is not enabled in non-interactive shells and
-# EncodePOSIXShellArgument is only expected to encode for non-interactive
-# shells, there is no room for error here by ignoring !.
-_escape = re.compile(r'(["\\`])')
-
-def EncodePOSIXShellArgument(argument):
- """Encodes |argument| suitably for consumption by POSIX shells.
-
- argument may be quoted and escaped as necessary to ensure that POSIX shells
- treat the returned value as a literal representing the argument passed to
- this function. Parameter (variable) expansions beginning with $ are allowed
- to remain intact without escaping the $, to allow the argument to contain
- references to variables to be expanded by the shell.
- """
-
- if not isinstance(argument, str):
- argument = str(argument)
-
- if _quote.search(argument):
- quote = '"'
- else:
- quote = ''
-
- encoded = quote + re.sub(_escape, r'\\\1', argument) + quote
-
- return encoded
-
-
-def EncodePOSIXShellList(list):
- """Encodes |list| suitably for consumption by POSIX shells.
-
- Returns EncodePOSIXShellArgument for each item in list, and joins them
- together using the space character as an argument separator.
- """
-
- encoded_arguments = []
- for argument in list:
- encoded_arguments.append(EncodePOSIXShellArgument(argument))
- return ' '.join(encoded_arguments)
-
-
-def DeepDependencyTargets(target_dicts, roots):
- """Returns the recursive list of target dependencies."""
- dependencies = set()
- pending = set(roots)
- while pending:
- # Pluck out one.
- r = pending.pop()
- # Skip if visited already.
- if r in dependencies:
- continue
- # Add it.
- dependencies.add(r)
- # Add its children.
- spec = target_dicts[r]
- pending.update(set(spec.get('dependencies', [])))
- pending.update(set(spec.get('dependencies_original', [])))
- return list(dependencies - set(roots))
-
-
-def BuildFileTargets(target_list, build_file):
- """From a target_list, returns the subset from the specified build_file.
- """
- return [p for p in target_list if BuildFile(p) == build_file]
-
-
-def AllTargets(target_list, target_dicts, build_file):
- """Returns all targets (direct and dependencies) for the specified build_file.
- """
- bftargets = BuildFileTargets(target_list, build_file)
- deptargets = DeepDependencyTargets(target_dicts, bftargets)
- return bftargets + deptargets
-
-
-def WriteOnDiff(filename):
- """Write to a file only if the new contents differ.
-
- Arguments:
- filename: name of the file to potentially write to.
- Returns:
- A file like object which will write to temporary file and only overwrite
- the target if it differs (on close).
- """
-
- class Writer(object):
- """Wrapper around file which only covers the target if it differs."""
- def __init__(self):
- # Pick temporary file.
- tmp_fd, self.tmp_path = tempfile.mkstemp(
- suffix='.tmp',
- prefix=os.path.split(filename)[1] + '.gyp.',
- dir=os.path.split(filename)[0])
- try:
- self.tmp_file = os.fdopen(tmp_fd, 'wb')
- except Exception:
- # Don't leave turds behind.
- os.unlink(self.tmp_path)
- raise
-
- def __getattr__(self, attrname):
- # Delegate everything else to self.tmp_file
- return getattr(self.tmp_file, attrname)
-
- def close(self):
- try:
- # Close tmp file.
- self.tmp_file.close()
- # Determine if different.
- same = False
- try:
- same = filecmp.cmp(self.tmp_path, filename, False)
- except OSError, e:
- if e.errno != errno.ENOENT:
- raise
-
- if same:
- # The new file is identical to the old one, just get rid of the new
- # one.
- os.unlink(self.tmp_path)
- else:
- # The new file is different from the old one, or there is no old one.
- # Rename the new file to the permanent name.
- #
- # tempfile.mkstemp uses an overly restrictive mode, resulting in a
- # file that can only be read by the owner, regardless of the umask.
- # There's no reason to not respect the umask here, which means that
- # an extra hoop is required to fetch it and reset the new file's mode.
- #
- # No way to get the umask without setting a new one? Set a safe one
- # and then set it back to the old value.
- umask = os.umask(077)
- os.umask(umask)
- os.chmod(self.tmp_path, 0666 & ~umask)
- if sys.platform == 'win32' and os.path.exists(filename):
- # NOTE: on windows (but not cygwin) rename will not replace an
- # existing file, so it must be preceded with a remove. Sadly there
- # is no way to make the switch atomic.
- os.remove(filename)
- os.rename(self.tmp_path, filename)
- except Exception:
- # Don't leave turds behind.
- os.unlink(self.tmp_path)
- raise
-
- return Writer()
-
-
-def EnsureDirExists(path):
- """Make sure the directory for |path| exists."""
- try:
- os.makedirs(os.path.dirname(path))
- except OSError:
- pass
-
-
-def GetFlavor(params):
- """Returns |params.flavor| if it's set, the system's default flavor else."""
- flavors = {
- 'cygwin': 'win',
- 'win32': 'win',
- 'darwin': 'mac',
- }
-
- if 'flavor' in params:
- return params['flavor']
- if sys.platform in flavors:
- return flavors[sys.platform]
- if sys.platform.startswith('sunos'):
- return 'solaris'
- if sys.platform.startswith('freebsd'):
- return 'freebsd'
- if sys.platform.startswith('openbsd'):
- return 'openbsd'
- if sys.platform.startswith('netbsd'):
- return 'netbsd'
- if sys.platform.startswith('aix'):
- return 'aix'
-
- return 'linux'
-
-
-def CopyTool(flavor, out_path):
- """Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
- to |out_path|."""
- # aix and solaris just need flock emulation. mac and win use more complicated
- # support scripts.
- prefix = {
- 'aix': 'flock',
- 'solaris': 'flock',
- 'mac': 'mac',
- 'win': 'win'
- }.get(flavor, None)
- if not prefix:
- return
-
- # Slurp input file.
- source_path = os.path.join(
- os.path.dirname(os.path.abspath(__file__)), '%s_tool.py' % prefix)
- with open(source_path) as source_file:
- source = source_file.readlines()
-
- # Add header and write it out.
- tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix)
- with open(tool_path, 'w') as tool_file:
- tool_file.write(
- ''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:]))
-
- # Make file executable.
- os.chmod(tool_path, 0755)
-
-
-# From Alex Martelli,
-# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560
-# ASPN: Python Cookbook: Remove duplicates from a sequence
-# First comment, dated 2001/10/13.
-# (Also in the printed Python Cookbook.)
-
-def uniquer(seq, idfun=None):
- if idfun is None:
- idfun = lambda x: x
- seen = {}
- result = []
- for item in seq:
- marker = idfun(item)
- if marker in seen: continue
- seen[marker] = 1
- result.append(item)
- return result
-
-
-# Based on http://code.activestate.com/recipes/576694/.
-class OrderedSet(collections.MutableSet):
- def __init__(self, iterable=None):
- self.end = end = []
- end += [None, end, end] # sentinel node for doubly linked list
- self.map = {} # key --> [key, prev, next]
- if iterable is not None:
- self |= iterable
-
- def __len__(self):
- return len(self.map)
-
- def __contains__(self, key):
- return key in self.map
-
- def add(self, key):
- if key not in self.map:
- end = self.end
- curr = end[1]
- curr[2] = end[1] = self.map[key] = [key, curr, end]
-
- def discard(self, key):
- if key in self.map:
- key, prev_item, next_item = self.map.pop(key)
- prev_item[2] = next_item
- next_item[1] = prev_item
-
- def __iter__(self):
- end = self.end
- curr = end[2]
- while curr is not end:
- yield curr[0]
- curr = curr[2]
-
- def __reversed__(self):
- end = self.end
- curr = end[1]
- while curr is not end:
- yield curr[0]
- curr = curr[1]
-
- # The second argument is an addition that causes a pylint warning.
- def pop(self, last=True): # pylint: disable=W0221
- if not self:
- raise KeyError('set is empty')
- key = self.end[1][0] if last else self.end[2][0]
- self.discard(key)
- return key
-
- def __repr__(self):
- if not self:
- return '%s()' % (self.__class__.__name__,)
- return '%s(%r)' % (self.__class__.__name__, list(self))
-
- def __eq__(self, other):
- if isinstance(other, OrderedSet):
- return len(self) == len(other) and list(self) == list(other)
- return set(self) == set(other)
-
- # Extensions to the recipe.
- def update(self, iterable):
- for i in iterable:
- if i not in self:
- self.add(i)
-
-
-class CycleError(Exception):
- """An exception raised when an unexpected cycle is detected."""
- def __init__(self, nodes):
- self.nodes = nodes
- def __str__(self):
- return 'CycleError: cycle involving: ' + str(self.nodes)
-
-
-def TopologicallySorted(graph, get_edges):
- r"""Topologically sort based on a user provided edge definition.
-
- Args:
- graph: A list of node names.
- get_edges: A function mapping from node name to a hashable collection
- of node names which this node has outgoing edges to.
- Returns:
- A list containing all of the node in graph in topological order.
- It is assumed that calling get_edges once for each node and caching is
- cheaper than repeatedly calling get_edges.
- Raises:
- CycleError in the event of a cycle.
- Example:
- graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
- def GetEdges(node):
- return re.findall(r'\$\(([^))]\)', graph[node])
- print TopologicallySorted(graph.keys(), GetEdges)
- ==>
- ['a', 'c', b']
- """
- get_edges = memoize(get_edges)
- visited = set()
- visiting = set()
- ordered_nodes = []
- def Visit(node):
- if node in visiting:
- raise CycleError(visiting)
- if node in visited:
- return
- visited.add(node)
- visiting.add(node)
- for neighbor in get_edges(node):
- Visit(neighbor)
- visiting.remove(node)
- ordered_nodes.insert(0, node)
- for node in sorted(graph):
- Visit(node)
- return ordered_nodes
-
-def CrossCompileRequested():
- # TODO: figure out how to not build extra host objects in the
- # non-cross-compile case when this is enabled, and enable unconditionally.
- return (os.environ.get('GYP_CROSSCOMPILE') or
- os.environ.get('AR_host') or
- os.environ.get('CC_host') or
- os.environ.get('CXX_host') or
- os.environ.get('AR_target') or
- os.environ.get('CC_target') or
- os.environ.get('CXX_target'))
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/common_test.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
deleted file mode 100755
index ad6f9a1438..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for the common.py file."""
-
-import gyp.common
-import unittest
-import sys
-
-
-class TestTopologicallySorted(unittest.TestCase):
- def test_Valid(self):
- """Test that sorting works on a valid graph with one possible order."""
- graph = {
- 'a': ['b', 'c'],
- 'b': [],
- 'c': ['d'],
- 'd': ['b'],
- }
- def GetEdge(node):
- return tuple(graph[node])
- self.assertEqual(
- gyp.common.TopologicallySorted(graph.keys(), GetEdge),
- ['a', 'c', 'd', 'b'])
-
- def test_Cycle(self):
- """Test that an exception is thrown on a cyclic graph."""
- graph = {
- 'a': ['b'],
- 'b': ['c'],
- 'c': ['d'],
- 'd': ['a'],
- }
- def GetEdge(node):
- return tuple(graph[node])
- self.assertRaises(
- gyp.common.CycleError, gyp.common.TopologicallySorted,
- graph.keys(), GetEdge)
-
-
-class TestGetFlavor(unittest.TestCase):
- """Test that gyp.common.GetFlavor works as intended"""
- original_platform = ''
-
- def setUp(self):
- self.original_platform = sys.platform
-
- def tearDown(self):
- sys.platform = self.original_platform
-
- def assertFlavor(self, expected, argument, param):
- sys.platform = argument
- self.assertEqual(expected, gyp.common.GetFlavor(param))
-
- def test_platform_default(self):
- self.assertFlavor('freebsd', 'freebsd9' , {})
- self.assertFlavor('freebsd', 'freebsd10', {})
- self.assertFlavor('openbsd', 'openbsd5' , {})
- self.assertFlavor('solaris', 'sunos5' , {});
- self.assertFlavor('solaris', 'sunos' , {});
- self.assertFlavor('linux' , 'linux2' , {});
- self.assertFlavor('linux' , 'linux3' , {});
-
- def test_param(self):
- self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'})
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
deleted file mode 100644
index b71e9d4e8a..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
+++ /dev/null
@@ -1,162 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import re
-import os
-
-
-def XmlToString(content, encoding='utf-8', pretty=False):
- """ Writes the XML content to disk, touching the file only if it has changed.
-
- Visual Studio files have a lot of pre-defined structures. This function makes
- it easy to represent these structures as Python data structures, instead of
- having to create a lot of function calls.
-
- Each XML element of the content is represented as a list composed of:
- 1. The name of the element, a string,
- 2. The attributes of the element, a dictionary (optional), and
- 3+. The content of the element, if any. Strings are simple text nodes and
- lists are child elements.
-
- Example 1:
- <test/>
- becomes
- ['test']
-
- Example 2:
- <myelement a='value1' b='value2'>
- <childtype>This is</childtype>
- <childtype>it!</childtype>
- </myelement>
-
- becomes
- ['myelement', {'a':'value1', 'b':'value2'},
- ['childtype', 'This is'],
- ['childtype', 'it!'],
- ]
-
- Args:
- content: The structured content to be converted.
- encoding: The encoding to report on the first XML line.
- pretty: True if we want pretty printing with indents and new lines.
-
- Returns:
- The XML content as a string.
- """
- # We create a huge list of all the elements of the file.
- xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
- if pretty:
- xml_parts.append('\n')
- _ConstructContentList(xml_parts, content, pretty)
-
- # Convert it to a string
- return ''.join(xml_parts)
-
-
-def _ConstructContentList(xml_parts, specification, pretty, level=0):
- """ Appends the XML parts corresponding to the specification.
-
- Args:
- xml_parts: A list of XML parts to be appended to.
- specification: The specification of the element. See EasyXml docs.
- pretty: True if we want pretty printing with indents and new lines.
- level: Indentation level.
- """
- # The first item in a specification is the name of the element.
- if pretty:
- indentation = ' ' * level
- new_line = '\n'
- else:
- indentation = ''
- new_line = ''
- name = specification[0]
- if not isinstance(name, str):
- raise Exception('The first item of an EasyXml specification should be '
- 'a string. Specification was ' + str(specification))
- xml_parts.append(indentation + '<' + name)
-
- # Optionally in second position is a dictionary of the attributes.
- rest = specification[1:]
- if rest and isinstance(rest[0], dict):
- for at, val in sorted(rest[0].iteritems()):
- xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True)))
- rest = rest[1:]
- if rest:
- xml_parts.append('>')
- all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
- multi_line = not all_strings
- if multi_line and new_line:
- xml_parts.append(new_line)
- for child_spec in rest:
- # If it's a string, append a text node.
- # Otherwise recurse over that child definition
- if isinstance(child_spec, str):
- xml_parts.append(_XmlEscape(child_spec))
- else:
- _ConstructContentList(xml_parts, child_spec, pretty, level + 1)
- if multi_line and indentation:
- xml_parts.append(indentation)
- xml_parts.append('</%s>%s' % (name, new_line))
- else:
- xml_parts.append('/>%s' % new_line)
-
-
-def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False,
- win32=False):
- """ Writes the XML content to disk, touching the file only if it has changed.
-
- Args:
- content: The structured content to be written.
- path: Location of the file.
- encoding: The encoding to report on the first line of the XML file.
- pretty: True if we want pretty printing with indents and new lines.
- """
- xml_string = XmlToString(content, encoding, pretty)
- if win32 and os.linesep != '\r\n':
- xml_string = xml_string.replace('\n', '\r\n')
-
- try:
- xml_string = xml_string.encode(encoding)
- except Exception:
- xml_string = unicode(xml_string, 'latin-1').encode(encoding)
-
- # Get the old content
- try:
- f = open(path, 'r')
- existing = f.read()
- f.close()
- except:
- existing = None
-
- # It has changed, write it
- if existing != xml_string:
- f = open(path, 'w')
- f.write(xml_string)
- f.close()
-
-
-_xml_escape_map = {
- '"': '&quot;',
- "'": '&apos;',
- '<': '&lt;',
- '>': '&gt;',
- '&': '&amp;',
- '\n': '&#xA;',
- '\r': '&#xD;',
-}
-
-
-_xml_escape_re = re.compile(
- "(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
-
-
-def _XmlEscape(value, attr=False):
- """ Escape a string for inclusion in XML."""
- def replace(match):
- m = match.string[match.start() : match.end()]
- # don't replace single quotes in attrs
- if attr and m == "'":
- return m
- return _xml_escape_map[m]
- return _xml_escape_re.sub(replace, value)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
deleted file mode 100755
index df64354982..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-""" Unit tests for the easy_xml.py file. """
-
-import gyp.easy_xml as easy_xml
-import unittest
-import StringIO
-
-
-class TestSequenceFunctions(unittest.TestCase):
-
- def setUp(self):
- self.stderr = StringIO.StringIO()
-
- def test_EasyXml_simple(self):
- self.assertEqual(
- easy_xml.XmlToString(['test']),
- '<?xml version="1.0" encoding="utf-8"?><test/>')
-
- self.assertEqual(
- easy_xml.XmlToString(['test'], encoding='Windows-1252'),
- '<?xml version="1.0" encoding="Windows-1252"?><test/>')
-
- def test_EasyXml_simple_with_attributes(self):
- self.assertEqual(
- easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
- '<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
-
- def test_EasyXml_escaping(self):
- original = '<test>\'"\r&\nfoo'
- converted = '&lt;test&gt;\'&quot;&#xD;&amp;&#xA;foo'
- converted_apos = converted.replace("'", '&apos;')
- self.assertEqual(
- easy_xml.XmlToString(['test3', {'a': original}, original]),
- '<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
- (converted, converted_apos))
-
- def test_EasyXml_pretty(self):
- self.assertEqual(
- easy_xml.XmlToString(
- ['test3',
- ['GrandParent',
- ['Parent1',
- ['Child']
- ],
- ['Parent2']
- ]
- ],
- pretty=True),
- '<?xml version="1.0" encoding="utf-8"?>\n'
- '<test3>\n'
- ' <GrandParent>\n'
- ' <Parent1>\n'
- ' <Child/>\n'
- ' </Parent1>\n'
- ' <Parent2/>\n'
- ' </GrandParent>\n'
- '</test3>\n')
-
-
- def test_EasyXml_complex(self):
- # We want to create:
- target = (
- '<?xml version="1.0" encoding="utf-8"?>'
- '<Project>'
- '<PropertyGroup Label="Globals">'
- '<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
- '<Keyword>Win32Proj</Keyword>'
- '<RootNamespace>automated_ui_tests</RootNamespace>'
- '</PropertyGroup>'
- '<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
- '<PropertyGroup '
- 'Condition="\'$(Configuration)|$(Platform)\'=='
- '\'Debug|Win32\'" Label="Configuration">'
- '<ConfigurationType>Application</ConfigurationType>'
- '<CharacterSet>Unicode</CharacterSet>'
- '</PropertyGroup>'
- '</Project>')
-
- xml = easy_xml.XmlToString(
- ['Project',
- ['PropertyGroup', {'Label': 'Globals'},
- ['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
- ['Keyword', 'Win32Proj'],
- ['RootNamespace', 'automated_ui_tests']
- ],
- ['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
- ['PropertyGroup',
- {'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
- 'Label': 'Configuration'},
- ['ConfigurationType', 'Application'],
- ['CharacterSet', 'Unicode']
- ]
- ])
- self.assertEqual(xml, target)
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
deleted file mode 100755
index b38d8660f7..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""These functions are executed via gyp-flock-tool when using the Makefile
-generator. Used on systems that don't have a built-in flock."""
-
-import fcntl
-import os
-import struct
-import subprocess
-import sys
-
-
-def main(args):
- executor = FlockTool()
- executor.Dispatch(args)
-
-
-class FlockTool(object):
- """This class emulates the 'flock' command."""
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- # Note that the stock python on SunOS has a bug
- # where fcntl.flock(fd, LOCK_EX) always fails
- # with EBADF, that's why we use this F_SETLK
- # hack instead.
- fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666)
- if sys.platform.startswith('aix'):
- # Python on AIX is compiled with LARGEFILE support, which changes the
- # struct size.
- op = struct.pack('hhIllqq', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
- else:
- op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
- fcntl.fcntl(fd, fcntl.F_SETLK, op)
- return subprocess.call(cmd_list)
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py
+++ /dev/null
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
deleted file mode 100644
index 921c1a6b71..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
+++ /dev/null
@@ -1,741 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
-the generator flag config_path) the path of a json file that dictates the files
-and targets to search for. The following keys are supported:
-files: list of paths (relative) of the files to search for.
-test_targets: unqualified target names to search for. Any target in this list
-that depends upon a file in |files| is output regardless of the type of target
-or chain of dependencies.
-additional_compile_targets: Unqualified targets to search for in addition to
-test_targets. Targets in the combined list that depend upon a file in |files|
-are not necessarily output. For example, if the target is of type none then the
-target is not output (but one of the descendants of the target will be).
-
-The following is output:
-error: only supplied if there is an error.
-compile_targets: minimal set of targets that directly or indirectly (for
- targets of type none) depend on the files in |files| and is one of the
- supplied targets or a target that one of the supplied targets depends on.
- The expectation is this set of targets is passed into a build step. This list
- always contains the output of test_targets as well.
-test_targets: set of targets from the supplied |test_targets| that either
- directly or indirectly depend upon a file in |files|. This list if useful
- if additional processing needs to be done for certain targets after the
- build, such as running tests.
-status: outputs one of three values: none of the supplied files were found,
- one of the include files changed so that it should be assumed everything
- changed (in this case test_targets and compile_targets are not output) or at
- least one file was found.
-invalid_targets: list of supplied targets that were not found.
-
-Example:
-Consider a graph like the following:
- A D
- / \
-B C
-A depends upon both B and C, A is of type none and B and C are executables.
-D is an executable, has no dependencies and nothing depends on it.
-If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and
-files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then
-the following is output:
-|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc
-and the supplied target A depends upon it. A is not output as a build_target
-as it is of type none with no rules and actions.
-|test_targets| = ["B"] B directly depends upon the change file b.cc.
-
-Even though the file d.cc, which D depends upon, has changed D is not output
-as it was not supplied by way of |additional_compile_targets| or |test_targets|.
-
-If the generator flag analyzer_output_path is specified, output is written
-there. Otherwise output is written to stdout.
-
-In Gyp the "all" target is shorthand for the root targets in the files passed
-to gyp. For example, if file "a.gyp" contains targets "a1" and
-"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency
-on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2".
-Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not
-directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp
-then the "all" target includes "b1" and "b2".
-"""
-
-import gyp.common
-import gyp.ninja_syntax as ninja_syntax
-import json
-import os
-import posixpath
-import sys
-
-debug = False
-
-found_dependency_string = 'Found dependency'
-no_dependency_string = 'No dependencies'
-# Status when it should be assumed that everything has changed.
-all_changed_string = 'Found dependency (all)'
-
-# MatchStatus is used indicate if and how a target depends upon the supplied
-# sources.
-# The target's sources contain one of the supplied paths.
-MATCH_STATUS_MATCHES = 1
-# The target has a dependency on another target that contains one of the
-# supplied paths.
-MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
-# The target's sources weren't in the supplied paths and none of the target's
-# dependencies depend upon a target that matched.
-MATCH_STATUS_DOESNT_MATCH = 3
-# The target doesn't contain the source, but the dependent targets have not yet
-# been visited to determine a more specific status yet.
-MATCH_STATUS_TBD = 4
-
-generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
-
-generator_wants_static_library_dependencies_adjusted = False
-
-generator_default_variables = {
-}
-for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
- 'LIB_DIR', 'SHARED_LIB_DIR']:
- generator_default_variables[dirname] = '!!!'
-
-for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
- 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
- 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
- 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
- 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
- 'CONFIGURATION_NAME']:
- generator_default_variables[unused] = ''
-
-
-def _ToGypPath(path):
- """Converts a path to the format used by gyp."""
- if os.sep == '\\' and os.altsep == '/':
- return path.replace('\\', '/')
- return path
-
-
-def _ResolveParent(path, base_path_components):
- """Resolves |path|, which starts with at least one '../'. Returns an empty
- string if the path shouldn't be considered. See _AddSources() for a
- description of |base_path_components|."""
- depth = 0
- while path.startswith('../'):
- depth += 1
- path = path[3:]
- # Relative includes may go outside the source tree. For example, an action may
- # have inputs in /usr/include, which are not in the source tree.
- if depth > len(base_path_components):
- return ''
- if depth == len(base_path_components):
- return path
- return '/'.join(base_path_components[0:len(base_path_components) - depth]) + \
- '/' + path
-
-
-def _AddSources(sources, base_path, base_path_components, result):
- """Extracts valid sources from |sources| and adds them to |result|. Each
- source file is relative to |base_path|, but may contain '..'. To make
- resolving '..' easier |base_path_components| contains each of the
- directories in |base_path|. Additionally each source may contain variables.
- Such sources are ignored as it is assumed dependencies on them are expressed
- and tracked in some other means."""
- # NOTE: gyp paths are always posix style.
- for source in sources:
- if not len(source) or source.startswith('!!!') or source.startswith('$'):
- continue
- # variable expansion may lead to //.
- org_source = source
- source = source[0] + source[1:].replace('//', '/')
- if source.startswith('../'):
- source = _ResolveParent(source, base_path_components)
- if len(source):
- result.append(source)
- continue
- result.append(base_path + source)
- if debug:
- print 'AddSource', org_source, result[len(result) - 1]
-
-
-def _ExtractSourcesFromAction(action, base_path, base_path_components,
- results):
- if 'inputs' in action:
- _AddSources(action['inputs'], base_path, base_path_components, results)
-
-
-def _ToLocalPath(toplevel_dir, path):
- """Converts |path| to a path relative to |toplevel_dir|."""
- if path == toplevel_dir:
- return ''
- if path.startswith(toplevel_dir + '/'):
- return path[len(toplevel_dir) + len('/'):]
- return path
-
-
-def _ExtractSources(target, target_dict, toplevel_dir):
- # |target| is either absolute or relative and in the format of the OS. Gyp
- # source paths are always posix. Convert |target| to a posix path relative to
- # |toplevel_dir_|. This is done to make it easy to build source paths.
- base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
- base_path_components = base_path.split('/')
-
- # Add a trailing '/' so that _AddSources() can easily build paths.
- if len(base_path):
- base_path += '/'
-
- if debug:
- print 'ExtractSources', target, base_path
-
- results = []
- if 'sources' in target_dict:
- _AddSources(target_dict['sources'], base_path, base_path_components,
- results)
- # Include the inputs from any actions. Any changes to these affect the
- # resulting output.
- if 'actions' in target_dict:
- for action in target_dict['actions']:
- _ExtractSourcesFromAction(action, base_path, base_path_components,
- results)
- if 'rules' in target_dict:
- for rule in target_dict['rules']:
- _ExtractSourcesFromAction(rule, base_path, base_path_components, results)
-
- return results
-
-
-class Target(object):
- """Holds information about a particular target:
- deps: set of Targets this Target depends upon. This is not recursive, only the
- direct dependent Targets.
- match_status: one of the MatchStatus values.
- back_deps: set of Targets that have a dependency on this Target.
- visited: used during iteration to indicate whether we've visited this target.
- This is used for two iterations, once in building the set of Targets and
- again in _GetBuildTargets().
- name: fully qualified name of the target.
- requires_build: True if the target type is such that it needs to be built.
- See _DoesTargetTypeRequireBuild for details.
- added_to_compile_targets: used when determining if the target was added to the
- set of targets that needs to be built.
- in_roots: true if this target is a descendant of one of the root nodes.
- is_executable: true if the type of target is executable.
- is_static_library: true if the type of target is static_library.
- is_or_has_linked_ancestor: true if the target does a link (eg executable), or
- if there is a target in back_deps that does a link."""
- def __init__(self, name):
- self.deps = set()
- self.match_status = MATCH_STATUS_TBD
- self.back_deps = set()
- self.name = name
- # TODO(sky): I don't like hanging this off Target. This state is specific
- # to certain functions and should be isolated there.
- self.visited = False
- self.requires_build = False
- self.added_to_compile_targets = False
- self.in_roots = False
- self.is_executable = False
- self.is_static_library = False
- self.is_or_has_linked_ancestor = False
-
-
-class Config(object):
- """Details what we're looking for
- files: set of files to search for
- targets: see file description for details."""
- def __init__(self):
- self.files = []
- self.targets = set()
- self.additional_compile_target_names = set()
- self.test_target_names = set()
-
- def Init(self, params):
- """Initializes Config. This is a separate method as it raises an exception
- if there is a parse error."""
- generator_flags = params.get('generator_flags', {})
- config_path = generator_flags.get('config_path', None)
- if not config_path:
- return
- try:
- f = open(config_path, 'r')
- config = json.load(f)
- f.close()
- except IOError:
- raise Exception('Unable to open file ' + config_path)
- except ValueError as e:
- raise Exception('Unable to parse config file ' + config_path + str(e))
- if not isinstance(config, dict):
- raise Exception('config_path must be a JSON file containing a dictionary')
- self.files = config.get('files', [])
- self.additional_compile_target_names = set(
- config.get('additional_compile_targets', []))
- self.test_target_names = set(config.get('test_targets', []))
-
-
-def _WasBuildFileModified(build_file, data, files, toplevel_dir):
- """Returns true if the build file |build_file| is either in |files| or
- one of the files included by |build_file| is in |files|. |toplevel_dir| is
- the root of the source tree."""
- if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
- if debug:
- print 'gyp file modified', build_file
- return True
-
- # First element of included_files is the file itself.
- if len(data[build_file]['included_files']) <= 1:
- return False
-
- for include_file in data[build_file]['included_files'][1:]:
- # |included_files| are relative to the directory of the |build_file|.
- rel_include_file = \
- _ToGypPath(gyp.common.UnrelativePath(include_file, build_file))
- if _ToLocalPath(toplevel_dir, rel_include_file) in files:
- if debug:
- print 'included gyp file modified, gyp_file=', build_file, \
- 'included file=', rel_include_file
- return True
- return False
-
-
-def _GetOrCreateTargetByName(targets, target_name):
- """Creates or returns the Target at targets[target_name]. If there is no
- Target for |target_name| one is created. Returns a tuple of whether a new
- Target was created and the Target."""
- if target_name in targets:
- return False, targets[target_name]
- target = Target(target_name)
- targets[target_name] = target
- return True, target
-
-
-def _DoesTargetTypeRequireBuild(target_dict):
- """Returns true if the target type is such that it needs to be built."""
- # If a 'none' target has rules or actions we assume it requires a build.
- return bool(target_dict['type'] != 'none' or
- target_dict.get('actions') or target_dict.get('rules'))
-
-
-def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
- build_files):
- """Returns a tuple of the following:
- . A dictionary mapping from fully qualified name to Target.
- . A list of the targets that have a source file in |files|.
- . Targets that constitute the 'all' target. See description at top of file
- for details on the 'all' target.
- This sets the |match_status| of the targets that contain any of the source
- files in |files| to MATCH_STATUS_MATCHES.
- |toplevel_dir| is the root of the source tree."""
- # Maps from target name to Target.
- name_to_target = {}
-
- # Targets that matched.
- matching_targets = []
-
- # Queue of targets to visit.
- targets_to_visit = target_list[:]
-
- # Maps from build file to a boolean indicating whether the build file is in
- # |files|.
- build_file_in_files = {}
-
- # Root targets across all files.
- roots = set()
-
- # Set of Targets in |build_files|.
- build_file_targets = set()
-
- while len(targets_to_visit) > 0:
- target_name = targets_to_visit.pop()
- created_target, target = _GetOrCreateTargetByName(name_to_target,
- target_name)
- if created_target:
- roots.add(target)
- elif target.visited:
- continue
-
- target.visited = True
- target.requires_build = _DoesTargetTypeRequireBuild(
- target_dicts[target_name])
- target_type = target_dicts[target_name]['type']
- target.is_executable = target_type == 'executable'
- target.is_static_library = target_type == 'static_library'
- target.is_or_has_linked_ancestor = (target_type == 'executable' or
- target_type == 'shared_library')
-
- build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
- if not build_file in build_file_in_files:
- build_file_in_files[build_file] = \
- _WasBuildFileModified(build_file, data, files, toplevel_dir)
-
- if build_file in build_files:
- build_file_targets.add(target)
-
- # If a build file (or any of its included files) is modified we assume all
- # targets in the file are modified.
- if build_file_in_files[build_file]:
- print 'matching target from modified build file', target_name
- target.match_status = MATCH_STATUS_MATCHES
- matching_targets.append(target)
- else:
- sources = _ExtractSources(target_name, target_dicts[target_name],
- toplevel_dir)
- for source in sources:
- if _ToGypPath(os.path.normpath(source)) in files:
- print 'target', target_name, 'matches', source
- target.match_status = MATCH_STATUS_MATCHES
- matching_targets.append(target)
- break
-
- # Add dependencies to visit as well as updating back pointers for deps.
- for dep in target_dicts[target_name].get('dependencies', []):
- targets_to_visit.append(dep)
-
- created_dep_target, dep_target = _GetOrCreateTargetByName(name_to_target,
- dep)
- if not created_dep_target:
- roots.discard(dep_target)
-
- target.deps.add(dep_target)
- dep_target.back_deps.add(target)
-
- return name_to_target, matching_targets, roots & build_file_targets
-
-
-def _GetUnqualifiedToTargetMapping(all_targets, to_find):
- """Returns a tuple of the following:
- . mapping (dictionary) from unqualified name to Target for all the
- Targets in |to_find|.
- . any target names not found. If this is empty all targets were found."""
- result = {}
- if not to_find:
- return {}, []
- to_find = set(to_find)
- for target_name in all_targets.keys():
- extracted = gyp.common.ParseQualifiedTarget(target_name)
- if len(extracted) > 1 and extracted[1] in to_find:
- to_find.remove(extracted[1])
- result[extracted[1]] = all_targets[target_name]
- if not to_find:
- return result, []
- return result, [x for x in to_find]
-
-
-def _DoesTargetDependOnMatchingTargets(target):
- """Returns true if |target| or any of its dependencies is one of the
- targets containing the files supplied as input to analyzer. This updates
- |matches| of the Targets as it recurses.
- target: the Target to look for."""
- if target.match_status == MATCH_STATUS_DOESNT_MATCH:
- return False
- if target.match_status == MATCH_STATUS_MATCHES or \
- target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY:
- return True
- for dep in target.deps:
- if _DoesTargetDependOnMatchingTargets(dep):
- target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
- print '\t', target.name, 'matches by dep', dep.name
- return True
- target.match_status = MATCH_STATUS_DOESNT_MATCH
- return False
-
-
-def _GetTargetsDependingOnMatchingTargets(possible_targets):
- """Returns the list of Targets in |possible_targets| that depend (either
- directly on indirectly) on at least one of the targets containing the files
- supplied as input to analyzer.
- possible_targets: targets to search from."""
- found = []
- print 'Targets that matched by dependency:'
- for target in possible_targets:
- if _DoesTargetDependOnMatchingTargets(target):
- found.append(target)
- return found
-
-
-def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
- """Recurses through all targets that depend on |target|, adding all targets
- that need to be built (and are in |roots|) to |result|.
- roots: set of root targets.
- add_if_no_ancestor: If true and there are no ancestors of |target| then add
- |target| to |result|. |target| must still be in |roots|.
- result: targets that need to be built are added here."""
- if target.visited:
- return
-
- target.visited = True
- target.in_roots = target in roots
-
- for back_dep_target in target.back_deps:
- _AddCompileTargets(back_dep_target, roots, False, result)
- target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
- target.in_roots |= back_dep_target.in_roots
- target.is_or_has_linked_ancestor |= (
- back_dep_target.is_or_has_linked_ancestor)
-
- # Always add 'executable' targets. Even though they may be built by other
- # targets that depend upon them it makes detection of what is going to be
- # built easier.
- # And always add static_libraries that have no dependencies on them from
- # linkables. This is necessary as the other dependencies on them may be
- # static libraries themselves, which are not compile time dependencies.
- if target.in_roots and \
- (target.is_executable or
- (not target.added_to_compile_targets and
- (add_if_no_ancestor or target.requires_build)) or
- (target.is_static_library and add_if_no_ancestor and
- not target.is_or_has_linked_ancestor)):
- print '\t\tadding to compile targets', target.name, 'executable', \
- target.is_executable, 'added_to_compile_targets', \
- target.added_to_compile_targets, 'add_if_no_ancestor', \
- add_if_no_ancestor, 'requires_build', target.requires_build, \
- 'is_static_library', target.is_static_library, \
- 'is_or_has_linked_ancestor', target.is_or_has_linked_ancestor
- result.add(target)
- target.added_to_compile_targets = True
-
-
-def _GetCompileTargets(matching_targets, supplied_targets):
- """Returns the set of Targets that require a build.
- matching_targets: targets that changed and need to be built.
- supplied_targets: set of targets supplied to analyzer to search from."""
- result = set()
- for target in matching_targets:
- print 'finding compile targets for match', target.name
- _AddCompileTargets(target, supplied_targets, True, result)
- return result
-
-
-def _WriteOutput(params, **values):
- """Writes the output, either to stdout or a file is specified."""
- if 'error' in values:
- print 'Error:', values['error']
- if 'status' in values:
- print values['status']
- if 'targets' in values:
- values['targets'].sort()
- print 'Supplied targets that depend on changed files:'
- for target in values['targets']:
- print '\t', target
- if 'invalid_targets' in values:
- values['invalid_targets'].sort()
- print 'The following targets were not found:'
- for target in values['invalid_targets']:
- print '\t', target
- if 'build_targets' in values:
- values['build_targets'].sort()
- print 'Targets that require a build:'
- for target in values['build_targets']:
- print '\t', target
- if 'compile_targets' in values:
- values['compile_targets'].sort()
- print 'Targets that need to be built:'
- for target in values['compile_targets']:
- print '\t', target
- if 'test_targets' in values:
- values['test_targets'].sort()
- print 'Test targets:'
- for target in values['test_targets']:
- print '\t', target
-
- output_path = params.get('generator_flags', {}).get(
- 'analyzer_output_path', None)
- if not output_path:
- print json.dumps(values)
- return
- try:
- f = open(output_path, 'w')
- f.write(json.dumps(values) + '\n')
- f.close()
- except IOError as e:
- print 'Error writing to output file', output_path, str(e)
-
-
-def _WasGypIncludeFileModified(params, files):
- """Returns true if one of the files in |files| is in the set of included
- files."""
- if params['options'].includes:
- for include in params['options'].includes:
- if _ToGypPath(os.path.normpath(include)) in files:
- print 'Include file modified, assuming all changed', include
- return True
- return False
-
-
-def _NamesNotIn(names, mapping):
- """Returns a list of the values in |names| that are not in |mapping|."""
- return [name for name in names if name not in mapping]
-
-
-def _LookupTargets(names, mapping):
- """Returns a list of the mapping[name] for each value in |names| that is in
- |mapping|."""
- return [mapping[name] for name in names if name in mapping]
-
-
-def CalculateVariables(default_variables, params):
- """Calculate additional variables for use in the build (called by gyp)."""
- flavor = gyp.common.GetFlavor(params)
- if flavor == 'mac':
- default_variables.setdefault('OS', 'mac')
- elif flavor == 'win':
- default_variables.setdefault('OS', 'win')
- # Copy additional generator configuration data from VS, which is shared
- # by the Windows Ninja generator.
- import gyp.generator.msvs as msvs_generator
- generator_additional_non_configuration_keys = getattr(msvs_generator,
- 'generator_additional_non_configuration_keys', [])
- generator_additional_path_sections = getattr(msvs_generator,
- 'generator_additional_path_sections', [])
-
- gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
- else:
- operating_system = flavor
- if flavor == 'android':
- operating_system = 'linux' # Keep this legacy behavior for now.
- default_variables.setdefault('OS', operating_system)
-
-
-class TargetCalculator(object):
- """Calculates the matching test_targets and matching compile_targets."""
- def __init__(self, files, additional_compile_target_names, test_target_names,
- data, target_list, target_dicts, toplevel_dir, build_files):
- self._additional_compile_target_names = set(additional_compile_target_names)
- self._test_target_names = set(test_target_names)
- self._name_to_target, self._changed_targets, self._root_targets = (
- _GenerateTargets(data, target_list, target_dicts, toplevel_dir,
- frozenset(files), build_files))
- self._unqualified_mapping, self.invalid_targets = (
- _GetUnqualifiedToTargetMapping(self._name_to_target,
- self._supplied_target_names_no_all()))
-
- def _supplied_target_names(self):
- return self._additional_compile_target_names | self._test_target_names
-
- def _supplied_target_names_no_all(self):
- """Returns the supplied test targets without 'all'."""
- result = self._supplied_target_names();
- result.discard('all')
- return result
-
- def is_build_impacted(self):
- """Returns true if the supplied files impact the build at all."""
- return self._changed_targets
-
- def find_matching_test_target_names(self):
- """Returns the set of output test targets."""
- assert self.is_build_impacted()
- # Find the test targets first. 'all' is special cased to mean all the
- # root targets. To deal with all the supplied |test_targets| are expanded
- # to include the root targets during lookup. If any of the root targets
- # match, we remove it and replace it with 'all'.
- test_target_names_no_all = set(self._test_target_names)
- test_target_names_no_all.discard('all')
- test_targets_no_all = _LookupTargets(test_target_names_no_all,
- self._unqualified_mapping)
- test_target_names_contains_all = 'all' in self._test_target_names
- if test_target_names_contains_all:
- test_targets = [x for x in (set(test_targets_no_all) |
- set(self._root_targets))]
- else:
- test_targets = [x for x in test_targets_no_all]
- print 'supplied test_targets'
- for target_name in self._test_target_names:
- print '\t', target_name
- print 'found test_targets'
- for target in test_targets:
- print '\t', target.name
- print 'searching for matching test targets'
- matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets)
- matching_test_targets_contains_all = (test_target_names_contains_all and
- set(matching_test_targets) &
- set(self._root_targets))
- if matching_test_targets_contains_all:
- # Remove any of the targets for all that were not explicitly supplied,
- # 'all' is subsequentely added to the matching names below.
- matching_test_targets = [x for x in (set(matching_test_targets) &
- set(test_targets_no_all))]
- print 'matched test_targets'
- for target in matching_test_targets:
- print '\t', target.name
- matching_target_names = [gyp.common.ParseQualifiedTarget(target.name)[1]
- for target in matching_test_targets]
- if matching_test_targets_contains_all:
- matching_target_names.append('all')
- print '\tall'
- return matching_target_names
-
- def find_matching_compile_target_names(self):
- """Returns the set of output compile targets."""
- assert self.is_build_impacted();
- # Compile targets are found by searching up from changed targets.
- # Reset the visited status for _GetBuildTargets.
- for target in self._name_to_target.itervalues():
- target.visited = False
-
- supplied_targets = _LookupTargets(self._supplied_target_names_no_all(),
- self._unqualified_mapping)
- if 'all' in self._supplied_target_names():
- supplied_targets = [x for x in (set(supplied_targets) |
- set(self._root_targets))]
- print 'Supplied test_targets & compile_targets'
- for target in supplied_targets:
- print '\t', target.name
- print 'Finding compile targets'
- compile_targets = _GetCompileTargets(self._changed_targets,
- supplied_targets)
- return [gyp.common.ParseQualifiedTarget(target.name)[1]
- for target in compile_targets]
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- """Called by gyp as the final stage. Outputs results."""
- config = Config()
- try:
- config.Init(params)
-
- if not config.files:
- raise Exception('Must specify files to analyze via config_path generator '
- 'flag')
-
- toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir))
- if debug:
- print 'toplevel_dir', toplevel_dir
-
- if _WasGypIncludeFileModified(params, config.files):
- result_dict = { 'status': all_changed_string,
- 'test_targets': list(config.test_target_names),
- 'compile_targets': list(
- config.additional_compile_target_names |
- config.test_target_names) }
- _WriteOutput(params, **result_dict)
- return
-
- calculator = TargetCalculator(config.files,
- config.additional_compile_target_names,
- config.test_target_names, data,
- target_list, target_dicts, toplevel_dir,
- params['build_files'])
- if not calculator.is_build_impacted():
- result_dict = { 'status': no_dependency_string,
- 'test_targets': [],
- 'compile_targets': [] }
- if calculator.invalid_targets:
- result_dict['invalid_targets'] = calculator.invalid_targets
- _WriteOutput(params, **result_dict)
- return
-
- test_target_names = calculator.find_matching_test_target_names()
- compile_target_names = calculator.find_matching_compile_target_names()
- found_at_least_one_target = compile_target_names or test_target_names
- result_dict = { 'test_targets': test_target_names,
- 'status': found_dependency_string if
- found_at_least_one_target else no_dependency_string,
- 'compile_targets': list(
- set(compile_target_names) |
- set(test_target_names)) }
- if calculator.invalid_targets:
- result_dict['invalid_targets'] = calculator.invalid_targets
- _WriteOutput(params, **result_dict)
-
- except Exception as e:
- _WriteOutput(params, error=str(e))
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
deleted file mode 100644
index 5b26cc785a..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
+++ /dev/null
@@ -1,1095 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Notes:
-#
-# This generates makefiles suitable for inclusion into the Android build system
-# via an Android.mk file. It is based on make.py, the standard makefile
-# generator.
-#
-# The code below generates a separate .mk file for each target, but
-# all are sourced by the top-level GypAndroid.mk. This means that all
-# variables in .mk-files clobber one another, and furthermore that any
-# variables set potentially clash with other Android build system variables.
-# Try to avoid setting global variables where possible.
-
-import gyp
-import gyp.common
-import gyp.generator.make as make # Reuse global functions from make backend.
-import os
-import re
-import subprocess
-
-generator_default_variables = {
- 'OS': 'android',
- 'EXECUTABLE_PREFIX': '',
- 'EXECUTABLE_SUFFIX': '',
- 'STATIC_LIB_PREFIX': 'lib',
- 'SHARED_LIB_PREFIX': 'lib',
- 'STATIC_LIB_SUFFIX': '.a',
- 'SHARED_LIB_SUFFIX': '.so',
- 'INTERMEDIATE_DIR': '$(gyp_intermediate_dir)',
- 'SHARED_INTERMEDIATE_DIR': '$(gyp_shared_intermediate_dir)',
- 'PRODUCT_DIR': '$(gyp_shared_intermediate_dir)',
- 'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)',
- 'LIB_DIR': '$(obj).$(TOOLSET)',
- 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
- 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
- 'RULE_INPUT_PATH': '$(RULE_SOURCES)',
- 'RULE_INPUT_EXT': '$(suffix $<)',
- 'RULE_INPUT_NAME': '$(notdir $<)',
- 'CONFIGURATION_NAME': '$(GYP_CONFIGURATION)',
-}
-
-# Make supports multiple toolsets
-generator_supports_multiple_toolsets = True
-
-
-# Generator-specific gyp specs.
-generator_additional_non_configuration_keys = [
- # Boolean to declare that this target does not want its name mangled.
- 'android_unmangled_name',
- # Map of android build system variables to set.
- 'aosp_build_settings',
-]
-generator_additional_path_sections = []
-generator_extra_sources_for_rules = []
-
-
-ALL_MODULES_FOOTER = """\
-# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from
-# all the included sub-makefiles. This is just here to clarify.
-gyp_all_modules:
-"""
-
-header = """\
-# This file is generated by gyp; do not edit.
-
-"""
-
-# Map gyp target types to Android module classes.
-MODULE_CLASSES = {
- 'static_library': 'STATIC_LIBRARIES',
- 'shared_library': 'SHARED_LIBRARIES',
- 'executable': 'EXECUTABLES',
-}
-
-
-def IsCPPExtension(ext):
- return make.COMPILABLE_EXTENSIONS.get(ext) == 'cxx'
-
-
-def Sourceify(path):
- """Convert a path to its source directory form. The Android backend does not
- support options.generator_output, so this function is a noop."""
- return path
-
-
-# Map from qualified target to path to output.
-# For Android, the target of these maps is a tuple ('static', 'modulename'),
-# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string,
-# since we link by module.
-target_outputs = {}
-# Map from qualified target to any linkable output. A subset
-# of target_outputs. E.g. when mybinary depends on liba, we want to
-# include liba in the linker line; when otherbinary depends on
-# mybinary, we just want to build mybinary first.
-target_link_deps = {}
-
-
-class AndroidMkWriter(object):
- """AndroidMkWriter packages up the writing of one target-specific Android.mk.
-
- Its only real entry point is Write(), and is mostly used for namespacing.
- """
-
- def __init__(self, android_top_dir):
- self.android_top_dir = android_top_dir
-
- def Write(self, qualified_target, relative_target, base_path, output_filename,
- spec, configs, part_of_all, write_alias_target, sdk_version):
- """The main entry point: writes a .mk file for a single target.
-
- Arguments:
- qualified_target: target we're generating
- relative_target: qualified target name relative to the root
- base_path: path relative to source root we're building in, used to resolve
- target-relative paths
- output_filename: output .mk file name to write
- spec, configs: gyp info
- part_of_all: flag indicating this target is part of 'all'
- write_alias_target: flag indicating whether to create short aliases for
- this target
- sdk_version: what to emit for LOCAL_SDK_VERSION in output
- """
- gyp.common.EnsureDirExists(output_filename)
-
- self.fp = open(output_filename, 'w')
-
- self.fp.write(header)
-
- self.qualified_target = qualified_target
- self.relative_target = relative_target
- self.path = base_path
- self.target = spec['target_name']
- self.type = spec['type']
- self.toolset = spec['toolset']
-
- deps, link_deps = self.ComputeDeps(spec)
-
- # Some of the generation below can add extra output, sources, or
- # link dependencies. All of the out params of the functions that
- # follow use names like extra_foo.
- extra_outputs = []
- extra_sources = []
-
- self.android_class = MODULE_CLASSES.get(self.type, 'GYP')
- self.android_module = self.ComputeAndroidModule(spec)
- (self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec)
- self.output = self.output_binary = self.ComputeOutput(spec)
-
- # Standard header.
- self.WriteLn('include $(CLEAR_VARS)\n')
-
- # Module class and name.
- self.WriteLn('LOCAL_MODULE_CLASS := ' + self.android_class)
- self.WriteLn('LOCAL_MODULE := ' + self.android_module)
- # Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE.
- # The library module classes fail if the stem is set. ComputeOutputParts
- # makes sure that stem == modulename in these cases.
- if self.android_stem != self.android_module:
- self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem)
- self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix)
- if self.toolset == 'host':
- self.WriteLn('LOCAL_IS_HOST_MODULE := true')
- self.WriteLn('LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)')
- elif sdk_version > 0:
- self.WriteLn('LOCAL_MODULE_TARGET_ARCH := '
- '$(TARGET_$(GYP_VAR_PREFIX)ARCH)')
- self.WriteLn('LOCAL_SDK_VERSION := %s' % sdk_version)
-
- # Grab output directories; needed for Actions and Rules.
- if self.toolset == 'host':
- self.WriteLn('gyp_intermediate_dir := '
- '$(call local-intermediates-dir,,$(GYP_HOST_VAR_PREFIX))')
- else:
- self.WriteLn('gyp_intermediate_dir := '
- '$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))')
- self.WriteLn('gyp_shared_intermediate_dir := '
- '$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))')
- self.WriteLn()
-
- # List files this target depends on so that actions/rules/copies/sources
- # can depend on the list.
- # TODO: doesn't pull in things through transitive link deps; needed?
- target_dependencies = [x[1] for x in deps if x[0] == 'path']
- self.WriteLn('# Make sure our deps are built first.')
- self.WriteList(target_dependencies, 'GYP_TARGET_DEPENDENCIES',
- local_pathify=True)
-
- # Actions must come first, since they can generate more OBJs for use below.
- if 'actions' in spec:
- self.WriteActions(spec['actions'], extra_sources, extra_outputs)
-
- # Rules must be early like actions.
- if 'rules' in spec:
- self.WriteRules(spec['rules'], extra_sources, extra_outputs)
-
- if 'copies' in spec:
- self.WriteCopies(spec['copies'], extra_outputs)
-
- # GYP generated outputs.
- self.WriteList(extra_outputs, 'GYP_GENERATED_OUTPUTS', local_pathify=True)
-
- # Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend
- # on both our dependency targets and our generated files.
- self.WriteLn('# Make sure our deps and generated files are built first.')
- self.WriteLn('LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) '
- '$(GYP_GENERATED_OUTPUTS)')
- self.WriteLn()
-
- # Sources.
- if spec.get('sources', []) or extra_sources:
- self.WriteSources(spec, configs, extra_sources)
-
- self.WriteTarget(spec, configs, deps, link_deps, part_of_all,
- write_alias_target)
-
- # Update global list of target outputs, used in dependency tracking.
- target_outputs[qualified_target] = ('path', self.output_binary)
-
- # Update global list of link dependencies.
- if self.type == 'static_library':
- target_link_deps[qualified_target] = ('static', self.android_module)
- elif self.type == 'shared_library':
- target_link_deps[qualified_target] = ('shared', self.android_module)
-
- self.fp.close()
- return self.android_module
-
-
- def WriteActions(self, actions, extra_sources, extra_outputs):
- """Write Makefile code for any 'actions' from the gyp input.
-
- extra_sources: a list that will be filled in with newly generated source
- files, if any
- extra_outputs: a list that will be filled in with any outputs of these
- actions (used to make other pieces dependent on these
- actions)
- """
- for action in actions:
- name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
- action['action_name']))
- self.WriteLn('### Rules for action "%s":' % action['action_name'])
- inputs = action['inputs']
- outputs = action['outputs']
-
- # Build up a list of outputs.
- # Collect the output dirs we'll need.
- dirs = set()
- for out in outputs:
- if not out.startswith('$'):
- print ('WARNING: Action for target "%s" writes output to local path '
- '"%s".' % (self.target, out))
- dir = os.path.split(out)[0]
- if dir:
- dirs.add(dir)
- if int(action.get('process_outputs_as_sources', False)):
- extra_sources += outputs
-
- # Prepare the actual command.
- command = gyp.common.EncodePOSIXShellList(action['action'])
- if 'message' in action:
- quiet_cmd = 'Gyp action: %s ($@)' % action['message']
- else:
- quiet_cmd = 'Gyp action: %s ($@)' % name
- if len(dirs) > 0:
- command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
-
- cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
- command = cd_action + command
-
- # The makefile rules are all relative to the top dir, but the gyp actions
- # are defined relative to their containing dir. This replaces the gyp_*
- # variables for the action rule with an absolute version so that the
- # output goes in the right place.
- # Only write the gyp_* rules for the "primary" output (:1);
- # it's superfluous for the "extra outputs", and this avoids accidentally
- # writing duplicate dummy rules for those outputs.
- main_output = make.QuoteSpaces(self.LocalPathify(outputs[0]))
- self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
- self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
- self.WriteLn('%s: gyp_intermediate_dir := '
- '$(abspath $(gyp_intermediate_dir))' % main_output)
- self.WriteLn('%s: gyp_shared_intermediate_dir := '
- '$(abspath $(gyp_shared_intermediate_dir))' % main_output)
-
- # Android's envsetup.sh adds a number of directories to the path including
- # the built host binary directory. This causes actions/rules invoked by
- # gyp to sometimes use these instead of system versions, e.g. bison.
- # The built host binaries may not be suitable, and can cause errors.
- # So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable
- # set by envsetup.
- self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))'
- % main_output)
-
- # Don't allow spaces in input/output filenames, but make an exception for
- # filenames which start with '$(' since it's okay for there to be spaces
- # inside of make function/macro invocations.
- for input in inputs:
- if not input.startswith('$(') and ' ' in input:
- raise gyp.common.GypError(
- 'Action input filename "%s" in target %s contains a space' %
- (input, self.target))
- for output in outputs:
- if not output.startswith('$(') and ' ' in output:
- raise gyp.common.GypError(
- 'Action output filename "%s" in target %s contains a space' %
- (output, self.target))
-
- self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
- (main_output, ' '.join(map(self.LocalPathify, inputs))))
- self.WriteLn('\t@echo "%s"' % quiet_cmd)
- self.WriteLn('\t$(hide)%s\n' % command)
- for output in outputs[1:]:
- # Make each output depend on the main output, with an empty command
- # to force make to notice that the mtime has changed.
- self.WriteLn('%s: %s ;' % (self.LocalPathify(output), main_output))
-
- extra_outputs += outputs
- self.WriteLn()
-
- self.WriteLn()
-
-
- def WriteRules(self, rules, extra_sources, extra_outputs):
- """Write Makefile code for any 'rules' from the gyp input.
-
- extra_sources: a list that will be filled in with newly generated source
- files, if any
- extra_outputs: a list that will be filled in with any outputs of these
- rules (used to make other pieces dependent on these rules)
- """
- if len(rules) == 0:
- return
-
- for rule in rules:
- if len(rule.get('rule_sources', [])) == 0:
- continue
- name = make.StringToMakefileVariable('%s_%s' % (self.relative_target,
- rule['rule_name']))
- self.WriteLn('\n### Generated for rule "%s":' % name)
- self.WriteLn('# "%s":' % rule)
-
- inputs = rule.get('inputs')
- for rule_source in rule.get('rule_sources', []):
- (rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
- (rule_source_root, rule_source_ext) = \
- os.path.splitext(rule_source_basename)
-
- outputs = [self.ExpandInputRoot(out, rule_source_root,
- rule_source_dirname)
- for out in rule['outputs']]
-
- dirs = set()
- for out in outputs:
- if not out.startswith('$'):
- print ('WARNING: Rule for target %s writes output to local path %s'
- % (self.target, out))
- dir = os.path.dirname(out)
- if dir:
- dirs.add(dir)
- extra_outputs += outputs
- if int(rule.get('process_outputs_as_sources', False)):
- extra_sources.extend(outputs)
-
- components = []
- for component in rule['action']:
- component = self.ExpandInputRoot(component, rule_source_root,
- rule_source_dirname)
- if '$(RULE_SOURCES)' in component:
- component = component.replace('$(RULE_SOURCES)',
- rule_source)
- components.append(component)
-
- command = gyp.common.EncodePOSIXShellList(components)
- cd_action = 'cd $(gyp_local_path)/%s; ' % self.path
- command = cd_action + command
- if dirs:
- command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
-
- # We set up a rule to build the first output, and then set up
- # a rule for each additional output to depend on the first.
- outputs = map(self.LocalPathify, outputs)
- main_output = outputs[0]
- self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output)
- self.WriteLn('%s: gyp_var_prefix := $(GYP_VAR_PREFIX)' % main_output)
- self.WriteLn('%s: gyp_intermediate_dir := '
- '$(abspath $(gyp_intermediate_dir))' % main_output)
- self.WriteLn('%s: gyp_shared_intermediate_dir := '
- '$(abspath $(gyp_shared_intermediate_dir))' % main_output)
-
- # See explanation in WriteActions.
- self.WriteLn('%s: export PATH := '
- '$(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output)
-
- main_output_deps = self.LocalPathify(rule_source)
- if inputs:
- main_output_deps += ' '
- main_output_deps += ' '.join([self.LocalPathify(f) for f in inputs])
-
- self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' %
- (main_output, main_output_deps))
- self.WriteLn('\t%s\n' % command)
- for output in outputs[1:]:
- # Make each output depend on the main output, with an empty command
- # to force make to notice that the mtime has changed.
- self.WriteLn('%s: %s ;' % (output, main_output))
- self.WriteLn()
-
- self.WriteLn()
-
-
- def WriteCopies(self, copies, extra_outputs):
- """Write Makefile code for any 'copies' from the gyp input.
-
- extra_outputs: a list that will be filled in with any outputs of this action
- (used to make other pieces dependent on this action)
- """
- self.WriteLn('### Generated for copy rule.')
-
- variable = make.StringToMakefileVariable(self.relative_target + '_copies')
- outputs = []
- for copy in copies:
- for path in copy['files']:
- # The Android build system does not allow generation of files into the
- # source tree. The destination should start with a variable, which will
- # typically be $(gyp_intermediate_dir) or
- # $(gyp_shared_intermediate_dir). Note that we can't use an assertion
- # because some of the gyp tests depend on this.
- if not copy['destination'].startswith('$'):
- print ('WARNING: Copy rule for target %s writes output to '
- 'local path %s' % (self.target, copy['destination']))
-
- # LocalPathify() calls normpath, stripping trailing slashes.
- path = Sourceify(self.LocalPathify(path))
- filename = os.path.split(path)[1]
- output = Sourceify(self.LocalPathify(os.path.join(copy['destination'],
- filename)))
-
- self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)' %
- (output, path))
- self.WriteLn('\t@echo Copying: $@')
- self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
- self.WriteLn('\t$(hide) $(ACP) -rpf $< $@')
- self.WriteLn()
- outputs.append(output)
- self.WriteLn('%s = %s' % (variable,
- ' '.join(map(make.QuoteSpaces, outputs))))
- extra_outputs.append('$(%s)' % variable)
- self.WriteLn()
-
-
- def WriteSourceFlags(self, spec, configs):
- """Write out the flags and include paths used to compile source files for
- the current target.
-
- Args:
- spec, configs: input from gyp.
- """
- for configname, config in sorted(configs.iteritems()):
- extracted_includes = []
-
- self.WriteLn('\n# Flags passed to both C and C++ files.')
- cflags, includes_from_cflags = self.ExtractIncludesFromCFlags(
- config.get('cflags', []) + config.get('cflags_c', []))
- extracted_includes.extend(includes_from_cflags)
- self.WriteList(cflags, 'MY_CFLAGS_%s' % configname)
-
- self.WriteList(config.get('defines'), 'MY_DEFS_%s' % configname,
- prefix='-D', quoter=make.EscapeCppDefine)
-
- self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS')
- includes = list(config.get('include_dirs', []))
- includes.extend(extracted_includes)
- includes = map(Sourceify, map(self.LocalPathify, includes))
- includes = self.NormalizeIncludePaths(includes)
- self.WriteList(includes, 'LOCAL_C_INCLUDES_%s' % configname)
-
- self.WriteLn('\n# Flags passed to only C++ (and not C) files.')
- self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS_%s' % configname)
-
- self.WriteLn('\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) '
- '$(MY_DEFS_$(GYP_CONFIGURATION))')
- # Undefine ANDROID for host modules
- # TODO: the source code should not use macro ANDROID to tell if it's host
- # or target module.
- if self.toolset == 'host':
- self.WriteLn('# Undefine ANDROID for host modules')
- self.WriteLn('LOCAL_CFLAGS += -UANDROID')
- self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '
- '$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))')
- self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))')
- # Android uses separate flags for assembly file invocations, but gyp expects
- # the same CFLAGS to be applied:
- self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)')
-
-
- def WriteSources(self, spec, configs, extra_sources):
- """Write Makefile code for any 'sources' from the gyp input.
- These are source files necessary to build the current target.
- We need to handle shared_intermediate directory source files as
- a special case by copying them to the intermediate directory and
- treating them as a genereated sources. Otherwise the Android build
- rules won't pick them up.
-
- Args:
- spec, configs: input from gyp.
- extra_sources: Sources generated from Actions or Rules.
- """
- sources = filter(make.Compilable, spec.get('sources', []))
- generated_not_sources = [x for x in extra_sources if not make.Compilable(x)]
- extra_sources = filter(make.Compilable, extra_sources)
-
- # Determine and output the C++ extension used by these sources.
- # We simply find the first C++ file and use that extension.
- all_sources = sources + extra_sources
- local_cpp_extension = '.cpp'
- for source in all_sources:
- (root, ext) = os.path.splitext(source)
- if IsCPPExtension(ext):
- local_cpp_extension = ext
- break
- if local_cpp_extension != '.cpp':
- self.WriteLn('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension)
-
- # We need to move any non-generated sources that are coming from the
- # shared intermediate directory out of LOCAL_SRC_FILES and put them
- # into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files
- # that don't match our local_cpp_extension, since Android will only
- # generate Makefile rules for a single LOCAL_CPP_EXTENSION.
- local_files = []
- for source in sources:
- (root, ext) = os.path.splitext(source)
- if '$(gyp_shared_intermediate_dir)' in source:
- extra_sources.append(source)
- elif '$(gyp_intermediate_dir)' in source:
- extra_sources.append(source)
- elif IsCPPExtension(ext) and ext != local_cpp_extension:
- extra_sources.append(source)
- else:
- local_files.append(os.path.normpath(os.path.join(self.path, source)))
-
- # For any generated source, if it is coming from the shared intermediate
- # directory then we add a Make rule to copy them to the local intermediate
- # directory first. This is because the Android LOCAL_GENERATED_SOURCES
- # must be in the local module intermediate directory for the compile rules
- # to work properly. If the file has the wrong C++ extension, then we add
- # a rule to copy that to intermediates and use the new version.
- final_generated_sources = []
- # If a source file gets copied, we still need to add the orginal source
- # directory as header search path, for GCC searches headers in the
- # directory that contains the source file by default.
- origin_src_dirs = []
- for source in extra_sources:
- local_file = source
- if not '$(gyp_intermediate_dir)/' in local_file:
- basename = os.path.basename(local_file)
- local_file = '$(gyp_intermediate_dir)/' + basename
- (root, ext) = os.path.splitext(local_file)
- if IsCPPExtension(ext) and ext != local_cpp_extension:
- local_file = root + local_cpp_extension
- if local_file != source:
- self.WriteLn('%s: %s' % (local_file, self.LocalPathify(source)))
- self.WriteLn('\tmkdir -p $(@D); cp $< $@')
- origin_src_dirs.append(os.path.dirname(source))
- final_generated_sources.append(local_file)
-
- # We add back in all of the non-compilable stuff to make sure that the
- # make rules have dependencies on them.
- final_generated_sources.extend(generated_not_sources)
- self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES')
-
- origin_src_dirs = gyp.common.uniquer(origin_src_dirs)
- origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs))
- self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS')
-
- self.WriteList(local_files, 'LOCAL_SRC_FILES')
-
- # Write out the flags used to compile the source; this must be done last
- # so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path.
- self.WriteSourceFlags(spec, configs)
-
-
- def ComputeAndroidModule(self, spec):
- """Return the Android module name used for a gyp spec.
-
- We use the complete qualified target name to avoid collisions between
- duplicate targets in different directories. We also add a suffix to
- distinguish gyp-generated module names.
- """
-
- if int(spec.get('android_unmangled_name', 0)):
- assert self.type != 'shared_library' or self.target.startswith('lib')
- return self.target
-
- if self.type == 'shared_library':
- # For reasons of convention, the Android build system requires that all
- # shared library modules are named 'libfoo' when generating -l flags.
- prefix = 'lib_'
- else:
- prefix = ''
-
- if spec['toolset'] == 'host':
- suffix = '_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp'
- else:
- suffix = '_gyp'
-
- if self.path:
- middle = make.StringToMakefileVariable('%s_%s' % (self.path, self.target))
- else:
- middle = make.StringToMakefileVariable(self.target)
-
- return ''.join([prefix, middle, suffix])
-
-
- def ComputeOutputParts(self, spec):
- """Return the 'output basename' of a gyp spec, split into filename + ext.
-
- Android libraries must be named the same thing as their module name,
- otherwise the linker can't find them, so product_name and so on must be
- ignored if we are building a library, and the "lib" prepending is
- not done for Android.
- """
- assert self.type != 'loadable_module' # TODO: not supported?
-
- target = spec['target_name']
- target_prefix = ''
- target_ext = ''
- if self.type == 'static_library':
- target = self.ComputeAndroidModule(spec)
- target_ext = '.a'
- elif self.type == 'shared_library':
- target = self.ComputeAndroidModule(spec)
- target_ext = '.so'
- elif self.type == 'none':
- target_ext = '.stamp'
- elif self.type != 'executable':
- print ("ERROR: What output file should be generated?",
- "type", self.type, "target", target)
-
- if self.type != 'static_library' and self.type != 'shared_library':
- target_prefix = spec.get('product_prefix', target_prefix)
- target = spec.get('product_name', target)
- product_ext = spec.get('product_extension')
- if product_ext:
- target_ext = '.' + product_ext
-
- target_stem = target_prefix + target
- return (target_stem, target_ext)
-
-
- def ComputeOutputBasename(self, spec):
- """Return the 'output basename' of a gyp spec.
-
- E.g., the loadable module 'foobar' in directory 'baz' will produce
- 'libfoobar.so'
- """
- return ''.join(self.ComputeOutputParts(spec))
-
-
- def ComputeOutput(self, spec):
- """Return the 'output' (full output path) of a gyp spec.
-
- E.g., the loadable module 'foobar' in directory 'baz' will produce
- '$(obj)/baz/libfoobar.so'
- """
- if self.type == 'executable':
- # We install host executables into shared_intermediate_dir so they can be
- # run by gyp rules that refer to PRODUCT_DIR.
- path = '$(gyp_shared_intermediate_dir)'
- elif self.type == 'shared_library':
- if self.toolset == 'host':
- path = '$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)'
- else:
- path = '$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)'
- else:
- # Other targets just get built into their intermediate dir.
- if self.toolset == 'host':
- path = ('$(call intermediates-dir-for,%s,%s,true,,'
- '$(GYP_HOST_VAR_PREFIX))' % (self.android_class,
- self.android_module))
- else:
- path = ('$(call intermediates-dir-for,%s,%s,,,$(GYP_VAR_PREFIX))'
- % (self.android_class, self.android_module))
-
- assert spec.get('product_dir') is None # TODO: not supported?
- return os.path.join(path, self.ComputeOutputBasename(spec))
-
- def NormalizeIncludePaths(self, include_paths):
- """ Normalize include_paths.
- Convert absolute paths to relative to the Android top directory.
-
- Args:
- include_paths: A list of unprocessed include paths.
- Returns:
- A list of normalized include paths.
- """
- normalized = []
- for path in include_paths:
- if path[0] == '/':
- path = gyp.common.RelativePath(path, self.android_top_dir)
- normalized.append(path)
- return normalized
-
- def ExtractIncludesFromCFlags(self, cflags):
- """Extract includes "-I..." out from cflags
-
- Args:
- cflags: A list of compiler flags, which may be mixed with "-I.."
- Returns:
- A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed.
- """
- clean_cflags = []
- include_paths = []
- for flag in cflags:
- if flag.startswith('-I'):
- include_paths.append(flag[2:])
- else:
- clean_cflags.append(flag)
-
- return (clean_cflags, include_paths)
-
- def FilterLibraries(self, libraries):
- """Filter the 'libraries' key to separate things that shouldn't be ldflags.
-
- Library entries that look like filenames should be converted to android
- module names instead of being passed to the linker as flags.
-
- Args:
- libraries: the value of spec.get('libraries')
- Returns:
- A tuple (static_lib_modules, dynamic_lib_modules, ldflags)
- """
- static_lib_modules = []
- dynamic_lib_modules = []
- ldflags = []
- for libs in libraries:
- # Libs can have multiple words.
- for lib in libs.split():
- # Filter the system libraries, which are added by default by the Android
- # build system.
- if (lib == '-lc' or lib == '-lstdc++' or lib == '-lm' or
- lib.endswith('libgcc.a')):
- continue
- match = re.search(r'([^/]+)\.a$', lib)
- if match:
- static_lib_modules.append(match.group(1))
- continue
- match = re.search(r'([^/]+)\.so$', lib)
- if match:
- dynamic_lib_modules.append(match.group(1))
- continue
- if lib.startswith('-l'):
- ldflags.append(lib)
- return (static_lib_modules, dynamic_lib_modules, ldflags)
-
-
- def ComputeDeps(self, spec):
- """Compute the dependencies of a gyp spec.
-
- Returns a tuple (deps, link_deps), where each is a list of
- filenames that will need to be put in front of make for either
- building (deps) or linking (link_deps).
- """
- deps = []
- link_deps = []
- if 'dependencies' in spec:
- deps.extend([target_outputs[dep] for dep in spec['dependencies']
- if target_outputs[dep]])
- for dep in spec['dependencies']:
- if dep in target_link_deps:
- link_deps.append(target_link_deps[dep])
- deps.extend(link_deps)
- return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
-
-
- def WriteTargetFlags(self, spec, configs, link_deps):
- """Write Makefile code to specify the link flags and library dependencies.
-
- spec, configs: input from gyp.
- link_deps: link dependency list; see ComputeDeps()
- """
- # Libraries (i.e. -lfoo)
- # These must be included even for static libraries as some of them provide
- # implicit include paths through the build system.
- libraries = gyp.common.uniquer(spec.get('libraries', []))
- static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries)
-
- if self.type != 'static_library':
- for configname, config in sorted(configs.iteritems()):
- ldflags = list(config.get('ldflags', []))
- self.WriteLn('')
- self.WriteList(ldflags, 'LOCAL_LDFLAGS_%s' % configname)
- self.WriteList(ldflags_libs, 'LOCAL_GYP_LIBS')
- self.WriteLn('LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION)) '
- '$(LOCAL_GYP_LIBS)')
-
- # Link dependencies (i.e. other gyp targets this target depends on)
- # These need not be included for static libraries as within the gyp build
- # we do not use the implicit include path mechanism.
- if self.type != 'static_library':
- static_link_deps = [x[1] for x in link_deps if x[0] == 'static']
- shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared']
- else:
- static_link_deps = []
- shared_link_deps = []
-
- # Only write the lists if they are non-empty.
- if static_libs or static_link_deps:
- self.WriteLn('')
- self.WriteList(static_libs + static_link_deps,
- 'LOCAL_STATIC_LIBRARIES')
- self.WriteLn('# Enable grouping to fix circular references')
- self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true')
- if dynamic_libs or shared_link_deps:
- self.WriteLn('')
- self.WriteList(dynamic_libs + shared_link_deps,
- 'LOCAL_SHARED_LIBRARIES')
-
-
- def WriteTarget(self, spec, configs, deps, link_deps, part_of_all,
- write_alias_target):
- """Write Makefile code to produce the final target of the gyp spec.
-
- spec, configs: input from gyp.
- deps, link_deps: dependency lists; see ComputeDeps()
- part_of_all: flag indicating this target is part of 'all'
- write_alias_target: flag indicating whether to create short aliases for this
- target
- """
- self.WriteLn('### Rules for final target.')
-
- if self.type != 'none':
- self.WriteTargetFlags(spec, configs, link_deps)
-
- settings = spec.get('aosp_build_settings', {})
- if settings:
- self.WriteLn('### Set directly by aosp_build_settings.')
- for k, v in settings.iteritems():
- if isinstance(v, list):
- self.WriteList(v, k)
- else:
- self.WriteLn('%s := %s' % (k, make.QuoteIfNecessary(v)))
- self.WriteLn('')
-
- # Add to the set of targets which represent the gyp 'all' target. We use the
- # name 'gyp_all_modules' as the Android build system doesn't allow the use
- # of the Make target 'all' and because 'all_modules' is the equivalent of
- # the Make target 'all' on Android.
- if part_of_all and write_alias_target:
- self.WriteLn('# Add target alias to "gyp_all_modules" target.')
- self.WriteLn('.PHONY: gyp_all_modules')
- self.WriteLn('gyp_all_modules: %s' % self.android_module)
- self.WriteLn('')
-
- # Add an alias from the gyp target name to the Android module name. This
- # simplifies manual builds of the target, and is required by the test
- # framework.
- if self.target != self.android_module and write_alias_target:
- self.WriteLn('# Alias gyp target name.')
- self.WriteLn('.PHONY: %s' % self.target)
- self.WriteLn('%s: %s' % (self.target, self.android_module))
- self.WriteLn('')
-
- # Add the command to trigger build of the target type depending
- # on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY
- # NOTE: This has to come last!
- modifier = ''
- if self.toolset == 'host':
- modifier = 'HOST_'
- if self.type == 'static_library':
- self.WriteLn('include $(BUILD_%sSTATIC_LIBRARY)' % modifier)
- elif self.type == 'shared_library':
- self.WriteLn('LOCAL_PRELINK_MODULE := false')
- self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier)
- elif self.type == 'executable':
- self.WriteLn('LOCAL_CXX_STL := libc++_static')
- # Executables are for build and test purposes only, so they're installed
- # to a directory that doesn't get included in the system image.
- self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)')
- self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier)
- else:
- self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp')
- self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true')
- if self.toolset == 'target':
- self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)')
- else:
- self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_HOST_VAR_PREFIX)')
- self.WriteLn()
- self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk')
- self.WriteLn()
- self.WriteLn('$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)')
- self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"')
- self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
- self.WriteLn('\t$(hide) touch $@')
- self.WriteLn()
- self.WriteLn('LOCAL_2ND_ARCH_VAR_PREFIX :=')
-
-
- def WriteList(self, value_list, variable=None, prefix='',
- quoter=make.QuoteIfNecessary, local_pathify=False):
- """Write a variable definition that is a list of values.
-
- E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
- foo = blaha blahb
- but in a pretty-printed style.
- """
- values = ''
- if value_list:
- value_list = [quoter(prefix + l) for l in value_list]
- if local_pathify:
- value_list = [self.LocalPathify(l) for l in value_list]
- values = ' \\\n\t' + ' \\\n\t'.join(value_list)
- self.fp.write('%s :=%s\n\n' % (variable, values))
-
-
- def WriteLn(self, text=''):
- self.fp.write(text + '\n')
-
-
- def LocalPathify(self, path):
- """Convert a subdirectory-relative path into a normalized path which starts
- with the make variable $(LOCAL_PATH) (i.e. the top of the project tree).
- Absolute paths, or paths that contain variables, are just normalized."""
- if '$(' in path or os.path.isabs(path):
- # path is not a file in the project tree in this case, but calling
- # normpath is still important for trimming trailing slashes.
- return os.path.normpath(path)
- local_path = os.path.join('$(LOCAL_PATH)', self.path, path)
- local_path = os.path.normpath(local_path)
- # Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH)
- # - i.e. that the resulting path is still inside the project tree. The
- # path may legitimately have ended up containing just $(LOCAL_PATH), though,
- # so we don't look for a slash.
- assert local_path.startswith('$(LOCAL_PATH)'), (
- 'Path %s attempts to escape from gyp path %s !)' % (path, self.path))
- return local_path
-
-
- def ExpandInputRoot(self, template, expansion, dirname):
- if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
- return template
- path = template % {
- 'INPUT_ROOT': expansion,
- 'INPUT_DIRNAME': dirname,
- }
- return os.path.normpath(path)
-
-
-def PerformBuild(data, configurations, params):
- # The android backend only supports the default configuration.
- options = params['options']
- makefile = os.path.abspath(os.path.join(options.toplevel_dir,
- 'GypAndroid.mk'))
- env = dict(os.environ)
- env['ONE_SHOT_MAKEFILE'] = makefile
- arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules']
- print 'Building: %s' % arguments
- subprocess.check_call(arguments, env=env)
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- options = params['options']
- generator_flags = params.get('generator_flags', {})
- builddir_name = generator_flags.get('output_dir', 'out')
- limit_to_target_all = generator_flags.get('limit_to_target_all', False)
- write_alias_targets = generator_flags.get('write_alias_targets', True)
- sdk_version = generator_flags.get('aosp_sdk_version', 0)
- android_top_dir = os.environ.get('ANDROID_BUILD_TOP')
- assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.'
-
- def CalculateMakefilePath(build_file, base_name):
- """Determine where to write a Makefile for a given gyp file."""
- # Paths in gyp files are relative to the .gyp file, but we want
- # paths relative to the source root for the master makefile. Grab
- # the path of the .gyp file as the base to relativize against.
- # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
- base_path = gyp.common.RelativePath(os.path.dirname(build_file),
- options.depth)
- # We write the file in the base_path directory.
- output_file = os.path.join(options.depth, base_path, base_name)
- assert not options.generator_output, (
- 'The Android backend does not support options.generator_output.')
- base_path = gyp.common.RelativePath(os.path.dirname(build_file),
- options.toplevel_dir)
- return base_path, output_file
-
- # TODO: search for the first non-'Default' target. This can go
- # away when we add verification that all targets have the
- # necessary configurations.
- default_configuration = None
- toolsets = set([target_dicts[target]['toolset'] for target in target_list])
- for target in target_list:
- spec = target_dicts[target]
- if spec['default_configuration'] != 'Default':
- default_configuration = spec['default_configuration']
- break
- if not default_configuration:
- default_configuration = 'Default'
-
- srcdir = '.'
- makefile_name = 'GypAndroid' + options.suffix + '.mk'
- makefile_path = os.path.join(options.toplevel_dir, makefile_name)
- assert not options.generator_output, (
- 'The Android backend does not support options.generator_output.')
- gyp.common.EnsureDirExists(makefile_path)
- root_makefile = open(makefile_path, 'w')
-
- root_makefile.write(header)
-
- # We set LOCAL_PATH just once, here, to the top of the project tree. This
- # allows all the other paths we use to be relative to the Android.mk file,
- # as the Android build system expects.
- root_makefile.write('\nLOCAL_PATH := $(call my-dir)\n')
-
- # Find the list of targets that derive from the gyp file(s) being built.
- needed_targets = set()
- for build_file in params['build_files']:
- for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
- needed_targets.add(target)
-
- build_files = set()
- include_list = set()
- android_modules = {}
- for qualified_target in target_list:
- build_file, target, toolset = gyp.common.ParseQualifiedTarget(
- qualified_target)
- relative_build_file = gyp.common.RelativePath(build_file,
- options.toplevel_dir)
- build_files.add(relative_build_file)
- included_files = data[build_file]['included_files']
- for included_file in included_files:
- # The included_files entries are relative to the dir of the build file
- # that included them, so we have to undo that and then make them relative
- # to the root dir.
- relative_include_file = gyp.common.RelativePath(
- gyp.common.UnrelativePath(included_file, build_file),
- options.toplevel_dir)
- abs_include_file = os.path.abspath(relative_include_file)
- # If the include file is from the ~/.gyp dir, we should use absolute path
- # so that relocating the src dir doesn't break the path.
- if (params['home_dot_gyp'] and
- abs_include_file.startswith(params['home_dot_gyp'])):
- build_files.add(abs_include_file)
- else:
- build_files.add(relative_include_file)
-
- base_path, output_file = CalculateMakefilePath(build_file,
- target + '.' + toolset + options.suffix + '.mk')
-
- spec = target_dicts[qualified_target]
- configs = spec['configurations']
-
- part_of_all = qualified_target in needed_targets
- if limit_to_target_all and not part_of_all:
- continue
-
- relative_target = gyp.common.QualifiedTarget(relative_build_file, target,
- toolset)
- writer = AndroidMkWriter(android_top_dir)
- android_module = writer.Write(qualified_target, relative_target, base_path,
- output_file, spec, configs,
- part_of_all=part_of_all,
- write_alias_target=write_alias_targets,
- sdk_version=sdk_version)
- if android_module in android_modules:
- print ('ERROR: Android module names must be unique. The following '
- 'targets both generate Android module name %s.\n %s\n %s' %
- (android_module, android_modules[android_module],
- qualified_target))
- return
- android_modules[android_module] = qualified_target
-
- # Our root_makefile lives at the source root. Compute the relative path
- # from there to the output_file for including.
- mkfile_rel_path = gyp.common.RelativePath(output_file,
- os.path.dirname(makefile_path))
- include_list.add(mkfile_rel_path)
-
- root_makefile.write('GYP_CONFIGURATION ?= %s\n' % default_configuration)
- root_makefile.write('GYP_VAR_PREFIX ?=\n')
- root_makefile.write('GYP_HOST_VAR_PREFIX ?=\n')
- root_makefile.write('GYP_HOST_MULTILIB ?= first\n')
-
- # Write out the sorted list of includes.
- root_makefile.write('\n')
- for include_file in sorted(include_list):
- root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n')
- root_makefile.write('\n')
-
- if write_alias_targets:
- root_makefile.write(ALL_MODULES_FOOTER)
-
- root_makefile.close()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
deleted file mode 100644
index 17f5e6396c..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
+++ /dev/null
@@ -1,1221 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""cmake output module
-
-This module is under development and should be considered experimental.
-
-This module produces cmake (2.8.8+) input as its output. One CMakeLists.txt is
-created for each configuration.
-
-This module's original purpose was to support editing in IDEs like KDevelop
-which use CMake for project management. It is also possible to use CMake to
-generate projects for other IDEs such as eclipse cdt and code::blocks. QtCreator
-will convert the CMakeLists.txt to a code::blocks cbp for the editor to read,
-but build using CMake. As a result QtCreator editor is unaware of compiler
-defines. The generated CMakeLists.txt can also be used to build on Linux. There
-is currently no support for building on platforms other than Linux.
-
-The generated CMakeLists.txt should properly compile all projects. However,
-there is a mismatch between gyp and cmake with regard to linking. All attempts
-are made to work around this, but CMake sometimes sees -Wl,--start-group as a
-library and incorrectly repeats it. As a result the output of this generator
-should not be relied on for building.
-
-When using with kdevelop, use version 4.4+. Previous versions of kdevelop will
-not be able to find the header file directories described in the generated
-CMakeLists.txt file.
-"""
-
-import multiprocessing
-import os
-import signal
-import string
-import subprocess
-import gyp.common
-
-generator_default_variables = {
- 'EXECUTABLE_PREFIX': '',
- 'EXECUTABLE_SUFFIX': '',
- 'STATIC_LIB_PREFIX': 'lib',
- 'STATIC_LIB_SUFFIX': '.a',
- 'SHARED_LIB_PREFIX': 'lib',
- 'SHARED_LIB_SUFFIX': '.so',
- 'SHARED_LIB_DIR': '${builddir}/lib.${TOOLSET}',
- 'LIB_DIR': '${obj}.${TOOLSET}',
- 'INTERMEDIATE_DIR': '${obj}.${TOOLSET}/${TARGET}/geni',
- 'SHARED_INTERMEDIATE_DIR': '${obj}/gen',
- 'PRODUCT_DIR': '${builddir}',
- 'RULE_INPUT_PATH': '${RULE_INPUT_PATH}',
- 'RULE_INPUT_DIRNAME': '${RULE_INPUT_DIRNAME}',
- 'RULE_INPUT_NAME': '${RULE_INPUT_NAME}',
- 'RULE_INPUT_ROOT': '${RULE_INPUT_ROOT}',
- 'RULE_INPUT_EXT': '${RULE_INPUT_EXT}',
- 'CONFIGURATION_NAME': '${configuration}',
-}
-
-FULL_PATH_VARS = ('${CMAKE_CURRENT_LIST_DIR}', '${builddir}', '${obj}')
-
-generator_supports_multiple_toolsets = True
-generator_wants_static_library_dependencies_adjusted = True
-
-COMPILABLE_EXTENSIONS = {
- '.c': 'cc',
- '.cc': 'cxx',
- '.cpp': 'cxx',
- '.cxx': 'cxx',
- '.s': 's', # cc
- '.S': 's', # cc
-}
-
-
-def RemovePrefix(a, prefix):
- """Returns 'a' without 'prefix' if it starts with 'prefix'."""
- return a[len(prefix):] if a.startswith(prefix) else a
-
-
-def CalculateVariables(default_variables, params):
- """Calculate additional variables for use in the build (called by gyp)."""
- default_variables.setdefault('OS', gyp.common.GetFlavor(params))
-
-
-def Compilable(filename):
- """Return true if the file is compilable (should be in OBJS)."""
- return any(filename.endswith(e) for e in COMPILABLE_EXTENSIONS)
-
-
-def Linkable(filename):
- """Return true if the file is linkable (should be on the link line)."""
- return filename.endswith('.o')
-
-
-def NormjoinPathForceCMakeSource(base_path, rel_path):
- """Resolves rel_path against base_path and returns the result.
-
- If rel_path is an absolute path it is returned unchanged.
- Otherwise it is resolved against base_path and normalized.
- If the result is a relative path, it is forced to be relative to the
- CMakeLists.txt.
- """
- if os.path.isabs(rel_path):
- return rel_path
- if any([rel_path.startswith(var) for var in FULL_PATH_VARS]):
- return rel_path
- # TODO: do we need to check base_path for absolute variables as well?
- return os.path.join('${CMAKE_CURRENT_LIST_DIR}',
- os.path.normpath(os.path.join(base_path, rel_path)))
-
-
-def NormjoinPath(base_path, rel_path):
- """Resolves rel_path against base_path and returns the result.
- TODO: what is this really used for?
- If rel_path begins with '$' it is returned unchanged.
- Otherwise it is resolved against base_path if relative, then normalized.
- """
- if rel_path.startswith('$') and not rel_path.startswith('${configuration}'):
- return rel_path
- return os.path.normpath(os.path.join(base_path, rel_path))
-
-
-def CMakeStringEscape(a):
- """Escapes the string 'a' for use inside a CMake string.
-
- This means escaping
- '\' otherwise it may be seen as modifying the next character
- '"' otherwise it will end the string
- ';' otherwise the string becomes a list
-
- The following do not need to be escaped
- '#' when the lexer is in string state, this does not start a comment
-
- The following are yet unknown
- '$' generator variables (like ${obj}) must not be escaped,
- but text $ should be escaped
- what is wanted is to know which $ come from generator variables
- """
- return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
-
-
-def SetFileProperty(output, source_name, property_name, values, sep):
- """Given a set of source file, sets the given property on them."""
- output.write('set_source_files_properties(')
- output.write(source_name)
- output.write(' PROPERTIES ')
- output.write(property_name)
- output.write(' "')
- for value in values:
- output.write(CMakeStringEscape(value))
- output.write(sep)
- output.write('")\n')
-
-
-def SetFilesProperty(output, variable, property_name, values, sep):
- """Given a set of source files, sets the given property on them."""
- output.write('set_source_files_properties(')
- WriteVariable(output, variable)
- output.write(' PROPERTIES ')
- output.write(property_name)
- output.write(' "')
- for value in values:
- output.write(CMakeStringEscape(value))
- output.write(sep)
- output.write('")\n')
-
-
-def SetTargetProperty(output, target_name, property_name, values, sep=''):
- """Given a target, sets the given property."""
- output.write('set_target_properties(')
- output.write(target_name)
- output.write(' PROPERTIES ')
- output.write(property_name)
- output.write(' "')
- for value in values:
- output.write(CMakeStringEscape(value))
- output.write(sep)
- output.write('")\n')
-
-
-def SetVariable(output, variable_name, value):
- """Sets a CMake variable."""
- output.write('set(')
- output.write(variable_name)
- output.write(' "')
- output.write(CMakeStringEscape(value))
- output.write('")\n')
-
-
-def SetVariableList(output, variable_name, values):
- """Sets a CMake variable to a list."""
- if not values:
- return SetVariable(output, variable_name, "")
- if len(values) == 1:
- return SetVariable(output, variable_name, values[0])
- output.write('list(APPEND ')
- output.write(variable_name)
- output.write('\n "')
- output.write('"\n "'.join([CMakeStringEscape(value) for value in values]))
- output.write('")\n')
-
-
-def UnsetVariable(output, variable_name):
- """Unsets a CMake variable."""
- output.write('unset(')
- output.write(variable_name)
- output.write(')\n')
-
-
-def WriteVariable(output, variable_name, prepend=None):
- if prepend:
- output.write(prepend)
- output.write('${')
- output.write(variable_name)
- output.write('}')
-
-
-class CMakeTargetType(object):
- def __init__(self, command, modifier, property_modifier):
- self.command = command
- self.modifier = modifier
- self.property_modifier = property_modifier
-
-
-cmake_target_type_from_gyp_target_type = {
- 'executable': CMakeTargetType('add_executable', None, 'RUNTIME'),
- 'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE'),
- 'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY'),
- 'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY'),
- 'none': CMakeTargetType('add_custom_target', 'SOURCES', None),
-}
-
-
-def StringToCMakeTargetName(a):
- """Converts the given string 'a' to a valid CMake target name.
-
- All invalid characters are replaced by '_'.
- Invalid for cmake: ' ', '/', '(', ')', '"'
- Invalid for make: ':'
- Invalid for unknown reasons but cause failures: '.'
- """
- return a.translate(string.maketrans(' /():."', '_______'))
-
-
-def WriteActions(target_name, actions, extra_sources, extra_deps,
- path_to_gyp, output):
- """Write CMake for the 'actions' in the target.
-
- Args:
- target_name: the name of the CMake target being generated.
- actions: the Gyp 'actions' dict for this target.
- extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
- extra_deps: [<cmake_taget>] to append with generated targets.
- path_to_gyp: relative path from CMakeLists.txt being generated to
- the Gyp file in which the target being generated is defined.
- """
- for action in actions:
- action_name = StringToCMakeTargetName(action['action_name'])
- action_target_name = '%s__%s' % (target_name, action_name)
-
- inputs = action['inputs']
- inputs_name = action_target_name + '__input'
- SetVariableList(output, inputs_name,
- [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
-
- outputs = action['outputs']
- cmake_outputs = [NormjoinPathForceCMakeSource(path_to_gyp, out)
- for out in outputs]
- outputs_name = action_target_name + '__output'
- SetVariableList(output, outputs_name, cmake_outputs)
-
- # Build up a list of outputs.
- # Collect the output dirs we'll need.
- dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
-
- if int(action.get('process_outputs_as_sources', False)):
- extra_sources.extend(zip(cmake_outputs, outputs))
-
- # add_custom_command
- output.write('add_custom_command(OUTPUT ')
- WriteVariable(output, outputs_name)
- output.write('\n')
-
- if len(dirs) > 0:
- for directory in dirs:
- output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ')
- output.write(directory)
- output.write('\n')
-
- output.write(' COMMAND ')
- output.write(gyp.common.EncodePOSIXShellList(action['action']))
- output.write('\n')
-
- output.write(' DEPENDS ')
- WriteVariable(output, inputs_name)
- output.write('\n')
-
- output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
- output.write(path_to_gyp)
- output.write('\n')
-
- output.write(' COMMENT ')
- if 'message' in action:
- output.write(action['message'])
- else:
- output.write(action_target_name)
- output.write('\n')
-
- output.write(' VERBATIM\n')
- output.write(')\n')
-
- # add_custom_target
- output.write('add_custom_target(')
- output.write(action_target_name)
- output.write('\n DEPENDS ')
- WriteVariable(output, outputs_name)
- output.write('\n SOURCES ')
- WriteVariable(output, inputs_name)
- output.write('\n)\n')
-
- extra_deps.append(action_target_name)
-
-
-def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source):
- if rel_path.startswith(("${RULE_INPUT_PATH}","${RULE_INPUT_DIRNAME}")):
- if any([rule_source.startswith(var) for var in FULL_PATH_VARS]):
- return rel_path
- return NormjoinPathForceCMakeSource(base_path, rel_path)
-
-
-def WriteRules(target_name, rules, extra_sources, extra_deps,
- path_to_gyp, output):
- """Write CMake for the 'rules' in the target.
-
- Args:
- target_name: the name of the CMake target being generated.
- actions: the Gyp 'actions' dict for this target.
- extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
- extra_deps: [<cmake_taget>] to append with generated targets.
- path_to_gyp: relative path from CMakeLists.txt being generated to
- the Gyp file in which the target being generated is defined.
- """
- for rule in rules:
- rule_name = StringToCMakeTargetName(target_name + '__' + rule['rule_name'])
-
- inputs = rule.get('inputs', [])
- inputs_name = rule_name + '__input'
- SetVariableList(output, inputs_name,
- [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
- outputs = rule['outputs']
- var_outputs = []
-
- for count, rule_source in enumerate(rule.get('rule_sources', [])):
- action_name = rule_name + '_' + str(count)
-
- rule_source_dirname, rule_source_basename = os.path.split(rule_source)
- rule_source_root, rule_source_ext = os.path.splitext(rule_source_basename)
-
- SetVariable(output, 'RULE_INPUT_PATH', rule_source)
- SetVariable(output, 'RULE_INPUT_DIRNAME', rule_source_dirname)
- SetVariable(output, 'RULE_INPUT_NAME', rule_source_basename)
- SetVariable(output, 'RULE_INPUT_ROOT', rule_source_root)
- SetVariable(output, 'RULE_INPUT_EXT', rule_source_ext)
-
- # Build up a list of outputs.
- # Collect the output dirs we'll need.
- dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
-
- # Create variables for the output, as 'local' variable will be unset.
- these_outputs = []
- for output_index, out in enumerate(outputs):
- output_name = action_name + '_' + str(output_index)
- SetVariable(output, output_name,
- NormjoinRulePathForceCMakeSource(path_to_gyp, out,
- rule_source))
- if int(rule.get('process_outputs_as_sources', False)):
- extra_sources.append(('${' + output_name + '}', out))
- these_outputs.append('${' + output_name + '}')
- var_outputs.append('${' + output_name + '}')
-
- # add_custom_command
- output.write('add_custom_command(OUTPUT\n')
- for out in these_outputs:
- output.write(' ')
- output.write(out)
- output.write('\n')
-
- for directory in dirs:
- output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ')
- output.write(directory)
- output.write('\n')
-
- output.write(' COMMAND ')
- output.write(gyp.common.EncodePOSIXShellList(rule['action']))
- output.write('\n')
-
- output.write(' DEPENDS ')
- WriteVariable(output, inputs_name)
- output.write(' ')
- output.write(NormjoinPath(path_to_gyp, rule_source))
- output.write('\n')
-
- # CMAKE_CURRENT_LIST_DIR is where the CMakeLists.txt lives.
- # The cwd is the current build directory.
- output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
- output.write(path_to_gyp)
- output.write('\n')
-
- output.write(' COMMENT ')
- if 'message' in rule:
- output.write(rule['message'])
- else:
- output.write(action_name)
- output.write('\n')
-
- output.write(' VERBATIM\n')
- output.write(')\n')
-
- UnsetVariable(output, 'RULE_INPUT_PATH')
- UnsetVariable(output, 'RULE_INPUT_DIRNAME')
- UnsetVariable(output, 'RULE_INPUT_NAME')
- UnsetVariable(output, 'RULE_INPUT_ROOT')
- UnsetVariable(output, 'RULE_INPUT_EXT')
-
- # add_custom_target
- output.write('add_custom_target(')
- output.write(rule_name)
- output.write(' DEPENDS\n')
- for out in var_outputs:
- output.write(' ')
- output.write(out)
- output.write('\n')
- output.write('SOURCES ')
- WriteVariable(output, inputs_name)
- output.write('\n')
- for rule_source in rule.get('rule_sources', []):
- output.write(' ')
- output.write(NormjoinPath(path_to_gyp, rule_source))
- output.write('\n')
- output.write(')\n')
-
- extra_deps.append(rule_name)
-
-
-def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
- """Write CMake for the 'copies' in the target.
-
- Args:
- target_name: the name of the CMake target being generated.
- actions: the Gyp 'actions' dict for this target.
- extra_deps: [<cmake_taget>] to append with generated targets.
- path_to_gyp: relative path from CMakeLists.txt being generated to
- the Gyp file in which the target being generated is defined.
- """
- copy_name = target_name + '__copies'
-
- # CMake gets upset with custom targets with OUTPUT which specify no output.
- have_copies = any(copy['files'] for copy in copies)
- if not have_copies:
- output.write('add_custom_target(')
- output.write(copy_name)
- output.write(')\n')
- extra_deps.append(copy_name)
- return
-
- class Copy(object):
- def __init__(self, ext, command):
- self.cmake_inputs = []
- self.cmake_outputs = []
- self.gyp_inputs = []
- self.gyp_outputs = []
- self.ext = ext
- self.inputs_name = None
- self.outputs_name = None
- self.command = command
-
- file_copy = Copy('', 'copy')
- dir_copy = Copy('_dirs', 'copy_directory')
-
- for copy in copies:
- files = copy['files']
- destination = copy['destination']
- for src in files:
- path = os.path.normpath(src)
- basename = os.path.split(path)[1]
- dst = os.path.join(destination, basename)
-
- copy = file_copy if os.path.basename(src) else dir_copy
-
- copy.cmake_inputs.append(NormjoinPathForceCMakeSource(path_to_gyp, src))
- copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst))
- copy.gyp_inputs.append(src)
- copy.gyp_outputs.append(dst)
-
- for copy in (file_copy, dir_copy):
- if copy.cmake_inputs:
- copy.inputs_name = copy_name + '__input' + copy.ext
- SetVariableList(output, copy.inputs_name, copy.cmake_inputs)
-
- copy.outputs_name = copy_name + '__output' + copy.ext
- SetVariableList(output, copy.outputs_name, copy.cmake_outputs)
-
- # add_custom_command
- output.write('add_custom_command(\n')
-
- output.write('OUTPUT')
- for copy in (file_copy, dir_copy):
- if copy.outputs_name:
- WriteVariable(output, copy.outputs_name, ' ')
- output.write('\n')
-
- for copy in (file_copy, dir_copy):
- for src, dst in zip(copy.gyp_inputs, copy.gyp_outputs):
- # 'cmake -E copy src dst' will create the 'dst' directory if needed.
- output.write('COMMAND ${CMAKE_COMMAND} -E %s ' % copy.command)
- output.write(src)
- output.write(' ')
- output.write(dst)
- output.write("\n")
-
- output.write('DEPENDS')
- for copy in (file_copy, dir_copy):
- if copy.inputs_name:
- WriteVariable(output, copy.inputs_name, ' ')
- output.write('\n')
-
- output.write('WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
- output.write(path_to_gyp)
- output.write('\n')
-
- output.write('COMMENT Copying for ')
- output.write(target_name)
- output.write('\n')
-
- output.write('VERBATIM\n')
- output.write(')\n')
-
- # add_custom_target
- output.write('add_custom_target(')
- output.write(copy_name)
- output.write('\n DEPENDS')
- for copy in (file_copy, dir_copy):
- if copy.outputs_name:
- WriteVariable(output, copy.outputs_name, ' ')
- output.write('\n SOURCES')
- if file_copy.inputs_name:
- WriteVariable(output, file_copy.inputs_name, ' ')
- output.write('\n)\n')
-
- extra_deps.append(copy_name)
-
-
-def CreateCMakeTargetBaseName(qualified_target):
- """This is the name we would like the target to have."""
- _, gyp_target_name, gyp_target_toolset = (
- gyp.common.ParseQualifiedTarget(qualified_target))
- cmake_target_base_name = gyp_target_name
- if gyp_target_toolset and gyp_target_toolset != 'target':
- cmake_target_base_name += '_' + gyp_target_toolset
- return StringToCMakeTargetName(cmake_target_base_name)
-
-
-def CreateCMakeTargetFullName(qualified_target):
- """An unambiguous name for the target."""
- gyp_file, gyp_target_name, gyp_target_toolset = (
- gyp.common.ParseQualifiedTarget(qualified_target))
- cmake_target_full_name = gyp_file + ':' + gyp_target_name
- if gyp_target_toolset and gyp_target_toolset != 'target':
- cmake_target_full_name += '_' + gyp_target_toolset
- return StringToCMakeTargetName(cmake_target_full_name)
-
-
-class CMakeNamer(object):
- """Converts Gyp target names into CMake target names.
-
- CMake requires that target names be globally unique. One way to ensure
- this is to fully qualify the names of the targets. Unfortunatly, this
- ends up with all targets looking like "chrome_chrome_gyp_chrome" instead
- of just "chrome". If this generator were only interested in building, it
- would be possible to fully qualify all target names, then create
- unqualified target names which depend on all qualified targets which
- should have had that name. This is more or less what the 'make' generator
- does with aliases. However, one goal of this generator is to create CMake
- files for use with IDEs, and fully qualified names are not as user
- friendly.
-
- Since target name collision is rare, we do the above only when required.
-
- Toolset variants are always qualified from the base, as this is required for
- building. However, it also makes sense for an IDE, as it is possible for
- defines to be different.
- """
- def __init__(self, target_list):
- self.cmake_target_base_names_conficting = set()
-
- cmake_target_base_names_seen = set()
- for qualified_target in target_list:
- cmake_target_base_name = CreateCMakeTargetBaseName(qualified_target)
-
- if cmake_target_base_name not in cmake_target_base_names_seen:
- cmake_target_base_names_seen.add(cmake_target_base_name)
- else:
- self.cmake_target_base_names_conficting.add(cmake_target_base_name)
-
- def CreateCMakeTargetName(self, qualified_target):
- base_name = CreateCMakeTargetBaseName(qualified_target)
- if base_name in self.cmake_target_base_names_conficting:
- return CreateCMakeTargetFullName(qualified_target)
- return base_name
-
-
-def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
- options, generator_flags, all_qualified_targets, output):
-
- # The make generator does this always.
- # TODO: It would be nice to be able to tell CMake all dependencies.
- circular_libs = generator_flags.get('circular', True)
-
- if not generator_flags.get('standalone', False):
- output.write('\n#')
- output.write(qualified_target)
- output.write('\n')
-
- gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
- rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir)
- rel_gyp_dir = os.path.dirname(rel_gyp_file)
-
- # Relative path from build dir to top dir.
- build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir)
- # Relative path from build dir to gyp dir.
- build_to_gyp = os.path.join(build_to_top, rel_gyp_dir)
-
- path_from_cmakelists_to_gyp = build_to_gyp
-
- spec = target_dicts.get(qualified_target, {})
- config = spec.get('configurations', {}).get(config_to_use, {})
-
- target_name = spec.get('target_name', '<missing target name>')
- target_type = spec.get('type', '<missing target type>')
- target_toolset = spec.get('toolset')
-
- cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
- if cmake_target_type is None:
- print ('Target %s has unknown target type %s, skipping.' %
- ( target_name, target_type ) )
- return
-
- SetVariable(output, 'TARGET', target_name)
- SetVariable(output, 'TOOLSET', target_toolset)
-
- cmake_target_name = namer.CreateCMakeTargetName(qualified_target)
-
- extra_sources = []
- extra_deps = []
-
- # Actions must come first, since they can generate more OBJs for use below.
- if 'actions' in spec:
- WriteActions(cmake_target_name, spec['actions'], extra_sources, extra_deps,
- path_from_cmakelists_to_gyp, output)
-
- # Rules must be early like actions.
- if 'rules' in spec:
- WriteRules(cmake_target_name, spec['rules'], extra_sources, extra_deps,
- path_from_cmakelists_to_gyp, output)
-
- # Copies
- if 'copies' in spec:
- WriteCopies(cmake_target_name, spec['copies'], extra_deps,
- path_from_cmakelists_to_gyp, output)
-
- # Target and sources
- srcs = spec.get('sources', [])
-
- # Gyp separates the sheep from the goats based on file extensions.
- # A full separation is done here because of flag handing (see below).
- s_sources = []
- c_sources = []
- cxx_sources = []
- linkable_sources = []
- other_sources = []
- for src in srcs:
- _, ext = os.path.splitext(src)
- src_type = COMPILABLE_EXTENSIONS.get(ext, None)
- src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src);
-
- if src_type == 's':
- s_sources.append(src_norm_path)
- elif src_type == 'cc':
- c_sources.append(src_norm_path)
- elif src_type == 'cxx':
- cxx_sources.append(src_norm_path)
- elif Linkable(ext):
- linkable_sources.append(src_norm_path)
- else:
- other_sources.append(src_norm_path)
-
- for extra_source in extra_sources:
- src, real_source = extra_source
- _, ext = os.path.splitext(real_source)
- src_type = COMPILABLE_EXTENSIONS.get(ext, None)
-
- if src_type == 's':
- s_sources.append(src)
- elif src_type == 'cc':
- c_sources.append(src)
- elif src_type == 'cxx':
- cxx_sources.append(src)
- elif Linkable(ext):
- linkable_sources.append(src)
- else:
- other_sources.append(src)
-
- s_sources_name = None
- if s_sources:
- s_sources_name = cmake_target_name + '__asm_srcs'
- SetVariableList(output, s_sources_name, s_sources)
-
- c_sources_name = None
- if c_sources:
- c_sources_name = cmake_target_name + '__c_srcs'
- SetVariableList(output, c_sources_name, c_sources)
-
- cxx_sources_name = None
- if cxx_sources:
- cxx_sources_name = cmake_target_name + '__cxx_srcs'
- SetVariableList(output, cxx_sources_name, cxx_sources)
-
- linkable_sources_name = None
- if linkable_sources:
- linkable_sources_name = cmake_target_name + '__linkable_srcs'
- SetVariableList(output, linkable_sources_name, linkable_sources)
-
- other_sources_name = None
- if other_sources:
- other_sources_name = cmake_target_name + '__other_srcs'
- SetVariableList(output, other_sources_name, other_sources)
-
- # CMake gets upset when executable targets provide no sources.
- # http://www.cmake.org/pipermail/cmake/2010-July/038461.html
- dummy_sources_name = None
- has_sources = (s_sources_name or
- c_sources_name or
- cxx_sources_name or
- linkable_sources_name or
- other_sources_name)
- if target_type == 'executable' and not has_sources:
- dummy_sources_name = cmake_target_name + '__dummy_srcs'
- SetVariable(output, dummy_sources_name,
- "${obj}.${TOOLSET}/${TARGET}/genc/dummy.c")
- output.write('if(NOT EXISTS "')
- WriteVariable(output, dummy_sources_name)
- output.write('")\n')
- output.write(' file(WRITE "')
- WriteVariable(output, dummy_sources_name)
- output.write('" "")\n')
- output.write("endif()\n")
-
-
- # CMake is opposed to setting linker directories and considers the practice
- # of setting linker directories dangerous. Instead, it favors the use of
- # find_library and passing absolute paths to target_link_libraries.
- # However, CMake does provide the command link_directories, which adds
- # link directories to targets defined after it is called.
- # As a result, link_directories must come before the target definition.
- # CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
- library_dirs = config.get('library_dirs')
- if library_dirs is not None:
- output.write('link_directories(')
- for library_dir in library_dirs:
- output.write(' ')
- output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir))
- output.write('\n')
- output.write(')\n')
-
- output.write(cmake_target_type.command)
- output.write('(')
- output.write(cmake_target_name)
-
- if cmake_target_type.modifier is not None:
- output.write(' ')
- output.write(cmake_target_type.modifier)
-
- if s_sources_name:
- WriteVariable(output, s_sources_name, ' ')
- if c_sources_name:
- WriteVariable(output, c_sources_name, ' ')
- if cxx_sources_name:
- WriteVariable(output, cxx_sources_name, ' ')
- if linkable_sources_name:
- WriteVariable(output, linkable_sources_name, ' ')
- if other_sources_name:
- WriteVariable(output, other_sources_name, ' ')
- if dummy_sources_name:
- WriteVariable(output, dummy_sources_name, ' ')
-
- output.write(')\n')
-
- # Let CMake know if the 'all' target should depend on this target.
- exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets
- else 'FALSE')
- SetTargetProperty(output, cmake_target_name,
- 'EXCLUDE_FROM_ALL', exclude_from_all)
- for extra_target_name in extra_deps:
- SetTargetProperty(output, extra_target_name,
- 'EXCLUDE_FROM_ALL', exclude_from_all)
-
- # Output name and location.
- if target_type != 'none':
- # Link as 'C' if there are no other files
- if not c_sources and not cxx_sources:
- SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C'])
-
- # Mark uncompiled sources as uncompiled.
- if other_sources_name:
- output.write('set_source_files_properties(')
- WriteVariable(output, other_sources_name, '')
- output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n')
-
- # Mark object sources as linkable.
- if linkable_sources_name:
- output.write('set_source_files_properties(')
- WriteVariable(output, other_sources_name, '')
- output.write(' PROPERTIES EXTERNAL_OBJECT "TRUE")\n')
-
- # Output directory
- target_output_directory = spec.get('product_dir')
- if target_output_directory is None:
- if target_type in ('executable', 'loadable_module'):
- target_output_directory = generator_default_variables['PRODUCT_DIR']
- elif target_type == 'shared_library':
- target_output_directory = '${builddir}/lib.${TOOLSET}'
- elif spec.get('standalone_static_library', False):
- target_output_directory = generator_default_variables['PRODUCT_DIR']
- else:
- base_path = gyp.common.RelativePath(os.path.dirname(gyp_file),
- options.toplevel_dir)
- target_output_directory = '${obj}.${TOOLSET}'
- target_output_directory = (
- os.path.join(target_output_directory, base_path))
-
- cmake_target_output_directory = NormjoinPathForceCMakeSource(
- path_from_cmakelists_to_gyp,
- target_output_directory)
- SetTargetProperty(output,
- cmake_target_name,
- cmake_target_type.property_modifier + '_OUTPUT_DIRECTORY',
- cmake_target_output_directory)
-
- # Output name
- default_product_prefix = ''
- default_product_name = target_name
- default_product_ext = ''
- if target_type == 'static_library':
- static_library_prefix = generator_default_variables['STATIC_LIB_PREFIX']
- default_product_name = RemovePrefix(default_product_name,
- static_library_prefix)
- default_product_prefix = static_library_prefix
- default_product_ext = generator_default_variables['STATIC_LIB_SUFFIX']
-
- elif target_type in ('loadable_module', 'shared_library'):
- shared_library_prefix = generator_default_variables['SHARED_LIB_PREFIX']
- default_product_name = RemovePrefix(default_product_name,
- shared_library_prefix)
- default_product_prefix = shared_library_prefix
- default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX']
-
- elif target_type != 'executable':
- print ('ERROR: What output file should be generated?',
- 'type', target_type, 'target', target_name)
-
- product_prefix = spec.get('product_prefix', default_product_prefix)
- product_name = spec.get('product_name', default_product_name)
- product_ext = spec.get('product_extension')
- if product_ext:
- product_ext = '.' + product_ext
- else:
- product_ext = default_product_ext
-
- SetTargetProperty(output, cmake_target_name, 'PREFIX', product_prefix)
- SetTargetProperty(output, cmake_target_name,
- cmake_target_type.property_modifier + '_OUTPUT_NAME',
- product_name)
- SetTargetProperty(output, cmake_target_name, 'SUFFIX', product_ext)
-
- # Make the output of this target referenceable as a source.
- cmake_target_output_basename = product_prefix + product_name + product_ext
- cmake_target_output = os.path.join(cmake_target_output_directory,
- cmake_target_output_basename)
- SetFileProperty(output, cmake_target_output, 'GENERATED', ['TRUE'], '')
-
- # Includes
- includes = config.get('include_dirs')
- if includes:
- # This (target include directories) is what requires CMake 2.8.8
- includes_name = cmake_target_name + '__include_dirs'
- SetVariableList(output, includes_name,
- [NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
- for include in includes])
- output.write('set_property(TARGET ')
- output.write(cmake_target_name)
- output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ')
- WriteVariable(output, includes_name, '')
- output.write(')\n')
-
- # Defines
- defines = config.get('defines')
- if defines is not None:
- SetTargetProperty(output,
- cmake_target_name,
- 'COMPILE_DEFINITIONS',
- defines,
- ';')
-
- # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
- # CMake currently does not have target C and CXX flags.
- # So, instead of doing...
-
- # cflags_c = config.get('cflags_c')
- # if cflags_c is not None:
- # SetTargetProperty(output, cmake_target_name,
- # 'C_COMPILE_FLAGS', cflags_c, ' ')
-
- # cflags_cc = config.get('cflags_cc')
- # if cflags_cc is not None:
- # SetTargetProperty(output, cmake_target_name,
- # 'CXX_COMPILE_FLAGS', cflags_cc, ' ')
-
- # Instead we must...
- cflags = config.get('cflags', [])
- cflags_c = config.get('cflags_c', [])
- cflags_cxx = config.get('cflags_cc', [])
- if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources):
- SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ')
-
- elif c_sources and not (s_sources or cxx_sources):
- flags = []
- flags.extend(cflags)
- flags.extend(cflags_c)
- SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
-
- elif cxx_sources and not (s_sources or c_sources):
- flags = []
- flags.extend(cflags)
- flags.extend(cflags_cxx)
- SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
-
- else:
- # TODO: This is broken, one cannot generally set properties on files,
- # as other targets may require different properties on the same files.
- if s_sources and cflags:
- SetFilesProperty(output, s_sources_name, 'COMPILE_FLAGS', cflags, ' ')
-
- if c_sources and (cflags or cflags_c):
- flags = []
- flags.extend(cflags)
- flags.extend(cflags_c)
- SetFilesProperty(output, c_sources_name, 'COMPILE_FLAGS', flags, ' ')
-
- if cxx_sources and (cflags or cflags_cxx):
- flags = []
- flags.extend(cflags)
- flags.extend(cflags_cxx)
- SetFilesProperty(output, cxx_sources_name, 'COMPILE_FLAGS', flags, ' ')
-
- # Linker flags
- ldflags = config.get('ldflags')
- if ldflags is not None:
- SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
-
- # Note on Dependencies and Libraries:
- # CMake wants to handle link order, resolving the link line up front.
- # Gyp does not retain or enforce specifying enough information to do so.
- # So do as other gyp generators and use --start-group and --end-group.
- # Give CMake as little information as possible so that it doesn't mess it up.
-
- # Dependencies
- rawDeps = spec.get('dependencies', [])
-
- static_deps = []
- shared_deps = []
- other_deps = []
- for rawDep in rawDeps:
- dep_cmake_name = namer.CreateCMakeTargetName(rawDep)
- dep_spec = target_dicts.get(rawDep, {})
- dep_target_type = dep_spec.get('type', None)
-
- if dep_target_type == 'static_library':
- static_deps.append(dep_cmake_name)
- elif dep_target_type == 'shared_library':
- shared_deps.append(dep_cmake_name)
- else:
- other_deps.append(dep_cmake_name)
-
- # ensure all external dependencies are complete before internal dependencies
- # extra_deps currently only depend on their own deps, so otherwise run early
- if static_deps or shared_deps or other_deps:
- for extra_dep in extra_deps:
- output.write('add_dependencies(')
- output.write(extra_dep)
- output.write('\n')
- for deps in (static_deps, shared_deps, other_deps):
- for dep in gyp.common.uniquer(deps):
- output.write(' ')
- output.write(dep)
- output.write('\n')
- output.write(')\n')
-
- linkable = target_type in ('executable', 'loadable_module', 'shared_library')
- other_deps.extend(extra_deps)
- if other_deps or (not linkable and (static_deps or shared_deps)):
- output.write('add_dependencies(')
- output.write(cmake_target_name)
- output.write('\n')
- for dep in gyp.common.uniquer(other_deps):
- output.write(' ')
- output.write(dep)
- output.write('\n')
- if not linkable:
- for deps in (static_deps, shared_deps):
- for lib_dep in gyp.common.uniquer(deps):
- output.write(' ')
- output.write(lib_dep)
- output.write('\n')
- output.write(')\n')
-
- # Libraries
- if linkable:
- external_libs = [lib for lib in spec.get('libraries', []) if len(lib) > 0]
- if external_libs or static_deps or shared_deps:
- output.write('target_link_libraries(')
- output.write(cmake_target_name)
- output.write('\n')
- if static_deps:
- write_group = circular_libs and len(static_deps) > 1
- if write_group:
- output.write('-Wl,--start-group\n')
- for dep in gyp.common.uniquer(static_deps):
- output.write(' ')
- output.write(dep)
- output.write('\n')
- if write_group:
- output.write('-Wl,--end-group\n')
- if shared_deps:
- for dep in gyp.common.uniquer(shared_deps):
- output.write(' ')
- output.write(dep)
- output.write('\n')
- if external_libs:
- for lib in gyp.common.uniquer(external_libs):
- output.write(' ')
- output.write(lib)
- output.write('\n')
-
- output.write(')\n')
-
- UnsetVariable(output, 'TOOLSET')
- UnsetVariable(output, 'TARGET')
-
-
-def GenerateOutputForConfig(target_list, target_dicts, data,
- params, config_to_use):
- options = params['options']
- generator_flags = params['generator_flags']
-
- # generator_dir: relative path from pwd to where make puts build files.
- # Makes migrating from make to cmake easier, cmake doesn't put anything here.
- # Each Gyp configuration creates a different CMakeLists.txt file
- # to avoid incompatibilities between Gyp and CMake configurations.
- generator_dir = os.path.relpath(options.generator_output or '.')
-
- # output_dir: relative path from generator_dir to the build directory.
- output_dir = generator_flags.get('output_dir', 'out')
-
- # build_dir: relative path from source root to our output files.
- # e.g. "out/Debug"
- build_dir = os.path.normpath(os.path.join(generator_dir,
- output_dir,
- config_to_use))
-
- toplevel_build = os.path.join(options.toplevel_dir, build_dir)
-
- output_file = os.path.join(toplevel_build, 'CMakeLists.txt')
- gyp.common.EnsureDirExists(output_file)
-
- output = open(output_file, 'w')
- output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
- output.write('cmake_policy(VERSION 2.8.8)\n')
-
- gyp_file, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1])
- output.write('project(')
- output.write(project_target)
- output.write(')\n')
-
- SetVariable(output, 'configuration', config_to_use)
-
- ar = None
- cc = None
- cxx = None
-
- make_global_settings = data[gyp_file].get('make_global_settings', [])
- build_to_top = gyp.common.InvertRelativePath(build_dir,
- options.toplevel_dir)
- for key, value in make_global_settings:
- if key == 'AR':
- ar = os.path.join(build_to_top, value)
- if key == 'CC':
- cc = os.path.join(build_to_top, value)
- if key == 'CXX':
- cxx = os.path.join(build_to_top, value)
-
- ar = gyp.common.GetEnvironFallback(['AR_target', 'AR'], ar)
- cc = gyp.common.GetEnvironFallback(['CC_target', 'CC'], cc)
- cxx = gyp.common.GetEnvironFallback(['CXX_target', 'CXX'], cxx)
-
- if ar:
- SetVariable(output, 'CMAKE_AR', ar)
- if cc:
- SetVariable(output, 'CMAKE_C_COMPILER', cc)
- if cxx:
- SetVariable(output, 'CMAKE_CXX_COMPILER', cxx)
-
- # The following appears to be as-yet undocumented.
- # http://public.kitware.com/Bug/view.php?id=8392
- output.write('enable_language(ASM)\n')
- # ASM-ATT does not support .S files.
- # output.write('enable_language(ASM-ATT)\n')
-
- if cc:
- SetVariable(output, 'CMAKE_ASM_COMPILER', cc)
-
- SetVariable(output, 'builddir', '${CMAKE_CURRENT_BINARY_DIR}')
- SetVariable(output, 'obj', '${builddir}/obj')
- output.write('\n')
-
- # TODO: Undocumented/unsupported (the CMake Java generator depends on it).
- # CMake by default names the object resulting from foo.c to be foo.c.o.
- # Gyp traditionally names the object resulting from foo.c foo.o.
- # This should be irrelevant, but some targets extract .o files from .a
- # and depend on the name of the extracted .o files.
- output.write('set(CMAKE_C_OUTPUT_EXTENSION_REPLACE 1)\n')
- output.write('set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n')
- output.write('\n')
-
- # Force ninja to use rsp files. Otherwise link and ar lines can get too long,
- # resulting in 'Argument list too long' errors.
- output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
- output.write('\n')
-
- namer = CMakeNamer(target_list)
-
- # The list of targets upon which the 'all' target should depend.
- # CMake has it's own implicit 'all' target, one is not created explicitly.
- all_qualified_targets = set()
- for build_file in params['build_files']:
- for qualified_target in gyp.common.AllTargets(target_list,
- target_dicts,
- os.path.normpath(build_file)):
- all_qualified_targets.add(qualified_target)
-
- for qualified_target in target_list:
- WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
- options, generator_flags, all_qualified_targets, output)
-
- output.close()
-
-
-def PerformBuild(data, configurations, params):
- options = params['options']
- generator_flags = params['generator_flags']
-
- # generator_dir: relative path from pwd to where make puts build files.
- # Makes migrating from make to cmake easier, cmake doesn't put anything here.
- generator_dir = os.path.relpath(options.generator_output or '.')
-
- # output_dir: relative path from generator_dir to the build directory.
- output_dir = generator_flags.get('output_dir', 'out')
-
- for config_name in configurations:
- # build_dir: relative path from source root to our output files.
- # e.g. "out/Debug"
- build_dir = os.path.normpath(os.path.join(generator_dir,
- output_dir,
- config_name))
- arguments = ['cmake', '-G', 'Ninja']
- print 'Generating [%s]: %s' % (config_name, arguments)
- subprocess.check_call(arguments, cwd=build_dir)
-
- arguments = ['ninja', '-C', build_dir]
- print 'Building [%s]: %s' % (config_name, arguments)
- subprocess.check_call(arguments)
-
-
-def CallGenerateOutputForConfig(arglist):
- # Ignore the interrupt signal so that the parent process catches it and
- # kills all multiprocessing children.
- signal.signal(signal.SIGINT, signal.SIG_IGN)
-
- target_list, target_dicts, data, params, config_name = arglist
- GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- user_config = params.get('generator_flags', {}).get('config', None)
- if user_config:
- GenerateOutputForConfig(target_list, target_dicts, data,
- params, user_config)
- else:
- config_names = target_dicts[target_list[0]]['configurations'].keys()
- if params['parallel']:
- try:
- pool = multiprocessing.Pool(len(config_names))
- arglists = []
- for config_name in config_names:
- arglists.append((target_list, target_dicts, data,
- params, config_name))
- pool.map(CallGenerateOutputForConfig, arglists)
- except KeyboardInterrupt, e:
- pool.terminate()
- raise e
- else:
- for config_name in config_names:
- GenerateOutputForConfig(target_list, target_dicts, data,
- params, config_name)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
deleted file mode 100644
index 160eafe2ef..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
+++ /dev/null
@@ -1,99 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import os
-import gyp
-import gyp.common
-import gyp.msvs_emulation
-import json
-import sys
-
-generator_supports_multiple_toolsets = True
-
-generator_wants_static_library_dependencies_adjusted = False
-
-generator_filelist_paths = {
-}
-
-generator_default_variables = {
-}
-for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
- 'LIB_DIR', 'SHARED_LIB_DIR']:
- # Some gyp steps fail if these are empty(!).
- generator_default_variables[dirname] = 'dir'
-for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
- 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
- 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
- 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
- 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
- 'CONFIGURATION_NAME']:
- generator_default_variables[unused] = ''
-
-
-def CalculateVariables(default_variables, params):
- generator_flags = params.get('generator_flags', {})
- for key, val in generator_flags.items():
- default_variables.setdefault(key, val)
- default_variables.setdefault('OS', gyp.common.GetFlavor(params))
-
- flavor = gyp.common.GetFlavor(params)
- if flavor =='win':
- # Copy additional generator configuration data from VS, which is shared
- # by the Windows Ninja generator.
- import gyp.generator.msvs as msvs_generator
- generator_additional_non_configuration_keys = getattr(msvs_generator,
- 'generator_additional_non_configuration_keys', [])
- generator_additional_path_sections = getattr(msvs_generator,
- 'generator_additional_path_sections', [])
-
- gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
-
-
-def CalculateGeneratorInputInfo(params):
- """Calculate the generator specific info that gets fed to input (called by
- gyp)."""
- generator_flags = params.get('generator_flags', {})
- if generator_flags.get('adjust_static_libraries', False):
- global generator_wants_static_library_dependencies_adjusted
- generator_wants_static_library_dependencies_adjusted = True
-
- toplevel = params['options'].toplevel_dir
- generator_dir = os.path.relpath(params['options'].generator_output or '.')
- # output_dir: relative path from generator_dir to the build directory.
- output_dir = generator_flags.get('output_dir', 'out')
- qualified_out_dir = os.path.normpath(os.path.join(
- toplevel, generator_dir, output_dir, 'gypfiles'))
- global generator_filelist_paths
- generator_filelist_paths = {
- 'toplevel': toplevel,
- 'qualified_out_dir': qualified_out_dir,
- }
-
-def GenerateOutput(target_list, target_dicts, data, params):
- # Map of target -> list of targets it depends on.
- edges = {}
-
- # Queue of targets to visit.
- targets_to_visit = target_list[:]
-
- while len(targets_to_visit) > 0:
- target = targets_to_visit.pop()
- if target in edges:
- continue
- edges[target] = []
-
- for dep in target_dicts[target].get('dependencies', []):
- edges[target].append(dep)
- targets_to_visit.append(dep)
-
- try:
- filepath = params['generator_flags']['output_dir']
- except KeyError:
- filepath = '.'
- filename = os.path.join(filepath, 'dump.json')
- f = open(filename, 'w')
- json.dump(edges, f)
- f.close()
- print 'Wrote json to %s.' % filename
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
deleted file mode 100644
index f4c7c12f59..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
+++ /dev/null
@@ -1,424 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""GYP backend that generates Eclipse CDT settings files.
-
-This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
-files that can be imported into an Eclipse CDT project. The XML file contains a
-list of include paths and symbols (i.e. defines).
-
-Because a full .cproject definition is not created by this generator, it's not
-possible to properly define the include dirs and symbols for each file
-individually. Instead, one set of includes/symbols is generated for the entire
-project. This works fairly well (and is a vast improvement in general), but may
-still result in a few indexer issues here and there.
-
-This generator has no automated tests, so expect it to be broken.
-"""
-
-from xml.sax.saxutils import escape
-import os.path
-import subprocess
-import gyp
-import gyp.common
-import gyp.msvs_emulation
-import shlex
-import xml.etree.cElementTree as ET
-
-generator_wants_static_library_dependencies_adjusted = False
-
-generator_default_variables = {
-}
-
-for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']:
- # Some gyp steps fail if these are empty(!), so we convert them to variables
- generator_default_variables[dirname] = '$' + dirname
-
-for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
- 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
- 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
- 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
- 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
- 'CONFIGURATION_NAME']:
- generator_default_variables[unused] = ''
-
-# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
-# part of the path when dealing with generated headers. This value will be
-# replaced dynamically for each configuration.
-generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \
- '$SHARED_INTERMEDIATE_DIR'
-
-
-def CalculateVariables(default_variables, params):
- generator_flags = params.get('generator_flags', {})
- for key, val in generator_flags.items():
- default_variables.setdefault(key, val)
- flavor = gyp.common.GetFlavor(params)
- default_variables.setdefault('OS', flavor)
- if flavor == 'win':
- # Copy additional generator configuration data from VS, which is shared
- # by the Eclipse generator.
- import gyp.generator.msvs as msvs_generator
- generator_additional_non_configuration_keys = getattr(msvs_generator,
- 'generator_additional_non_configuration_keys', [])
- generator_additional_path_sections = getattr(msvs_generator,
- 'generator_additional_path_sections', [])
-
- gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
-
-
-def CalculateGeneratorInputInfo(params):
- """Calculate the generator specific info that gets fed to input (called by
- gyp)."""
- generator_flags = params.get('generator_flags', {})
- if generator_flags.get('adjust_static_libraries', False):
- global generator_wants_static_library_dependencies_adjusted
- generator_wants_static_library_dependencies_adjusted = True
-
-
-def GetAllIncludeDirectories(target_list, target_dicts,
- shared_intermediate_dirs, config_name, params,
- compiler_path):
- """Calculate the set of include directories to be used.
-
- Returns:
- A list including all the include_dir's specified for every target followed
- by any include directories that were added as cflag compiler options.
- """
-
- gyp_includes_set = set()
- compiler_includes_list = []
-
- # Find compiler's default include dirs.
- if compiler_path:
- command = shlex.split(compiler_path)
- command.extend(['-E', '-xc++', '-v', '-'])
- proc = subprocess.Popen(args=command, stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- output = proc.communicate()[1]
- # Extract the list of include dirs from the output, which has this format:
- # ...
- # #include "..." search starts here:
- # #include <...> search starts here:
- # /usr/include/c++/4.6
- # /usr/local/include
- # End of search list.
- # ...
- in_include_list = False
- for line in output.splitlines():
- if line.startswith('#include'):
- in_include_list = True
- continue
- if line.startswith('End of search list.'):
- break
- if in_include_list:
- include_dir = line.strip()
- if include_dir not in compiler_includes_list:
- compiler_includes_list.append(include_dir)
-
- flavor = gyp.common.GetFlavor(params)
- if flavor == 'win':
- generator_flags = params.get('generator_flags', {})
- for target_name in target_list:
- target = target_dicts[target_name]
- if config_name in target['configurations']:
- config = target['configurations'][config_name]
-
- # Look for any include dirs that were explicitly added via cflags. This
- # may be done in gyp files to force certain includes to come at the end.
- # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
- # remove this.
- if flavor == 'win':
- msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
- cflags = msvs_settings.GetCflags(config_name)
- else:
- cflags = config['cflags']
- for cflag in cflags:
- if cflag.startswith('-I'):
- include_dir = cflag[2:]
- if include_dir not in compiler_includes_list:
- compiler_includes_list.append(include_dir)
-
- # Find standard gyp include dirs.
- if config.has_key('include_dirs'):
- include_dirs = config['include_dirs']
- for shared_intermediate_dir in shared_intermediate_dirs:
- for include_dir in include_dirs:
- include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR',
- shared_intermediate_dir)
- if not os.path.isabs(include_dir):
- base_dir = os.path.dirname(target_name)
-
- include_dir = base_dir + '/' + include_dir
- include_dir = os.path.abspath(include_dir)
-
- gyp_includes_set.add(include_dir)
-
- # Generate a list that has all the include dirs.
- all_includes_list = list(gyp_includes_set)
- all_includes_list.sort()
- for compiler_include in compiler_includes_list:
- if not compiler_include in gyp_includes_set:
- all_includes_list.append(compiler_include)
-
- # All done.
- return all_includes_list
-
-
-def GetCompilerPath(target_list, data, options):
- """Determine a command that can be used to invoke the compiler.
-
- Returns:
- If this is a gyp project that has explicit make settings, try to determine
- the compiler from that. Otherwise, see if a compiler was specified via the
- CC_target environment variable.
- """
- # First, see if the compiler is configured in make's settings.
- build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
- make_global_settings_dict = data[build_file].get('make_global_settings', {})
- for key, value in make_global_settings_dict:
- if key in ['CC', 'CXX']:
- return os.path.join(options.toplevel_dir, value)
-
- # Check to see if the compiler was specified as an environment variable.
- for key in ['CC_target', 'CC', 'CXX']:
- compiler = os.environ.get(key)
- if compiler:
- return compiler
-
- return 'gcc'
-
-
-def GetAllDefines(target_list, target_dicts, data, config_name, params,
- compiler_path):
- """Calculate the defines for a project.
-
- Returns:
- A dict that includes explict defines declared in gyp files along with all of
- the default defines that the compiler uses.
- """
-
- # Get defines declared in the gyp files.
- all_defines = {}
- flavor = gyp.common.GetFlavor(params)
- if flavor == 'win':
- generator_flags = params.get('generator_flags', {})
- for target_name in target_list:
- target = target_dicts[target_name]
-
- if flavor == 'win':
- msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
- extra_defines = msvs_settings.GetComputedDefines(config_name)
- else:
- extra_defines = []
- if config_name in target['configurations']:
- config = target['configurations'][config_name]
- target_defines = config['defines']
- else:
- target_defines = []
- for define in target_defines + extra_defines:
- split_define = define.split('=', 1)
- if len(split_define) == 1:
- split_define.append('1')
- if split_define[0].strip() in all_defines:
- # Already defined
- continue
- all_defines[split_define[0].strip()] = split_define[1].strip()
- # Get default compiler defines (if possible).
- if flavor == 'win':
- return all_defines # Default defines already processed in the loop above.
- if compiler_path:
- command = shlex.split(compiler_path)
- command.extend(['-E', '-dM', '-'])
- cpp_proc = subprocess.Popen(args=command, cwd='.',
- stdin=subprocess.PIPE, stdout=subprocess.PIPE)
- cpp_output = cpp_proc.communicate()[0]
- cpp_lines = cpp_output.split('\n')
- for cpp_line in cpp_lines:
- if not cpp_line.strip():
- continue
- cpp_line_parts = cpp_line.split(' ', 2)
- key = cpp_line_parts[1]
- if len(cpp_line_parts) >= 3:
- val = cpp_line_parts[2]
- else:
- val = '1'
- all_defines[key] = val
-
- return all_defines
-
-
-def WriteIncludePaths(out, eclipse_langs, include_dirs):
- """Write the includes section of a CDT settings export file."""
-
- out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
- 'settingswizards.IncludePaths">\n')
- out.write(' <language name="holder for library settings"></language>\n')
- for lang in eclipse_langs:
- out.write(' <language name="%s">\n' % lang)
- for include_dir in include_dirs:
- out.write(' <includepath workspace_path="false">%s</includepath>\n' %
- include_dir)
- out.write(' </language>\n')
- out.write(' </section>\n')
-
-
-def WriteMacros(out, eclipse_langs, defines):
- """Write the macros section of a CDT settings export file."""
-
- out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
- 'settingswizards.Macros">\n')
- out.write(' <language name="holder for library settings"></language>\n')
- for lang in eclipse_langs:
- out.write(' <language name="%s">\n' % lang)
- for key in sorted(defines.iterkeys()):
- out.write(' <macro><name>%s</name><value>%s</value></macro>\n' %
- (escape(key), escape(defines[key])))
- out.write(' </language>\n')
- out.write(' </section>\n')
-
-
-def GenerateOutputForConfig(target_list, target_dicts, data, params,
- config_name):
- options = params['options']
- generator_flags = params.get('generator_flags', {})
-
- # build_dir: relative path from source root to our output files.
- # e.g. "out/Debug"
- build_dir = os.path.join(generator_flags.get('output_dir', 'out'),
- config_name)
-
- toplevel_build = os.path.join(options.toplevel_dir, build_dir)
- # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
- # SHARED_INTERMEDIATE_DIR. Include both possible locations.
- shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
- os.path.join(toplevel_build, 'gen')]
-
- GenerateCdtSettingsFile(target_list,
- target_dicts,
- data,
- params,
- config_name,
- os.path.join(toplevel_build,
- 'eclipse-cdt-settings.xml'),
- options,
- shared_intermediate_dirs)
- GenerateClasspathFile(target_list,
- target_dicts,
- options.toplevel_dir,
- toplevel_build,
- os.path.join(toplevel_build,
- 'eclipse-classpath.xml'))
-
-
-def GenerateCdtSettingsFile(target_list, target_dicts, data, params,
- config_name, out_name, options,
- shared_intermediate_dirs):
- gyp.common.EnsureDirExists(out_name)
- with open(out_name, 'w') as out:
- out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
- out.write('<cdtprojectproperties>\n')
-
- eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
- 'GNU C++', 'GNU C', 'Assembly']
- compiler_path = GetCompilerPath(target_list, data, options)
- include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
- shared_intermediate_dirs,
- config_name, params, compiler_path)
- WriteIncludePaths(out, eclipse_langs, include_dirs)
- defines = GetAllDefines(target_list, target_dicts, data, config_name,
- params, compiler_path)
- WriteMacros(out, eclipse_langs, defines)
-
- out.write('</cdtprojectproperties>\n')
-
-
-def GenerateClasspathFile(target_list, target_dicts, toplevel_dir,
- toplevel_build, out_name):
- '''Generates a classpath file suitable for symbol navigation and code
- completion of Java code (such as in Android projects) by finding all
- .java and .jar files used as action inputs.'''
- gyp.common.EnsureDirExists(out_name)
- result = ET.Element('classpath')
-
- def AddElements(kind, paths):
- # First, we need to normalize the paths so they are all relative to the
- # toplevel dir.
- rel_paths = set()
- for path in paths:
- if os.path.isabs(path):
- rel_paths.add(os.path.relpath(path, toplevel_dir))
- else:
- rel_paths.add(path)
-
- for path in sorted(rel_paths):
- entry_element = ET.SubElement(result, 'classpathentry')
- entry_element.set('kind', kind)
- entry_element.set('path', path)
-
- AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir))
- AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
- # Include the standard JRE container and a dummy out folder
- AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER'])
- # Include a dummy out folder so that Eclipse doesn't use the default /bin
- # folder in the root of the project.
- AddElements('output', [os.path.join(toplevel_build, '.eclipse-java-build')])
-
- ET.ElementTree(result).write(out_name)
-
-
-def GetJavaJars(target_list, target_dicts, toplevel_dir):
- '''Generates a sequence of all .jars used as inputs.'''
- for target_name in target_list:
- target = target_dicts[target_name]
- for action in target.get('actions', []):
- for input_ in action['inputs']:
- if os.path.splitext(input_)[1] == '.jar' and not input_.startswith('$'):
- if os.path.isabs(input_):
- yield input_
- else:
- yield os.path.join(os.path.dirname(target_name), input_)
-
-
-def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
- '''Generates a sequence of all likely java package root directories.'''
- for target_name in target_list:
- target = target_dicts[target_name]
- for action in target.get('actions', []):
- for input_ in action['inputs']:
- if (os.path.splitext(input_)[1] == '.java' and
- not input_.startswith('$')):
- dir_ = os.path.dirname(os.path.join(os.path.dirname(target_name),
- input_))
- # If there is a parent 'src' or 'java' folder, navigate up to it -
- # these are canonical package root names in Chromium. This will
- # break if 'src' or 'java' exists in the package structure. This
- # could be further improved by inspecting the java file for the
- # package name if this proves to be too fragile in practice.
- parent_search = dir_
- while os.path.basename(parent_search) not in ['src', 'java']:
- parent_search, _ = os.path.split(parent_search)
- if not parent_search or parent_search == toplevel_dir:
- # Didn't find a known root, just return the original path
- yield dir_
- break
- else:
- yield parent_search
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- """Generate an XML settings file that can be imported into a CDT project."""
-
- if params['options'].generator_output:
- raise NotImplementedError("--generator_output not implemented for eclipse")
-
- user_config = params.get('generator_flags', {}).get('config', None)
- if user_config:
- GenerateOutputForConfig(target_list, target_dicts, data, params,
- user_config)
- else:
- config_names = target_dicts[target_list[0]]['configurations'].keys()
- for config_name in config_names:
- GenerateOutputForConfig(target_list, target_dicts, data, params,
- config_name)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
deleted file mode 100644
index 3efdb9966a..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""gypd output module
-
-This module produces gyp input as its output. Output files are given the
-.gypd extension to avoid overwriting the .gyp files that they are generated
-from. Internal references to .gyp files (such as those found in
-"dependencies" sections) are not adjusted to point to .gypd files instead;
-unlike other paths, which are relative to the .gyp or .gypd file, such paths
-are relative to the directory from which gyp was run to create the .gypd file.
-
-This generator module is intended to be a sample and a debugging aid, hence
-the "d" for "debug" in .gypd. It is useful to inspect the results of the
-various merges, expansions, and conditional evaluations performed by gyp
-and to see a representation of what would be fed to a generator module.
-
-It's not advisable to rename .gypd files produced by this module to .gyp,
-because they will have all merges, expansions, and evaluations already
-performed and the relevant constructs not present in the output; paths to
-dependencies may be wrong; and various sections that do not belong in .gyp
-files such as such as "included_files" and "*_excluded" will be present.
-Output will also be stripped of comments. This is not intended to be a
-general-purpose gyp pretty-printer; for that, you probably just want to
-run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip
-comments but won't do all of the other things done to this module's output.
-
-The specific formatting of the output generated by this module is subject
-to change.
-"""
-
-
-import gyp.common
-import errno
-import os
-import pprint
-
-
-# These variables should just be spit back out as variable references.
-_generator_identity_variables = [
- 'CONFIGURATION_NAME',
- 'EXECUTABLE_PREFIX',
- 'EXECUTABLE_SUFFIX',
- 'INTERMEDIATE_DIR',
- 'LIB_DIR',
- 'PRODUCT_DIR',
- 'RULE_INPUT_ROOT',
- 'RULE_INPUT_DIRNAME',
- 'RULE_INPUT_EXT',
- 'RULE_INPUT_NAME',
- 'RULE_INPUT_PATH',
- 'SHARED_INTERMEDIATE_DIR',
- 'SHARED_LIB_DIR',
- 'SHARED_LIB_PREFIX',
- 'SHARED_LIB_SUFFIX',
- 'STATIC_LIB_PREFIX',
- 'STATIC_LIB_SUFFIX',
-]
-
-# gypd doesn't define a default value for OS like many other generator
-# modules. Specify "-D OS=whatever" on the command line to provide a value.
-generator_default_variables = {
-}
-
-# gypd supports multiple toolsets
-generator_supports_multiple_toolsets = True
-
-# TODO(mark): This always uses <, which isn't right. The input module should
-# notify the generator to tell it which phase it is operating in, and this
-# module should use < for the early phase and then switch to > for the late
-# phase. Bonus points for carrying @ back into the output too.
-for v in _generator_identity_variables:
- generator_default_variables[v] = '<(%s)' % v
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- output_files = {}
- for qualified_target in target_list:
- [input_file, target] = \
- gyp.common.ParseQualifiedTarget(qualified_target)[0:2]
-
- if input_file[-4:] != '.gyp':
- continue
- input_file_stem = input_file[:-4]
- output_file = input_file_stem + params['options'].suffix + '.gypd'
-
- if not output_file in output_files:
- output_files[output_file] = input_file
-
- for output_file, input_file in output_files.iteritems():
- output = open(output_file, 'w')
- pprint.pprint(data[input_file], output)
- output.close()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
deleted file mode 100644
index bd405f43a9..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""gypsh output module
-
-gypsh is a GYP shell. It's not really a generator per se. All it does is
-fire up an interactive Python session with a few local variables set to the
-variables passed to the generator. Like gypd, it's intended as a debugging
-aid, to facilitate the exploration of .gyp structures after being processed
-by the input module.
-
-The expected usage is "gyp -f gypsh -D OS=desired_os".
-"""
-
-
-import code
-import sys
-
-
-# All of this stuff about generator variables was lovingly ripped from gypd.py.
-# That module has a much better description of what's going on and why.
-_generator_identity_variables = [
- 'EXECUTABLE_PREFIX',
- 'EXECUTABLE_SUFFIX',
- 'INTERMEDIATE_DIR',
- 'PRODUCT_DIR',
- 'RULE_INPUT_ROOT',
- 'RULE_INPUT_DIRNAME',
- 'RULE_INPUT_EXT',
- 'RULE_INPUT_NAME',
- 'RULE_INPUT_PATH',
- 'SHARED_INTERMEDIATE_DIR',
-]
-
-generator_default_variables = {
-}
-
-for v in _generator_identity_variables:
- generator_default_variables[v] = '<(%s)' % v
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- locals = {
- 'target_list': target_list,
- 'target_dicts': target_dicts,
- 'data': data,
- }
-
- # Use a banner that looks like the stock Python one and like what
- # code.interact uses by default, but tack on something to indicate what
- # locals are available, and identify gypsh.
- banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
- (sys.version, sys.platform, repr(sorted(locals.keys())))
-
- code.interact(banner, local=locals)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
deleted file mode 100644
index 64b9dd267b..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
+++ /dev/null
@@ -1,2220 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# Notes:
-#
-# This is all roughly based on the Makefile system used by the Linux
-# kernel, but is a non-recursive make -- we put the entire dependency
-# graph in front of make and let it figure it out.
-#
-# The code below generates a separate .mk file for each target, but
-# all are sourced by the top-level Makefile. This means that all
-# variables in .mk-files clobber one another. Be careful to use :=
-# where appropriate for immediate evaluation, and similarly to watch
-# that you're not relying on a variable value to last beween different
-# .mk files.
-#
-# TODOs:
-#
-# Global settings and utility functions are currently stuffed in the
-# toplevel Makefile. It may make sense to generate some .mk files on
-# the side to keep the the files readable.
-
-import os
-import re
-import sys
-import subprocess
-import gyp
-import gyp.common
-import gyp.xcode_emulation
-from gyp.common import GetEnvironFallback
-from gyp.common import GypError
-
-generator_default_variables = {
- 'EXECUTABLE_PREFIX': '',
- 'EXECUTABLE_SUFFIX': '',
- 'STATIC_LIB_PREFIX': 'lib',
- 'SHARED_LIB_PREFIX': 'lib',
- 'STATIC_LIB_SUFFIX': '.a',
- 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni',
- 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen',
- 'PRODUCT_DIR': '$(builddir)',
- 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
- 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
- 'RULE_INPUT_PATH': '$(abspath $<)',
- 'RULE_INPUT_EXT': '$(suffix $<)',
- 'RULE_INPUT_NAME': '$(notdir $<)',
- 'CONFIGURATION_NAME': '$(BUILDTYPE)',
-}
-
-# Make supports multiple toolsets
-generator_supports_multiple_toolsets = True
-
-# Request sorted dependencies in the order from dependents to dependencies.
-generator_wants_sorted_dependencies = False
-
-# Placates pylint.
-generator_additional_non_configuration_keys = []
-generator_additional_path_sections = []
-generator_extra_sources_for_rules = []
-generator_filelist_paths = None
-
-
-def CalculateVariables(default_variables, params):
- """Calculate additional variables for use in the build (called by gyp)."""
- flavor = gyp.common.GetFlavor(params)
- if flavor == 'mac':
- default_variables.setdefault('OS', 'mac')
- default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
- default_variables.setdefault('SHARED_LIB_DIR',
- generator_default_variables['PRODUCT_DIR'])
- default_variables.setdefault('LIB_DIR',
- generator_default_variables['PRODUCT_DIR'])
-
- # Copy additional generator configuration data from Xcode, which is shared
- # by the Mac Make generator.
- import gyp.generator.xcode as xcode_generator
- global generator_additional_non_configuration_keys
- generator_additional_non_configuration_keys = getattr(xcode_generator,
- 'generator_additional_non_configuration_keys', [])
- global generator_additional_path_sections
- generator_additional_path_sections = getattr(xcode_generator,
- 'generator_additional_path_sections', [])
- global generator_extra_sources_for_rules
- generator_extra_sources_for_rules = getattr(xcode_generator,
- 'generator_extra_sources_for_rules', [])
- COMPILABLE_EXTENSIONS.update({'.m': 'objc', '.mm' : 'objcxx'})
- else:
- operating_system = flavor
- if flavor == 'android':
- operating_system = 'linux' # Keep this legacy behavior for now.
- default_variables.setdefault('OS', operating_system)
- default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
- default_variables.setdefault('SHARED_LIB_DIR','$(builddir)/lib.$(TOOLSET)')
- default_variables.setdefault('LIB_DIR', '$(obj).$(TOOLSET)')
-
-
-def CalculateGeneratorInputInfo(params):
- """Calculate the generator specific info that gets fed to input (called by
- gyp)."""
- generator_flags = params.get('generator_flags', {})
- android_ndk_version = generator_flags.get('android_ndk_version', None)
- # Android NDK requires a strict link order.
- if android_ndk_version:
- global generator_wants_sorted_dependencies
- generator_wants_sorted_dependencies = True
-
- output_dir = params['options'].generator_output or \
- params['options'].toplevel_dir
- builddir_name = generator_flags.get('output_dir', 'out')
- qualified_out_dir = os.path.normpath(os.path.join(
- output_dir, builddir_name, 'gypfiles'))
-
- global generator_filelist_paths
- generator_filelist_paths = {
- 'toplevel': params['options'].toplevel_dir,
- 'qualified_out_dir': qualified_out_dir,
- }
-
-
-# The .d checking code below uses these functions:
-# wildcard, sort, foreach, shell, wordlist
-# wildcard can handle spaces, the rest can't.
-# Since I could find no way to make foreach work with spaces in filenames
-# correctly, the .d files have spaces replaced with another character. The .d
-# file for
-# Chromium\ Framework.framework/foo
-# is for example
-# out/Release/.deps/out/Release/Chromium?Framework.framework/foo
-# This is the replacement character.
-SPACE_REPLACEMENT = '?'
-
-
-LINK_COMMANDS_LINUX = """\
-quiet_cmd_alink = AR($(TOOLSET)) $@
-cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
-
-quiet_cmd_alink_thin = AR($(TOOLSET)) $@
-cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
-
-# Due to circular dependencies between libraries :(, we wrap the
-# special "figure out circular dependencies" flags around the entire
-# input list during linking.
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
-
-# We support two kinds of shared objects (.so):
-# 1) shared_library, which is just bundling together many dependent libraries
-# into a link line.
-# 2) loadable_module, which is generating a module intended for dlopen().
-#
-# They differ only slightly:
-# In the former case, we want to package all dependent code into the .so.
-# In the latter case, we want to package just the API exposed by the
-# outermost module.
-# This means shared_library uses --whole-archive, while loadable_module doesn't.
-# (Note that --whole-archive is incompatible with the --start-group used in
-# normal linking.)
-
-# Other shared-object link notes:
-# - Set SONAME to the library filename so our binaries don't reference
-# the local, absolute paths used on the link command-line.
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
-"""
-
-LINK_COMMANDS_MAC = """\
-quiet_cmd_alink = LIBTOOL-STATIC $@
-cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-"""
-
-LINK_COMMANDS_ANDROID = """\
-quiet_cmd_alink = AR($(TOOLSET)) $@
-cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
-
-quiet_cmd_alink_thin = AR($(TOOLSET)) $@
-cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
-
-# Due to circular dependencies between libraries :(, we wrap the
-# special "figure out circular dependencies" flags around the entire
-# input list during linking.
-quiet_cmd_link = LINK($(TOOLSET)) $@
-quiet_cmd_link_host = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
-cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
-
-# Other shared-object link notes:
-# - Set SONAME to the library filename so our binaries don't reference
-# the local, absolute paths used on the link command-line.
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
-quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-"""
-
-
-LINK_COMMANDS_AIX = """\
-quiet_cmd_alink = AR($(TOOLSET)) $@
-cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
-
-quiet_cmd_alink_thin = AR($(TOOLSET)) $@
-cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
-
-quiet_cmd_link = LINK($(TOOLSET)) $@
-cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink = SOLINK($(TOOLSET)) $@
-cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
-
-quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-"""
-
-
-# Header of toplevel Makefile.
-# This should go into the build tree, but it's easier to keep it here for now.
-SHARED_HEADER = ("""\
-# We borrow heavily from the kernel build setup, though we are simpler since
-# we don't have Kconfig tweaking settings on us.
-
-# The implicit make rules have it looking for RCS files, among other things.
-# We instead explicitly write all the rules we care about.
-# It's even quicker (saves ~200ms) to pass -r on the command line.
-MAKEFLAGS=-r
-
-# The source directory tree.
-srcdir := %(srcdir)s
-abs_srcdir := $(abspath $(srcdir))
-
-# The name of the builddir.
-builddir_name ?= %(builddir)s
-
-# The V=1 flag on command line makes us verbosely print command lines.
-ifdef V
- quiet=
-else
- quiet=quiet_
-endif
-
-# Specify BUILDTYPE=Release on the command line for a release build.
-BUILDTYPE ?= %(default_configuration)s
-
-# Directory all our build output goes into.
-# Note that this must be two directories beneath src/ for unit tests to pass,
-# as they reach into the src/ directory for data with relative paths.
-builddir ?= $(builddir_name)/$(BUILDTYPE)
-abs_builddir := $(abspath $(builddir))
-depsdir := $(builddir)/.deps
-
-# Object output directory.
-obj := $(builddir)/obj
-abs_obj := $(abspath $(obj))
-
-# We build up a list of every single one of the targets so we can slurp in the
-# generated dependency rule Makefiles in one pass.
-all_deps :=
-
-%(make_global_settings)s
-
-CC.target ?= %(CC.target)s
-CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
-CXX.target ?= %(CXX.target)s
-CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
-LINK.target ?= %(LINK.target)s
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
-# C++ apps need to be linked with g++.
-LINK ?= $(CXX.target)
-
-# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
-# to replicate this environment fallback in make as well.
-CC.host ?= %(CC.host)s
-CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
-CXX.host ?= %(CXX.host)s
-CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
-LINK.host ?= %(LINK.host)s
-LDFLAGS.host ?=
-AR.host ?= %(AR.host)s
-
-# Define a dir function that can handle spaces.
-# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
-# "leading spaces cannot appear in the text of the first argument as written.
-# These characters can be put into the argument value by variable substitution."
-empty :=
-space := $(empty) $(empty)
-
-# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
-replace_spaces = $(subst $(space),""" + SPACE_REPLACEMENT + """,$1)
-unreplace_spaces = $(subst """ + SPACE_REPLACEMENT + """,$(space),$1)
-dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
-
-# Flags to make gcc output dependency info. Note that you need to be
-# careful here to use the flags that ccache and distcc can understand.
-# We write to a dep file on the side first and then rename at the end
-# so we can't end up with a broken dep file.
-depfile = $(depsdir)/$(call replace_spaces,$@).d
-DEPFLAGS = -MMD -MF $(depfile).raw
-
-# We have to fixup the deps output in a few ways.
-# (1) the file output should mention the proper .o file.
-# ccache or distcc lose the path to the target, so we convert a rule of
-# the form:
-# foobar.o: DEP1 DEP2
-# into
-# path/to/foobar.o: DEP1 DEP2
-# (2) we want missing files not to cause us to fail to build.
-# We want to rewrite
-# foobar.o: DEP1 DEP2 \\
-# DEP3
-# to
-# DEP1:
-# DEP2:
-# DEP3:
-# so if the files are missing, they're just considered phony rules.
-# We have to do some pretty insane escaping to get those backslashes
-# and dollar signs past make, the shell, and sed at the same time.
-# Doesn't work with spaces, but that's fine: .d files have spaces in
-# their names replaced with other characters."""
-r"""
-define fixup_dep
-# The depfile may not exist if the input file didn't have any #includes.
-touch $(depfile).raw
-# Fixup path as in (1).
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
-# Add extra rules as in (2).
-# We remove slashes and replace spaces with new lines;
-# remove blank lines;
-# delete the first line and append a colon to the remaining lines.
-sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
- grep -v '^$$' |\
- sed -e 1d -e 's|$$|:|' \
- >> $(depfile)
-rm $(depfile).raw
-endef
-"""
-"""
-# Command definitions:
-# - cmd_foo is the actual command to run;
-# - quiet_cmd_foo is the brief-output summary of the command.
-
-quiet_cmd_cc = CC($(TOOLSET)) $@
-cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
-
-quiet_cmd_cxx = CXX($(TOOLSET)) $@
-cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-%(extra_commands)s
-quiet_cmd_touch = TOUCH $@
-cmd_touch = touch $@
-
-quiet_cmd_copy = COPY $@
-# send stderr to /dev/null to ignore messages when linking directories.
-cmd_copy = rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@"
-
-%(link_commands)s
-"""
-
-r"""
-# Define an escape_quotes function to escape single quotes.
-# This allows us to handle quotes properly as long as we always use
-# use single quotes and escape_quotes.
-escape_quotes = $(subst ','\'',$(1))
-# This comment is here just to include a ' to unconfuse syntax highlighting.
-# Define an escape_vars function to escape '$' variable syntax.
-# This allows us to read/write command lines with shell variables (e.g.
-# $LD_LIBRARY_PATH), without triggering make substitution.
-escape_vars = $(subst $$,$$$$,$(1))
-# Helper that expands to a shell command to echo a string exactly as it is in
-# make. This uses printf instead of echo because printf's behaviour with respect
-# to escape sequences is more portable than echo's across different shells
-# (e.g., dash, bash).
-exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))'
-"""
-"""
-# Helper to compare the command we're about to run against the command
-# we logged the last time we ran the command. Produces an empty
-# string (false) when the commands match.
-# Tricky point: Make has no string-equality test function.
-# The kernel uses the following, but it seems like it would have false
-# positives, where one string reordered its arguments.
-# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\
-# $(filter-out $(cmd_$@), $(cmd_$(1))))
-# We instead substitute each for the empty string into the other, and
-# say they're equal if both substitutions produce the empty string.
-# .d files contain """ + SPACE_REPLACEMENT + \
- """ instead of spaces, take that into account.
-command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\
- $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
-
-# Helper that is non-empty when a prerequisite changes.
-# Normally make does this implicitly, but we force rules to always run
-# so we can check their command lines.
-# $? -- new prerequisites
-# $| -- order-only dependencies
-prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
-
-# Helper that executes all postbuilds until one fails.
-define do_postbuilds
- @E=0;\\
- for p in $(POSTBUILDS); do\\
- eval $$p;\\
- E=$$?;\\
- if [ $$E -ne 0 ]; then\\
- break;\\
- fi;\\
- done;\\
- if [ $$E -ne 0 ]; then\\
- rm -rf "$@";\\
- exit $$E;\\
- fi
-endef
-
-# do_cmd: run a command via the above cmd_foo names, if necessary.
-# Should always run for a given target to handle command-line changes.
-# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
-# Third argument, if non-zero, makes it do POSTBUILDS processing.
-# Note: We intentionally do NOT call dirx for depfile, since it contains """ + \
- SPACE_REPLACEMENT + """ for
-# spaces already and dirx strips the """ + SPACE_REPLACEMENT + \
- """ characters.
-define do_cmd
-$(if $(or $(command_changed),$(prereq_changed)),
- @$(call exact_echo, $($(quiet)cmd_$(1)))
- @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
- $(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))),
- @$(cmd_$(1))
- @echo " $(quiet_cmd_$(1)): Finished",
- @$(cmd_$(1))
- )
- @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
- @$(if $(2),$(fixup_dep))
- $(if $(and $(3), $(POSTBUILDS)),
- $(call do_postbuilds)
- )
-)
-endef
-
-# Declare the "%(default_target)s" target first so it is the default,
-# even though we don't have the deps yet.
-.PHONY: %(default_target)s
-%(default_target)s:
-
-# make looks for ways to re-generate included makefiles, but in our case, we
-# don't have a direct way. Explicitly telling make that it has nothing to do
-# for them makes it go faster.
-%%.d: ;
-
-# Use FORCE_DO_CMD to force a target to run. Should be coupled with
-# do_cmd.
-.PHONY: FORCE_DO_CMD
-FORCE_DO_CMD:
-
-""")
-
-SHARED_HEADER_MAC_COMMANDS = """
-quiet_cmd_objc = CXX($(TOOLSET)) $@
-cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-quiet_cmd_objcxx = CXX($(TOOLSET)) $@
-cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# Commands for precompiled header files.
-quiet_cmd_pch_c = CXX($(TOOLSET)) $@
-cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
-cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
-quiet_cmd_pch_m = CXX($(TOOLSET)) $@
-cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
-quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
-cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
-
-# gyp-mac-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_mac_tool = MACTOOL $(4) $<
-cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
-
-quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
-cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
-
-quiet_cmd_infoplist = INFOPLIST $@
-cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-"""
-
-
-def WriteRootHeaderSuffixRules(writer):
- extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower)
-
- writer.write('# Suffix rules, putting all outputs into $(obj).\n')
- for ext in extensions:
- writer.write('$(obj).$(TOOLSET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD\n' % ext)
- writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
-
- writer.write('\n# Try building from generated source, too.\n')
- for ext in extensions:
- writer.write(
- '$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n' % ext)
- writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
- writer.write('\n')
- for ext in extensions:
- writer.write('$(obj).$(TOOLSET)/%%.o: $(obj)/%%%s FORCE_DO_CMD\n' % ext)
- writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
- writer.write('\n')
-
-
-SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\
-# Suffix rules, putting all outputs into $(obj).
-""")
-
-
-SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\
-# Try building from generated source, too.
-""")
-
-
-SHARED_FOOTER = """\
-# "all" is a concatenation of the "all" targets from all the included
-# sub-makefiles. This is just here to clarify.
-all:
-
-# Add in dependency-tracking rules. $(all_deps) is the list of every single
-# target in our tree. Only consider the ones with .d (dependency) info:
-d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
-ifneq ($(d_files),)
- include $(d_files)
-endif
-"""
-
-header = """\
-# This file is generated by gyp; do not edit.
-
-"""
-
-# Maps every compilable file extension to the do_cmd that compiles it.
-COMPILABLE_EXTENSIONS = {
- '.c': 'cc',
- '.cc': 'cxx',
- '.cpp': 'cxx',
- '.cxx': 'cxx',
- '.s': 'cc',
- '.S': 'cc',
-}
-
-def Compilable(filename):
- """Return true if the file is compilable (should be in OBJS)."""
- for res in (filename.endswith(e) for e in COMPILABLE_EXTENSIONS):
- if res:
- return True
- return False
-
-
-def Linkable(filename):
- """Return true if the file is linkable (should be on the link line)."""
- return filename.endswith('.o')
-
-
-def Target(filename):
- """Translate a compilable filename to its .o target."""
- return os.path.splitext(filename)[0] + '.o'
-
-
-def EscapeShellArgument(s):
- """Quotes an argument so that it will be interpreted literally by a POSIX
- shell. Taken from
- http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
- """
- return "'" + s.replace("'", "'\\''") + "'"
-
-
-def EscapeMakeVariableExpansion(s):
- """Make has its own variable expansion syntax using $. We must escape it for
- string to be interpreted literally."""
- return s.replace('$', '$$')
-
-
-def EscapeCppDefine(s):
- """Escapes a CPP define so that it will reach the compiler unaltered."""
- s = EscapeShellArgument(s)
- s = EscapeMakeVariableExpansion(s)
- # '#' characters must be escaped even embedded in a string, else Make will
- # treat it as the start of a comment.
- return s.replace('#', r'\#')
-
-
-def QuoteIfNecessary(string):
- """TODO: Should this ideally be replaced with one or more of the above
- functions?"""
- if '"' in string:
- string = '"' + string.replace('"', '\\"') + '"'
- return string
-
-
-def StringToMakefileVariable(string):
- """Convert a string to a value that is acceptable as a make variable name."""
- return re.sub('[^a-zA-Z0-9_]', '_', string)
-
-
-srcdir_prefix = ''
-def Sourceify(path):
- """Convert a path to its source directory form."""
- if '$(' in path:
- return path
- if os.path.isabs(path):
- return path
- return srcdir_prefix + path
-
-
-def QuoteSpaces(s, quote=r'\ '):
- return s.replace(' ', quote)
-
-
-# TODO: Avoid code duplication with _ValidateSourcesForMSVSProject in msvs.py.
-def _ValidateSourcesForOSX(spec, all_sources):
- """Makes sure if duplicate basenames are not specified in the source list.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- """
- if spec.get('type', None) != 'static_library':
- return
-
- basenames = {}
- for source in all_sources:
- name, ext = os.path.splitext(source)
- is_compiled_file = ext in [
- '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
- if not is_compiled_file:
- continue
- basename = os.path.basename(name) # Don't include extension.
- basenames.setdefault(basename, []).append(source)
-
- error = ''
- for basename, files in basenames.iteritems():
- if len(files) > 1:
- error += ' %s: %s\n' % (basename, ' '.join(files))
-
- if error:
- print('static library %s has several files with the same basename:\n' %
- spec['target_name'] + error + 'libtool on OS X will generate' +
- ' warnings for them.')
- raise GypError('Duplicate basenames in sources section, see list above')
-
-
-# Map from qualified target to path to output.
-target_outputs = {}
-# Map from qualified target to any linkable output. A subset
-# of target_outputs. E.g. when mybinary depends on liba, we want to
-# include liba in the linker line; when otherbinary depends on
-# mybinary, we just want to build mybinary first.
-target_link_deps = {}
-
-
-class MakefileWriter(object):
- """MakefileWriter packages up the writing of one target-specific foobar.mk.
-
- Its only real entry point is Write(), and is mostly used for namespacing.
- """
-
- def __init__(self, generator_flags, flavor):
- self.generator_flags = generator_flags
- self.flavor = flavor
-
- self.suffix_rules_srcdir = {}
- self.suffix_rules_objdir1 = {}
- self.suffix_rules_objdir2 = {}
-
- # Generate suffix rules for all compilable extensions.
- for ext in COMPILABLE_EXTENSIONS.keys():
- # Suffix rules for source folder.
- self.suffix_rules_srcdir.update({ext: ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD
- @$(call do_cmd,%s,1)
-""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
-
- # Suffix rules for generated source files.
- self.suffix_rules_objdir1.update({ext: ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD
- @$(call do_cmd,%s,1)
-""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
- self.suffix_rules_objdir2.update({ext: ("""\
-$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
- @$(call do_cmd,%s,1)
-""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
-
-
- def Write(self, qualified_target, base_path, output_filename, spec, configs,
- part_of_all):
- """The main entry point: writes a .mk file for a single target.
-
- Arguments:
- qualified_target: target we're generating
- base_path: path relative to source root we're building in, used to resolve
- target-relative paths
- output_filename: output .mk file name to write
- spec, configs: gyp info
- part_of_all: flag indicating this target is part of 'all'
- """
- gyp.common.EnsureDirExists(output_filename)
-
- self.fp = open(output_filename, 'w')
-
- self.fp.write(header)
-
- self.qualified_target = qualified_target
- self.path = base_path
- self.target = spec['target_name']
- self.type = spec['type']
- self.toolset = spec['toolset']
-
- self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
- if self.flavor == 'mac':
- self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
- else:
- self.xcode_settings = None
-
- deps, link_deps = self.ComputeDeps(spec)
-
- # Some of the generation below can add extra output, sources, or
- # link dependencies. All of the out params of the functions that
- # follow use names like extra_foo.
- extra_outputs = []
- extra_sources = []
- extra_link_deps = []
- extra_mac_bundle_resources = []
- mac_bundle_deps = []
-
- if self.is_mac_bundle:
- self.output = self.ComputeMacBundleOutput(spec)
- self.output_binary = self.ComputeMacBundleBinaryOutput(spec)
- else:
- self.output = self.output_binary = self.ComputeOutput(spec)
-
- self.is_standalone_static_library = bool(
- spec.get('standalone_static_library', 0))
- self._INSTALLABLE_TARGETS = ('executable', 'loadable_module',
- 'shared_library')
- if (self.is_standalone_static_library or
- self.type in self._INSTALLABLE_TARGETS):
- self.alias = os.path.basename(self.output)
- install_path = self._InstallableTargetInstallPath()
- else:
- self.alias = self.output
- install_path = self.output
-
- self.WriteLn("TOOLSET := " + self.toolset)
- self.WriteLn("TARGET := " + self.target)
-
- # Actions must come first, since they can generate more OBJs for use below.
- if 'actions' in spec:
- self.WriteActions(spec['actions'], extra_sources, extra_outputs,
- extra_mac_bundle_resources, part_of_all)
-
- # Rules must be early like actions.
- if 'rules' in spec:
- self.WriteRules(spec['rules'], extra_sources, extra_outputs,
- extra_mac_bundle_resources, part_of_all)
-
- if 'copies' in spec:
- self.WriteCopies(spec['copies'], extra_outputs, part_of_all)
-
- # Bundle resources.
- if self.is_mac_bundle:
- all_mac_bundle_resources = (
- spec.get('mac_bundle_resources', []) + extra_mac_bundle_resources)
- self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps)
- self.WriteMacInfoPlist(mac_bundle_deps)
-
- # Sources.
- all_sources = spec.get('sources', []) + extra_sources
- if all_sources:
- if self.flavor == 'mac':
- # libtool on OS X generates warnings for duplicate basenames in the same
- # target.
- _ValidateSourcesForOSX(spec, all_sources)
- self.WriteSources(
- configs, deps, all_sources, extra_outputs,
- extra_link_deps, part_of_all,
- gyp.xcode_emulation.MacPrefixHeader(
- self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)),
- self.Pchify))
- sources = filter(Compilable, all_sources)
- if sources:
- self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
- extensions = set([os.path.splitext(s)[1] for s in sources])
- for ext in extensions:
- if ext in self.suffix_rules_srcdir:
- self.WriteLn(self.suffix_rules_srcdir[ext])
- self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2)
- for ext in extensions:
- if ext in self.suffix_rules_objdir1:
- self.WriteLn(self.suffix_rules_objdir1[ext])
- for ext in extensions:
- if ext in self.suffix_rules_objdir2:
- self.WriteLn(self.suffix_rules_objdir2[ext])
- self.WriteLn('# End of this set of suffix rules')
-
- # Add dependency from bundle to bundle binary.
- if self.is_mac_bundle:
- mac_bundle_deps.append(self.output_binary)
-
- self.WriteTarget(spec, configs, deps, extra_link_deps + link_deps,
- mac_bundle_deps, extra_outputs, part_of_all)
-
- # Update global list of target outputs, used in dependency tracking.
- target_outputs[qualified_target] = install_path
-
- # Update global list of link dependencies.
- if self.type in ('static_library', 'shared_library'):
- target_link_deps[qualified_target] = self.output_binary
-
- # Currently any versions have the same effect, but in future the behavior
- # could be different.
- if self.generator_flags.get('android_ndk_version', None):
- self.WriteAndroidNdkModuleRule(self.target, all_sources, link_deps)
-
- self.fp.close()
-
-
- def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
- """Write a "sub-project" Makefile.
-
- This is a small, wrapper Makefile that calls the top-level Makefile to build
- the targets from a single gyp file (i.e. a sub-project).
-
- Arguments:
- output_filename: sub-project Makefile name to write
- makefile_path: path to the top-level Makefile
- targets: list of "all" targets for this sub-project
- build_dir: build output directory, relative to the sub-project
- """
- gyp.common.EnsureDirExists(output_filename)
- self.fp = open(output_filename, 'w')
- self.fp.write(header)
- # For consistency with other builders, put sub-project build output in the
- # sub-project dir (see test/subdirectory/gyptest-subdir-all.py).
- self.WriteLn('export builddir_name ?= %s' %
- os.path.join(os.path.dirname(output_filename), build_dir))
- self.WriteLn('.PHONY: all')
- self.WriteLn('all:')
- if makefile_path:
- makefile_path = ' -C ' + makefile_path
- self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets)))
- self.fp.close()
-
-
- def WriteActions(self, actions, extra_sources, extra_outputs,
- extra_mac_bundle_resources, part_of_all):
- """Write Makefile code for any 'actions' from the gyp input.
-
- extra_sources: a list that will be filled in with newly generated source
- files, if any
- extra_outputs: a list that will be filled in with any outputs of these
- actions (used to make other pieces dependent on these
- actions)
- part_of_all: flag indicating this target is part of 'all'
- """
- env = self.GetSortedXcodeEnv()
- for action in actions:
- name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
- action['action_name']))
- self.WriteLn('### Rules for action "%s":' % action['action_name'])
- inputs = action['inputs']
- outputs = action['outputs']
-
- # Build up a list of outputs.
- # Collect the output dirs we'll need.
- dirs = set()
- for out in outputs:
- dir = os.path.split(out)[0]
- if dir:
- dirs.add(dir)
- if int(action.get('process_outputs_as_sources', False)):
- extra_sources += outputs
- if int(action.get('process_outputs_as_mac_bundle_resources', False)):
- extra_mac_bundle_resources += outputs
-
- # Write the actual command.
- action_commands = action['action']
- if self.flavor == 'mac':
- action_commands = [gyp.xcode_emulation.ExpandEnvVars(command, env)
- for command in action_commands]
- command = gyp.common.EncodePOSIXShellList(action_commands)
- if 'message' in action:
- self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message']))
- else:
- self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name))
- if len(dirs) > 0:
- command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
-
- cd_action = 'cd %s; ' % Sourceify(self.path or '.')
-
- # command and cd_action get written to a toplevel variable called
- # cmd_foo. Toplevel variables can't handle things that change per
- # makefile like $(TARGET), so hardcode the target.
- command = command.replace('$(TARGET)', self.target)
- cd_action = cd_action.replace('$(TARGET)', self.target)
-
- # Set LD_LIBRARY_PATH in case the action runs an executable from this
- # build which links to shared libs from this build.
- # actions run on the host, so they should in theory only use host
- # libraries, but until everything is made cross-compile safe, also use
- # target libraries.
- # TODO(piman): when everything is cross-compile safe, remove lib.target
- self.WriteLn('cmd_%s = LD_LIBRARY_PATH=$(builddir)/lib.host:'
- '$(builddir)/lib.target:$$LD_LIBRARY_PATH; '
- 'export LD_LIBRARY_PATH; '
- '%s%s'
- % (name, cd_action, command))
- self.WriteLn()
- outputs = map(self.Absolutify, outputs)
- # The makefile rules are all relative to the top dir, but the gyp actions
- # are defined relative to their containing dir. This replaces the obj
- # variable for the action rule with an absolute version so that the output
- # goes in the right place.
- # Only write the 'obj' and 'builddir' rules for the "primary" output (:1);
- # it's superfluous for the "extra outputs", and this avoids accidentally
- # writing duplicate dummy rules for those outputs.
- # Same for environment.
- self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0]))
- self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0]))
- self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv())
-
- for input in inputs:
- assert ' ' not in input, (
- "Spaces in action input filenames not supported (%s)" % input)
- for output in outputs:
- assert ' ' not in output, (
- "Spaces in action output filenames not supported (%s)" % output)
-
- # See the comment in WriteCopies about expanding env vars.
- outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
- inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
-
- self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)),
- part_of_all=part_of_all, command=name)
-
- # Stuff the outputs in a variable so we can refer to them later.
- outputs_variable = 'action_%s_outputs' % name
- self.WriteLn('%s := %s' % (outputs_variable, ' '.join(outputs)))
- extra_outputs.append('$(%s)' % outputs_variable)
- self.WriteLn()
-
- self.WriteLn()
-
-
- def WriteRules(self, rules, extra_sources, extra_outputs,
- extra_mac_bundle_resources, part_of_all):
- """Write Makefile code for any 'rules' from the gyp input.
-
- extra_sources: a list that will be filled in with newly generated source
- files, if any
- extra_outputs: a list that will be filled in with any outputs of these
- rules (used to make other pieces dependent on these rules)
- part_of_all: flag indicating this target is part of 'all'
- """
- env = self.GetSortedXcodeEnv()
- for rule in rules:
- name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
- rule['rule_name']))
- count = 0
- self.WriteLn('### Generated for rule %s:' % name)
-
- all_outputs = []
-
- for rule_source in rule.get('rule_sources', []):
- dirs = set()
- (rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
- (rule_source_root, rule_source_ext) = \
- os.path.splitext(rule_source_basename)
-
- outputs = [self.ExpandInputRoot(out, rule_source_root,
- rule_source_dirname)
- for out in rule['outputs']]
-
- for out in outputs:
- dir = os.path.dirname(out)
- if dir:
- dirs.add(dir)
- if int(rule.get('process_outputs_as_sources', False)):
- extra_sources += outputs
- if int(rule.get('process_outputs_as_mac_bundle_resources', False)):
- extra_mac_bundle_resources += outputs
- inputs = map(Sourceify, map(self.Absolutify, [rule_source] +
- rule.get('inputs', [])))
- actions = ['$(call do_cmd,%s_%d)' % (name, count)]
-
- if name == 'resources_grit':
- # HACK: This is ugly. Grit intentionally doesn't touch the
- # timestamp of its output file when the file doesn't change,
- # which is fine in hash-based dependency systems like scons
- # and forge, but not kosher in the make world. After some
- # discussion, hacking around it here seems like the least
- # amount of pain.
- actions += ['@touch --no-create $@']
-
- # See the comment in WriteCopies about expanding env vars.
- outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
- inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
-
- outputs = map(self.Absolutify, outputs)
- all_outputs += outputs
- # Only write the 'obj' and 'builddir' rules for the "primary" output
- # (:1); it's superfluous for the "extra outputs", and this avoids
- # accidentally writing duplicate dummy rules for those outputs.
- self.WriteLn('%s: obj := $(abs_obj)' % outputs[0])
- self.WriteLn('%s: builddir := $(abs_builddir)' % outputs[0])
- self.WriteMakeRule(outputs, inputs, actions,
- command="%s_%d" % (name, count))
- # Spaces in rule filenames are not supported, but rule variables have
- # spaces in them (e.g. RULE_INPUT_PATH expands to '$(abspath $<)').
- # The spaces within the variables are valid, so remove the variables
- # before checking.
- variables_with_spaces = re.compile(r'\$\([^ ]* \$<\)')
- for output in outputs:
- output = re.sub(variables_with_spaces, '', output)
- assert ' ' not in output, (
- "Spaces in rule filenames not yet supported (%s)" % output)
- self.WriteLn('all_deps += %s' % ' '.join(outputs))
-
- action = [self.ExpandInputRoot(ac, rule_source_root,
- rule_source_dirname)
- for ac in rule['action']]
- mkdirs = ''
- if len(dirs) > 0:
- mkdirs = 'mkdir -p %s; ' % ' '.join(dirs)
- cd_action = 'cd %s; ' % Sourceify(self.path or '.')
-
- # action, cd_action, and mkdirs get written to a toplevel variable
- # called cmd_foo. Toplevel variables can't handle things that change
- # per makefile like $(TARGET), so hardcode the target.
- if self.flavor == 'mac':
- action = [gyp.xcode_emulation.ExpandEnvVars(command, env)
- for command in action]
- action = gyp.common.EncodePOSIXShellList(action)
- action = action.replace('$(TARGET)', self.target)
- cd_action = cd_action.replace('$(TARGET)', self.target)
- mkdirs = mkdirs.replace('$(TARGET)', self.target)
-
- # Set LD_LIBRARY_PATH in case the rule runs an executable from this
- # build which links to shared libs from this build.
- # rules run on the host, so they should in theory only use host
- # libraries, but until everything is made cross-compile safe, also use
- # target libraries.
- # TODO(piman): when everything is cross-compile safe, remove lib.target
- self.WriteLn(
- "cmd_%(name)s_%(count)d = LD_LIBRARY_PATH="
- "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; "
- "export LD_LIBRARY_PATH; "
- "%(cd_action)s%(mkdirs)s%(action)s" % {
- 'action': action,
- 'cd_action': cd_action,
- 'count': count,
- 'mkdirs': mkdirs,
- 'name': name,
- })
- self.WriteLn(
- 'quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % {
- 'count': count,
- 'name': name,
- })
- self.WriteLn()
- count += 1
-
- outputs_variable = 'rule_%s_outputs' % name
- self.WriteList(all_outputs, outputs_variable)
- extra_outputs.append('$(%s)' % outputs_variable)
-
- self.WriteLn('### Finished generating for rule: %s' % name)
- self.WriteLn()
- self.WriteLn('### Finished generating for all rules')
- self.WriteLn('')
-
-
- def WriteCopies(self, copies, extra_outputs, part_of_all):
- """Write Makefile code for any 'copies' from the gyp input.
-
- extra_outputs: a list that will be filled in with any outputs of this action
- (used to make other pieces dependent on this action)
- part_of_all: flag indicating this target is part of 'all'
- """
- self.WriteLn('### Generated for copy rule.')
-
- variable = StringToMakefileVariable(self.qualified_target + '_copies')
- outputs = []
- for copy in copies:
- for path in copy['files']:
- # Absolutify() may call normpath, and will strip trailing slashes.
- path = Sourceify(self.Absolutify(path))
- filename = os.path.split(path)[1]
- output = Sourceify(self.Absolutify(os.path.join(copy['destination'],
- filename)))
-
- # If the output path has variables in it, which happens in practice for
- # 'copies', writing the environment as target-local doesn't work,
- # because the variables are already needed for the target name.
- # Copying the environment variables into global make variables doesn't
- # work either, because then the .d files will potentially contain spaces
- # after variable expansion, and .d file handling cannot handle spaces.
- # As a workaround, manually expand variables at gyp time. Since 'copies'
- # can't run scripts, there's no need to write the env then.
- # WriteDoCmd() will escape spaces for .d files.
- env = self.GetSortedXcodeEnv()
- output = gyp.xcode_emulation.ExpandEnvVars(output, env)
- path = gyp.xcode_emulation.ExpandEnvVars(path, env)
- self.WriteDoCmd([output], [path], 'copy', part_of_all)
- outputs.append(output)
- self.WriteLn('%s = %s' % (variable, ' '.join(map(QuoteSpaces, outputs))))
- extra_outputs.append('$(%s)' % variable)
- self.WriteLn()
-
-
- def WriteMacBundleResources(self, resources, bundle_deps):
- """Writes Makefile code for 'mac_bundle_resources'."""
- self.WriteLn('### Generated for mac_bundle_resources')
-
- for output, res in gyp.xcode_emulation.GetMacBundleResources(
- generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
- map(Sourceify, map(self.Absolutify, resources))):
- _, ext = os.path.splitext(output)
- if ext != '.xcassets':
- # Make does not supports '.xcassets' emulation.
- self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource',
- part_of_all=True)
- bundle_deps.append(output)
-
-
- def WriteMacInfoPlist(self, bundle_deps):
- """Write Makefile code for bundle Info.plist files."""
- info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
- generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
- lambda p: Sourceify(self.Absolutify(p)))
- if not info_plist:
- return
- if defines:
- # Create an intermediate file to store preprocessed results.
- intermediate_plist = ('$(obj).$(TOOLSET)/$(TARGET)/' +
- os.path.basename(info_plist))
- self.WriteList(defines, intermediate_plist + ': INFOPLIST_DEFINES', '-D',
- quoter=EscapeCppDefine)
- self.WriteMakeRule([intermediate_plist], [info_plist],
- ['$(call do_cmd,infoplist)',
- # "Convert" the plist so that any weird whitespace changes from the
- # preprocessor do not affect the XML parser in mac_tool.
- '@plutil -convert xml1 $@ $@'])
- info_plist = intermediate_plist
- # plists can contain envvars and substitute them into the file.
- self.WriteSortedXcodeEnv(
- out, self.GetSortedXcodeEnv(additional_settings=extra_env))
- self.WriteDoCmd([out], [info_plist], 'mac_tool,,,copy-info-plist',
- part_of_all=True)
- bundle_deps.append(out)
-
-
- def WriteSources(self, configs, deps, sources,
- extra_outputs, extra_link_deps,
- part_of_all, precompiled_header):
- """Write Makefile code for any 'sources' from the gyp input.
- These are source files necessary to build the current target.
-
- configs, deps, sources: input from gyp.
- extra_outputs: a list of extra outputs this action should be dependent on;
- used to serialize action/rules before compilation
- extra_link_deps: a list that will be filled in with any outputs of
- compilation (to be used in link lines)
- part_of_all: flag indicating this target is part of 'all'
- """
-
- # Write configuration-specific variables for CFLAGS, etc.
- for configname in sorted(configs.keys()):
- config = configs[configname]
- self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D',
- quoter=EscapeCppDefine)
-
- if self.flavor == 'mac':
- cflags = self.xcode_settings.GetCflags(configname)
- cflags_c = self.xcode_settings.GetCflagsC(configname)
- cflags_cc = self.xcode_settings.GetCflagsCC(configname)
- cflags_objc = self.xcode_settings.GetCflagsObjC(configname)
- cflags_objcc = self.xcode_settings.GetCflagsObjCC(configname)
- else:
- cflags = config.get('cflags')
- cflags_c = config.get('cflags_c')
- cflags_cc = config.get('cflags_cc')
-
- self.WriteLn("# Flags passed to all source files.");
- self.WriteList(cflags, 'CFLAGS_%s' % configname)
- self.WriteLn("# Flags passed to only C files.");
- self.WriteList(cflags_c, 'CFLAGS_C_%s' % configname)
- self.WriteLn("# Flags passed to only C++ files.");
- self.WriteList(cflags_cc, 'CFLAGS_CC_%s' % configname)
- if self.flavor == 'mac':
- self.WriteLn("# Flags passed to only ObjC files.");
- self.WriteList(cflags_objc, 'CFLAGS_OBJC_%s' % configname)
- self.WriteLn("# Flags passed to only ObjC++ files.");
- self.WriteList(cflags_objcc, 'CFLAGS_OBJCC_%s' % configname)
- includes = config.get('include_dirs')
- if includes:
- includes = map(Sourceify, map(self.Absolutify, includes))
- self.WriteList(includes, 'INCS_%s' % configname, prefix='-I')
-
- compilable = filter(Compilable, sources)
- objs = map(self.Objectify, map(self.Absolutify, map(Target, compilable)))
- self.WriteList(objs, 'OBJS')
-
- for obj in objs:
- assert ' ' not in obj, (
- "Spaces in object filenames not supported (%s)" % obj)
- self.WriteLn('# Add to the list of files we specially track '
- 'dependencies for.')
- self.WriteLn('all_deps += $(OBJS)')
- self.WriteLn()
-
- # Make sure our dependencies are built first.
- if deps:
- self.WriteMakeRule(['$(OBJS)'], deps,
- comment = 'Make sure our dependencies are built '
- 'before any of us.',
- order_only = True)
-
- # Make sure the actions and rules run first.
- # If they generate any extra headers etc., the per-.o file dep tracking
- # will catch the proper rebuilds, so order only is still ok here.
- if extra_outputs:
- self.WriteMakeRule(['$(OBJS)'], extra_outputs,
- comment = 'Make sure our actions/rules run '
- 'before any of us.',
- order_only = True)
-
- pchdeps = precompiled_header.GetObjDependencies(compilable, objs )
- if pchdeps:
- self.WriteLn('# Dependencies from obj files to their precompiled headers')
- for source, obj, gch in pchdeps:
- self.WriteLn('%s: %s' % (obj, gch))
- self.WriteLn('# End precompiled header dependencies')
-
- if objs:
- extra_link_deps.append('$(OBJS)')
- self.WriteLn("""\
-# CFLAGS et al overrides must be target-local.
-# See "Target-specific Variable Values" in the GNU Make manual.""")
- self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)")
- self.WriteLn("$(OBJS): GYP_CFLAGS := "
- "$(DEFS_$(BUILDTYPE)) "
- "$(INCS_$(BUILDTYPE)) "
- "%s " % precompiled_header.GetInclude('c') +
- "$(CFLAGS_$(BUILDTYPE)) "
- "$(CFLAGS_C_$(BUILDTYPE))")
- self.WriteLn("$(OBJS): GYP_CXXFLAGS := "
- "$(DEFS_$(BUILDTYPE)) "
- "$(INCS_$(BUILDTYPE)) "
- "%s " % precompiled_header.GetInclude('cc') +
- "$(CFLAGS_$(BUILDTYPE)) "
- "$(CFLAGS_CC_$(BUILDTYPE))")
- if self.flavor == 'mac':
- self.WriteLn("$(OBJS): GYP_OBJCFLAGS := "
- "$(DEFS_$(BUILDTYPE)) "
- "$(INCS_$(BUILDTYPE)) "
- "%s " % precompiled_header.GetInclude('m') +
- "$(CFLAGS_$(BUILDTYPE)) "
- "$(CFLAGS_C_$(BUILDTYPE)) "
- "$(CFLAGS_OBJC_$(BUILDTYPE))")
- self.WriteLn("$(OBJS): GYP_OBJCXXFLAGS := "
- "$(DEFS_$(BUILDTYPE)) "
- "$(INCS_$(BUILDTYPE)) "
- "%s " % precompiled_header.GetInclude('mm') +
- "$(CFLAGS_$(BUILDTYPE)) "
- "$(CFLAGS_CC_$(BUILDTYPE)) "
- "$(CFLAGS_OBJCC_$(BUILDTYPE))")
-
- self.WritePchTargets(precompiled_header.GetPchBuildCommands())
-
- # If there are any object files in our input file list, link them into our
- # output.
- extra_link_deps += filter(Linkable, sources)
-
- self.WriteLn()
-
- def WritePchTargets(self, pch_commands):
- """Writes make rules to compile prefix headers."""
- if not pch_commands:
- return
-
- for gch, lang_flag, lang, input in pch_commands:
- extra_flags = {
- 'c': '$(CFLAGS_C_$(BUILDTYPE))',
- 'cc': '$(CFLAGS_CC_$(BUILDTYPE))',
- 'm': '$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))',
- 'mm': '$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))',
- }[lang]
- var_name = {
- 'c': 'GYP_PCH_CFLAGS',
- 'cc': 'GYP_PCH_CXXFLAGS',
- 'm': 'GYP_PCH_OBJCFLAGS',
- 'mm': 'GYP_PCH_OBJCXXFLAGS',
- }[lang]
- self.WriteLn("%s: %s := %s " % (gch, var_name, lang_flag) +
- "$(DEFS_$(BUILDTYPE)) "
- "$(INCS_$(BUILDTYPE)) "
- "$(CFLAGS_$(BUILDTYPE)) " +
- extra_flags)
-
- self.WriteLn('%s: %s FORCE_DO_CMD' % (gch, input))
- self.WriteLn('\t@$(call do_cmd,pch_%s,1)' % lang)
- self.WriteLn('')
- assert ' ' not in gch, (
- "Spaces in gch filenames not supported (%s)" % gch)
- self.WriteLn('all_deps += %s' % gch)
- self.WriteLn('')
-
-
- def ComputeOutputBasename(self, spec):
- """Return the 'output basename' of a gyp spec.
-
- E.g., the loadable module 'foobar' in directory 'baz' will produce
- 'libfoobar.so'
- """
- assert not self.is_mac_bundle
-
- if self.flavor == 'mac' and self.type in (
- 'static_library', 'executable', 'shared_library', 'loadable_module'):
- return self.xcode_settings.GetExecutablePath()
-
- target = spec['target_name']
- target_prefix = ''
- target_ext = ''
- if self.type == 'static_library':
- if target[:3] == 'lib':
- target = target[3:]
- target_prefix = 'lib'
- target_ext = '.a'
- elif self.type in ('loadable_module', 'shared_library'):
- if target[:3] == 'lib':
- target = target[3:]
- target_prefix = 'lib'
- target_ext = '.so'
- elif self.type == 'none':
- target = '%s.stamp' % target
- elif self.type != 'executable':
- print ("ERROR: What output file should be generated?",
- "type", self.type, "target", target)
-
- target_prefix = spec.get('product_prefix', target_prefix)
- target = spec.get('product_name', target)
- product_ext = spec.get('product_extension')
- if product_ext:
- target_ext = '.' + product_ext
-
- return target_prefix + target + target_ext
-
-
- def _InstallImmediately(self):
- return self.toolset == 'target' and self.flavor == 'mac' and self.type in (
- 'static_library', 'executable', 'shared_library', 'loadable_module')
-
-
- def ComputeOutput(self, spec):
- """Return the 'output' (full output path) of a gyp spec.
-
- E.g., the loadable module 'foobar' in directory 'baz' will produce
- '$(obj)/baz/libfoobar.so'
- """
- assert not self.is_mac_bundle
-
- path = os.path.join('$(obj).' + self.toolset, self.path)
- if self.type == 'executable' or self._InstallImmediately():
- path = '$(builddir)'
- path = spec.get('product_dir', path)
- return os.path.join(path, self.ComputeOutputBasename(spec))
-
-
- def ComputeMacBundleOutput(self, spec):
- """Return the 'output' (full output path) to a bundle output directory."""
- assert self.is_mac_bundle
- path = generator_default_variables['PRODUCT_DIR']
- return os.path.join(path, self.xcode_settings.GetWrapperName())
-
-
- def ComputeMacBundleBinaryOutput(self, spec):
- """Return the 'output' (full output path) to the binary in a bundle."""
- path = generator_default_variables['PRODUCT_DIR']
- return os.path.join(path, self.xcode_settings.GetExecutablePath())
-
-
- def ComputeDeps(self, spec):
- """Compute the dependencies of a gyp spec.
-
- Returns a tuple (deps, link_deps), where each is a list of
- filenames that will need to be put in front of make for either
- building (deps) or linking (link_deps).
- """
- deps = []
- link_deps = []
- if 'dependencies' in spec:
- deps.extend([target_outputs[dep] for dep in spec['dependencies']
- if target_outputs[dep]])
- for dep in spec['dependencies']:
- if dep in target_link_deps:
- link_deps.append(target_link_deps[dep])
- deps.extend(link_deps)
- # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)?
- # This hack makes it work:
- # link_deps.extend(spec.get('libraries', []))
- return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
-
-
- def WriteDependencyOnExtraOutputs(self, target, extra_outputs):
- self.WriteMakeRule([self.output_binary], extra_outputs,
- comment = 'Build our special outputs first.',
- order_only = True)
-
-
- def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps,
- extra_outputs, part_of_all):
- """Write Makefile code to produce the final target of the gyp spec.
-
- spec, configs: input from gyp.
- deps, link_deps: dependency lists; see ComputeDeps()
- extra_outputs: any extra outputs that our target should depend on
- part_of_all: flag indicating this target is part of 'all'
- """
-
- self.WriteLn('### Rules for final target.')
-
- if extra_outputs:
- self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs)
- self.WriteMakeRule(extra_outputs, deps,
- comment=('Preserve order dependency of '
- 'special output on deps.'),
- order_only = True)
-
- target_postbuilds = {}
- if self.type != 'none':
- for configname in sorted(configs.keys()):
- config = configs[configname]
- if self.flavor == 'mac':
- ldflags = self.xcode_settings.GetLdflags(configname,
- generator_default_variables['PRODUCT_DIR'],
- lambda p: Sourceify(self.Absolutify(p)))
-
- # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
- gyp_to_build = gyp.common.InvertRelativePath(self.path)
- target_postbuild = self.xcode_settings.AddImplicitPostbuilds(
- configname,
- QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
- self.output))),
- QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
- self.output_binary))))
- if target_postbuild:
- target_postbuilds[configname] = target_postbuild
- else:
- ldflags = config.get('ldflags', [])
- # Compute an rpath for this output if needed.
- if any(dep.endswith('.so') or '.so.' in dep for dep in deps):
- # We want to get the literal string "$ORIGIN" into the link command,
- # so we need lots of escaping.
- ldflags.append(r'-Wl,-rpath=\$$ORIGIN/lib.%s/' % self.toolset)
- ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' %
- self.toolset)
- library_dirs = config.get('library_dirs', [])
- ldflags += [('-L%s' % library_dir) for library_dir in library_dirs]
- self.WriteList(ldflags, 'LDFLAGS_%s' % configname)
- if self.flavor == 'mac':
- self.WriteList(self.xcode_settings.GetLibtoolflags(configname),
- 'LIBTOOLFLAGS_%s' % configname)
- libraries = spec.get('libraries')
- if libraries:
- # Remove duplicate entries
- libraries = gyp.common.uniquer(libraries)
- if self.flavor == 'mac':
- libraries = self.xcode_settings.AdjustLibraries(libraries)
- self.WriteList(libraries, 'LIBS')
- self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' %
- QuoteSpaces(self.output_binary))
- self.WriteLn('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary))
-
- if self.flavor == 'mac':
- self.WriteLn('%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))' %
- QuoteSpaces(self.output_binary))
-
- # Postbuild actions. Like actions, but implicitly depend on the target's
- # output.
- postbuilds = []
- if self.flavor == 'mac':
- if target_postbuilds:
- postbuilds.append('$(TARGET_POSTBUILDS_$(BUILDTYPE))')
- postbuilds.extend(
- gyp.xcode_emulation.GetSpecPostbuildCommands(spec))
-
- if postbuilds:
- # Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE),
- # so we must output its definition first, since we declare variables
- # using ":=".
- self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv())
-
- for configname in target_postbuilds:
- self.WriteLn('%s: TARGET_POSTBUILDS_%s := %s' %
- (QuoteSpaces(self.output),
- configname,
- gyp.common.EncodePOSIXShellList(target_postbuilds[configname])))
-
- # Postbuilds expect to be run in the gyp file's directory, so insert an
- # implicit postbuild to cd to there.
- postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path]))
- for i in xrange(len(postbuilds)):
- if not postbuilds[i].startswith('$'):
- postbuilds[i] = EscapeShellArgument(postbuilds[i])
- self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output))
- self.WriteLn('%s: POSTBUILDS := %s' % (
- QuoteSpaces(self.output), ' '.join(postbuilds)))
-
- # A bundle directory depends on its dependencies such as bundle resources
- # and bundle binary. When all dependencies have been built, the bundle
- # needs to be packaged.
- if self.is_mac_bundle:
- # If the framework doesn't contain a binary, then nothing depends
- # on the actions -- make the framework depend on them directly too.
- self.WriteDependencyOnExtraOutputs(self.output, extra_outputs)
-
- # Bundle dependencies. Note that the code below adds actions to this
- # target, so if you move these two lines, move the lines below as well.
- self.WriteList(map(QuoteSpaces, bundle_deps), 'BUNDLE_DEPS')
- self.WriteLn('%s: $(BUNDLE_DEPS)' % QuoteSpaces(self.output))
-
- # After the framework is built, package it. Needs to happen before
- # postbuilds, since postbuilds depend on this.
- if self.type in ('shared_library', 'loadable_module'):
- self.WriteLn('\t@$(call do_cmd,mac_package_framework,,,%s)' %
- self.xcode_settings.GetFrameworkVersion())
-
- # Bundle postbuilds can depend on the whole bundle, so run them after
- # the bundle is packaged, not already after the bundle binary is done.
- if postbuilds:
- self.WriteLn('\t@$(call do_postbuilds)')
- postbuilds = [] # Don't write postbuilds for target's output.
-
- # Needed by test/mac/gyptest-rebuild.py.
- self.WriteLn('\t@true # No-op, used by tests')
-
- # Since this target depends on binary and resources which are in
- # nested subfolders, the framework directory will be older than
- # its dependencies usually. To prevent this rule from executing
- # on every build (expensive, especially with postbuilds), expliclity
- # update the time on the framework directory.
- self.WriteLn('\t@touch -c %s' % QuoteSpaces(self.output))
-
- if postbuilds:
- assert not self.is_mac_bundle, ('Postbuilds for bundles should be done '
- 'on the bundle, not the binary (target \'%s\')' % self.target)
- assert 'product_dir' not in spec, ('Postbuilds do not work with '
- 'custom product_dir')
-
- if self.type == 'executable':
- self.WriteLn('%s: LD_INPUTS := %s' % (
- QuoteSpaces(self.output_binary),
- ' '.join(map(QuoteSpaces, link_deps))))
- if self.toolset == 'host' and self.flavor == 'android':
- self.WriteDoCmd([self.output_binary], link_deps, 'link_host',
- part_of_all, postbuilds=postbuilds)
- else:
- self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all,
- postbuilds=postbuilds)
-
- elif self.type == 'static_library':
- for link_dep in link_deps:
- assert ' ' not in link_dep, (
- "Spaces in alink input filenames not supported (%s)" % link_dep)
- if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
- self.is_standalone_static_library):
- self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin',
- part_of_all, postbuilds=postbuilds)
- else:
- self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all,
- postbuilds=postbuilds)
- elif self.type == 'shared_library':
- self.WriteLn('%s: LD_INPUTS := %s' % (
- QuoteSpaces(self.output_binary),
- ' '.join(map(QuoteSpaces, link_deps))))
- self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all,
- postbuilds=postbuilds)
- elif self.type == 'loadable_module':
- for link_dep in link_deps:
- assert ' ' not in link_dep, (
- "Spaces in module input filenames not supported (%s)" % link_dep)
- if self.toolset == 'host' and self.flavor == 'android':
- self.WriteDoCmd([self.output_binary], link_deps, 'solink_module_host',
- part_of_all, postbuilds=postbuilds)
- else:
- self.WriteDoCmd(
- [self.output_binary], link_deps, 'solink_module', part_of_all,
- postbuilds=postbuilds)
- elif self.type == 'none':
- # Write a stamp line.
- self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all,
- postbuilds=postbuilds)
- else:
- print "WARNING: no output for", self.type, target
-
- # Add an alias for each target (if there are any outputs).
- # Installable target aliases are created below.
- if ((self.output and self.output != self.target) and
- (self.type not in self._INSTALLABLE_TARGETS)):
- self.WriteMakeRule([self.target], [self.output],
- comment='Add target alias', phony = True)
- if part_of_all:
- self.WriteMakeRule(['all'], [self.target],
- comment = 'Add target alias to "all" target.',
- phony = True)
-
- # Add special-case rules for our installable targets.
- # 1) They need to install to the build dir or "product" dir.
- # 2) They get shortcuts for building (e.g. "make chrome").
- # 3) They are part of "make all".
- if (self.type in self._INSTALLABLE_TARGETS or
- self.is_standalone_static_library):
- if self.type == 'shared_library':
- file_desc = 'shared library'
- elif self.type == 'static_library':
- file_desc = 'static library'
- else:
- file_desc = 'executable'
- install_path = self._InstallableTargetInstallPath()
- installable_deps = [self.output]
- if (self.flavor == 'mac' and not 'product_dir' in spec and
- self.toolset == 'target'):
- # On mac, products are created in install_path immediately.
- assert install_path == self.output, '%s != %s' % (
- install_path, self.output)
-
- # Point the target alias to the final binary output.
- self.WriteMakeRule([self.target], [install_path],
- comment='Add target alias', phony = True)
- if install_path != self.output:
- assert not self.is_mac_bundle # See comment a few lines above.
- self.WriteDoCmd([install_path], [self.output], 'copy',
- comment = 'Copy this to the %s output path.' %
- file_desc, part_of_all=part_of_all)
- installable_deps.append(install_path)
- if self.output != self.alias and self.alias != self.target:
- self.WriteMakeRule([self.alias], installable_deps,
- comment = 'Short alias for building this %s.' %
- file_desc, phony = True)
- if part_of_all:
- self.WriteMakeRule(['all'], [install_path],
- comment = 'Add %s to "all" target.' % file_desc,
- phony = True)
-
-
- def WriteList(self, value_list, variable=None, prefix='',
- quoter=QuoteIfNecessary):
- """Write a variable definition that is a list of values.
-
- E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
- foo = blaha blahb
- but in a pretty-printed style.
- """
- values = ''
- if value_list:
- value_list = [quoter(prefix + l) for l in value_list]
- values = ' \\\n\t' + ' \\\n\t'.join(value_list)
- self.fp.write('%s :=%s\n\n' % (variable, values))
-
-
- def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None,
- postbuilds=False):
- """Write a Makefile rule that uses do_cmd.
-
- This makes the outputs dependent on the command line that was run,
- as well as support the V= make command line flag.
- """
- suffix = ''
- if postbuilds:
- assert ',' not in command
- suffix = ',,1' # Tell do_cmd to honor $POSTBUILDS
- self.WriteMakeRule(outputs, inputs,
- actions = ['$(call do_cmd,%s%s)' % (command, suffix)],
- comment = comment,
- command = command,
- force = True)
- # Add our outputs to the list of targets we read depfiles from.
- # all_deps is only used for deps file reading, and for deps files we replace
- # spaces with ? because escaping doesn't work with make's $(sort) and
- # other functions.
- outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs]
- self.WriteLn('all_deps += %s' % ' '.join(outputs))
-
-
- def WriteMakeRule(self, outputs, inputs, actions=None, comment=None,
- order_only=False, force=False, phony=False, command=None):
- """Write a Makefile rule, with some extra tricks.
-
- outputs: a list of outputs for the rule (note: this is not directly
- supported by make; see comments below)
- inputs: a list of inputs for the rule
- actions: a list of shell commands to run for the rule
- comment: a comment to put in the Makefile above the rule (also useful
- for making this Python script's code self-documenting)
- order_only: if true, makes the dependency order-only
- force: if true, include FORCE_DO_CMD as an order-only dep
- phony: if true, the rule does not actually generate the named output, the
- output is just a name to run the rule
- command: (optional) command name to generate unambiguous labels
- """
- outputs = map(QuoteSpaces, outputs)
- inputs = map(QuoteSpaces, inputs)
-
- if comment:
- self.WriteLn('# ' + comment)
- if phony:
- self.WriteLn('.PHONY: ' + ' '.join(outputs))
- if actions:
- self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0])
- force_append = ' FORCE_DO_CMD' if force else ''
-
- if order_only:
- # Order only rule: Just write a simple rule.
- # TODO(evanm): just make order_only a list of deps instead of this hack.
- self.WriteLn('%s: | %s%s' %
- (' '.join(outputs), ' '.join(inputs), force_append))
- elif len(outputs) == 1:
- # Regular rule, one output: Just write a simple rule.
- self.WriteLn('%s: %s%s' % (outputs[0], ' '.join(inputs), force_append))
- else:
- # Regular rule, more than one output: Multiple outputs are tricky in
- # make. We will write three rules:
- # - All outputs depend on an intermediate file.
- # - Make .INTERMEDIATE depend on the intermediate.
- # - The intermediate file depends on the inputs and executes the
- # actual command.
- # - The intermediate recipe will 'touch' the intermediate file.
- # - The multi-output rule will have an do-nothing recipe.
- intermediate = "%s.intermediate" % (command if command else self.target)
- self.WriteLn('%s: %s' % (' '.join(outputs), intermediate))
- self.WriteLn('\t%s' % '@:');
- self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate))
- self.WriteLn('%s: %s%s' %
- (intermediate, ' '.join(inputs), force_append))
- actions.insert(0, '$(call do_cmd,touch)')
-
- if actions:
- for action in actions:
- self.WriteLn('\t%s' % action)
- self.WriteLn()
-
-
- def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps):
- """Write a set of LOCAL_XXX definitions for Android NDK.
-
- These variable definitions will be used by Android NDK but do nothing for
- non-Android applications.
-
- Arguments:
- module_name: Android NDK module name, which must be unique among all
- module names.
- all_sources: A list of source files (will be filtered by Compilable).
- link_deps: A list of link dependencies, which must be sorted in
- the order from dependencies to dependents.
- """
- if self.type not in ('executable', 'shared_library', 'static_library'):
- return
-
- self.WriteLn('# Variable definitions for Android applications')
- self.WriteLn('include $(CLEAR_VARS)')
- self.WriteLn('LOCAL_MODULE := ' + module_name)
- self.WriteLn('LOCAL_CFLAGS := $(CFLAGS_$(BUILDTYPE)) '
- '$(DEFS_$(BUILDTYPE)) '
- # LOCAL_CFLAGS is applied to both of C and C++. There is
- # no way to specify $(CFLAGS_C_$(BUILDTYPE)) only for C
- # sources.
- '$(CFLAGS_C_$(BUILDTYPE)) '
- # $(INCS_$(BUILDTYPE)) includes the prefix '-I' while
- # LOCAL_C_INCLUDES does not expect it. So put it in
- # LOCAL_CFLAGS.
- '$(INCS_$(BUILDTYPE))')
- # LOCAL_CXXFLAGS is obsolete and LOCAL_CPPFLAGS is preferred.
- self.WriteLn('LOCAL_CPPFLAGS := $(CFLAGS_CC_$(BUILDTYPE))')
- self.WriteLn('LOCAL_C_INCLUDES :=')
- self.WriteLn('LOCAL_LDLIBS := $(LDFLAGS_$(BUILDTYPE)) $(LIBS)')
-
- # Detect the C++ extension.
- cpp_ext = {'.cc': 0, '.cpp': 0, '.cxx': 0}
- default_cpp_ext = '.cpp'
- for filename in all_sources:
- ext = os.path.splitext(filename)[1]
- if ext in cpp_ext:
- cpp_ext[ext] += 1
- if cpp_ext[ext] > cpp_ext[default_cpp_ext]:
- default_cpp_ext = ext
- self.WriteLn('LOCAL_CPP_EXTENSION := ' + default_cpp_ext)
-
- self.WriteList(map(self.Absolutify, filter(Compilable, all_sources)),
- 'LOCAL_SRC_FILES')
-
- # Filter out those which do not match prefix and suffix and produce
- # the resulting list without prefix and suffix.
- def DepsToModules(deps, prefix, suffix):
- modules = []
- for filepath in deps:
- filename = os.path.basename(filepath)
- if filename.startswith(prefix) and filename.endswith(suffix):
- modules.append(filename[len(prefix):-len(suffix)])
- return modules
-
- # Retrieve the default value of 'SHARED_LIB_SUFFIX'
- params = {'flavor': 'linux'}
- default_variables = {}
- CalculateVariables(default_variables, params)
-
- self.WriteList(
- DepsToModules(link_deps,
- generator_default_variables['SHARED_LIB_PREFIX'],
- default_variables['SHARED_LIB_SUFFIX']),
- 'LOCAL_SHARED_LIBRARIES')
- self.WriteList(
- DepsToModules(link_deps,
- generator_default_variables['STATIC_LIB_PREFIX'],
- generator_default_variables['STATIC_LIB_SUFFIX']),
- 'LOCAL_STATIC_LIBRARIES')
-
- if self.type == 'executable':
- self.WriteLn('include $(BUILD_EXECUTABLE)')
- elif self.type == 'shared_library':
- self.WriteLn('include $(BUILD_SHARED_LIBRARY)')
- elif self.type == 'static_library':
- self.WriteLn('include $(BUILD_STATIC_LIBRARY)')
- self.WriteLn()
-
-
- def WriteLn(self, text=''):
- self.fp.write(text + '\n')
-
-
- def GetSortedXcodeEnv(self, additional_settings=None):
- return gyp.xcode_emulation.GetSortedXcodeEnv(
- self.xcode_settings, "$(abs_builddir)",
- os.path.join("$(abs_srcdir)", self.path), "$(BUILDTYPE)",
- additional_settings)
-
-
- def GetSortedXcodePostbuildEnv(self):
- # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
- # TODO(thakis): It would be nice to have some general mechanism instead.
- strip_save_file = self.xcode_settings.GetPerTargetSetting(
- 'CHROMIUM_STRIP_SAVE_FILE', '')
- # Even if strip_save_file is empty, explicitly write it. Else a postbuild
- # might pick up an export from an earlier target.
- return self.GetSortedXcodeEnv(
- additional_settings={'CHROMIUM_STRIP_SAVE_FILE': strip_save_file})
-
-
- def WriteSortedXcodeEnv(self, target, env):
- for k, v in env:
- # For
- # foo := a\ b
- # the escaped space does the right thing. For
- # export foo := a\ b
- # it does not -- the backslash is written to the env as literal character.
- # So don't escape spaces in |env[k]|.
- self.WriteLn('%s: export %s := %s' % (QuoteSpaces(target), k, v))
-
-
- def Objectify(self, path):
- """Convert a path to its output directory form."""
- if '$(' in path:
- path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/' % self.toolset)
- if not '$(obj)' in path:
- path = '$(obj).%s/$(TARGET)/%s' % (self.toolset, path)
- return path
-
-
- def Pchify(self, path, lang):
- """Convert a prefix header path to its output directory form."""
- path = self.Absolutify(path)
- if '$(' in path:
- path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/pch-%s' %
- (self.toolset, lang))
- return path
- return '$(obj).%s/$(TARGET)/pch-%s/%s' % (self.toolset, lang, path)
-
-
- def Absolutify(self, path):
- """Convert a subdirectory-relative path into a base-relative path.
- Skips over paths that contain variables."""
- if '$(' in path:
- # Don't call normpath in this case, as it might collapse the
- # path too aggressively if it features '..'. However it's still
- # important to strip trailing slashes.
- return path.rstrip('/')
- return os.path.normpath(os.path.join(self.path, path))
-
-
- def ExpandInputRoot(self, template, expansion, dirname):
- if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
- return template
- path = template % {
- 'INPUT_ROOT': expansion,
- 'INPUT_DIRNAME': dirname,
- }
- return path
-
-
- def _InstallableTargetInstallPath(self):
- """Returns the location of the final output for an installable target."""
- # Xcode puts shared_library results into PRODUCT_DIR, and some gyp files
- # rely on this. Emulate this behavior for mac.
-
- # XXX(TooTallNate): disabling this code since we don't want this behavior...
- #if (self.type == 'shared_library' and
- # (self.flavor != 'mac' or self.toolset != 'target')):
- # # Install all shared libs into a common directory (per toolset) for
- # # convenient access with LD_LIBRARY_PATH.
- # return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
- return '$(builddir)/' + self.alias
-
-
-def WriteAutoRegenerationRule(params, root_makefile, makefile_name,
- build_files):
- """Write the target to regenerate the Makefile."""
- options = params['options']
- build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir)
- for filename in params['build_files_arg']]
-
- gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'],
- options.toplevel_dir)
- if not gyp_binary.startswith(os.sep):
- gyp_binary = os.path.join('.', gyp_binary)
-
- root_makefile.write(
- "quiet_cmd_regen_makefile = ACTION Regenerating $@\n"
- "cmd_regen_makefile = cd $(srcdir); %(cmd)s\n"
- "%(makefile_name)s: %(deps)s\n"
- "\t$(call do_cmd,regen_makefile)\n\n" % {
- 'makefile_name': makefile_name,
- 'deps': ' '.join(map(Sourceify, build_files)),
- 'cmd': gyp.common.EncodePOSIXShellList(
- [gyp_binary, '-fmake'] +
- gyp.RegenerateFlags(options) +
- build_files_args)})
-
-
-def PerformBuild(data, configurations, params):
- options = params['options']
- for config in configurations:
- arguments = ['make']
- if options.toplevel_dir and options.toplevel_dir != '.':
- arguments += '-C', options.toplevel_dir
- arguments.append('BUILDTYPE=' + config)
- print 'Building [%s]: %s' % (config, arguments)
- subprocess.check_call(arguments)
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- options = params['options']
- flavor = gyp.common.GetFlavor(params)
- generator_flags = params.get('generator_flags', {})
- builddir_name = generator_flags.get('output_dir', 'out')
- android_ndk_version = generator_flags.get('android_ndk_version', None)
- default_target = generator_flags.get('default_target', 'all')
-
- def CalculateMakefilePath(build_file, base_name):
- """Determine where to write a Makefile for a given gyp file."""
- # Paths in gyp files are relative to the .gyp file, but we want
- # paths relative to the source root for the master makefile. Grab
- # the path of the .gyp file as the base to relativize against.
- # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
- base_path = gyp.common.RelativePath(os.path.dirname(build_file),
- options.depth)
- # We write the file in the base_path directory.
- output_file = os.path.join(options.depth, base_path, base_name)
- if options.generator_output:
- output_file = os.path.join(
- options.depth, options.generator_output, base_path, base_name)
- base_path = gyp.common.RelativePath(os.path.dirname(build_file),
- options.toplevel_dir)
- return base_path, output_file
-
- # TODO: search for the first non-'Default' target. This can go
- # away when we add verification that all targets have the
- # necessary configurations.
- default_configuration = None
- toolsets = set([target_dicts[target]['toolset'] for target in target_list])
- for target in target_list:
- spec = target_dicts[target]
- if spec['default_configuration'] != 'Default':
- default_configuration = spec['default_configuration']
- break
- if not default_configuration:
- default_configuration = 'Default'
-
- srcdir = '.'
- makefile_name = 'Makefile' + options.suffix
- makefile_path = os.path.join(options.toplevel_dir, makefile_name)
- if options.generator_output:
- global srcdir_prefix
- makefile_path = os.path.join(
- options.toplevel_dir, options.generator_output, makefile_name)
- srcdir = gyp.common.RelativePath(srcdir, options.generator_output)
- srcdir_prefix = '$(srcdir)/'
-
- flock_command= 'flock'
- copy_archive_arguments = '-af'
- header_params = {
- 'default_target': default_target,
- 'builddir': builddir_name,
- 'default_configuration': default_configuration,
- 'flock': flock_command,
- 'flock_index': 1,
- 'link_commands': LINK_COMMANDS_LINUX,
- 'extra_commands': '',
- 'srcdir': srcdir,
- 'copy_archive_args': copy_archive_arguments,
- }
- if flavor == 'mac':
- flock_command = './gyp-mac-tool flock'
- header_params.update({
- 'flock': flock_command,
- 'flock_index': 2,
- 'link_commands': LINK_COMMANDS_MAC,
- 'extra_commands': SHARED_HEADER_MAC_COMMANDS,
- })
- elif flavor == 'android':
- header_params.update({
- 'link_commands': LINK_COMMANDS_ANDROID,
- })
- elif flavor == 'solaris':
- header_params.update({
- 'flock': './gyp-flock-tool flock',
- 'flock_index': 2,
- })
- elif flavor == 'freebsd':
- # Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific.
- header_params.update({
- 'flock': 'lockf',
- })
- elif flavor == 'openbsd':
- copy_archive_arguments = '-pPRf'
- header_params.update({
- 'copy_archive_args': copy_archive_arguments,
- })
- elif flavor == 'aix':
- copy_archive_arguments = '-pPRf'
- header_params.update({
- 'copy_archive_args': copy_archive_arguments,
- 'link_commands': LINK_COMMANDS_AIX,
- 'flock': './gyp-flock-tool flock',
- 'flock_index': 2,
- })
-
- header_params.update({
- 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'),
- 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'),
- 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'),
- 'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'),
- 'CC.host': GetEnvironFallback(('CC_host', 'CC'), 'gcc'),
- 'AR.host': GetEnvironFallback(('AR_host', 'AR'), 'ar'),
- 'CXX.host': GetEnvironFallback(('CXX_host', 'CXX'), 'g++'),
- 'LINK.host': GetEnvironFallback(('LINK_host', 'LINK'), '$(CXX.host)'),
- })
-
- build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
- make_global_settings_array = data[build_file].get('make_global_settings', [])
- wrappers = {}
- for key, value in make_global_settings_array:
- if key.endswith('_wrapper'):
- wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value
- make_global_settings = ''
- for key, value in make_global_settings_array:
- if re.match('.*_wrapper', key):
- continue
- if value[0] != '$':
- value = '$(abspath %s)' % value
- wrapper = wrappers.get(key)
- if wrapper:
- value = '%s %s' % (wrapper, value)
- del wrappers[key]
- if key in ('CC', 'CC.host', 'CXX', 'CXX.host'):
- make_global_settings += (
- 'ifneq (,$(filter $(origin %s), undefined default))\n' % key)
- # Let gyp-time envvars win over global settings.
- env_key = key.replace('.', '_') # CC.host -> CC_host
- if env_key in os.environ:
- value = os.environ[env_key]
- make_global_settings += ' %s = %s\n' % (key, value)
- make_global_settings += 'endif\n'
- else:
- make_global_settings += '%s ?= %s\n' % (key, value)
- # TODO(ukai): define cmd when only wrapper is specified in
- # make_global_settings.
-
- header_params['make_global_settings'] = make_global_settings
-
- gyp.common.EnsureDirExists(makefile_path)
- root_makefile = open(makefile_path, 'w')
- root_makefile.write(SHARED_HEADER % header_params)
- # Currently any versions have the same effect, but in future the behavior
- # could be different.
- if android_ndk_version:
- root_makefile.write(
- '# Define LOCAL_PATH for build of Android applications.\n'
- 'LOCAL_PATH := $(call my-dir)\n'
- '\n')
- for toolset in toolsets:
- root_makefile.write('TOOLSET := %s\n' % toolset)
- WriteRootHeaderSuffixRules(root_makefile)
-
- # Put build-time support tools next to the root Makefile.
- dest_path = os.path.dirname(makefile_path)
- gyp.common.CopyTool(flavor, dest_path)
-
- # Find the list of targets that derive from the gyp file(s) being built.
- needed_targets = set()
- for build_file in params['build_files']:
- for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
- needed_targets.add(target)
-
- build_files = set()
- include_list = set()
- for qualified_target in target_list:
- build_file, target, toolset = gyp.common.ParseQualifiedTarget(
- qualified_target)
-
- this_make_global_settings = data[build_file].get('make_global_settings', [])
- assert make_global_settings_array == this_make_global_settings, (
- "make_global_settings needs to be the same for all targets. %s vs. %s" %
- (this_make_global_settings, make_global_settings))
-
- build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))
- included_files = data[build_file]['included_files']
- for included_file in included_files:
- # The included_files entries are relative to the dir of the build file
- # that included them, so we have to undo that and then make them relative
- # to the root dir.
- relative_include_file = gyp.common.RelativePath(
- gyp.common.UnrelativePath(included_file, build_file),
- options.toplevel_dir)
- abs_include_file = os.path.abspath(relative_include_file)
- # If the include file is from the ~/.gyp dir, we should use absolute path
- # so that relocating the src dir doesn't break the path.
- if (params['home_dot_gyp'] and
- abs_include_file.startswith(params['home_dot_gyp'])):
- build_files.add(abs_include_file)
- else:
- build_files.add(relative_include_file)
-
- base_path, output_file = CalculateMakefilePath(build_file,
- target + '.' + toolset + options.suffix + '.mk')
-
- spec = target_dicts[qualified_target]
- configs = spec['configurations']
-
- if flavor == 'mac':
- gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
-
- writer = MakefileWriter(generator_flags, flavor)
- writer.Write(qualified_target, base_path, output_file, spec, configs,
- part_of_all=qualified_target in needed_targets)
-
- # Our root_makefile lives at the source root. Compute the relative path
- # from there to the output_file for including.
- mkfile_rel_path = gyp.common.RelativePath(output_file,
- os.path.dirname(makefile_path))
- include_list.add(mkfile_rel_path)
-
- # Write out per-gyp (sub-project) Makefiles.
- depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd())
- for build_file in build_files:
- # The paths in build_files were relativized above, so undo that before
- # testing against the non-relativized items in target_list and before
- # calculating the Makefile path.
- build_file = os.path.join(depth_rel_path, build_file)
- gyp_targets = [target_dicts[target]['target_name'] for target in target_list
- if target.startswith(build_file) and
- target in needed_targets]
- # Only generate Makefiles for gyp files with targets.
- if not gyp_targets:
- continue
- base_path, output_file = CalculateMakefilePath(build_file,
- os.path.splitext(os.path.basename(build_file))[0] + '.Makefile')
- makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path),
- os.path.dirname(output_file))
- writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets,
- builddir_name)
-
-
- # Write out the sorted list of includes.
- root_makefile.write('\n')
- for include_file in sorted(include_list):
- # We wrap each .mk include in an if statement so users can tell make to
- # not load a file by setting NO_LOAD. The below make code says, only
- # load the .mk file if the .mk filename doesn't start with a token in
- # NO_LOAD.
- root_makefile.write(
- "ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n"
- " $(findstring $(join ^,$(prefix)),\\\n"
- " $(join ^," + include_file + ")))),)\n")
- root_makefile.write(" include " + include_file + "\n")
- root_makefile.write("endif\n")
- root_makefile.write('\n')
-
- if (not generator_flags.get('standalone')
- and generator_flags.get('auto_regeneration', True)):
- WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
-
- root_makefile.write(SHARED_FOOTER)
-
- root_makefile.close()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
deleted file mode 100644
index 6bfad0f3bd..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
+++ /dev/null
@@ -1,3494 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import copy
-import ntpath
-import os
-import posixpath
-import re
-import subprocess
-import sys
-
-import gyp.common
-import gyp.easy_xml as easy_xml
-import gyp.generator.ninja as ninja_generator
-import gyp.MSVSNew as MSVSNew
-import gyp.MSVSProject as MSVSProject
-import gyp.MSVSSettings as MSVSSettings
-import gyp.MSVSToolFile as MSVSToolFile
-import gyp.MSVSUserFile as MSVSUserFile
-import gyp.MSVSUtil as MSVSUtil
-import gyp.MSVSVersion as MSVSVersion
-from gyp.common import GypError
-from gyp.common import OrderedSet
-
-# TODO: Remove once bots are on 2.7, http://crbug.com/241769
-def _import_OrderedDict():
- import collections
- try:
- return collections.OrderedDict
- except AttributeError:
- import gyp.ordered_dict
- return gyp.ordered_dict.OrderedDict
-OrderedDict = _import_OrderedDict()
-
-
-# Regular expression for validating Visual Studio GUIDs. If the GUID
-# contains lowercase hex letters, MSVS will be fine. However,
-# IncrediBuild BuildConsole will parse the solution file, but then
-# silently skip building the target causing hard to track down errors.
-# Note that this only happens with the BuildConsole, and does not occur
-# if IncrediBuild is executed from inside Visual Studio. This regex
-# validates that the string looks like a GUID with all uppercase hex
-# letters.
-VALID_MSVS_GUID_CHARS = re.compile(r'^[A-F0-9\-]+$')
-
-
-generator_default_variables = {
- 'EXECUTABLE_PREFIX': '',
- 'EXECUTABLE_SUFFIX': '.exe',
- 'STATIC_LIB_PREFIX': '',
- 'SHARED_LIB_PREFIX': '',
- 'STATIC_LIB_SUFFIX': '.lib',
- 'SHARED_LIB_SUFFIX': '.dll',
- 'INTERMEDIATE_DIR': '$(IntDir)',
- 'SHARED_INTERMEDIATE_DIR': '$(OutDir)obj/global_intermediate',
- 'OS': 'win',
- 'PRODUCT_DIR': '$(OutDir)',
- 'LIB_DIR': '$(OutDir)lib',
- 'RULE_INPUT_ROOT': '$(InputName)',
- 'RULE_INPUT_DIRNAME': '$(InputDir)',
- 'RULE_INPUT_EXT': '$(InputExt)',
- 'RULE_INPUT_NAME': '$(InputFileName)',
- 'RULE_INPUT_PATH': '$(InputPath)',
- 'CONFIGURATION_NAME': '$(ConfigurationName)',
-}
-
-
-# The msvs specific sections that hold paths
-generator_additional_path_sections = [
- 'msvs_cygwin_dirs',
- 'msvs_props',
-]
-
-
-generator_additional_non_configuration_keys = [
- 'msvs_cygwin_dirs',
- 'msvs_cygwin_shell',
- 'msvs_large_pdb',
- 'msvs_shard',
- 'msvs_external_builder',
- 'msvs_external_builder_out_dir',
- 'msvs_external_builder_build_cmd',
- 'msvs_external_builder_clean_cmd',
- 'msvs_external_builder_clcompile_cmd',
- 'msvs_enable_winrt',
- 'msvs_requires_importlibrary',
- 'msvs_enable_winphone',
- 'msvs_application_type_revision',
- 'msvs_target_platform_version',
- 'msvs_target_platform_minversion',
-]
-
-
-# List of precompiled header related keys.
-precomp_keys = [
- 'msvs_precompiled_header',
- 'msvs_precompiled_source',
-]
-
-
-cached_username = None
-
-
-cached_domain = None
-
-
-# TODO(gspencer): Switch the os.environ calls to be
-# win32api.GetDomainName() and win32api.GetUserName() once the
-# python version in depot_tools has been updated to work on Vista
-# 64-bit.
-def _GetDomainAndUserName():
- if sys.platform not in ('win32', 'cygwin'):
- return ('DOMAIN', 'USERNAME')
- global cached_username
- global cached_domain
- if not cached_domain or not cached_username:
- domain = os.environ.get('USERDOMAIN')
- username = os.environ.get('USERNAME')
- if not domain or not username:
- call = subprocess.Popen(['net', 'config', 'Workstation'],
- stdout=subprocess.PIPE)
- config = call.communicate()[0]
- username_re = re.compile(r'^User name\s+(\S+)', re.MULTILINE)
- username_match = username_re.search(config)
- if username_match:
- username = username_match.group(1)
- domain_re = re.compile(r'^Logon domain\s+(\S+)', re.MULTILINE)
- domain_match = domain_re.search(config)
- if domain_match:
- domain = domain_match.group(1)
- cached_domain = domain
- cached_username = username
- return (cached_domain, cached_username)
-
-fixpath_prefix = None
-
-
-def _NormalizedSource(source):
- """Normalize the path.
-
- But not if that gets rid of a variable, as this may expand to something
- larger than one directory.
-
- Arguments:
- source: The path to be normalize.d
-
- Returns:
- The normalized path.
- """
- normalized = os.path.normpath(source)
- if source.count('$') == normalized.count('$'):
- source = normalized
- return source
-
-
-def _FixPath(path):
- """Convert paths to a form that will make sense in a vcproj file.
-
- Arguments:
- path: The path to convert, may contain / etc.
- Returns:
- The path with all slashes made into backslashes.
- """
- if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$':
- path = os.path.join(fixpath_prefix, path)
- path = path.replace('/', '\\')
- path = _NormalizedSource(path)
- if path and path[-1] == '\\':
- path = path[:-1]
- return path
-
-
-def _FixPaths(paths):
- """Fix each of the paths of the list."""
- return [_FixPath(i) for i in paths]
-
-
-def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
- list_excluded=True, msvs_version=None):
- """Converts a list split source file paths into a vcproj folder hierarchy.
-
- Arguments:
- sources: A list of source file paths split.
- prefix: A list of source file path layers meant to apply to each of sources.
- excluded: A set of excluded files.
- msvs_version: A MSVSVersion object.
-
- Returns:
- A hierarchy of filenames and MSVSProject.Filter objects that matches the
- layout of the source tree.
- For example:
- _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']],
- prefix=['joe'])
- -->
- [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
- MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
- """
- if not prefix: prefix = []
- result = []
- excluded_result = []
- folders = OrderedDict()
- # Gather files into the final result, excluded, or folders.
- for s in sources:
- if len(s) == 1:
- filename = _NormalizedSource('\\'.join(prefix + s))
- if filename in excluded:
- excluded_result.append(filename)
- else:
- result.append(filename)
- elif msvs_version and not msvs_version.UsesVcxproj():
- # For MSVS 2008 and earlier, we need to process all files before walking
- # the sub folders.
- if not folders.get(s[0]):
- folders[s[0]] = []
- folders[s[0]].append(s[1:])
- else:
- contents = _ConvertSourcesToFilterHierarchy([s[1:]], prefix + [s[0]],
- excluded=excluded,
- list_excluded=list_excluded,
- msvs_version=msvs_version)
- contents = MSVSProject.Filter(s[0], contents=contents)
- result.append(contents)
- # Add a folder for excluded files.
- if excluded_result and list_excluded:
- excluded_folder = MSVSProject.Filter('_excluded_files',
- contents=excluded_result)
- result.append(excluded_folder)
-
- if msvs_version and msvs_version.UsesVcxproj():
- return result
-
- # Populate all the folders.
- for f in folders:
- contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f],
- excluded=excluded,
- list_excluded=list_excluded,
- msvs_version=msvs_version)
- contents = MSVSProject.Filter(f, contents=contents)
- result.append(contents)
- return result
-
-
-def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
- if not value: return
- _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset)
-
-
-def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
- # TODO(bradnelson): ugly hack, fix this more generally!!!
- if 'Directories' in setting or 'Dependencies' in setting:
- if type(value) == str:
- value = value.replace('/', '\\')
- else:
- value = [i.replace('/', '\\') for i in value]
- if not tools.get(tool_name):
- tools[tool_name] = dict()
- tool = tools[tool_name]
- if tool.get(setting):
- if only_if_unset: return
- if type(tool[setting]) == list and type(value) == list:
- tool[setting] += value
- else:
- raise TypeError(
- 'Appending "%s" to a non-list setting "%s" for tool "%s" is '
- 'not allowed, previous value: %s' % (
- value, setting, tool_name, str(tool[setting])))
- else:
- tool[setting] = value
-
-
-def _ConfigPlatform(config_data):
- return config_data.get('msvs_configuration_platform', 'Win32')
-
-
-def _ConfigBaseName(config_name, platform_name):
- if config_name.endswith('_' + platform_name):
- return config_name[0:-len(platform_name) - 1]
- else:
- return config_name
-
-
-def _ConfigFullName(config_name, config_data):
- platform_name = _ConfigPlatform(config_data)
- return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name)
-
-
-def _ConfigWindowsTargetPlatformVersion(config_data):
- ver = config_data.get('msvs_windows_target_platform_version')
- if not ver or re.match(r'^\d+', ver):
- return ver
- for key in [r'HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s',
- r'HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s']:
- sdkdir = MSVSVersion._RegistryGetValue(key % ver, 'InstallationFolder')
- if not sdkdir:
- continue
- version = MSVSVersion._RegistryGetValue(key % ver, 'ProductVersion') or ''
- # find a matching entry in sdkdir\include
- names = sorted([x for x in os.listdir(r'%s\include' % sdkdir) \
- if x.startswith(version)], reverse = True)
- return names[0]
-
-
-def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
- quote_cmd, do_setup_env):
-
- if [x for x in cmd if '$(InputDir)' in x]:
- input_dir_preamble = (
- 'set INPUTDIR=$(InputDir)\n'
- 'if NOT DEFINED INPUTDIR set INPUTDIR=.\\\n'
- 'set INPUTDIR=%INPUTDIR:~0,-1%\n'
- )
- else:
- input_dir_preamble = ''
-
- if cygwin_shell:
- # Find path to cygwin.
- cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0])
- # Prepare command.
- direct_cmd = cmd
- direct_cmd = [i.replace('$(IntDir)',
- '`cygpath -m "${INTDIR}"`') for i in direct_cmd]
- direct_cmd = [i.replace('$(OutDir)',
- '`cygpath -m "${OUTDIR}"`') for i in direct_cmd]
- direct_cmd = [i.replace('$(InputDir)',
- '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd]
- if has_input_path:
- direct_cmd = [i.replace('$(InputPath)',
- '`cygpath -m "${INPUTPATH}"`')
- for i in direct_cmd]
- direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd]
- # direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd)
- direct_cmd = ' '.join(direct_cmd)
- # TODO(quote): regularize quoting path names throughout the module
- cmd = ''
- if do_setup_env:
- cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && '
- cmd += 'set CYGWIN=nontsec&& '
- if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0:
- cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& '
- if direct_cmd.find('INTDIR') >= 0:
- cmd += 'set INTDIR=$(IntDir)&& '
- if direct_cmd.find('OUTDIR') >= 0:
- cmd += 'set OUTDIR=$(OutDir)&& '
- if has_input_path and direct_cmd.find('INPUTPATH') >= 0:
- cmd += 'set INPUTPATH=$(InputPath) && '
- cmd += 'bash -c "%(cmd)s"'
- cmd = cmd % {'cygwin_dir': cygwin_dir,
- 'cmd': direct_cmd}
- return input_dir_preamble + cmd
- else:
- # Convert cat --> type to mimic unix.
- if cmd[0] == 'cat':
- command = ['type']
- else:
- command = [cmd[0].replace('/', '\\')]
- # Add call before command to ensure that commands can be tied together one
- # after the other without aborting in Incredibuild, since IB makes a bat
- # file out of the raw command string, and some commands (like python) are
- # actually batch files themselves.
- command.insert(0, 'call')
- # Fix the paths
- # TODO(quote): This is a really ugly heuristic, and will miss path fixing
- # for arguments like "--arg=path" or "/opt:path".
- # If the argument starts with a slash or dash, it's probably a command line
- # switch
- arguments = [i if (i[:1] in "/-") else _FixPath(i) for i in cmd[1:]]
- arguments = [i.replace('$(InputDir)', '%INPUTDIR%') for i in arguments]
- arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments]
- if quote_cmd:
- # Support a mode for using cmd directly.
- # Convert any paths to native form (first element is used directly).
- # TODO(quote): regularize quoting path names throughout the module
- arguments = ['"%s"' % i for i in arguments]
- # Collapse into a single command.
- return input_dir_preamble + ' '.join(command + arguments)
-
-
-def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env):
- # Currently this weird argument munging is used to duplicate the way a
- # python script would need to be run as part of the chrome tree.
- # Eventually we should add some sort of rule_default option to set this
- # per project. For now the behavior chrome needs is the default.
- mcs = rule.get('msvs_cygwin_shell')
- if mcs is None:
- mcs = int(spec.get('msvs_cygwin_shell', 1))
- elif isinstance(mcs, str):
- mcs = int(mcs)
- quote_cmd = int(rule.get('msvs_quote_cmd', 1))
- return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path,
- quote_cmd, do_setup_env=do_setup_env)
-
-
-def _AddActionStep(actions_dict, inputs, outputs, description, command):
- """Merge action into an existing list of actions.
-
- Care must be taken so that actions which have overlapping inputs either don't
- get assigned to the same input, or get collapsed into one.
-
- Arguments:
- actions_dict: dictionary keyed on input name, which maps to a list of
- dicts describing the actions attached to that input file.
- inputs: list of inputs
- outputs: list of outputs
- description: description of the action
- command: command line to execute
- """
- # Require there to be at least one input (call sites will ensure this).
- assert inputs
-
- action = {
- 'inputs': inputs,
- 'outputs': outputs,
- 'description': description,
- 'command': command,
- }
-
- # Pick where to stick this action.
- # While less than optimal in terms of build time, attach them to the first
- # input for now.
- chosen_input = inputs[0]
-
- # Add it there.
- if chosen_input not in actions_dict:
- actions_dict[chosen_input] = []
- actions_dict[chosen_input].append(action)
-
-
-def _AddCustomBuildToolForMSVS(p, spec, primary_input,
- inputs, outputs, description, cmd):
- """Add a custom build tool to execute something.
-
- Arguments:
- p: the target project
- spec: the target project dict
- primary_input: input file to attach the build tool to
- inputs: list of inputs
- outputs: list of outputs
- description: description of the action
- cmd: command line to execute
- """
- inputs = _FixPaths(inputs)
- outputs = _FixPaths(outputs)
- tool = MSVSProject.Tool(
- 'VCCustomBuildTool',
- {'Description': description,
- 'AdditionalDependencies': ';'.join(inputs),
- 'Outputs': ';'.join(outputs),
- 'CommandLine': cmd,
- })
- # Add to the properties of primary input for each config.
- for config_name, c_data in spec['configurations'].iteritems():
- p.AddFileConfig(_FixPath(primary_input),
- _ConfigFullName(config_name, c_data), tools=[tool])
-
-
-def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
- """Add actions accumulated into an actions_dict, merging as needed.
-
- Arguments:
- p: the target project
- spec: the target project dict
- actions_dict: dictionary keyed on input name, which maps to a list of
- dicts describing the actions attached to that input file.
- """
- for primary_input in actions_dict:
- inputs = OrderedSet()
- outputs = OrderedSet()
- descriptions = []
- commands = []
- for action in actions_dict[primary_input]:
- inputs.update(OrderedSet(action['inputs']))
- outputs.update(OrderedSet(action['outputs']))
- descriptions.append(action['description'])
- commands.append(action['command'])
- # Add the custom build step for one input file.
- description = ', and also '.join(descriptions)
- command = '\r\n'.join(commands)
- _AddCustomBuildToolForMSVS(p, spec,
- primary_input=primary_input,
- inputs=inputs,
- outputs=outputs,
- description=description,
- cmd=command)
-
-
-def _RuleExpandPath(path, input_file):
- """Given the input file to which a rule applied, string substitute a path.
-
- Arguments:
- path: a path to string expand
- input_file: the file to which the rule applied.
- Returns:
- The string substituted path.
- """
- path = path.replace('$(InputName)',
- os.path.splitext(os.path.split(input_file)[1])[0])
- path = path.replace('$(InputDir)', os.path.dirname(input_file))
- path = path.replace('$(InputExt)',
- os.path.splitext(os.path.split(input_file)[1])[1])
- path = path.replace('$(InputFileName)', os.path.split(input_file)[1])
- path = path.replace('$(InputPath)', input_file)
- return path
-
-
-def _FindRuleTriggerFiles(rule, sources):
- """Find the list of files which a particular rule applies to.
-
- Arguments:
- rule: the rule in question
- sources: the set of all known source files for this project
- Returns:
- The list of sources that trigger a particular rule.
- """
- return rule.get('rule_sources', [])
-
-
-def _RuleInputsAndOutputs(rule, trigger_file):
- """Find the inputs and outputs generated by a rule.
-
- Arguments:
- rule: the rule in question.
- trigger_file: the main trigger for this rule.
- Returns:
- The pair of (inputs, outputs) involved in this rule.
- """
- raw_inputs = _FixPaths(rule.get('inputs', []))
- raw_outputs = _FixPaths(rule.get('outputs', []))
- inputs = OrderedSet()
- outputs = OrderedSet()
- inputs.add(trigger_file)
- for i in raw_inputs:
- inputs.add(_RuleExpandPath(i, trigger_file))
- for o in raw_outputs:
- outputs.add(_RuleExpandPath(o, trigger_file))
- return (inputs, outputs)
-
-
-def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
- """Generate a native rules file.
-
- Arguments:
- p: the target project
- rules: the set of rules to include
- output_dir: the directory in which the project/gyp resides
- spec: the project dict
- options: global generator options
- """
- rules_filename = '%s%s.rules' % (spec['target_name'],
- options.suffix)
- rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename),
- spec['target_name'])
- # Add each rule.
- for r in rules:
- rule_name = r['rule_name']
- rule_ext = r['extension']
- inputs = _FixPaths(r.get('inputs', []))
- outputs = _FixPaths(r.get('outputs', []))
- # Skip a rule with no action and no inputs.
- if 'action' not in r and not r.get('rule_sources', []):
- continue
- cmd = _BuildCommandLineForRule(spec, r, has_input_path=True,
- do_setup_env=True)
- rules_file.AddCustomBuildRule(name=rule_name,
- description=r.get('message', rule_name),
- extensions=[rule_ext],
- additional_dependencies=inputs,
- outputs=outputs,
- cmd=cmd)
- # Write out rules file.
- rules_file.WriteIfChanged()
-
- # Add rules file to project.
- p.AddToolFile(rules_filename)
-
-
-def _Cygwinify(path):
- path = path.replace('$(OutDir)', '$(OutDirCygwin)')
- path = path.replace('$(IntDir)', '$(IntDirCygwin)')
- return path
-
-
-def _GenerateExternalRules(rules, output_dir, spec,
- sources, options, actions_to_add):
- """Generate an external makefile to do a set of rules.
-
- Arguments:
- rules: the list of rules to include
- output_dir: path containing project and gyp files
- spec: project specification data
- sources: set of sources known
- options: global generator options
- actions_to_add: The list of actions we will add to.
- """
- filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix)
- mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
- # Find cygwin style versions of some paths.
- mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
- mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
- # Gather stuff needed to emit all: target.
- all_inputs = OrderedSet()
- all_outputs = OrderedSet()
- all_output_dirs = OrderedSet()
- first_outputs = []
- for rule in rules:
- trigger_files = _FindRuleTriggerFiles(rule, sources)
- for tf in trigger_files:
- inputs, outputs = _RuleInputsAndOutputs(rule, tf)
- all_inputs.update(OrderedSet(inputs))
- all_outputs.update(OrderedSet(outputs))
- # Only use one target from each rule as the dependency for
- # 'all' so we don't try to build each rule multiple times.
- first_outputs.append(list(outputs)[0])
- # Get the unique output directories for this rule.
- output_dirs = [os.path.split(i)[0] for i in outputs]
- for od in output_dirs:
- all_output_dirs.add(od)
- first_outputs_cyg = [_Cygwinify(i) for i in first_outputs]
- # Write out all: target, including mkdir for each output directory.
- mk_file.write('all: %s\n' % ' '.join(first_outputs_cyg))
- for od in all_output_dirs:
- if od:
- mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od)
- mk_file.write('\n')
- # Define how each output is generated.
- for rule in rules:
- trigger_files = _FindRuleTriggerFiles(rule, sources)
- for tf in trigger_files:
- # Get all the inputs and outputs for this rule for this trigger file.
- inputs, outputs = _RuleInputsAndOutputs(rule, tf)
- inputs = [_Cygwinify(i) for i in inputs]
- outputs = [_Cygwinify(i) for i in outputs]
- # Prepare the command line for this rule.
- cmd = [_RuleExpandPath(c, tf) for c in rule['action']]
- cmd = ['"%s"' % i for i in cmd]
- cmd = ' '.join(cmd)
- # Add it to the makefile.
- mk_file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs)))
- mk_file.write('\t%s\n\n' % cmd)
- # Close up the file.
- mk_file.close()
-
- # Add makefile to list of sources.
- sources.add(filename)
- # Add a build action to call makefile.
- cmd = ['make',
- 'OutDir=$(OutDir)',
- 'IntDir=$(IntDir)',
- '-j', '${NUMBER_OF_PROCESSORS_PLUS_1}',
- '-f', filename]
- cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True)
- # Insert makefile as 0'th input, so it gets the action attached there,
- # as this is easier to understand from in the IDE.
- all_inputs = list(all_inputs)
- all_inputs.insert(0, filename)
- _AddActionStep(actions_to_add,
- inputs=_FixPaths(all_inputs),
- outputs=_FixPaths(all_outputs),
- description='Running external rules for %s' %
- spec['target_name'],
- command=cmd)
-
-
-def _EscapeEnvironmentVariableExpansion(s):
- """Escapes % characters.
-
- Escapes any % characters so that Windows-style environment variable
- expansions will leave them alone.
- See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
- to understand why we have to do this.
-
- Args:
- s: The string to be escaped.
-
- Returns:
- The escaped string.
- """
- s = s.replace('%', '%%')
- return s
-
-
-quote_replacer_regex = re.compile(r'(\\*)"')
-
-
-def _EscapeCommandLineArgumentForMSVS(s):
- """Escapes a Windows command-line argument.
-
- So that the Win32 CommandLineToArgv function will turn the escaped result back
- into the original string.
- See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
- ("Parsing C++ Command-Line Arguments") to understand why we have to do
- this.
-
- Args:
- s: the string to be escaped.
- Returns:
- the escaped string.
- """
-
- def _Replace(match):
- # For a literal quote, CommandLineToArgv requires an odd number of
- # backslashes preceding it, and it produces half as many literal backslashes
- # (rounded down). So we need to produce 2n+1 backslashes.
- return 2 * match.group(1) + '\\"'
-
- # Escape all quotes so that they are interpreted literally.
- s = quote_replacer_regex.sub(_Replace, s)
- # Now add unescaped quotes so that any whitespace is interpreted literally.
- s = '"' + s + '"'
- return s
-
-
-delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)')
-
-
-def _EscapeVCProjCommandLineArgListItem(s):
- """Escapes command line arguments for MSVS.
-
- The VCProj format stores string lists in a single string using commas and
- semi-colons as separators, which must be quoted if they are to be
- interpreted literally. However, command-line arguments may already have
- quotes, and the VCProj parser is ignorant of the backslash escaping
- convention used by CommandLineToArgv, so the command-line quotes and the
- VCProj quotes may not be the same quotes. So to store a general
- command-line argument in a VCProj list, we need to parse the existing
- quoting according to VCProj's convention and quote any delimiters that are
- not already quoted by that convention. The quotes that we add will also be
- seen by CommandLineToArgv, so if backslashes precede them then we also have
- to escape those backslashes according to the CommandLineToArgv
- convention.
-
- Args:
- s: the string to be escaped.
- Returns:
- the escaped string.
- """
-
- def _Replace(match):
- # For a non-literal quote, CommandLineToArgv requires an even number of
- # backslashes preceding it, and it produces half as many literal
- # backslashes. So we need to produce 2n backslashes.
- return 2 * match.group(1) + '"' + match.group(2) + '"'
-
- segments = s.split('"')
- # The unquoted segments are at the even-numbered indices.
- for i in range(0, len(segments), 2):
- segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i])
- # Concatenate back into a single string
- s = '"'.join(segments)
- if len(segments) % 2 == 0:
- # String ends while still quoted according to VCProj's convention. This
- # means the delimiter and the next list item that follow this one in the
- # .vcproj file will be misinterpreted as part of this item. There is nothing
- # we can do about this. Adding an extra quote would correct the problem in
- # the VCProj but cause the same problem on the final command-line. Moving
- # the item to the end of the list does works, but that's only possible if
- # there's only one such item. Let's just warn the user.
- print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' +
- 'quotes in ' + s)
- return s
-
-
-def _EscapeCppDefineForMSVS(s):
- """Escapes a CPP define so that it will reach the compiler unaltered."""
- s = _EscapeEnvironmentVariableExpansion(s)
- s = _EscapeCommandLineArgumentForMSVS(s)
- s = _EscapeVCProjCommandLineArgListItem(s)
- # cl.exe replaces literal # characters with = in preprocesor definitions for
- # some reason. Octal-encode to work around that.
- s = s.replace('#', '\\%03o' % ord('#'))
- return s
-
-
-quote_replacer_regex2 = re.compile(r'(\\+)"')
-
-
-def _EscapeCommandLineArgumentForMSBuild(s):
- """Escapes a Windows command-line argument for use by MSBuild."""
-
- def _Replace(match):
- return (len(match.group(1)) / 2 * 4) * '\\' + '\\"'
-
- # Escape all quotes so that they are interpreted literally.
- s = quote_replacer_regex2.sub(_Replace, s)
- return s
-
-
-def _EscapeMSBuildSpecialCharacters(s):
- escape_dictionary = {
- '%': '%25',
- '$': '%24',
- '@': '%40',
- "'": '%27',
- ';': '%3B',
- '?': '%3F',
- '*': '%2A'
- }
- result = ''.join([escape_dictionary.get(c, c) for c in s])
- return result
-
-
-def _EscapeCppDefineForMSBuild(s):
- """Escapes a CPP define so that it will reach the compiler unaltered."""
- s = _EscapeEnvironmentVariableExpansion(s)
- s = _EscapeCommandLineArgumentForMSBuild(s)
- s = _EscapeMSBuildSpecialCharacters(s)
- # cl.exe replaces literal # characters with = in preprocesor definitions for
- # some reason. Octal-encode to work around that.
- s = s.replace('#', '\\%03o' % ord('#'))
- return s
-
-
-def _GenerateRulesForMSVS(p, output_dir, options, spec,
- sources, excluded_sources,
- actions_to_add):
- """Generate all the rules for a particular project.
-
- Arguments:
- p: the project
- output_dir: directory to emit rules to
- options: global options passed to the generator
- spec: the specification for this project
- sources: the set of all known source files in this project
- excluded_sources: the set of sources excluded from normal processing
- actions_to_add: deferred list of actions to add in
- """
- rules = spec.get('rules', [])
- rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
- rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
-
- # Handle rules that use a native rules file.
- if rules_native:
- _GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options)
-
- # Handle external rules (non-native rules).
- if rules_external:
- _GenerateExternalRules(rules_external, output_dir, spec,
- sources, options, actions_to_add)
- _AdjustSourcesForRules(rules, sources, excluded_sources, False)
-
-
-def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild):
- # Add outputs generated by each rule (if applicable).
- for rule in rules:
- # Add in the outputs from this rule.
- trigger_files = _FindRuleTriggerFiles(rule, sources)
- for trigger_file in trigger_files:
- # Remove trigger_file from excluded_sources to let the rule be triggered
- # (e.g. rule trigger ax_enums.idl is added to excluded_sources
- # because it's also in an action's inputs in the same project)
- excluded_sources.discard(_FixPath(trigger_file))
- # Done if not processing outputs as sources.
- if int(rule.get('process_outputs_as_sources', False)):
- inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
- inputs = OrderedSet(_FixPaths(inputs))
- outputs = OrderedSet(_FixPaths(outputs))
- inputs.remove(_FixPath(trigger_file))
- sources.update(inputs)
- if not is_msbuild:
- excluded_sources.update(inputs)
- sources.update(outputs)
-
-
-def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
- """Take inputs with actions attached out of the list of exclusions.
-
- Arguments:
- excluded_sources: list of source files not to be built.
- actions_to_add: dict of actions keyed on source file they're attached to.
- Returns:
- excluded_sources with files that have actions attached removed.
- """
- must_keep = OrderedSet(_FixPaths(actions_to_add.keys()))
- return [s for s in excluded_sources if s not in must_keep]
-
-
-def _GetDefaultConfiguration(spec):
- return spec['configurations'][spec['default_configuration']]
-
-
-def _GetGuidOfProject(proj_path, spec):
- """Get the guid for the project.
-
- Arguments:
- proj_path: Path of the vcproj or vcxproj file to generate.
- spec: The target dictionary containing the properties of the target.
- Returns:
- the guid.
- Raises:
- ValueError: if the specified GUID is invalid.
- """
- # Pluck out the default configuration.
- default_config = _GetDefaultConfiguration(spec)
- # Decide the guid of the project.
- guid = default_config.get('msvs_guid')
- if guid:
- if VALID_MSVS_GUID_CHARS.match(guid) is None:
- raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' %
- (guid, VALID_MSVS_GUID_CHARS.pattern))
- guid = '{%s}' % guid
- guid = guid or MSVSNew.MakeGuid(proj_path)
- return guid
-
-
-def _GetMsbuildToolsetOfProject(proj_path, spec, version):
- """Get the platform toolset for the project.
-
- Arguments:
- proj_path: Path of the vcproj or vcxproj file to generate.
- spec: The target dictionary containing the properties of the target.
- version: The MSVSVersion object.
- Returns:
- the platform toolset string or None.
- """
- # Pluck out the default configuration.
- default_config = _GetDefaultConfiguration(spec)
- toolset = default_config.get('msbuild_toolset')
- if not toolset and version.DefaultToolset():
- toolset = version.DefaultToolset()
- return toolset
-
-
-def _GenerateProject(project, options, version, generator_flags):
- """Generates a vcproj file.
-
- Arguments:
- project: the MSVSProject object.
- options: global generator options.
- version: the MSVSVersion object.
- generator_flags: dict of generator-specific flags.
- Returns:
- A list of source files that cannot be found on disk.
- """
- default_config = _GetDefaultConfiguration(project.spec)
-
- # Skip emitting anything if told to with msvs_existing_vcproj option.
- if default_config.get('msvs_existing_vcproj'):
- return []
-
- if version.UsesVcxproj():
- return _GenerateMSBuildProject(project, options, version, generator_flags)
- else:
- return _GenerateMSVSProject(project, options, version, generator_flags)
-
-
-# TODO: Avoid code duplication with _ValidateSourcesForOSX in make.py.
-def _ValidateSourcesForMSVSProject(spec, version):
- """Makes sure if duplicate basenames are not specified in the source list.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- version: The VisualStudioVersion object.
- """
- # This validation should not be applied to MSVC2010 and later.
- assert not version.UsesVcxproj()
-
- # TODO: Check if MSVC allows this for loadable_module targets.
- if spec.get('type', None) not in ('static_library', 'shared_library'):
- return
- sources = spec.get('sources', [])
- basenames = {}
- for source in sources:
- name, ext = os.path.splitext(source)
- is_compiled_file = ext in [
- '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
- if not is_compiled_file:
- continue
- basename = os.path.basename(name) # Don't include extension.
- basenames.setdefault(basename, []).append(source)
-
- error = ''
- for basename, files in basenames.iteritems():
- if len(files) > 1:
- error += ' %s: %s\n' % (basename, ' '.join(files))
-
- if error:
- print('static library %s has several files with the same basename:\n' %
- spec['target_name'] + error + 'MSVC08 cannot handle that.')
- raise GypError('Duplicate basenames in sources section, see list above')
-
-
-def _GenerateMSVSProject(project, options, version, generator_flags):
- """Generates a .vcproj file. It may create .rules and .user files too.
-
- Arguments:
- project: The project object we will generate the file for.
- options: Global options passed to the generator.
- version: The VisualStudioVersion object.
- generator_flags: dict of generator-specific flags.
- """
- spec = project.spec
- gyp.common.EnsureDirExists(project.path)
-
- platforms = _GetUniquePlatforms(spec)
- p = MSVSProject.Writer(project.path, version, spec['target_name'],
- project.guid, platforms)
-
- # Get directory project file is in.
- project_dir = os.path.split(project.path)[0]
- gyp_path = _NormalizedSource(project.build_file)
- relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
-
- config_type = _GetMSVSConfigurationType(spec, project.build_file)
- for config_name, config in spec['configurations'].iteritems():
- _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
-
- # MSVC08 and prior version cannot handle duplicate basenames in the same
- # target.
- # TODO: Take excluded sources into consideration if possible.
- _ValidateSourcesForMSVSProject(spec, version)
-
- # Prepare list of sources and excluded sources.
- gyp_file = os.path.split(project.build_file)[1]
- sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
- gyp_file)
-
- # Add rules.
- actions_to_add = {}
- _GenerateRulesForMSVS(p, project_dir, options, spec,
- sources, excluded_sources,
- actions_to_add)
- list_excluded = generator_flags.get('msvs_list_excluded_files', True)
- sources, excluded_sources, excluded_idl = (
- _AdjustSourcesAndConvertToFilterHierarchy(spec, options, project_dir,
- sources, excluded_sources,
- list_excluded, version))
-
- # Add in files.
- missing_sources = _VerifySourcesExist(sources, project_dir)
- p.AddFiles(sources)
-
- _AddToolFilesToMSVS(p, spec)
- _HandlePreCompiledHeaders(p, sources, spec)
- _AddActions(actions_to_add, spec, relative_path_of_gyp_file)
- _AddCopies(actions_to_add, spec)
- _WriteMSVSUserFile(project.path, version, spec)
-
- # NOTE: this stanza must appear after all actions have been decided.
- # Don't excluded sources with actions attached, or they won't run.
- excluded_sources = _FilterActionsFromExcluded(
- excluded_sources, actions_to_add)
- _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
- list_excluded)
- _AddAccumulatedActionsToMSVS(p, spec, actions_to_add)
-
- # Write it out.
- p.WriteIfChanged()
-
- return missing_sources
-
-
-def _GetUniquePlatforms(spec):
- """Returns the list of unique platforms for this spec, e.g ['win32', ...].
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- Returns:
- The MSVSUserFile object created.
- """
- # Gather list of unique platforms.
- platforms = OrderedSet()
- for configuration in spec['configurations']:
- platforms.add(_ConfigPlatform(spec['configurations'][configuration]))
- platforms = list(platforms)
- return platforms
-
-
-def _CreateMSVSUserFile(proj_path, version, spec):
- """Generates a .user file for the user running this Gyp program.
-
- Arguments:
- proj_path: The path of the project file being created. The .user file
- shares the same path (with an appropriate suffix).
- version: The VisualStudioVersion object.
- spec: The target dictionary containing the properties of the target.
- Returns:
- The MSVSUserFile object created.
- """
- (domain, username) = _GetDomainAndUserName()
- vcuser_filename = '.'.join([proj_path, domain, username, 'user'])
- user_file = MSVSUserFile.Writer(vcuser_filename, version,
- spec['target_name'])
- return user_file
-
-
-def _GetMSVSConfigurationType(spec, build_file):
- """Returns the configuration type for this project.
-
- It's a number defined by Microsoft. May raise an exception.
-
- Args:
- spec: The target dictionary containing the properties of the target.
- build_file: The path of the gyp file.
- Returns:
- An integer, the configuration type.
- """
- try:
- config_type = {
- 'executable': '1', # .exe
- 'shared_library': '2', # .dll
- 'loadable_module': '2', # .dll
- 'static_library': '4', # .lib
- 'none': '10', # Utility type
- }[spec['type']]
- except KeyError:
- if spec.get('type'):
- raise GypError('Target type %s is not a valid target type for '
- 'target %s in %s.' %
- (spec['type'], spec['target_name'], build_file))
- else:
- raise GypError('Missing type field for target %s in %s.' %
- (spec['target_name'], build_file))
- return config_type
-
-
-def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
- """Adds a configuration to the MSVS project.
-
- Many settings in a vcproj file are specific to a configuration. This
- function the main part of the vcproj file that's configuration specific.
-
- Arguments:
- p: The target project being generated.
- spec: The target dictionary containing the properties of the target.
- config_type: The configuration type, a number as defined by Microsoft.
- config_name: The name of the configuration.
- config: The dictionary that defines the special processing to be done
- for this configuration.
- """
- # Get the information for this configuration
- include_dirs, midl_include_dirs, resource_include_dirs = \
- _GetIncludeDirs(config)
- libraries = _GetLibraries(spec)
- library_dirs = _GetLibraryDirs(config)
- out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
- defines = _GetDefines(config)
- defines = [_EscapeCppDefineForMSVS(d) for d in defines]
- disabled_warnings = _GetDisabledWarnings(config)
- prebuild = config.get('msvs_prebuild')
- postbuild = config.get('msvs_postbuild')
- def_file = _GetModuleDefinition(spec)
- precompiled_header = config.get('msvs_precompiled_header')
-
- # Prepare the list of tools as a dictionary.
- tools = dict()
- # Add in user specified msvs_settings.
- msvs_settings = config.get('msvs_settings', {})
- MSVSSettings.ValidateMSVSSettings(msvs_settings)
-
- # Prevent default library inheritance from the environment.
- _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', ['$(NOINHERIT)'])
-
- for tool in msvs_settings:
- settings = config['msvs_settings'][tool]
- for setting in settings:
- _ToolAppend(tools, tool, setting, settings[setting])
- # Add the information to the appropriate tool
- _ToolAppend(tools, 'VCCLCompilerTool',
- 'AdditionalIncludeDirectories', include_dirs)
- _ToolAppend(tools, 'VCMIDLTool',
- 'AdditionalIncludeDirectories', midl_include_dirs)
- _ToolAppend(tools, 'VCResourceCompilerTool',
- 'AdditionalIncludeDirectories', resource_include_dirs)
- # Add in libraries.
- _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', libraries)
- _ToolAppend(tools, 'VCLinkerTool', 'AdditionalLibraryDirectories',
- library_dirs)
- if out_file:
- _ToolAppend(tools, vc_tool, 'OutputFile', out_file, only_if_unset=True)
- # Add defines.
- _ToolAppend(tools, 'VCCLCompilerTool', 'PreprocessorDefinitions', defines)
- _ToolAppend(tools, 'VCResourceCompilerTool', 'PreprocessorDefinitions',
- defines)
- # Change program database directory to prevent collisions.
- _ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName',
- '$(IntDir)$(ProjectName)\\vc80.pdb', only_if_unset=True)
- # Add disabled warnings.
- _ToolAppend(tools, 'VCCLCompilerTool',
- 'DisableSpecificWarnings', disabled_warnings)
- # Add Pre-build.
- _ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild)
- # Add Post-build.
- _ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild)
- # Turn on precompiled headers if appropriate.
- if precompiled_header:
- precompiled_header = os.path.split(precompiled_header)[1]
- _ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2')
- _ToolAppend(tools, 'VCCLCompilerTool',
- 'PrecompiledHeaderThrough', precompiled_header)
- _ToolAppend(tools, 'VCCLCompilerTool',
- 'ForcedIncludeFiles', precompiled_header)
- # Loadable modules don't generate import libraries;
- # tell dependent projects to not expect one.
- if spec['type'] == 'loadable_module':
- _ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true')
- # Set the module definition file if any.
- if def_file:
- _ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile', def_file)
-
- _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name)
-
-
-def _GetIncludeDirs(config):
- """Returns the list of directories to be used for #include directives.
-
- Arguments:
- config: The dictionary that defines the special processing to be done
- for this configuration.
- Returns:
- The list of directory paths.
- """
- # TODO(bradnelson): include_dirs should really be flexible enough not to
- # require this sort of thing.
- include_dirs = (
- config.get('include_dirs', []) +
- config.get('msvs_system_include_dirs', []))
- midl_include_dirs = (
- config.get('midl_include_dirs', []) +
- config.get('msvs_system_include_dirs', []))
- resource_include_dirs = config.get('resource_include_dirs', include_dirs)
- include_dirs = _FixPaths(include_dirs)
- midl_include_dirs = _FixPaths(midl_include_dirs)
- resource_include_dirs = _FixPaths(resource_include_dirs)
- return include_dirs, midl_include_dirs, resource_include_dirs
-
-
-def _GetLibraryDirs(config):
- """Returns the list of directories to be used for library search paths.
-
- Arguments:
- config: The dictionary that defines the special processing to be done
- for this configuration.
- Returns:
- The list of directory paths.
- """
-
- library_dirs = config.get('library_dirs', [])
- library_dirs = _FixPaths(library_dirs)
- return library_dirs
-
-
-def _GetLibraries(spec):
- """Returns the list of libraries for this configuration.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- Returns:
- The list of directory paths.
- """
- libraries = spec.get('libraries', [])
- # Strip out -l, as it is not used on windows (but is needed so we can pass
- # in libraries that are assumed to be in the default library path).
- # Also remove duplicate entries, leaving only the last duplicate, while
- # preserving order.
- found = OrderedSet()
- unique_libraries_list = []
- for entry in reversed(libraries):
- library = re.sub(r'^\-l', '', entry)
- if not os.path.splitext(library)[1]:
- library += '.lib'
- if library not in found:
- found.add(library)
- unique_libraries_list.append(library)
- unique_libraries_list.reverse()
- return unique_libraries_list
-
-
-def _GetOutputFilePathAndTool(spec, msbuild):
- """Returns the path and tool to use for this target.
-
- Figures out the path of the file this spec will create and the name of
- the VC tool that will create it.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- Returns:
- A triple of (file path, name of the vc tool, name of the msbuild tool)
- """
- # Select a name for the output file.
- out_file = ''
- vc_tool = ''
- msbuild_tool = ''
- output_file_map = {
- 'executable': ('VCLinkerTool', 'Link', '$(OutDir)', '.exe'),
- 'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
- 'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
- 'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)lib\\', '.lib'),
- }
- output_file_props = output_file_map.get(spec['type'])
- if output_file_props and int(spec.get('msvs_auto_output_file', 1)):
- vc_tool, msbuild_tool, out_dir, suffix = output_file_props
- if spec.get('standalone_static_library', 0):
- out_dir = '$(OutDir)'
- out_dir = spec.get('product_dir', out_dir)
- product_extension = spec.get('product_extension')
- if product_extension:
- suffix = '.' + product_extension
- elif msbuild:
- suffix = '$(TargetExt)'
- prefix = spec.get('product_prefix', '')
- product_name = spec.get('product_name', '$(ProjectName)')
- out_file = ntpath.join(out_dir, prefix + product_name + suffix)
- return out_file, vc_tool, msbuild_tool
-
-
-def _GetOutputTargetExt(spec):
- """Returns the extension for this target, including the dot
-
- If product_extension is specified, set target_extension to this to avoid
- MSB8012, returns None otherwise. Ignores any target_extension settings in
- the input files.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- Returns:
- A string with the extension, or None
- """
- target_extension = spec.get('product_extension')
- if target_extension:
- return '.' + target_extension
- return None
-
-
-def _GetDefines(config):
- """Returns the list of preprocessor definitions for this configuation.
-
- Arguments:
- config: The dictionary that defines the special processing to be done
- for this configuration.
- Returns:
- The list of preprocessor definitions.
- """
- defines = []
- for d in config.get('defines', []):
- if type(d) == list:
- fd = '='.join([str(dpart) for dpart in d])
- else:
- fd = str(d)
- defines.append(fd)
- return defines
-
-
-def _GetDisabledWarnings(config):
- return [str(i) for i in config.get('msvs_disabled_warnings', [])]
-
-
-def _GetModuleDefinition(spec):
- def_file = ''
- if spec['type'] in ['shared_library', 'loadable_module', 'executable']:
- def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
- if len(def_files) == 1:
- def_file = _FixPath(def_files[0])
- elif def_files:
- raise ValueError(
- 'Multiple module definition files in one target, target %s lists '
- 'multiple .def files: %s' % (
- spec['target_name'], ' '.join(def_files)))
- return def_file
-
-
-def _ConvertToolsToExpectedForm(tools):
- """Convert tools to a form expected by Visual Studio.
-
- Arguments:
- tools: A dictionary of settings; the tool name is the key.
- Returns:
- A list of Tool objects.
- """
- tool_list = []
- for tool, settings in tools.iteritems():
- # Collapse settings with lists.
- settings_fixed = {}
- for setting, value in settings.iteritems():
- if type(value) == list:
- if ((tool == 'VCLinkerTool' and
- setting == 'AdditionalDependencies') or
- setting == 'AdditionalOptions'):
- settings_fixed[setting] = ' '.join(value)
- else:
- settings_fixed[setting] = ';'.join(value)
- else:
- settings_fixed[setting] = value
- # Add in this tool.
- tool_list.append(MSVSProject.Tool(tool, settings_fixed))
- return tool_list
-
-
-def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name):
- """Add to the project file the configuration specified by config.
-
- Arguments:
- p: The target project being generated.
- spec: the target project dict.
- tools: A dictionary of settings; the tool name is the key.
- config: The dictionary that defines the special processing to be done
- for this configuration.
- config_type: The configuration type, a number as defined by Microsoft.
- config_name: The name of the configuration.
- """
- attributes = _GetMSVSAttributes(spec, config, config_type)
- # Add in this configuration.
- tool_list = _ConvertToolsToExpectedForm(tools)
- p.AddConfig(_ConfigFullName(config_name, config),
- attrs=attributes, tools=tool_list)
-
-
-def _GetMSVSAttributes(spec, config, config_type):
- # Prepare configuration attributes.
- prepared_attrs = {}
- source_attrs = config.get('msvs_configuration_attributes', {})
- for a in source_attrs:
- prepared_attrs[a] = source_attrs[a]
- # Add props files.
- vsprops_dirs = config.get('msvs_props', [])
- vsprops_dirs = _FixPaths(vsprops_dirs)
- if vsprops_dirs:
- prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs)
- # Set configuration type.
- prepared_attrs['ConfigurationType'] = config_type
- output_dir = prepared_attrs.get('OutputDirectory',
- '$(SolutionDir)$(ConfigurationName)')
- prepared_attrs['OutputDirectory'] = _FixPath(output_dir) + '\\'
- if 'IntermediateDirectory' not in prepared_attrs:
- intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)'
- prepared_attrs['IntermediateDirectory'] = _FixPath(intermediate) + '\\'
- else:
- intermediate = _FixPath(prepared_attrs['IntermediateDirectory']) + '\\'
- intermediate = MSVSSettings.FixVCMacroSlashes(intermediate)
- prepared_attrs['IntermediateDirectory'] = intermediate
- return prepared_attrs
-
-
-def _AddNormalizedSources(sources_set, sources_array):
- sources_set.update(_NormalizedSource(s) for s in sources_array)
-
-
-def _PrepareListOfSources(spec, generator_flags, gyp_file):
- """Prepare list of sources and excluded sources.
-
- Besides the sources specified directly in the spec, adds the gyp file so
- that a change to it will cause a re-compile. Also adds appropriate sources
- for actions and copies. Assumes later stage will un-exclude files which
- have custom build steps attached.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- gyp_file: The name of the gyp file.
- Returns:
- A pair of (list of sources, list of excluded sources).
- The sources will be relative to the gyp file.
- """
- sources = OrderedSet()
- _AddNormalizedSources(sources, spec.get('sources', []))
- excluded_sources = OrderedSet()
- # Add in the gyp file.
- if not generator_flags.get('standalone'):
- sources.add(gyp_file)
-
- # Add in 'action' inputs and outputs.
- for a in spec.get('actions', []):
- inputs = a['inputs']
- inputs = [_NormalizedSource(i) for i in inputs]
- # Add all inputs to sources and excluded sources.
- inputs = OrderedSet(inputs)
- sources.update(inputs)
- if not spec.get('msvs_external_builder'):
- excluded_sources.update(inputs)
- if int(a.get('process_outputs_as_sources', False)):
- _AddNormalizedSources(sources, a.get('outputs', []))
- # Add in 'copies' inputs and outputs.
- for cpy in spec.get('copies', []):
- _AddNormalizedSources(sources, cpy.get('files', []))
- return (sources, excluded_sources)
-
-
-def _AdjustSourcesAndConvertToFilterHierarchy(
- spec, options, gyp_dir, sources, excluded_sources, list_excluded, version):
- """Adjusts the list of sources and excluded sources.
-
- Also converts the sets to lists.
-
- Arguments:
- spec: The target dictionary containing the properties of the target.
- options: Global generator options.
- gyp_dir: The path to the gyp file being processed.
- sources: A set of sources to be included for this project.
- excluded_sources: A set of sources to be excluded for this project.
- version: A MSVSVersion object.
- Returns:
- A trio of (list of sources, list of excluded sources,
- path of excluded IDL file)
- """
- # Exclude excluded sources coming into the generator.
- excluded_sources.update(OrderedSet(spec.get('sources_excluded', [])))
- # Add excluded sources into sources for good measure.
- sources.update(excluded_sources)
- # Convert to proper windows form.
- # NOTE: sources goes from being a set to a list here.
- # NOTE: excluded_sources goes from being a set to a list here.
- sources = _FixPaths(sources)
- # Convert to proper windows form.
- excluded_sources = _FixPaths(excluded_sources)
-
- excluded_idl = _IdlFilesHandledNonNatively(spec, sources)
-
- precompiled_related = _GetPrecompileRelatedFiles(spec)
- # Find the excluded ones, minus the precompiled header related ones.
- fully_excluded = [i for i in excluded_sources if i not in precompiled_related]
-
- # Convert to folders and the right slashes.
- sources = [i.split('\\') for i in sources]
- sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded,
- list_excluded=list_excluded,
- msvs_version=version)
-
- # Prune filters with a single child to flatten ugly directory structures
- # such as ../../src/modules/module1 etc.
- if version.UsesVcxproj():
- while all([isinstance(s, MSVSProject.Filter) for s in sources]) \
- and len(set([s.name for s in sources])) == 1:
- assert all([len(s.contents) == 1 for s in sources])
- sources = [s.contents[0] for s in sources]
- else:
- while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
- sources = sources[0].contents
-
- return sources, excluded_sources, excluded_idl
-
-
-def _IdlFilesHandledNonNatively(spec, sources):
- # If any non-native rules use 'idl' as an extension exclude idl files.
- # Gather a list here to use later.
- using_idl = False
- for rule in spec.get('rules', []):
- if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)):
- using_idl = True
- break
- if using_idl:
- excluded_idl = [i for i in sources if i.endswith('.idl')]
- else:
- excluded_idl = []
- return excluded_idl
-
-
-def _GetPrecompileRelatedFiles(spec):
- # Gather a list of precompiled header related sources.
- precompiled_related = []
- for _, config in spec['configurations'].iteritems():
- for k in precomp_keys:
- f = config.get(k)
- if f:
- precompiled_related.append(_FixPath(f))
- return precompiled_related
-
-
-def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
- list_excluded):
- exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
- for file_name, excluded_configs in exclusions.iteritems():
- if (not list_excluded and
- len(excluded_configs) == len(spec['configurations'])):
- # If we're not listing excluded files, then they won't appear in the
- # project, so don't try to configure them to be excluded.
- pass
- else:
- for config_name, config in excluded_configs:
- p.AddFileConfig(file_name, _ConfigFullName(config_name, config),
- {'ExcludedFromBuild': 'true'})
-
-
-def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
- exclusions = {}
- # Exclude excluded sources from being built.
- for f in excluded_sources:
- excluded_configs = []
- for config_name, config in spec['configurations'].iteritems():
- precomped = [_FixPath(config.get(i, '')) for i in precomp_keys]
- # Don't do this for ones that are precompiled header related.
- if f not in precomped:
- excluded_configs.append((config_name, config))
- exclusions[f] = excluded_configs
- # If any non-native rules use 'idl' as an extension exclude idl files.
- # Exclude them now.
- for f in excluded_idl:
- excluded_configs = []
- for config_name, config in spec['configurations'].iteritems():
- excluded_configs.append((config_name, config))
- exclusions[f] = excluded_configs
- return exclusions
-
-
-def _AddToolFilesToMSVS(p, spec):
- # Add in tool files (rules).
- tool_files = OrderedSet()
- for _, config in spec['configurations'].iteritems():
- for f in config.get('msvs_tool_files', []):
- tool_files.add(f)
- for f in tool_files:
- p.AddToolFile(f)
-
-
-def _HandlePreCompiledHeaders(p, sources, spec):
- # Pre-compiled header source stubs need a different compiler flag
- # (generate precompiled header) and any source file not of the same
- # kind (i.e. C vs. C++) as the precompiled header source stub needs
- # to have use of precompiled headers disabled.
- extensions_excluded_from_precompile = []
- for config_name, config in spec['configurations'].iteritems():
- source = config.get('msvs_precompiled_source')
- if source:
- source = _FixPath(source)
- # UsePrecompiledHeader=1 for if using precompiled headers.
- tool = MSVSProject.Tool('VCCLCompilerTool',
- {'UsePrecompiledHeader': '1'})
- p.AddFileConfig(source, _ConfigFullName(config_name, config),
- {}, tools=[tool])
- basename, extension = os.path.splitext(source)
- if extension == '.c':
- extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx']
- else:
- extensions_excluded_from_precompile = ['.c']
- def DisableForSourceTree(source_tree):
- for source in source_tree:
- if isinstance(source, MSVSProject.Filter):
- DisableForSourceTree(source.contents)
- else:
- basename, extension = os.path.splitext(source)
- if extension in extensions_excluded_from_precompile:
- for config_name, config in spec['configurations'].iteritems():
- tool = MSVSProject.Tool('VCCLCompilerTool',
- {'UsePrecompiledHeader': '0',
- 'ForcedIncludeFiles': '$(NOINHERIT)'})
- p.AddFileConfig(_FixPath(source),
- _ConfigFullName(config_name, config),
- {}, tools=[tool])
- # Do nothing if there was no precompiled source.
- if extensions_excluded_from_precompile:
- DisableForSourceTree(sources)
-
-
-def _AddActions(actions_to_add, spec, relative_path_of_gyp_file):
- # Add actions.
- actions = spec.get('actions', [])
- # Don't setup_env every time. When all the actions are run together in one
- # batch file in VS, the PATH will grow too long.
- # Membership in this set means that the cygwin environment has been set up,
- # and does not need to be set up again.
- have_setup_env = set()
- for a in actions:
- # Attach actions to the gyp file if nothing else is there.
- inputs = a.get('inputs') or [relative_path_of_gyp_file]
- attached_to = inputs[0]
- need_setup_env = attached_to not in have_setup_env
- cmd = _BuildCommandLineForRule(spec, a, has_input_path=False,
- do_setup_env=need_setup_env)
- have_setup_env.add(attached_to)
- # Add the action.
- _AddActionStep(actions_to_add,
- inputs=inputs,
- outputs=a.get('outputs', []),
- description=a.get('message', a['action_name']),
- command=cmd)
-
-
-def _WriteMSVSUserFile(project_path, version, spec):
- # Add run_as and test targets.
- if 'run_as' in spec:
- run_as = spec['run_as']
- action = run_as.get('action', [])
- environment = run_as.get('environment', [])
- working_directory = run_as.get('working_directory', '.')
- elif int(spec.get('test', 0)):
- action = ['$(TargetPath)', '--gtest_print_time']
- environment = []
- working_directory = '.'
- else:
- return # Nothing to add
- # Write out the user file.
- user_file = _CreateMSVSUserFile(project_path, version, spec)
- for config_name, c_data in spec['configurations'].iteritems():
- user_file.AddDebugSettings(_ConfigFullName(config_name, c_data),
- action, environment, working_directory)
- user_file.WriteIfChanged()
-
-
-def _AddCopies(actions_to_add, spec):
- copies = _GetCopies(spec)
- for inputs, outputs, cmd, description in copies:
- _AddActionStep(actions_to_add, inputs=inputs, outputs=outputs,
- description=description, command=cmd)
-
-
-def _GetCopies(spec):
- copies = []
- # Add copies.
- for cpy in spec.get('copies', []):
- for src in cpy.get('files', []):
- dst = os.path.join(cpy['destination'], os.path.basename(src))
- # _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and
- # outputs, so do the same for our generated command line.
- if src.endswith('/'):
- src_bare = src[:-1]
- base_dir = posixpath.split(src_bare)[0]
- outer_dir = posixpath.split(src_bare)[1]
- cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % (
- _FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir)
- copies.append(([src], ['dummy_copies', dst], cmd,
- 'Copying %s to %s' % (src, dst)))
- else:
- cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % (
- _FixPath(cpy['destination']), _FixPath(src), _FixPath(dst))
- copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, dst)))
- return copies
-
-
-def _GetPathDict(root, path):
- # |path| will eventually be empty (in the recursive calls) if it was initially
- # relative; otherwise it will eventually end up as '\', 'D:\', etc.
- if not path or path.endswith(os.sep):
- return root
- parent, folder = os.path.split(path)
- parent_dict = _GetPathDict(root, parent)
- if folder not in parent_dict:
- parent_dict[folder] = dict()
- return parent_dict[folder]
-
-
-def _DictsToFolders(base_path, bucket, flat):
- # Convert to folders recursively.
- children = []
- for folder, contents in bucket.iteritems():
- if type(contents) == dict:
- folder_children = _DictsToFolders(os.path.join(base_path, folder),
- contents, flat)
- if flat:
- children += folder_children
- else:
- folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder),
- name='(' + folder + ')',
- entries=folder_children)
- children.append(folder_children)
- else:
- children.append(contents)
- return children
-
-
-def _CollapseSingles(parent, node):
- # Recursively explorer the tree of dicts looking for projects which are
- # the sole item in a folder which has the same name as the project. Bring
- # such projects up one level.
- if (type(node) == dict and
- len(node) == 1 and
- node.keys()[0] == parent + '.vcproj'):
- return node[node.keys()[0]]
- if type(node) != dict:
- return node
- for child in node:
- node[child] = _CollapseSingles(child, node[child])
- return node
-
-
-def _GatherSolutionFolders(sln_projects, project_objects, flat):
- root = {}
- # Convert into a tree of dicts on path.
- for p in sln_projects:
- gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2]
- gyp_dir = os.path.dirname(gyp_file)
- path_dict = _GetPathDict(root, gyp_dir)
- path_dict[target + '.vcproj'] = project_objects[p]
- # Walk down from the top until we hit a folder that has more than one entry.
- # In practice, this strips the top-level "src/" dir from the hierarchy in
- # the solution.
- while len(root) == 1 and type(root[root.keys()[0]]) == dict:
- root = root[root.keys()[0]]
- # Collapse singles.
- root = _CollapseSingles('', root)
- # Merge buckets until everything is a root entry.
- return _DictsToFolders('', root, flat)
-
-
-def _GetPathOfProject(qualified_target, spec, options, msvs_version):
- default_config = _GetDefaultConfiguration(spec)
- proj_filename = default_config.get('msvs_existing_vcproj')
- if not proj_filename:
- proj_filename = (spec['target_name'] + options.suffix +
- msvs_version.ProjectExtension())
-
- build_file = gyp.common.BuildFile(qualified_target)
- proj_path = os.path.join(os.path.dirname(build_file), proj_filename)
- fix_prefix = None
- if options.generator_output:
- project_dir_path = os.path.dirname(os.path.abspath(proj_path))
- proj_path = os.path.join(options.generator_output, proj_path)
- fix_prefix = gyp.common.RelativePath(project_dir_path,
- os.path.dirname(proj_path))
- return proj_path, fix_prefix
-
-
-def _GetPlatformOverridesOfProject(spec):
- # Prepare a dict indicating which project configurations are used for which
- # solution configurations for this target.
- config_platform_overrides = {}
- for config_name, c in spec['configurations'].iteritems():
- config_fullname = _ConfigFullName(config_name, c)
- platform = c.get('msvs_target_platform', _ConfigPlatform(c))
- fixed_config_fullname = '%s|%s' % (
- _ConfigBaseName(config_name, _ConfigPlatform(c)), platform)
- config_platform_overrides[config_fullname] = fixed_config_fullname
- return config_platform_overrides
-
-
-def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
- """Create a MSVSProject object for the targets found in target list.
-
- Arguments:
- target_list: the list of targets to generate project objects for.
- target_dicts: the dictionary of specifications.
- options: global generator options.
- msvs_version: the MSVSVersion object.
- Returns:
- A set of created projects, keyed by target.
- """
- global fixpath_prefix
- # Generate each project.
- projects = {}
- for qualified_target in target_list:
- spec = target_dicts[qualified_target]
- if spec['toolset'] != 'target':
- raise GypError(
- 'Multiple toolsets not supported in msvs build (target %s)' %
- qualified_target)
- proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec,
- options, msvs_version)
- guid = _GetGuidOfProject(proj_path, spec)
- overrides = _GetPlatformOverridesOfProject(spec)
- build_file = gyp.common.BuildFile(qualified_target)
- # Create object for this project.
- obj = MSVSNew.MSVSProject(
- proj_path,
- name=spec['target_name'],
- guid=guid,
- spec=spec,
- build_file=build_file,
- config_platform_overrides=overrides,
- fixpath_prefix=fixpath_prefix)
- # Set project toolset if any (MS build only)
- if msvs_version.UsesVcxproj():
- obj.set_msbuild_toolset(
- _GetMsbuildToolsetOfProject(proj_path, spec, msvs_version))
- projects[qualified_target] = obj
- # Set all the dependencies, but not if we are using an external builder like
- # ninja
- for project in projects.values():
- if not project.spec.get('msvs_external_builder'):
- deps = project.spec.get('dependencies', [])
- deps = [projects[d] for d in deps]
- project.set_dependencies(deps)
- return projects
-
-
-def _InitNinjaFlavor(params, target_list, target_dicts):
- """Initialize targets for the ninja flavor.
-
- This sets up the necessary variables in the targets to generate msvs projects
- that use ninja as an external builder. The variables in the spec are only set
- if they have not been set. This allows individual specs to override the
- default values initialized here.
- Arguments:
- params: Params provided to the generator.
- target_list: List of target pairs: 'base/base.gyp:base'.
- target_dicts: Dict of target properties keyed on target pair.
- """
- for qualified_target in target_list:
- spec = target_dicts[qualified_target]
- if spec.get('msvs_external_builder'):
- # The spec explicitly defined an external builder, so don't change it.
- continue
-
- path_to_ninja = spec.get('msvs_path_to_ninja', 'ninja.exe')
-
- spec['msvs_external_builder'] = 'ninja'
- if not spec.get('msvs_external_builder_out_dir'):
- gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
- gyp_dir = os.path.dirname(gyp_file)
- configuration = '$(Configuration)'
- if params.get('target_arch') == 'x64':
- configuration += '_x64'
- spec['msvs_external_builder_out_dir'] = os.path.join(
- gyp.common.RelativePath(params['options'].toplevel_dir, gyp_dir),
- ninja_generator.ComputeOutputDir(params),
- configuration)
- if not spec.get('msvs_external_builder_build_cmd'):
- spec['msvs_external_builder_build_cmd'] = [
- path_to_ninja,
- '-C',
- '$(OutDir)',
- '$(ProjectName)',
- ]
- if not spec.get('msvs_external_builder_clean_cmd'):
- spec['msvs_external_builder_clean_cmd'] = [
- path_to_ninja,
- '-C',
- '$(OutDir)',
- '-tclean',
- '$(ProjectName)',
- ]
-
-
-def CalculateVariables(default_variables, params):
- """Generated variables that require params to be known."""
-
- generator_flags = params.get('generator_flags', {})
-
- # Select project file format version (if unset, default to auto detecting).
- msvs_version = MSVSVersion.SelectVisualStudioVersion(
- generator_flags.get('msvs_version', 'auto'))
- # Stash msvs_version for later (so we don't have to probe the system twice).
- params['msvs_version'] = msvs_version
-
- # Set a variable so conditions can be based on msvs_version.
- default_variables['MSVS_VERSION'] = msvs_version.ShortName()
-
- # To determine processor word size on Windows, in addition to checking
- # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
- # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which
- # contains the actual word size of the system when running thru WOW64).
- if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or
- os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0):
- default_variables['MSVS_OS_BITS'] = 64
- else:
- default_variables['MSVS_OS_BITS'] = 32
-
- if gyp.common.GetFlavor(params) == 'ninja':
- default_variables['SHARED_INTERMEDIATE_DIR'] = '$(OutDir)gen'
-
-
-def PerformBuild(data, configurations, params):
- options = params['options']
- msvs_version = params['msvs_version']
- devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com')
-
- for build_file, build_file_dict in data.iteritems():
- (build_file_root, build_file_ext) = os.path.splitext(build_file)
- if build_file_ext != '.gyp':
- continue
- sln_path = build_file_root + options.suffix + '.sln'
- if options.generator_output:
- sln_path = os.path.join(options.generator_output, sln_path)
-
- for config in configurations:
- arguments = [devenv, sln_path, '/Build', config]
- print 'Building [%s]: %s' % (config, arguments)
- rtn = subprocess.check_call(arguments)
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- """Generate .sln and .vcproj files.
-
- This is the entry point for this generator.
- Arguments:
- target_list: List of target pairs: 'base/base.gyp:base'.
- target_dicts: Dict of target properties keyed on target pair.
- data: Dictionary containing per .gyp data.
- """
- global fixpath_prefix
-
- options = params['options']
-
- # Get the project file format version back out of where we stashed it in
- # GeneratorCalculatedVariables.
- msvs_version = params['msvs_version']
-
- generator_flags = params.get('generator_flags', {})
-
- # Optionally shard targets marked with 'msvs_shard': SHARD_COUNT.
- (target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts)
-
- # Optionally use the large PDB workaround for targets marked with
- # 'msvs_large_pdb': 1.
- (target_list, target_dicts) = MSVSUtil.InsertLargePdbShims(
- target_list, target_dicts, generator_default_variables)
-
- # Optionally configure each spec to use ninja as the external builder.
- if params.get('flavor') == 'ninja':
- _InitNinjaFlavor(params, target_list, target_dicts)
-
- # Prepare the set of configurations.
- configs = set()
- for qualified_target in target_list:
- spec = target_dicts[qualified_target]
- for config_name, config in spec['configurations'].iteritems():
- configs.add(_ConfigFullName(config_name, config))
- configs = list(configs)
-
- # Figure out all the projects that will be generated and their guids
- project_objects = _CreateProjectObjects(target_list, target_dicts, options,
- msvs_version)
-
- # Generate each project.
- missing_sources = []
- for project in project_objects.values():
- fixpath_prefix = project.fixpath_prefix
- missing_sources.extend(_GenerateProject(project, options, msvs_version,
- generator_flags))
- fixpath_prefix = None
-
- for build_file in data:
- # Validate build_file extension
- if not build_file.endswith('.gyp'):
- continue
- sln_path = os.path.splitext(build_file)[0] + options.suffix + '.sln'
- if options.generator_output:
- sln_path = os.path.join(options.generator_output, sln_path)
- # Get projects in the solution, and their dependents.
- sln_projects = gyp.common.BuildFileTargets(target_list, build_file)
- sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects)
- # Create folder hierarchy.
- root_entries = _GatherSolutionFolders(
- sln_projects, project_objects, flat=msvs_version.FlatSolution())
- # Create solution.
- sln = MSVSNew.MSVSSolution(sln_path,
- entries=root_entries,
- variants=configs,
- websiteProperties=False,
- version=msvs_version)
- sln.Write()
-
- if missing_sources:
- error_message = "Missing input files:\n" + \
- '\n'.join(set(missing_sources))
- if generator_flags.get('msvs_error_on_missing_sources', False):
- raise GypError(error_message)
- else:
- print >> sys.stdout, "Warning: " + error_message
-
-
-def _GenerateMSBuildFiltersFile(filters_path, source_files,
- rule_dependencies, extension_to_rule_name):
- """Generate the filters file.
-
- This file is used by Visual Studio to organize the presentation of source
- files into folders.
-
- Arguments:
- filters_path: The path of the file to be created.
- source_files: The hierarchical structure of all the sources.
- extension_to_rule_name: A dictionary mapping file extensions to rules.
- """
- filter_group = []
- source_group = []
- _AppendFiltersForMSBuild('', source_files, rule_dependencies,
- extension_to_rule_name, filter_group, source_group)
- if filter_group:
- content = ['Project',
- {'ToolsVersion': '4.0',
- 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
- },
- ['ItemGroup'] + filter_group,
- ['ItemGroup'] + source_group
- ]
- easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True)
- elif os.path.exists(filters_path):
- # We don't need this filter anymore. Delete the old filter file.
- os.unlink(filters_path)
-
-
-def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies,
- extension_to_rule_name,
- filter_group, source_group):
- """Creates the list of filters and sources to be added in the filter file.
-
- Args:
- parent_filter_name: The name of the filter under which the sources are
- found.
- sources: The hierarchy of filters and sources to process.
- extension_to_rule_name: A dictionary mapping file extensions to rules.
- filter_group: The list to which filter entries will be appended.
- source_group: The list to which source entries will be appeneded.
- """
- for source in sources:
- if isinstance(source, MSVSProject.Filter):
- # We have a sub-filter. Create the name of that sub-filter.
- if not parent_filter_name:
- filter_name = source.name
- else:
- filter_name = '%s\\%s' % (parent_filter_name, source.name)
- # Add the filter to the group.
- filter_group.append(
- ['Filter', {'Include': filter_name},
- ['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]])
- # Recurse and add its dependents.
- _AppendFiltersForMSBuild(filter_name, source.contents,
- rule_dependencies, extension_to_rule_name,
- filter_group, source_group)
- else:
- # It's a source. Create a source entry.
- _, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
- extension_to_rule_name)
- source_entry = [element, {'Include': source}]
- # Specify the filter it is part of, if any.
- if parent_filter_name:
- source_entry.append(['Filter', parent_filter_name])
- source_group.append(source_entry)
-
-
-def _MapFileToMsBuildSourceType(source, rule_dependencies,
- extension_to_rule_name):
- """Returns the group and element type of the source file.
-
- Arguments:
- source: The source file name.
- extension_to_rule_name: A dictionary mapping file extensions to rules.
-
- Returns:
- A pair of (group this file should be part of, the label of element)
- """
- _, ext = os.path.splitext(source)
- if ext in extension_to_rule_name:
- group = 'rule'
- element = extension_to_rule_name[ext]
- elif ext in ['.cc', '.cpp', '.c', '.cxx']:
- group = 'compile'
- element = 'ClCompile'
- elif ext in ['.h', '.hxx']:
- group = 'include'
- element = 'ClInclude'
- elif ext == '.rc':
- group = 'resource'
- element = 'ResourceCompile'
- elif ext == '.asm':
- group = 'masm'
- element = 'MASM'
- elif ext == '.idl':
- group = 'midl'
- element = 'Midl'
- elif source in rule_dependencies:
- group = 'rule_dependency'
- element = 'CustomBuild'
- else:
- group = 'none'
- element = 'None'
- return (group, element)
-
-
-def _GenerateRulesForMSBuild(output_dir, options, spec,
- sources, excluded_sources,
- props_files_of_rules, targets_files_of_rules,
- actions_to_add, rule_dependencies,
- extension_to_rule_name):
- # MSBuild rules are implemented using three files: an XML file, a .targets
- # file and a .props file.
- # See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx
- # for more details.
- rules = spec.get('rules', [])
- rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
- rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
-
- msbuild_rules = []
- for rule in rules_native:
- # Skip a rule with no action and no inputs.
- if 'action' not in rule and not rule.get('rule_sources', []):
- continue
- msbuild_rule = MSBuildRule(rule, spec)
- msbuild_rules.append(msbuild_rule)
- rule_dependencies.update(msbuild_rule.additional_dependencies.split(';'))
- extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name
- if msbuild_rules:
- base = spec['target_name'] + options.suffix
- props_name = base + '.props'
- targets_name = base + '.targets'
- xml_name = base + '.xml'
-
- props_files_of_rules.add(props_name)
- targets_files_of_rules.add(targets_name)
-
- props_path = os.path.join(output_dir, props_name)
- targets_path = os.path.join(output_dir, targets_name)
- xml_path = os.path.join(output_dir, xml_name)
-
- _GenerateMSBuildRulePropsFile(props_path, msbuild_rules)
- _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules)
- _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules)
-
- if rules_external:
- _GenerateExternalRules(rules_external, output_dir, spec,
- sources, options, actions_to_add)
- _AdjustSourcesForRules(rules, sources, excluded_sources, True)
-
-
-class MSBuildRule(object):
- """Used to store information used to generate an MSBuild rule.
-
- Attributes:
- rule_name: The rule name, sanitized to use in XML.
- target_name: The name of the target.
- after_targets: The name of the AfterTargets element.
- before_targets: The name of the BeforeTargets element.
- depends_on: The name of the DependsOn element.
- compute_output: The name of the ComputeOutput element.
- dirs_to_make: The name of the DirsToMake element.
- inputs: The name of the _inputs element.
- tlog: The name of the _tlog element.
- extension: The extension this rule applies to.
- description: The message displayed when this rule is invoked.
- additional_dependencies: A string listing additional dependencies.
- outputs: The outputs of this rule.
- command: The command used to run the rule.
- """
-
- def __init__(self, rule, spec):
- self.display_name = rule['rule_name']
- # Assure that the rule name is only characters and numbers
- self.rule_name = re.sub(r'\W', '_', self.display_name)
- # Create the various element names, following the example set by the
- # Visual Studio 2008 to 2010 conversion. I don't know if VS2010
- # is sensitive to the exact names.
- self.target_name = '_' + self.rule_name
- self.after_targets = self.rule_name + 'AfterTargets'
- self.before_targets = self.rule_name + 'BeforeTargets'
- self.depends_on = self.rule_name + 'DependsOn'
- self.compute_output = 'Compute%sOutput' % self.rule_name
- self.dirs_to_make = self.rule_name + 'DirsToMake'
- self.inputs = self.rule_name + '_inputs'
- self.tlog = self.rule_name + '_tlog'
- self.extension = rule['extension']
- if not self.extension.startswith('.'):
- self.extension = '.' + self.extension
-
- self.description = MSVSSettings.ConvertVCMacrosToMSBuild(
- rule.get('message', self.rule_name))
- old_additional_dependencies = _FixPaths(rule.get('inputs', []))
- self.additional_dependencies = (
- ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i)
- for i in old_additional_dependencies]))
- old_outputs = _FixPaths(rule.get('outputs', []))
- self.outputs = ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i)
- for i in old_outputs])
- old_command = _BuildCommandLineForRule(spec, rule, has_input_path=True,
- do_setup_env=True)
- self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command)
-
-
-def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules):
- """Generate the .props file."""
- content = ['Project',
- {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}]
- for rule in msbuild_rules:
- content.extend([
- ['PropertyGroup',
- {'Condition': "'$(%s)' == '' and '$(%s)' == '' and "
- "'$(ConfigurationType)' != 'Makefile'" % (rule.before_targets,
- rule.after_targets)
- },
- [rule.before_targets, 'Midl'],
- [rule.after_targets, 'CustomBuild'],
- ],
- ['PropertyGroup',
- [rule.depends_on,
- {'Condition': "'$(ConfigurationType)' != 'Makefile'"},
- '_SelectedFiles;$(%s)' % rule.depends_on
- ],
- ],
- ['ItemDefinitionGroup',
- [rule.rule_name,
- ['CommandLineTemplate', rule.command],
- ['Outputs', rule.outputs],
- ['ExecutionDescription', rule.description],
- ['AdditionalDependencies', rule.additional_dependencies],
- ],
- ]
- ])
- easy_xml.WriteXmlIfChanged(content, props_path, pretty=True, win32=True)
-
-
-def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
- """Generate the .targets file."""
- content = ['Project',
- {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
- }
- ]
- item_group = [
- 'ItemGroup',
- ['PropertyPageSchema',
- {'Include': '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'}
- ]
- ]
- for rule in msbuild_rules:
- item_group.append(
- ['AvailableItemName',
- {'Include': rule.rule_name},
- ['Targets', rule.target_name],
- ])
- content.append(item_group)
-
- for rule in msbuild_rules:
- content.append(
- ['UsingTask',
- {'TaskName': rule.rule_name,
- 'TaskFactory': 'XamlTaskFactory',
- 'AssemblyName': 'Microsoft.Build.Tasks.v4.0'
- },
- ['Task', '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'],
- ])
- for rule in msbuild_rules:
- rule_name = rule.rule_name
- target_outputs = '%%(%s.Outputs)' % rule_name
- target_inputs = ('%%(%s.Identity);%%(%s.AdditionalDependencies);'
- '$(MSBuildProjectFile)') % (rule_name, rule_name)
- rule_inputs = '%%(%s.Identity)' % rule_name
- extension_condition = ("'%(Extension)'=='.obj' or "
- "'%(Extension)'=='.res' or "
- "'%(Extension)'=='.rsc' or "
- "'%(Extension)'=='.lib'")
- remove_section = [
- 'ItemGroup',
- {'Condition': "'@(SelectedFiles)' != ''"},
- [rule_name,
- {'Remove': '@(%s)' % rule_name,
- 'Condition': "'%(Identity)' != '@(SelectedFiles)'"
- }
- ]
- ]
- inputs_section = [
- 'ItemGroup',
- [rule.inputs, {'Include': '%%(%s.AdditionalDependencies)' % rule_name}]
- ]
- logging_section = [
- 'ItemGroup',
- [rule.tlog,
- {'Include': '%%(%s.Outputs)' % rule_name,
- 'Condition': ("'%%(%s.Outputs)' != '' and "
- "'%%(%s.ExcludedFromBuild)' != 'true'" %
- (rule_name, rule_name))
- },
- ['Source', "@(%s, '|')" % rule_name],
- ['Inputs', "@(%s -> '%%(Fullpath)', ';')" % rule.inputs],
- ],
- ]
- message_section = [
- 'Message',
- {'Importance': 'High',
- 'Text': '%%(%s.ExecutionDescription)' % rule_name
- }
- ]
- write_tlog_section = [
- 'WriteLinesToFile',
- {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
- "'true'" % (rule.tlog, rule.tlog),
- 'File': '$(IntDir)$(ProjectName).write.1.tlog',
- 'Lines': "^%%(%s.Source);@(%s->'%%(Fullpath)')" % (rule.tlog,
- rule.tlog)
- }
- ]
- read_tlog_section = [
- 'WriteLinesToFile',
- {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
- "'true'" % (rule.tlog, rule.tlog),
- 'File': '$(IntDir)$(ProjectName).read.1.tlog',
- 'Lines': "^%%(%s.Source);%%(%s.Inputs)" % (rule.tlog, rule.tlog)
- }
- ]
- command_and_input_section = [
- rule_name,
- {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
- "'true'" % (rule_name, rule_name),
- 'EchoOff': 'true',
- 'StandardOutputImportance': 'High',
- 'StandardErrorImportance': 'High',
- 'CommandLineTemplate': '%%(%s.CommandLineTemplate)' % rule_name,
- 'AdditionalOptions': '%%(%s.AdditionalOptions)' % rule_name,
- 'Inputs': rule_inputs
- }
- ]
- content.extend([
- ['Target',
- {'Name': rule.target_name,
- 'BeforeTargets': '$(%s)' % rule.before_targets,
- 'AfterTargets': '$(%s)' % rule.after_targets,
- 'Condition': "'@(%s)' != ''" % rule_name,
- 'DependsOnTargets': '$(%s);%s' % (rule.depends_on,
- rule.compute_output),
- 'Outputs': target_outputs,
- 'Inputs': target_inputs
- },
- remove_section,
- inputs_section,
- logging_section,
- message_section,
- write_tlog_section,
- read_tlog_section,
- command_and_input_section,
- ],
- ['PropertyGroup',
- ['ComputeLinkInputsTargets',
- '$(ComputeLinkInputsTargets);',
- '%s;' % rule.compute_output
- ],
- ['ComputeLibInputsTargets',
- '$(ComputeLibInputsTargets);',
- '%s;' % rule.compute_output
- ],
- ],
- ['Target',
- {'Name': rule.compute_output,
- 'Condition': "'@(%s)' != ''" % rule_name
- },
- ['ItemGroup',
- [rule.dirs_to_make,
- {'Condition': "'@(%s)' != '' and "
- "'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name),
- 'Include': '%%(%s.Outputs)' % rule_name
- }
- ],
- ['Link',
- {'Include': '%%(%s.Identity)' % rule.dirs_to_make,
- 'Condition': extension_condition
- }
- ],
- ['Lib',
- {'Include': '%%(%s.Identity)' % rule.dirs_to_make,
- 'Condition': extension_condition
- }
- ],
- ['ImpLib',
- {'Include': '%%(%s.Identity)' % rule.dirs_to_make,
- 'Condition': extension_condition
- }
- ],
- ],
- ['MakeDir',
- {'Directories': ("@(%s->'%%(RootDir)%%(Directory)')" %
- rule.dirs_to_make)
- }
- ]
- ],
- ])
- easy_xml.WriteXmlIfChanged(content, targets_path, pretty=True, win32=True)
-
-
-def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules):
- # Generate the .xml file
- content = [
- 'ProjectSchemaDefinitions',
- {'xmlns': ('clr-namespace:Microsoft.Build.Framework.XamlTypes;'
- 'assembly=Microsoft.Build.Framework'),
- 'xmlns:x': 'http://schemas.microsoft.com/winfx/2006/xaml',
- 'xmlns:sys': 'clr-namespace:System;assembly=mscorlib',
- 'xmlns:transformCallback':
- 'Microsoft.Cpp.Dev10.ConvertPropertyCallback'
- }
- ]
- for rule in msbuild_rules:
- content.extend([
- ['Rule',
- {'Name': rule.rule_name,
- 'PageTemplate': 'tool',
- 'DisplayName': rule.display_name,
- 'Order': '200'
- },
- ['Rule.DataSource',
- ['DataSource',
- {'Persistence': 'ProjectFile',
- 'ItemType': rule.rule_name
- }
- ]
- ],
- ['Rule.Categories',
- ['Category',
- {'Name': 'General'},
- ['Category.DisplayName',
- ['sys:String', 'General'],
- ],
- ],
- ['Category',
- {'Name': 'Command Line',
- 'Subtype': 'CommandLine'
- },
- ['Category.DisplayName',
- ['sys:String', 'Command Line'],
- ],
- ],
- ],
- ['StringListProperty',
- {'Name': 'Inputs',
- 'Category': 'Command Line',
- 'IsRequired': 'true',
- 'Switch': ' '
- },
- ['StringListProperty.DataSource',
- ['DataSource',
- {'Persistence': 'ProjectFile',
- 'ItemType': rule.rule_name,
- 'SourceType': 'Item'
- }
- ]
- ],
- ],
- ['StringProperty',
- {'Name': 'CommandLineTemplate',
- 'DisplayName': 'Command Line',
- 'Visible': 'False',
- 'IncludeInCommandLine': 'False'
- }
- ],
- ['DynamicEnumProperty',
- {'Name': rule.before_targets,
- 'Category': 'General',
- 'EnumProvider': 'Targets',
- 'IncludeInCommandLine': 'False'
- },
- ['DynamicEnumProperty.DisplayName',
- ['sys:String', 'Execute Before'],
- ],
- ['DynamicEnumProperty.Description',
- ['sys:String', 'Specifies the targets for the build customization'
- ' to run before.'
- ],
- ],
- ['DynamicEnumProperty.ProviderSettings',
- ['NameValuePair',
- {'Name': 'Exclude',
- 'Value': '^%s|^Compute' % rule.before_targets
- }
- ]
- ],
- ['DynamicEnumProperty.DataSource',
- ['DataSource',
- {'Persistence': 'ProjectFile',
- 'HasConfigurationCondition': 'true'
- }
- ]
- ],
- ],
- ['DynamicEnumProperty',
- {'Name': rule.after_targets,
- 'Category': 'General',
- 'EnumProvider': 'Targets',
- 'IncludeInCommandLine': 'False'
- },
- ['DynamicEnumProperty.DisplayName',
- ['sys:String', 'Execute After'],
- ],
- ['DynamicEnumProperty.Description',
- ['sys:String', ('Specifies the targets for the build customization'
- ' to run after.')
- ],
- ],
- ['DynamicEnumProperty.ProviderSettings',
- ['NameValuePair',
- {'Name': 'Exclude',
- 'Value': '^%s|^Compute' % rule.after_targets
- }
- ]
- ],
- ['DynamicEnumProperty.DataSource',
- ['DataSource',
- {'Persistence': 'ProjectFile',
- 'ItemType': '',
- 'HasConfigurationCondition': 'true'
- }
- ]
- ],
- ],
- ['StringListProperty',
- {'Name': 'Outputs',
- 'DisplayName': 'Outputs',
- 'Visible': 'False',
- 'IncludeInCommandLine': 'False'
- }
- ],
- ['StringProperty',
- {'Name': 'ExecutionDescription',
- 'DisplayName': 'Execution Description',
- 'Visible': 'False',
- 'IncludeInCommandLine': 'False'
- }
- ],
- ['StringListProperty',
- {'Name': 'AdditionalDependencies',
- 'DisplayName': 'Additional Dependencies',
- 'IncludeInCommandLine': 'False',
- 'Visible': 'false'
- }
- ],
- ['StringProperty',
- {'Subtype': 'AdditionalOptions',
- 'Name': 'AdditionalOptions',
- 'Category': 'Command Line'
- },
- ['StringProperty.DisplayName',
- ['sys:String', 'Additional Options'],
- ],
- ['StringProperty.Description',
- ['sys:String', 'Additional Options'],
- ],
- ],
- ],
- ['ItemType',
- {'Name': rule.rule_name,
- 'DisplayName': rule.display_name
- }
- ],
- ['FileExtension',
- {'Name': '*' + rule.extension,
- 'ContentType': rule.rule_name
- }
- ],
- ['ContentType',
- {'Name': rule.rule_name,
- 'DisplayName': '',
- 'ItemType': rule.rule_name
- }
- ]
- ])
- easy_xml.WriteXmlIfChanged(content, xml_path, pretty=True, win32=True)
-
-
-def _GetConfigurationAndPlatform(name, settings):
- configuration = name.rsplit('_', 1)[0]
- platform = settings.get('msvs_configuration_platform', 'Win32')
- return (configuration, platform)
-
-
-def _GetConfigurationCondition(name, settings):
- return (r"'$(Configuration)|$(Platform)'=='%s|%s'" %
- _GetConfigurationAndPlatform(name, settings))
-
-
-def _GetMSBuildProjectConfigurations(configurations):
- group = ['ItemGroup', {'Label': 'ProjectConfigurations'}]
- for (name, settings) in sorted(configurations.iteritems()):
- configuration, platform = _GetConfigurationAndPlatform(name, settings)
- designation = '%s|%s' % (configuration, platform)
- group.append(
- ['ProjectConfiguration', {'Include': designation},
- ['Configuration', configuration],
- ['Platform', platform]])
- return [group]
-
-
-def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
- namespace = os.path.splitext(gyp_file_name)[0]
- properties = [
- ['PropertyGroup', {'Label': 'Globals'},
- ['ProjectGuid', guid],
- ['Keyword', 'Win32Proj'],
- ['RootNamespace', namespace],
- ['IgnoreWarnCompileDuplicatedFilename', 'true'],
- ]
- ]
-
- if os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or \
- os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64':
- properties[0].append(['PreferredToolArchitecture', 'x64'])
-
- if spec.get('msvs_enable_winrt'):
- properties[0].append(['DefaultLanguage', 'en-US'])
- properties[0].append(['AppContainerApplication', 'true'])
- if spec.get('msvs_application_type_revision'):
- app_type_revision = spec.get('msvs_application_type_revision')
- properties[0].append(['ApplicationTypeRevision', app_type_revision])
- else:
- properties[0].append(['ApplicationTypeRevision', '8.1'])
-
- if spec.get('msvs_target_platform_version'):
- target_platform_version = spec.get('msvs_target_platform_version')
- properties[0].append(['WindowsTargetPlatformVersion',
- target_platform_version])
- if spec.get('msvs_target_platform_minversion'):
- target_platform_minversion = spec.get('msvs_target_platform_minversion')
- properties[0].append(['WindowsTargetPlatformMinVersion',
- target_platform_minversion])
- else:
- properties[0].append(['WindowsTargetPlatformMinVersion',
- target_platform_version])
- if spec.get('msvs_enable_winphone'):
- properties[0].append(['ApplicationType', 'Windows Phone'])
- else:
- properties[0].append(['ApplicationType', 'Windows Store'])
-
- platform_name = None
- msvs_windows_target_platform_version = None
- for configuration in spec['configurations'].itervalues():
- platform_name = platform_name or _ConfigPlatform(configuration)
- msvs_windows_target_platform_version = \
- msvs_windows_target_platform_version or \
- _ConfigWindowsTargetPlatformVersion(configuration)
- if platform_name and msvs_windows_target_platform_version:
- break
-
- if platform_name == 'ARM':
- properties[0].append(['WindowsSDKDesktopARMSupport', 'true'])
- if msvs_windows_target_platform_version:
- properties[0].append(['WindowsTargetPlatformVersion', \
- str(msvs_windows_target_platform_version)])
-
- return properties
-
-def _GetMSBuildConfigurationDetails(spec, build_file):
- properties = {}
- for name, settings in spec['configurations'].iteritems():
- msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file)
- condition = _GetConfigurationCondition(name, settings)
- character_set = msbuild_attributes.get('CharacterSet')
- _AddConditionalProperty(properties, condition, 'ConfigurationType',
- msbuild_attributes['ConfigurationType'])
- if character_set:
- if 'msvs_enable_winrt' not in spec :
- _AddConditionalProperty(properties, condition, 'CharacterSet',
- character_set)
- return _GetMSBuildPropertyGroup(spec, 'Configuration', properties)
-
-
-def _GetMSBuildLocalProperties(msbuild_toolset):
- # Currently the only local property we support is PlatformToolset
- properties = {}
- if msbuild_toolset:
- properties = [
- ['PropertyGroup', {'Label': 'Locals'},
- ['PlatformToolset', msbuild_toolset],
- ]
- ]
- return properties
-
-
-def _GetMSBuildPropertySheets(configurations):
- user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props'
- additional_props = {}
- props_specified = False
- for name, settings in sorted(configurations.iteritems()):
- configuration = _GetConfigurationCondition(name, settings)
- if settings.has_key('msbuild_props'):
- additional_props[configuration] = _FixPaths(settings['msbuild_props'])
- props_specified = True
- else:
- additional_props[configuration] = ''
-
- if not props_specified:
- return [
- ['ImportGroup',
- {'Label': 'PropertySheets'},
- ['Import',
- {'Project': user_props,
- 'Condition': "exists('%s')" % user_props,
- 'Label': 'LocalAppDataPlatform'
- }
- ]
- ]
- ]
- else:
- sheets = []
- for condition, props in additional_props.iteritems():
- import_group = [
- 'ImportGroup',
- {'Label': 'PropertySheets',
- 'Condition': condition
- },
- ['Import',
- {'Project': user_props,
- 'Condition': "exists('%s')" % user_props,
- 'Label': 'LocalAppDataPlatform'
- }
- ]
- ]
- for props_file in props:
- import_group.append(['Import', {'Project':props_file}])
- sheets.append(import_group)
- return sheets
-
-def _ConvertMSVSBuildAttributes(spec, config, build_file):
- config_type = _GetMSVSConfigurationType(spec, build_file)
- msvs_attributes = _GetMSVSAttributes(spec, config, config_type)
- msbuild_attributes = {}
- for a in msvs_attributes:
- if a in ['IntermediateDirectory', 'OutputDirectory']:
- directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a])
- if not directory.endswith('\\'):
- directory += '\\'
- msbuild_attributes[a] = directory
- elif a == 'CharacterSet':
- msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a])
- elif a == 'ConfigurationType':
- msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a])
- else:
- print 'Warning: Do not know how to convert MSVS attribute ' + a
- return msbuild_attributes
-
-
-def _ConvertMSVSCharacterSet(char_set):
- if char_set.isdigit():
- char_set = {
- '0': 'MultiByte',
- '1': 'Unicode',
- '2': 'MultiByte',
- }[char_set]
- return char_set
-
-
-def _ConvertMSVSConfigurationType(config_type):
- if config_type.isdigit():
- config_type = {
- '1': 'Application',
- '2': 'DynamicLibrary',
- '4': 'StaticLibrary',
- '10': 'Utility'
- }[config_type]
- return config_type
-
-
-def _GetMSBuildAttributes(spec, config, build_file):
- if 'msbuild_configuration_attributes' not in config:
- msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file)
-
- else:
- config_type = _GetMSVSConfigurationType(spec, build_file)
- config_type = _ConvertMSVSConfigurationType(config_type)
- msbuild_attributes = config.get('msbuild_configuration_attributes', {})
- msbuild_attributes.setdefault('ConfigurationType', config_type)
- output_dir = msbuild_attributes.get('OutputDirectory',
- '$(SolutionDir)$(Configuration)')
- msbuild_attributes['OutputDirectory'] = _FixPath(output_dir) + '\\'
- if 'IntermediateDirectory' not in msbuild_attributes:
- intermediate = _FixPath('$(Configuration)') + '\\'
- msbuild_attributes['IntermediateDirectory'] = intermediate
- if 'CharacterSet' in msbuild_attributes:
- msbuild_attributes['CharacterSet'] = _ConvertMSVSCharacterSet(
- msbuild_attributes['CharacterSet'])
- if 'TargetName' not in msbuild_attributes:
- prefix = spec.get('product_prefix', '')
- product_name = spec.get('product_name', '$(ProjectName)')
- target_name = prefix + product_name
- msbuild_attributes['TargetName'] = target_name
- if 'TargetExt' not in msbuild_attributes and 'product_extension' in spec:
- ext = spec.get('product_extension')
- msbuild_attributes['TargetExt'] = '.' + ext
-
- if spec.get('msvs_external_builder'):
- external_out_dir = spec.get('msvs_external_builder_out_dir', '.')
- msbuild_attributes['OutputDirectory'] = _FixPath(external_out_dir) + '\\'
-
- # Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile'
- # (depending on the tool used) to avoid MSB8012 warning.
- msbuild_tool_map = {
- 'executable': 'Link',
- 'shared_library': 'Link',
- 'loadable_module': 'Link',
- 'static_library': 'Lib',
- }
- msbuild_tool = msbuild_tool_map.get(spec['type'])
- if msbuild_tool:
- msbuild_settings = config['finalized_msbuild_settings']
- out_file = msbuild_settings[msbuild_tool].get('OutputFile')
- if out_file:
- msbuild_attributes['TargetPath'] = _FixPath(out_file)
- target_ext = msbuild_settings[msbuild_tool].get('TargetExt')
- if target_ext:
- msbuild_attributes['TargetExt'] = target_ext
-
- return msbuild_attributes
-
-
-def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
- # TODO(jeanluc) We could optimize out the following and do it only if
- # there are actions.
- # TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'.
- new_paths = []
- cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])[0]
- if cygwin_dirs:
- cyg_path = '$(MSBuildProjectDirectory)\\%s\\bin\\' % _FixPath(cygwin_dirs)
- new_paths.append(cyg_path)
- # TODO(jeanluc) Change the convention to have both a cygwin_dir and a
- # python_dir.
- python_path = cyg_path.replace('cygwin\\bin', 'python_26')
- new_paths.append(python_path)
- if new_paths:
- new_paths = '$(ExecutablePath);' + ';'.join(new_paths)
-
- properties = {}
- for (name, configuration) in sorted(configurations.iteritems()):
- condition = _GetConfigurationCondition(name, configuration)
- attributes = _GetMSBuildAttributes(spec, configuration, build_file)
- msbuild_settings = configuration['finalized_msbuild_settings']
- _AddConditionalProperty(properties, condition, 'IntDir',
- attributes['IntermediateDirectory'])
- _AddConditionalProperty(properties, condition, 'OutDir',
- attributes['OutputDirectory'])
- _AddConditionalProperty(properties, condition, 'TargetName',
- attributes['TargetName'])
- if 'TargetExt' in attributes:
- _AddConditionalProperty(properties, condition, 'TargetExt',
- attributes['TargetExt'])
-
- if attributes.get('TargetPath'):
- _AddConditionalProperty(properties, condition, 'TargetPath',
- attributes['TargetPath'])
- if attributes.get('TargetExt'):
- _AddConditionalProperty(properties, condition, 'TargetExt',
- attributes['TargetExt'])
-
- if new_paths:
- _AddConditionalProperty(properties, condition, 'ExecutablePath',
- new_paths)
- tool_settings = msbuild_settings.get('', {})
- for name, value in sorted(tool_settings.iteritems()):
- formatted_value = _GetValueFormattedForMSBuild('', name, value)
- _AddConditionalProperty(properties, condition, name, formatted_value)
- return _GetMSBuildPropertyGroup(spec, None, properties)
-
-
-def _AddConditionalProperty(properties, condition, name, value):
- """Adds a property / conditional value pair to a dictionary.
-
- Arguments:
- properties: The dictionary to be modified. The key is the name of the
- property. The value is itself a dictionary; its key is the value and
- the value a list of condition for which this value is true.
- condition: The condition under which the named property has the value.
- name: The name of the property.
- value: The value of the property.
- """
- if name not in properties:
- properties[name] = {}
- values = properties[name]
- if value not in values:
- values[value] = []
- conditions = values[value]
- conditions.append(condition)
-
-
-# Regex for msvs variable references ( i.e. $(FOO) ).
-MSVS_VARIABLE_REFERENCE = re.compile(r'\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)')
-
-
-def _GetMSBuildPropertyGroup(spec, label, properties):
- """Returns a PropertyGroup definition for the specified properties.
-
- Arguments:
- spec: The target project dict.
- label: An optional label for the PropertyGroup.
- properties: The dictionary to be converted. The key is the name of the
- property. The value is itself a dictionary; its key is the value and
- the value a list of condition for which this value is true.
- """
- group = ['PropertyGroup']
- if label:
- group.append({'Label': label})
- num_configurations = len(spec['configurations'])
- def GetEdges(node):
- # Use a definition of edges such that user_of_variable -> used_varible.
- # This happens to be easier in this case, since a variable's
- # definition contains all variables it references in a single string.
- edges = set()
- for value in sorted(properties[node].keys()):
- # Add to edges all $(...) references to variables.
- #
- # Variable references that refer to names not in properties are excluded
- # These can exist for instance to refer built in definitions like
- # $(SolutionDir).
- #
- # Self references are ignored. Self reference is used in a few places to
- # append to the default value. I.e. PATH=$(PATH);other_path
- edges.update(set([v for v in MSVS_VARIABLE_REFERENCE.findall(value)
- if v in properties and v != node]))
- return edges
- properties_ordered = gyp.common.TopologicallySorted(
- properties.keys(), GetEdges)
- # Walk properties in the reverse of a topological sort on
- # user_of_variable -> used_variable as this ensures variables are
- # defined before they are used.
- # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
- for name in reversed(properties_ordered):
- values = properties[name]
- for value, conditions in sorted(values.iteritems()):
- if len(conditions) == num_configurations:
- # If the value is the same all configurations,
- # just add one unconditional entry.
- group.append([name, value])
- else:
- for condition in conditions:
- group.append([name, {'Condition': condition}, value])
- return [group]
-
-
-def _GetMSBuildToolSettingsSections(spec, configurations):
- groups = []
- for (name, configuration) in sorted(configurations.iteritems()):
- msbuild_settings = configuration['finalized_msbuild_settings']
- group = ['ItemDefinitionGroup',
- {'Condition': _GetConfigurationCondition(name, configuration)}
- ]
- for tool_name, tool_settings in sorted(msbuild_settings.iteritems()):
- # Skip the tool named '' which is a holder of global settings handled
- # by _GetMSBuildConfigurationGlobalProperties.
- if tool_name:
- if tool_settings:
- tool = [tool_name]
- for name, value in sorted(tool_settings.iteritems()):
- formatted_value = _GetValueFormattedForMSBuild(tool_name, name,
- value)
- tool.append([name, formatted_value])
- group.append(tool)
- groups.append(group)
- return groups
-
-
-def _FinalizeMSBuildSettings(spec, configuration):
- if 'msbuild_settings' in configuration:
- converted = False
- msbuild_settings = configuration['msbuild_settings']
- MSVSSettings.ValidateMSBuildSettings(msbuild_settings)
- else:
- converted = True
- msvs_settings = configuration.get('msvs_settings', {})
- msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
- include_dirs, midl_include_dirs, resource_include_dirs = \
- _GetIncludeDirs(configuration)
- libraries = _GetLibraries(spec)
- library_dirs = _GetLibraryDirs(configuration)
- out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
- target_ext = _GetOutputTargetExt(spec)
- defines = _GetDefines(configuration)
- if converted:
- # Visual Studio 2010 has TR1
- defines = [d for d in defines if d != '_HAS_TR1=0']
- # Warn of ignored settings
- ignored_settings = ['msvs_tool_files']
- for ignored_setting in ignored_settings:
- value = configuration.get(ignored_setting)
- if value:
- print ('Warning: The automatic conversion to MSBuild does not handle '
- '%s. Ignoring setting of %s' % (ignored_setting, str(value)))
-
- defines = [_EscapeCppDefineForMSBuild(d) for d in defines]
- disabled_warnings = _GetDisabledWarnings(configuration)
- prebuild = configuration.get('msvs_prebuild')
- postbuild = configuration.get('msvs_postbuild')
- def_file = _GetModuleDefinition(spec)
- precompiled_header = configuration.get('msvs_precompiled_header')
-
- # Add the information to the appropriate tool
- # TODO(jeanluc) We could optimize and generate these settings only if
- # the corresponding files are found, e.g. don't generate ResourceCompile
- # if you don't have any resources.
- _ToolAppend(msbuild_settings, 'ClCompile',
- 'AdditionalIncludeDirectories', include_dirs)
- _ToolAppend(msbuild_settings, 'Midl',
- 'AdditionalIncludeDirectories', midl_include_dirs)
- _ToolAppend(msbuild_settings, 'ResourceCompile',
- 'AdditionalIncludeDirectories', resource_include_dirs)
- # Add in libraries, note that even for empty libraries, we want this
- # set, to prevent inheriting default libraries from the enviroment.
- _ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies',
- libraries)
- _ToolAppend(msbuild_settings, 'Link', 'AdditionalLibraryDirectories',
- library_dirs)
- if out_file:
- _ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file,
- only_if_unset=True)
- if target_ext:
- _ToolAppend(msbuild_settings, msbuild_tool, 'TargetExt', target_ext,
- only_if_unset=True)
- # Add defines.
- _ToolAppend(msbuild_settings, 'ClCompile',
- 'PreprocessorDefinitions', defines)
- _ToolAppend(msbuild_settings, 'ResourceCompile',
- 'PreprocessorDefinitions', defines)
- # Add disabled warnings.
- _ToolAppend(msbuild_settings, 'ClCompile',
- 'DisableSpecificWarnings', disabled_warnings)
- # Turn on precompiled headers if appropriate.
- if precompiled_header:
- precompiled_header = os.path.split(precompiled_header)[1]
- _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'Use')
- _ToolAppend(msbuild_settings, 'ClCompile',
- 'PrecompiledHeaderFile', precompiled_header)
- _ToolAppend(msbuild_settings, 'ClCompile',
- 'ForcedIncludeFiles', [precompiled_header])
- else:
- _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'NotUsing')
- # Turn off WinRT compilation
- _ToolAppend(msbuild_settings, 'ClCompile', 'CompileAsWinRT', 'false')
- # Turn on import libraries if appropriate
- if spec.get('msvs_requires_importlibrary'):
- _ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'false')
- # Loadable modules don't generate import libraries;
- # tell dependent projects to not expect one.
- if spec['type'] == 'loadable_module':
- _ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'true')
- # Set the module definition file if any.
- if def_file:
- _ToolAppend(msbuild_settings, 'Link', 'ModuleDefinitionFile', def_file)
- configuration['finalized_msbuild_settings'] = msbuild_settings
- if prebuild:
- _ToolAppend(msbuild_settings, 'PreBuildEvent', 'Command', prebuild)
- if postbuild:
- _ToolAppend(msbuild_settings, 'PostBuildEvent', 'Command', postbuild)
-
-
-def _GetValueFormattedForMSBuild(tool_name, name, value):
- if type(value) == list:
- # For some settings, VS2010 does not automatically extends the settings
- # TODO(jeanluc) Is this what we want?
- if name in ['AdditionalIncludeDirectories',
- 'AdditionalLibraryDirectories',
- 'AdditionalOptions',
- 'DelayLoadDLLs',
- 'DisableSpecificWarnings',
- 'PreprocessorDefinitions']:
- value.append('%%(%s)' % name)
- # For most tools, entries in a list should be separated with ';' but some
- # settings use a space. Check for those first.
- exceptions = {
- 'ClCompile': ['AdditionalOptions'],
- 'Link': ['AdditionalOptions'],
- 'Lib': ['AdditionalOptions']}
- if tool_name in exceptions and name in exceptions[tool_name]:
- char = ' '
- else:
- char = ';'
- formatted_value = char.join(
- [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value])
- else:
- formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value)
- return formatted_value
-
-
-def _VerifySourcesExist(sources, root_dir):
- """Verifies that all source files exist on disk.
-
- Checks that all regular source files, i.e. not created at run time,
- exist on disk. Missing files cause needless recompilation but no otherwise
- visible errors.
-
- Arguments:
- sources: A recursive list of Filter/file names.
- root_dir: The root directory for the relative path names.
- Returns:
- A list of source files that cannot be found on disk.
- """
- missing_sources = []
- for source in sources:
- if isinstance(source, MSVSProject.Filter):
- missing_sources.extend(_VerifySourcesExist(source.contents, root_dir))
- else:
- if '$' not in source:
- full_path = os.path.join(root_dir, source)
- if not os.path.exists(full_path):
- missing_sources.append(full_path)
- return missing_sources
-
-
-def _GetMSBuildSources(spec, sources, exclusions, rule_dependencies,
- extension_to_rule_name, actions_spec,
- sources_handled_by_action, list_excluded):
- groups = ['none', 'masm', 'midl', 'include', 'compile', 'resource', 'rule',
- 'rule_dependency']
- grouped_sources = {}
- for g in groups:
- grouped_sources[g] = []
-
- _AddSources2(spec, sources, exclusions, grouped_sources,
- rule_dependencies, extension_to_rule_name,
- sources_handled_by_action, list_excluded)
- sources = []
- for g in groups:
- if grouped_sources[g]:
- sources.append(['ItemGroup'] + grouped_sources[g])
- if actions_spec:
- sources.append(['ItemGroup'] + actions_spec)
- return sources
-
-
-def _AddSources2(spec, sources, exclusions, grouped_sources,
- rule_dependencies, extension_to_rule_name,
- sources_handled_by_action,
- list_excluded):
- extensions_excluded_from_precompile = []
- for source in sources:
- if isinstance(source, MSVSProject.Filter):
- _AddSources2(spec, source.contents, exclusions, grouped_sources,
- rule_dependencies, extension_to_rule_name,
- sources_handled_by_action,
- list_excluded)
- else:
- if not source in sources_handled_by_action:
- detail = []
- excluded_configurations = exclusions.get(source, [])
- if len(excluded_configurations) == len(spec['configurations']):
- detail.append(['ExcludedFromBuild', 'true'])
- else:
- for config_name, configuration in sorted(excluded_configurations):
- condition = _GetConfigurationCondition(config_name, configuration)
- detail.append(['ExcludedFromBuild',
- {'Condition': condition},
- 'true'])
- # Add precompile if needed
- for config_name, configuration in spec['configurations'].iteritems():
- precompiled_source = configuration.get('msvs_precompiled_source', '')
- if precompiled_source != '':
- precompiled_source = _FixPath(precompiled_source)
- if not extensions_excluded_from_precompile:
- # If the precompiled header is generated by a C source, we must
- # not try to use it for C++ sources, and vice versa.
- basename, extension = os.path.splitext(precompiled_source)
- if extension == '.c':
- extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx']
- else:
- extensions_excluded_from_precompile = ['.c']
-
- if precompiled_source == source:
- condition = _GetConfigurationCondition(config_name, configuration)
- detail.append(['PrecompiledHeader',
- {'Condition': condition},
- 'Create'
- ])
- else:
- # Turn off precompiled header usage for source files of a
- # different type than the file that generated the
- # precompiled header.
- for extension in extensions_excluded_from_precompile:
- if source.endswith(extension):
- detail.append(['PrecompiledHeader', ''])
- detail.append(['ForcedIncludeFiles', ''])
-
- group, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
- extension_to_rule_name)
- grouped_sources[group].append([element, {'Include': source}] + detail)
-
-
-def _GetMSBuildProjectReferences(project):
- references = []
- if project.dependencies:
- group = ['ItemGroup']
- for dependency in project.dependencies:
- guid = dependency.guid
- project_dir = os.path.split(project.path)[0]
- relative_path = gyp.common.RelativePath(dependency.path, project_dir)
- project_ref = ['ProjectReference',
- {'Include': relative_path},
- ['Project', guid],
- ['ReferenceOutputAssembly', 'false']
- ]
- for config in dependency.spec.get('configurations', {}).itervalues():
- if config.get('msvs_use_library_dependency_inputs', 0):
- project_ref.append(['UseLibraryDependencyInputs', 'true'])
- break
- # If it's disabled in any config, turn it off in the reference.
- if config.get('msvs_2010_disable_uldi_when_referenced', 0):
- project_ref.append(['UseLibraryDependencyInputs', 'false'])
- break
- group.append(project_ref)
- references.append(group)
- return references
-
-
-def _GenerateMSBuildProject(project, options, version, generator_flags):
- spec = project.spec
- configurations = spec['configurations']
- project_dir, project_file_name = os.path.split(project.path)
- gyp.common.EnsureDirExists(project.path)
- # Prepare list of sources and excluded sources.
- gyp_path = _NormalizedSource(project.build_file)
- relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
-
- gyp_file = os.path.split(project.build_file)[1]
- sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
- gyp_file)
- # Add rules.
- actions_to_add = {}
- props_files_of_rules = set()
- targets_files_of_rules = set()
- rule_dependencies = set()
- extension_to_rule_name = {}
- list_excluded = generator_flags.get('msvs_list_excluded_files', True)
-
- # Don't generate rules if we are using an external builder like ninja.
- if not spec.get('msvs_external_builder'):
- _GenerateRulesForMSBuild(project_dir, options, spec,
- sources, excluded_sources,
- props_files_of_rules, targets_files_of_rules,
- actions_to_add, rule_dependencies,
- extension_to_rule_name)
- else:
- rules = spec.get('rules', [])
- _AdjustSourcesForRules(rules, sources, excluded_sources, True)
-
- sources, excluded_sources, excluded_idl = (
- _AdjustSourcesAndConvertToFilterHierarchy(spec, options,
- project_dir, sources,
- excluded_sources,
- list_excluded, version))
-
- # Don't add actions if we are using an external builder like ninja.
- if not spec.get('msvs_external_builder'):
- _AddActions(actions_to_add, spec, project.build_file)
- _AddCopies(actions_to_add, spec)
-
- # NOTE: this stanza must appear after all actions have been decided.
- # Don't excluded sources with actions attached, or they won't run.
- excluded_sources = _FilterActionsFromExcluded(
- excluded_sources, actions_to_add)
-
- exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
- actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild(
- spec, actions_to_add)
-
- _GenerateMSBuildFiltersFile(project.path + '.filters', sources,
- rule_dependencies,
- extension_to_rule_name)
- missing_sources = _VerifySourcesExist(sources, project_dir)
-
- for configuration in configurations.itervalues():
- _FinalizeMSBuildSettings(spec, configuration)
-
- # Add attributes to root element
-
- import_default_section = [
- ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.Default.props'}]]
- import_cpp_props_section = [
- ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]]
- import_cpp_targets_section = [
- ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]]
- import_masm_props_section = [
- ['Import',
- {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.props'}]]
- import_masm_targets_section = [
- ['Import',
- {'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.targets'}]]
- macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]]
-
- content = [
- 'Project',
- {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003',
- 'ToolsVersion': version.ProjectVersion(),
- 'DefaultTargets': 'Build'
- }]
-
- content += _GetMSBuildProjectConfigurations(configurations)
- content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name)
- content += import_default_section
- content += _GetMSBuildConfigurationDetails(spec, project.build_file)
- if spec.get('msvs_enable_winphone'):
- content += _GetMSBuildLocalProperties('v120_wp81')
- else:
- content += _GetMSBuildLocalProperties(project.msbuild_toolset)
- content += import_cpp_props_section
- content += import_masm_props_section
- content += _GetMSBuildExtensions(props_files_of_rules)
- content += _GetMSBuildPropertySheets(configurations)
- content += macro_section
- content += _GetMSBuildConfigurationGlobalProperties(spec, configurations,
- project.build_file)
- content += _GetMSBuildToolSettingsSections(spec, configurations)
- content += _GetMSBuildSources(
- spec, sources, exclusions, rule_dependencies, extension_to_rule_name,
- actions_spec, sources_handled_by_action, list_excluded)
- content += _GetMSBuildProjectReferences(project)
- content += import_cpp_targets_section
- content += import_masm_targets_section
- content += _GetMSBuildExtensionTargets(targets_files_of_rules)
-
- if spec.get('msvs_external_builder'):
- content += _GetMSBuildExternalBuilderTargets(spec)
-
- # TODO(jeanluc) File a bug to get rid of runas. We had in MSVS:
- # has_run_as = _WriteMSVSUserFile(project.path, version, spec)
-
- easy_xml.WriteXmlIfChanged(content, project.path, pretty=True, win32=True)
-
- return missing_sources
-
-
-def _GetMSBuildExternalBuilderTargets(spec):
- """Return a list of MSBuild targets for external builders.
-
- The "Build" and "Clean" targets are always generated. If the spec contains
- 'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
- be generated, to support building selected C/C++ files.
-
- Arguments:
- spec: The gyp target spec.
- Returns:
- List of MSBuild 'Target' specs.
- """
- build_cmd = _BuildCommandLineForRuleRaw(
- spec, spec['msvs_external_builder_build_cmd'],
- False, False, False, False)
- build_target = ['Target', {'Name': 'Build'}]
- build_target.append(['Exec', {'Command': build_cmd}])
-
- clean_cmd = _BuildCommandLineForRuleRaw(
- spec, spec['msvs_external_builder_clean_cmd'],
- False, False, False, False)
- clean_target = ['Target', {'Name': 'Clean'}]
- clean_target.append(['Exec', {'Command': clean_cmd}])
-
- targets = [build_target, clean_target]
-
- if spec.get('msvs_external_builder_clcompile_cmd'):
- clcompile_cmd = _BuildCommandLineForRuleRaw(
- spec, spec['msvs_external_builder_clcompile_cmd'],
- False, False, False, False)
- clcompile_target = ['Target', {'Name': 'ClCompile'}]
- clcompile_target.append(['Exec', {'Command': clcompile_cmd}])
- targets.append(clcompile_target)
-
- return targets
-
-
-def _GetMSBuildExtensions(props_files_of_rules):
- extensions = ['ImportGroup', {'Label': 'ExtensionSettings'}]
- for props_file in props_files_of_rules:
- extensions.append(['Import', {'Project': props_file}])
- return [extensions]
-
-
-def _GetMSBuildExtensionTargets(targets_files_of_rules):
- targets_node = ['ImportGroup', {'Label': 'ExtensionTargets'}]
- for targets_file in sorted(targets_files_of_rules):
- targets_node.append(['Import', {'Project': targets_file}])
- return [targets_node]
-
-
-def _GenerateActionsForMSBuild(spec, actions_to_add):
- """Add actions accumulated into an actions_to_add, merging as needed.
-
- Arguments:
- spec: the target project dict
- actions_to_add: dictionary keyed on input name, which maps to a list of
- dicts describing the actions attached to that input file.
-
- Returns:
- A pair of (action specification, the sources handled by this action).
- """
- sources_handled_by_action = OrderedSet()
- actions_spec = []
- for primary_input, actions in actions_to_add.iteritems():
- inputs = OrderedSet()
- outputs = OrderedSet()
- descriptions = []
- commands = []
- for action in actions:
- inputs.update(OrderedSet(action['inputs']))
- outputs.update(OrderedSet(action['outputs']))
- descriptions.append(action['description'])
- cmd = action['command']
- # For most actions, add 'call' so that actions that invoke batch files
- # return and continue executing. msbuild_use_call provides a way to
- # disable this but I have not seen any adverse effect from doing that
- # for everything.
- if action.get('msbuild_use_call', True):
- cmd = 'call ' + cmd
- commands.append(cmd)
- # Add the custom build action for one input file.
- description = ', and also '.join(descriptions)
-
- # We can't join the commands simply with && because the command line will
- # get too long. See also _AddActions: cygwin's setup_env mustn't be called
- # for every invocation or the command that sets the PATH will grow too
- # long.
- command = '\r\n'.join([c + '\r\nif %errorlevel% neq 0 exit /b %errorlevel%'
- for c in commands])
- _AddMSBuildAction(spec,
- primary_input,
- inputs,
- outputs,
- command,
- description,
- sources_handled_by_action,
- actions_spec)
- return actions_spec, sources_handled_by_action
-
-
-def _AddMSBuildAction(spec, primary_input, inputs, outputs, cmd, description,
- sources_handled_by_action, actions_spec):
- command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd)
- primary_input = _FixPath(primary_input)
- inputs_array = _FixPaths(inputs)
- outputs_array = _FixPaths(outputs)
- additional_inputs = ';'.join([i for i in inputs_array
- if i != primary_input])
- outputs = ';'.join(outputs_array)
- sources_handled_by_action.add(primary_input)
- action_spec = ['CustomBuild', {'Include': primary_input}]
- action_spec.extend(
- # TODO(jeanluc) 'Document' for all or just if as_sources?
- [['FileType', 'Document'],
- ['Command', command],
- ['Message', description],
- ['Outputs', outputs]
- ])
- if additional_inputs:
- action_spec.append(['AdditionalInputs', additional_inputs])
- actions_spec.append(action_spec)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
deleted file mode 100755
index c0b021df50..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-""" Unit tests for the msvs.py file. """
-
-import gyp.generator.msvs as msvs
-import unittest
-import StringIO
-
-
-class TestSequenceFunctions(unittest.TestCase):
-
- def setUp(self):
- self.stderr = StringIO.StringIO()
-
- def test_GetLibraries(self):
- self.assertEqual(
- msvs._GetLibraries({}),
- [])
- self.assertEqual(
- msvs._GetLibraries({'libraries': []}),
- [])
- self.assertEqual(
- msvs._GetLibraries({'other':'foo', 'libraries': ['a.lib']}),
- ['a.lib'])
- self.assertEqual(
- msvs._GetLibraries({'libraries': ['-la']}),
- ['a.lib'])
- self.assertEqual(
- msvs._GetLibraries({'libraries': ['a.lib', 'b.lib', 'c.lib', '-lb.lib',
- '-lb.lib', 'd.lib', 'a.lib']}),
- ['c.lib', 'b.lib', 'd.lib', 'a.lib'])
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
deleted file mode 100644
index 841067ed34..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
+++ /dev/null
@@ -1,2410 +0,0 @@
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import collections
-import copy
-import hashlib
-import json
-import multiprocessing
-import os.path
-import re
-import signal
-import subprocess
-import sys
-import gyp
-import gyp.common
-from gyp.common import OrderedSet
-import gyp.msvs_emulation
-import gyp.MSVSUtil as MSVSUtil
-import gyp.xcode_emulation
-from cStringIO import StringIO
-
-from gyp.common import GetEnvironFallback
-import gyp.ninja_syntax as ninja_syntax
-
-generator_default_variables = {
- 'EXECUTABLE_PREFIX': '',
- 'EXECUTABLE_SUFFIX': '',
- 'STATIC_LIB_PREFIX': 'lib',
- 'STATIC_LIB_SUFFIX': '.a',
- 'SHARED_LIB_PREFIX': 'lib',
-
- # Gyp expects the following variables to be expandable by the build
- # system to the appropriate locations. Ninja prefers paths to be
- # known at gyp time. To resolve this, introduce special
- # variables starting with $! and $| (which begin with a $ so gyp knows it
- # should be treated specially, but is otherwise an invalid
- # ninja/shell variable) that are passed to gyp here but expanded
- # before writing out into the target .ninja files; see
- # ExpandSpecial.
- # $! is used for variables that represent a path and that can only appear at
- # the start of a string, while $| is used for variables that can appear
- # anywhere in a string.
- 'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR',
- 'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen',
- 'PRODUCT_DIR': '$!PRODUCT_DIR',
- 'CONFIGURATION_NAME': '$|CONFIGURATION_NAME',
-
- # Special variables that may be used by gyp 'rule' targets.
- # We generate definitions for these variables on the fly when processing a
- # rule.
- 'RULE_INPUT_ROOT': '${root}',
- 'RULE_INPUT_DIRNAME': '${dirname}',
- 'RULE_INPUT_PATH': '${source}',
- 'RULE_INPUT_EXT': '${ext}',
- 'RULE_INPUT_NAME': '${name}',
-}
-
-# Placates pylint.
-generator_additional_non_configuration_keys = []
-generator_additional_path_sections = []
-generator_extra_sources_for_rules = []
-generator_filelist_paths = None
-
-generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
-
-def StripPrefix(arg, prefix):
- if arg.startswith(prefix):
- return arg[len(prefix):]
- return arg
-
-
-def QuoteShellArgument(arg, flavor):
- """Quote a string such that it will be interpreted as a single argument
- by the shell."""
- # Rather than attempting to enumerate the bad shell characters, just
- # whitelist common OK ones and quote anything else.
- if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg):
- return arg # No quoting necessary.
- if flavor == 'win':
- return gyp.msvs_emulation.QuoteForRspFile(arg)
- return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
-
-
-def Define(d, flavor):
- """Takes a preprocessor define and returns a -D parameter that's ninja- and
- shell-escaped."""
- if flavor == 'win':
- # cl.exe replaces literal # characters with = in preprocesor definitions for
- # some reason. Octal-encode to work around that.
- d = d.replace('#', '\\%03o' % ord('#'))
- return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor)
-
-
-def AddArch(output, arch):
- """Adds an arch string to an output path."""
- output, extension = os.path.splitext(output)
- return '%s.%s%s' % (output, arch, extension)
-
-
-class Target(object):
- """Target represents the paths used within a single gyp target.
-
- Conceptually, building a single target A is a series of steps:
-
- 1) actions/rules/copies generates source/resources/etc.
- 2) compiles generates .o files
- 3) link generates a binary (library/executable)
- 4) bundle merges the above in a mac bundle
-
- (Any of these steps can be optional.)
-
- From a build ordering perspective, a dependent target B could just
- depend on the last output of this series of steps.
-
- But some dependent commands sometimes need to reach inside the box.
- For example, when linking B it needs to get the path to the static
- library generated by A.
-
- This object stores those paths. To keep things simple, member
- variables only store concrete paths to single files, while methods
- compute derived values like "the last output of the target".
- """
- def __init__(self, type):
- # Gyp type ("static_library", etc.) of this target.
- self.type = type
- # File representing whether any input dependencies necessary for
- # dependent actions have completed.
- self.preaction_stamp = None
- # File representing whether any input dependencies necessary for
- # dependent compiles have completed.
- self.precompile_stamp = None
- # File representing the completion of actions/rules/copies, if any.
- self.actions_stamp = None
- # Path to the output of the link step, if any.
- self.binary = None
- # Path to the file representing the completion of building the bundle,
- # if any.
- self.bundle = None
- # On Windows, incremental linking requires linking against all the .objs
- # that compose a .lib (rather than the .lib itself). That list is stored
- # here. In this case, we also need to save the compile_deps for the target,
- # so that the the target that directly depends on the .objs can also depend
- # on those.
- self.component_objs = None
- self.compile_deps = None
- # Windows only. The import .lib is the output of a build step, but
- # because dependents only link against the lib (not both the lib and the
- # dll) we keep track of the import library here.
- self.import_lib = None
-
- def Linkable(self):
- """Return true if this is a target that can be linked against."""
- return self.type in ('static_library', 'shared_library')
-
- def UsesToc(self, flavor):
- """Return true if the target should produce a restat rule based on a TOC
- file."""
- # For bundles, the .TOC should be produced for the binary, not for
- # FinalOutput(). But the naive approach would put the TOC file into the
- # bundle, so don't do this for bundles for now.
- if flavor == 'win' or self.bundle:
- return False
- return self.type in ('shared_library', 'loadable_module')
-
- def PreActionInput(self, flavor):
- """Return the path, if any, that should be used as a dependency of
- any dependent action step."""
- if self.UsesToc(flavor):
- return self.FinalOutput() + '.TOC'
- return self.FinalOutput() or self.preaction_stamp
-
- def PreCompileInput(self):
- """Return the path, if any, that should be used as a dependency of
- any dependent compile step."""
- return self.actions_stamp or self.precompile_stamp
-
- def FinalOutput(self):
- """Return the last output of the target, which depends on all prior
- steps."""
- return self.bundle or self.binary or self.actions_stamp
-
-
-# A small discourse on paths as used within the Ninja build:
-# All files we produce (both at gyp and at build time) appear in the
-# build directory (e.g. out/Debug).
-#
-# Paths within a given .gyp file are always relative to the directory
-# containing the .gyp file. Call these "gyp paths". This includes
-# sources as well as the starting directory a given gyp rule/action
-# expects to be run from. We call the path from the source root to
-# the gyp file the "base directory" within the per-.gyp-file
-# NinjaWriter code.
-#
-# All paths as written into the .ninja files are relative to the build
-# directory. Call these paths "ninja paths".
-#
-# We translate between these two notions of paths with two helper
-# functions:
-#
-# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
-# into the equivalent ninja path.
-#
-# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
-# an output file; the result can be namespaced such that it is unique
-# to the input file name as well as the output target name.
-
-class NinjaWriter(object):
- def __init__(self, hash_for_rules, target_outputs, base_dir, build_dir,
- output_file, toplevel_build, output_file_name, flavor,
- toplevel_dir=None):
- """
- base_dir: path from source root to directory containing this gyp file,
- by gyp semantics, all input paths are relative to this
- build_dir: path from source root to build output
- toplevel_dir: path to the toplevel directory
- """
-
- self.hash_for_rules = hash_for_rules
- self.target_outputs = target_outputs
- self.base_dir = base_dir
- self.build_dir = build_dir
- self.ninja = ninja_syntax.Writer(output_file)
- self.toplevel_build = toplevel_build
- self.output_file_name = output_file_name
-
- self.flavor = flavor
- self.abs_build_dir = None
- if toplevel_dir is not None:
- self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir,
- build_dir))
- self.obj_ext = '.obj' if flavor == 'win' else '.o'
- if flavor == 'win':
- # See docstring of msvs_emulation.GenerateEnvironmentFiles().
- self.win_env = {}
- for arch in ('x86', 'x64'):
- self.win_env[arch] = 'environment.' + arch
-
- # Relative path from build output dir to base dir.
- build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir)
- self.build_to_base = os.path.join(build_to_top, base_dir)
- # Relative path from base dir to build dir.
- base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir)
- self.base_to_build = os.path.join(base_to_top, build_dir)
-
- def ExpandSpecial(self, path, product_dir=None):
- """Expand specials like $!PRODUCT_DIR in |path|.
-
- If |product_dir| is None, assumes the cwd is already the product
- dir. Otherwise, |product_dir| is the relative path to the product
- dir.
- """
-
- PRODUCT_DIR = '$!PRODUCT_DIR'
- if PRODUCT_DIR in path:
- if product_dir:
- path = path.replace(PRODUCT_DIR, product_dir)
- else:
- path = path.replace(PRODUCT_DIR + '/', '')
- path = path.replace(PRODUCT_DIR + '\\', '')
- path = path.replace(PRODUCT_DIR, '.')
-
- INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR'
- if INTERMEDIATE_DIR in path:
- int_dir = self.GypPathToUniqueOutput('gen')
- # GypPathToUniqueOutput generates a path relative to the product dir,
- # so insert product_dir in front if it is provided.
- path = path.replace(INTERMEDIATE_DIR,
- os.path.join(product_dir or '', int_dir))
-
- CONFIGURATION_NAME = '$|CONFIGURATION_NAME'
- path = path.replace(CONFIGURATION_NAME, self.config_name)
-
- return path
-
- def ExpandRuleVariables(self, path, root, dirname, source, ext, name):
- if self.flavor == 'win':
- path = self.msvs_settings.ConvertVSMacros(
- path, config=self.config_name)
- path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root)
- path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'],
- dirname)
- path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source)
- path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext)
- path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name)
- return path
-
- def GypPathToNinja(self, path, env=None):
- """Translate a gyp path to a ninja path, optionally expanding environment
- variable references in |path| with |env|.
-
- See the above discourse on path conversions."""
- if env:
- if self.flavor == 'mac':
- path = gyp.xcode_emulation.ExpandEnvVars(path, env)
- elif self.flavor == 'win':
- path = gyp.msvs_emulation.ExpandMacros(path, env)
- if path.startswith('$!'):
- expanded = self.ExpandSpecial(path)
- if self.flavor == 'win':
- expanded = os.path.normpath(expanded)
- return expanded
- if '$|' in path:
- path = self.ExpandSpecial(path)
- assert '$' not in path, path
- return os.path.normpath(os.path.join(self.build_to_base, path))
-
- def GypPathToUniqueOutput(self, path, qualified=True):
- """Translate a gyp path to a ninja path for writing output.
-
- If qualified is True, qualify the resulting filename with the name
- of the target. This is necessary when e.g. compiling the same
- path twice for two separate output targets.
-
- See the above discourse on path conversions."""
-
- path = self.ExpandSpecial(path)
- assert not path.startswith('$'), path
-
- # Translate the path following this scheme:
- # Input: foo/bar.gyp, target targ, references baz/out.o
- # Output: obj/foo/baz/targ.out.o (if qualified)
- # obj/foo/baz/out.o (otherwise)
- # (and obj.host instead of obj for cross-compiles)
- #
- # Why this scheme and not some other one?
- # 1) for a given input, you can compute all derived outputs by matching
- # its path, even if the input is brought via a gyp file with '..'.
- # 2) simple files like libraries and stamps have a simple filename.
-
- obj = 'obj'
- if self.toolset != 'target':
- obj += '.' + self.toolset
-
- path_dir, path_basename = os.path.split(path)
- assert not os.path.isabs(path_dir), (
- "'%s' can not be absolute path (see crbug.com/462153)." % path_dir)
-
- if qualified:
- path_basename = self.name + '.' + path_basename
- return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
- path_basename))
-
- def WriteCollapsedDependencies(self, name, targets, order_only=None):
- """Given a list of targets, return a path for a single file
- representing the result of building all the targets or None.
-
- Uses a stamp file if necessary."""
-
- assert targets == filter(None, targets), targets
- if len(targets) == 0:
- assert not order_only
- return None
- if len(targets) > 1 or order_only:
- stamp = self.GypPathToUniqueOutput(name + '.stamp')
- targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only)
- self.ninja.newline()
- return targets[0]
-
- def _SubninjaNameForArch(self, arch):
- output_file_base = os.path.splitext(self.output_file_name)[0]
- return '%s.%s.ninja' % (output_file_base, arch)
-
- def WriteSpec(self, spec, config_name, generator_flags):
- """The main entry point for NinjaWriter: write the build rules for a spec.
-
- Returns a Target object, which represents the output paths for this spec.
- Returns None if there are no outputs (e.g. a settings-only 'none' type
- target)."""
-
- self.config_name = config_name
- self.name = spec['target_name']
- self.toolset = spec['toolset']
- config = spec['configurations'][config_name]
- self.target = Target(spec['type'])
- self.is_standalone_static_library = bool(
- spec.get('standalone_static_library', 0))
- # Track if this target contains any C++ files, to decide if gcc or g++
- # should be used for linking.
- self.uses_cpp = False
-
- self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
- self.xcode_settings = self.msvs_settings = None
- if self.flavor == 'mac':
- self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
- if self.flavor == 'win':
- self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
- generator_flags)
- arch = self.msvs_settings.GetArch(config_name)
- self.ninja.variable('arch', self.win_env[arch])
- self.ninja.variable('cc', '$cl_' + arch)
- self.ninja.variable('cxx', '$cl_' + arch)
- self.ninja.variable('cc_host', '$cl_' + arch)
- self.ninja.variable('cxx_host', '$cl_' + arch)
- self.ninja.variable('asm', '$ml_' + arch)
-
- if self.flavor == 'mac':
- self.archs = self.xcode_settings.GetActiveArchs(config_name)
- if len(self.archs) > 1:
- self.arch_subninjas = dict(
- (arch, ninja_syntax.Writer(
- OpenOutput(os.path.join(self.toplevel_build,
- self._SubninjaNameForArch(arch)),
- 'w')))
- for arch in self.archs)
-
- # Compute predepends for all rules.
- # actions_depends is the dependencies this target depends on before running
- # any of its action/rule/copy steps.
- # compile_depends is the dependencies this target depends on before running
- # any of its compile steps.
- actions_depends = []
- compile_depends = []
- # TODO(evan): it is rather confusing which things are lists and which
- # are strings. Fix these.
- if 'dependencies' in spec:
- for dep in spec['dependencies']:
- if dep in self.target_outputs:
- target = self.target_outputs[dep]
- actions_depends.append(target.PreActionInput(self.flavor))
- compile_depends.append(target.PreCompileInput())
- actions_depends = filter(None, actions_depends)
- compile_depends = filter(None, compile_depends)
- actions_depends = self.WriteCollapsedDependencies('actions_depends',
- actions_depends)
- compile_depends = self.WriteCollapsedDependencies('compile_depends',
- compile_depends)
- self.target.preaction_stamp = actions_depends
- self.target.precompile_stamp = compile_depends
-
- # Write out actions, rules, and copies. These must happen before we
- # compile any sources, so compute a list of predependencies for sources
- # while we do it.
- extra_sources = []
- mac_bundle_depends = []
- self.target.actions_stamp = self.WriteActionsRulesCopies(
- spec, extra_sources, actions_depends, mac_bundle_depends)
-
- # If we have actions/rules/copies, we depend directly on those, but
- # otherwise we depend on dependent target's actions/rules/copies etc.
- # We never need to explicitly depend on previous target's link steps,
- # because no compile ever depends on them.
- compile_depends_stamp = (self.target.actions_stamp or compile_depends)
-
- # Write out the compilation steps, if any.
- link_deps = []
- sources = extra_sources + spec.get('sources', [])
- if sources:
- if self.flavor == 'mac' and len(self.archs) > 1:
- # Write subninja file containing compile and link commands scoped to
- # a single arch if a fat binary is being built.
- for arch in self.archs:
- self.ninja.subninja(self._SubninjaNameForArch(arch))
-
- pch = None
- if self.flavor == 'win':
- gyp.msvs_emulation.VerifyMissingSources(
- sources, self.abs_build_dir, generator_flags, self.GypPathToNinja)
- pch = gyp.msvs_emulation.PrecompiledHeader(
- self.msvs_settings, config_name, self.GypPathToNinja,
- self.GypPathToUniqueOutput, self.obj_ext)
- else:
- pch = gyp.xcode_emulation.MacPrefixHeader(
- self.xcode_settings, self.GypPathToNinja,
- lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))
- link_deps = self.WriteSources(
- self.ninja, config_name, config, sources, compile_depends_stamp, pch,
- spec)
- # Some actions/rules output 'sources' that are already object files.
- obj_outputs = [f for f in sources if f.endswith(self.obj_ext)]
- if obj_outputs:
- if self.flavor != 'mac' or len(self.archs) == 1:
- link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
- else:
- print "Warning: Actions/rules writing object files don't work with " \
- "multiarch targets, dropping. (target %s)" % spec['target_name']
- elif self.flavor == 'mac' and len(self.archs) > 1:
- link_deps = collections.defaultdict(list)
-
- compile_deps = self.target.actions_stamp or actions_depends
- if self.flavor == 'win' and self.target.type == 'static_library':
- self.target.component_objs = link_deps
- self.target.compile_deps = compile_deps
-
- # Write out a link step, if needed.
- output = None
- is_empty_bundle = not link_deps and not mac_bundle_depends
- if link_deps or self.target.actions_stamp or actions_depends:
- output = self.WriteTarget(spec, config_name, config, link_deps,
- compile_deps)
- if self.is_mac_bundle:
- mac_bundle_depends.append(output)
-
- # Bundle all of the above together, if needed.
- if self.is_mac_bundle:
- output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle)
-
- if not output:
- return None
-
- assert self.target.FinalOutput(), output
- return self.target
-
- def _WinIdlRule(self, source, prebuild, outputs):
- """Handle the implicit VS .idl rule for one source file. Fills |outputs|
- with files that are generated."""
- outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
- source, self.config_name)
- outdir = self.GypPathToNinja(outdir)
- def fix_path(path, rel=None):
- path = os.path.join(outdir, path)
- dirname, basename = os.path.split(source)
- root, ext = os.path.splitext(basename)
- path = self.ExpandRuleVariables(
- path, root, dirname, source, ext, basename)
- if rel:
- path = os.path.relpath(path, rel)
- return path
- vars = [(name, fix_path(value, outdir)) for name, value in vars]
- output = [fix_path(p) for p in output]
- vars.append(('outdir', outdir))
- vars.append(('idlflags', flags))
- input = self.GypPathToNinja(source)
- self.ninja.build(output, 'idl', input,
- variables=vars, order_only=prebuild)
- outputs.extend(output)
-
- def WriteWinIdlFiles(self, spec, prebuild):
- """Writes rules to match MSVS's implicit idl handling."""
- assert self.flavor == 'win'
- if self.msvs_settings.HasExplicitIdlRulesOrActions(spec):
- return []
- outputs = []
- for source in filter(lambda x: x.endswith('.idl'), spec['sources']):
- self._WinIdlRule(source, prebuild, outputs)
- return outputs
-
- def WriteActionsRulesCopies(self, spec, extra_sources, prebuild,
- mac_bundle_depends):
- """Write out the Actions, Rules, and Copies steps. Return a path
- representing the outputs of these steps."""
- outputs = []
- if self.is_mac_bundle:
- mac_bundle_resources = spec.get('mac_bundle_resources', [])[:]
- else:
- mac_bundle_resources = []
- extra_mac_bundle_resources = []
-
- if 'actions' in spec:
- outputs += self.WriteActions(spec['actions'], extra_sources, prebuild,
- extra_mac_bundle_resources)
- if 'rules' in spec:
- outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
- mac_bundle_resources,
- extra_mac_bundle_resources)
- if 'copies' in spec:
- outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends)
-
- if 'sources' in spec and self.flavor == 'win':
- outputs += self.WriteWinIdlFiles(spec, prebuild)
-
- stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
-
- if self.is_mac_bundle:
- xcassets = self.WriteMacBundleResources(
- extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
- partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends)
- self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends)
-
- return stamp
-
- def GenerateDescription(self, verb, message, fallback):
- """Generate and return a description of a build step.
-
- |verb| is the short summary, e.g. ACTION or RULE.
- |message| is a hand-written description, or None if not available.
- |fallback| is the gyp-level name of the step, usable as a fallback.
- """
- if self.toolset != 'target':
- verb += '(%s)' % self.toolset
- if message:
- return '%s %s' % (verb, self.ExpandSpecial(message))
- else:
- return '%s %s: %s' % (verb, self.name, fallback)
-
- def WriteActions(self, actions, extra_sources, prebuild,
- extra_mac_bundle_resources):
- # Actions cd into the base directory.
- env = self.GetToolchainEnv()
- all_outputs = []
- for action in actions:
- # First write out a rule for the action.
- name = '%s_%s' % (action['action_name'], self.hash_for_rules)
- description = self.GenerateDescription('ACTION',
- action.get('message', None),
- name)
- is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
- if self.flavor == 'win' else False)
- args = action['action']
- depfile = action.get('depfile', None)
- if depfile:
- depfile = self.ExpandSpecial(depfile, self.base_to_build)
- pool = 'console' if int(action.get('ninja_use_console', 0)) else None
- rule_name, _ = self.WriteNewNinjaRule(name, args, description,
- is_cygwin, env, pool,
- depfile=depfile)
-
- inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
- if int(action.get('process_outputs_as_sources', False)):
- extra_sources += action['outputs']
- if int(action.get('process_outputs_as_mac_bundle_resources', False)):
- extra_mac_bundle_resources += action['outputs']
- outputs = [self.GypPathToNinja(o, env) for o in action['outputs']]
-
- # Then write out an edge using the rule.
- self.ninja.build(outputs, rule_name, inputs,
- order_only=prebuild)
- all_outputs += outputs
-
- self.ninja.newline()
-
- return all_outputs
-
- def WriteRules(self, rules, extra_sources, prebuild,
- mac_bundle_resources, extra_mac_bundle_resources):
- env = self.GetToolchainEnv()
- all_outputs = []
- for rule in rules:
- # Skip a rule with no action and no inputs.
- if 'action' not in rule and not rule.get('rule_sources', []):
- continue
-
- # First write out a rule for the rule action.
- name = '%s_%s' % (rule['rule_name'], self.hash_for_rules)
-
- args = rule['action']
- description = self.GenerateDescription(
- 'RULE',
- rule.get('message', None),
- ('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
- is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
- if self.flavor == 'win' else False)
- pool = 'console' if int(rule.get('ninja_use_console', 0)) else None
- rule_name, args = self.WriteNewNinjaRule(
- name, args, description, is_cygwin, env, pool)
-
- # TODO: if the command references the outputs directly, we should
- # simplify it to just use $out.
-
- # Rules can potentially make use of some special variables which
- # must vary per source file.
- # Compute the list of variables we'll need to provide.
- special_locals = ('source', 'root', 'dirname', 'ext', 'name')
- needed_variables = set(['source'])
- for argument in args:
- for var in special_locals:
- if '${%s}' % var in argument:
- needed_variables.add(var)
-
- def cygwin_munge(path):
- # pylint: disable=cell-var-from-loop
- if is_cygwin:
- return path.replace('\\', '/')
- return path
-
- inputs = [self.GypPathToNinja(i, env) for i in rule.get('inputs', [])]
-
- # If there are n source files matching the rule, and m additional rule
- # inputs, then adding 'inputs' to each build edge written below will
- # write m * n inputs. Collapsing reduces this to m + n.
- sources = rule.get('rule_sources', [])
- num_inputs = len(inputs)
- if prebuild:
- num_inputs += 1
- if num_inputs > 2 and len(sources) > 2:
- inputs = [self.WriteCollapsedDependencies(
- rule['rule_name'], inputs, order_only=prebuild)]
- prebuild = []
-
- # For each source file, write an edge that generates all the outputs.
- for source in sources:
- source = os.path.normpath(source)
- dirname, basename = os.path.split(source)
- root, ext = os.path.splitext(basename)
-
- # Gather the list of inputs and outputs, expanding $vars if possible.
- outputs = [self.ExpandRuleVariables(o, root, dirname,
- source, ext, basename)
- for o in rule['outputs']]
-
- if int(rule.get('process_outputs_as_sources', False)):
- extra_sources += outputs
-
- was_mac_bundle_resource = source in mac_bundle_resources
- if was_mac_bundle_resource or \
- int(rule.get('process_outputs_as_mac_bundle_resources', False)):
- extra_mac_bundle_resources += outputs
- # Note: This is n_resources * n_outputs_in_rule. Put to-be-removed
- # items in a set and remove them all in a single pass if this becomes
- # a performance issue.
- if was_mac_bundle_resource:
- mac_bundle_resources.remove(source)
-
- extra_bindings = []
- for var in needed_variables:
- if var == 'root':
- extra_bindings.append(('root', cygwin_munge(root)))
- elif var == 'dirname':
- # '$dirname' is a parameter to the rule action, which means
- # it shouldn't be converted to a Ninja path. But we don't
- # want $!PRODUCT_DIR in there either.
- dirname_expanded = self.ExpandSpecial(dirname, self.base_to_build)
- extra_bindings.append(('dirname', cygwin_munge(dirname_expanded)))
- elif var == 'source':
- # '$source' is a parameter to the rule action, which means
- # it shouldn't be converted to a Ninja path. But we don't
- # want $!PRODUCT_DIR in there either.
- source_expanded = self.ExpandSpecial(source, self.base_to_build)
- extra_bindings.append(('source', cygwin_munge(source_expanded)))
- elif var == 'ext':
- extra_bindings.append(('ext', ext))
- elif var == 'name':
- extra_bindings.append(('name', cygwin_munge(basename)))
- else:
- assert var == None, repr(var)
-
- outputs = [self.GypPathToNinja(o, env) for o in outputs]
- if self.flavor == 'win':
- # WriteNewNinjaRule uses unique_name for creating an rsp file on win.
- extra_bindings.append(('unique_name',
- hashlib.md5(outputs[0]).hexdigest()))
- self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
- implicit=inputs,
- order_only=prebuild,
- variables=extra_bindings)
-
- all_outputs.extend(outputs)
-
- return all_outputs
-
- def WriteCopies(self, copies, prebuild, mac_bundle_depends):
- outputs = []
- env = self.GetToolchainEnv()
- for copy in copies:
- for path in copy['files']:
- # Normalize the path so trailing slashes don't confuse us.
- path = os.path.normpath(path)
- basename = os.path.split(path)[1]
- src = self.GypPathToNinja(path, env)
- dst = self.GypPathToNinja(os.path.join(copy['destination'], basename),
- env)
- outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild)
- if self.is_mac_bundle:
- # gyp has mac_bundle_resources to copy things into a bundle's
- # Resources folder, but there's no built-in way to copy files to other
- # places in the bundle. Hence, some targets use copies for this. Check
- # if this file is copied into the current bundle, and if so add it to
- # the bundle depends so that dependent targets get rebuilt if the copy
- # input changes.
- if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()):
- mac_bundle_depends.append(dst)
-
- return outputs
-
- def WriteMacBundleResources(self, resources, bundle_depends):
- """Writes ninja edges for 'mac_bundle_resources'."""
- xcassets = []
- for output, res in gyp.xcode_emulation.GetMacBundleResources(
- generator_default_variables['PRODUCT_DIR'],
- self.xcode_settings, map(self.GypPathToNinja, resources)):
- output = self.ExpandSpecial(output)
- if os.path.splitext(output)[-1] != '.xcassets':
- isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
- self.ninja.build(output, 'mac_tool', res,
- variables=[('mactool_cmd', 'copy-bundle-resource'), \
- ('binary', isBinary)])
- bundle_depends.append(output)
- else:
- xcassets.append(res)
- return xcassets
-
- def WriteMacXCassets(self, xcassets, bundle_depends):
- """Writes ninja edges for 'mac_bundle_resources' .xcassets files.
-
- This add an invocation of 'actool' via the 'mac_tool.py' helper script.
- It assumes that the assets catalogs define at least one imageset and
- thus an Assets.car file will be generated in the application resources
- directory. If this is not the case, then the build will probably be done
- at each invocation of ninja."""
- if not xcassets:
- return
-
- extra_arguments = {}
- settings_to_arg = {
- 'XCASSETS_APP_ICON': 'app-icon',
- 'XCASSETS_LAUNCH_IMAGE': 'launch-image',
- }
- settings = self.xcode_settings.xcode_settings[self.config_name]
- for settings_key, arg_name in settings_to_arg.iteritems():
- value = settings.get(settings_key)
- if value:
- extra_arguments[arg_name] = value
-
- partial_info_plist = None
- if extra_arguments:
- partial_info_plist = self.GypPathToUniqueOutput(
- 'assetcatalog_generated_info.plist')
- extra_arguments['output-partial-info-plist'] = partial_info_plist
-
- outputs = []
- outputs.append(
- os.path.join(
- self.xcode_settings.GetBundleResourceFolder(),
- 'Assets.car'))
- if partial_info_plist:
- outputs.append(partial_info_plist)
-
- keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor)
- extra_env = self.xcode_settings.GetPerTargetSettings()
- env = self.GetSortedXcodeEnv(additional_settings=extra_env)
- env = self.ComputeExportEnvString(env)
-
- bundle_depends.extend(self.ninja.build(
- outputs, 'compile_xcassets', xcassets,
- variables=[('env', env), ('keys', keys)]))
- return partial_info_plist
-
- def WriteMacInfoPlist(self, partial_info_plist, bundle_depends):
- """Write build rules for bundle Info.plist files."""
- info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
- generator_default_variables['PRODUCT_DIR'],
- self.xcode_settings, self.GypPathToNinja)
- if not info_plist:
- return
- out = self.ExpandSpecial(out)
- if defines:
- # Create an intermediate file to store preprocessed results.
- intermediate_plist = self.GypPathToUniqueOutput(
- os.path.basename(info_plist))
- defines = ' '.join([Define(d, self.flavor) for d in defines])
- info_plist = self.ninja.build(
- intermediate_plist, 'preprocess_infoplist', info_plist,
- variables=[('defines',defines)])
-
- env = self.GetSortedXcodeEnv(additional_settings=extra_env)
- env = self.ComputeExportEnvString(env)
-
- if partial_info_plist:
- intermediate_plist = self.GypPathToUniqueOutput('merged_info.plist')
- info_plist = self.ninja.build(
- intermediate_plist, 'merge_infoplist',
- [partial_info_plist, info_plist])
-
- keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
- keys = QuoteShellArgument(json.dumps(keys), self.flavor)
- isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
- self.ninja.build(out, 'copy_infoplist', info_plist,
- variables=[('env', env), ('keys', keys),
- ('binary', isBinary)])
- bundle_depends.append(out)
-
- def WriteSources(self, ninja_file, config_name, config, sources, predepends,
- precompiled_header, spec):
- """Write build rules to compile all of |sources|."""
- if self.toolset == 'host':
- self.ninja.variable('ar', '$ar_host')
- self.ninja.variable('cc', '$cc_host')
- self.ninja.variable('cxx', '$cxx_host')
- self.ninja.variable('ld', '$ld_host')
- self.ninja.variable('ldxx', '$ldxx_host')
- self.ninja.variable('nm', '$nm_host')
- self.ninja.variable('readelf', '$readelf_host')
-
- if self.flavor != 'mac' or len(self.archs) == 1:
- return self.WriteSourcesForArch(
- self.ninja, config_name, config, sources, predepends,
- precompiled_header, spec)
- else:
- return dict((arch, self.WriteSourcesForArch(
- self.arch_subninjas[arch], config_name, config, sources, predepends,
- precompiled_header, spec, arch=arch))
- for arch in self.archs)
-
- def WriteSourcesForArch(self, ninja_file, config_name, config, sources,
- predepends, precompiled_header, spec, arch=None):
- """Write build rules to compile all of |sources|."""
-
- extra_defines = []
- if self.flavor == 'mac':
- cflags = self.xcode_settings.GetCflags(config_name, arch=arch)
- cflags_c = self.xcode_settings.GetCflagsC(config_name)
- cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
- cflags_objc = ['$cflags_c'] + \
- self.xcode_settings.GetCflagsObjC(config_name)
- cflags_objcc = ['$cflags_cc'] + \
- self.xcode_settings.GetCflagsObjCC(config_name)
- elif self.flavor == 'win':
- asmflags = self.msvs_settings.GetAsmflags(config_name)
- cflags = self.msvs_settings.GetCflags(config_name)
- cflags_c = self.msvs_settings.GetCflagsC(config_name)
- cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
- extra_defines = self.msvs_settings.GetComputedDefines(config_name)
- # See comment at cc_command for why there's two .pdb files.
- pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
- config_name, self.ExpandSpecial)
- if not pdbpath_c:
- obj = 'obj'
- if self.toolset != 'target':
- obj += '.' + self.toolset
- pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
- pdbpath_c = pdbpath + '.c.pdb'
- pdbpath_cc = pdbpath + '.cc.pdb'
- self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c])
- self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc])
- self.WriteVariableList(ninja_file, 'pchprefix', [self.name])
- else:
- cflags = config.get('cflags', [])
- cflags_c = config.get('cflags_c', [])
- cflags_cc = config.get('cflags_cc', [])
-
- # Respect environment variables related to build, but target-specific
- # flags can still override them.
- if self.toolset == 'target':
- cflags_c = (os.environ.get('CPPFLAGS', '').split() +
- os.environ.get('CFLAGS', '').split() + cflags_c)
- cflags_cc = (os.environ.get('CPPFLAGS', '').split() +
- os.environ.get('CXXFLAGS', '').split() + cflags_cc)
- elif self.toolset == 'host':
- cflags_c = (os.environ.get('CPPFLAGS_host', '').split() +
- os.environ.get('CFLAGS_host', '').split() + cflags_c)
- cflags_cc = (os.environ.get('CPPFLAGS_host', '').split() +
- os.environ.get('CXXFLAGS_host', '').split() + cflags_cc)
-
- defines = config.get('defines', []) + extra_defines
- self.WriteVariableList(ninja_file, 'defines',
- [Define(d, self.flavor) for d in defines])
- if self.flavor == 'win':
- self.WriteVariableList(ninja_file, 'asmflags',
- map(self.ExpandSpecial, asmflags))
- self.WriteVariableList(ninja_file, 'rcflags',
- [QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
- for f in self.msvs_settings.GetRcflags(config_name,
- self.GypPathToNinja)])
-
- include_dirs = config.get('include_dirs', [])
-
- env = self.GetToolchainEnv()
- if self.flavor == 'win':
- include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
- config_name)
- self.WriteVariableList(ninja_file, 'includes',
- [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
- for i in include_dirs])
-
- if self.flavor == 'win':
- midl_include_dirs = config.get('midl_include_dirs', [])
- midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs(
- midl_include_dirs, config_name)
- self.WriteVariableList(ninja_file, 'midl_includes',
- [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
- for i in midl_include_dirs])
-
- pch_commands = precompiled_header.GetPchBuildCommands(arch)
- if self.flavor == 'mac':
- # Most targets use no precompiled headers, so only write these if needed.
- for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'),
- ('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]:
- include = precompiled_header.GetInclude(ext, arch)
- if include: ninja_file.variable(var, include)
-
- arflags = config.get('arflags', [])
-
- self.WriteVariableList(ninja_file, 'cflags',
- map(self.ExpandSpecial, cflags))
- self.WriteVariableList(ninja_file, 'cflags_c',
- map(self.ExpandSpecial, cflags_c))
- self.WriteVariableList(ninja_file, 'cflags_cc',
- map(self.ExpandSpecial, cflags_cc))
- if self.flavor == 'mac':
- self.WriteVariableList(ninja_file, 'cflags_objc',
- map(self.ExpandSpecial, cflags_objc))
- self.WriteVariableList(ninja_file, 'cflags_objcc',
- map(self.ExpandSpecial, cflags_objcc))
- self.WriteVariableList(ninja_file, 'arflags',
- map(self.ExpandSpecial, arflags))
- ninja_file.newline()
- outputs = []
- has_rc_source = False
- for source in sources:
- filename, ext = os.path.splitext(source)
- ext = ext[1:]
- obj_ext = self.obj_ext
- if ext in ('cc', 'cpp', 'cxx'):
- command = 'cxx'
- self.uses_cpp = True
- elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
- command = 'cc'
- elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
- command = 'cc_s'
- elif (self.flavor == 'win' and ext == 'asm' and
- not self.msvs_settings.HasExplicitAsmRules(spec)):
- command = 'asm'
- # Add the _asm suffix as msvs is capable of handling .cc and
- # .asm files of the same name without collision.
- obj_ext = '_asm.obj'
- elif self.flavor == 'mac' and ext == 'm':
- command = 'objc'
- elif self.flavor == 'mac' and ext == 'mm':
- command = 'objcxx'
- self.uses_cpp = True
- elif self.flavor == 'win' and ext == 'rc':
- command = 'rc'
- obj_ext = '.res'
- has_rc_source = True
- else:
- # Ignore unhandled extensions.
- continue
- input = self.GypPathToNinja(source)
- output = self.GypPathToUniqueOutput(filename + obj_ext)
- if arch is not None:
- output = AddArch(output, arch)
- implicit = precompiled_header.GetObjDependencies([input], [output], arch)
- variables = []
- if self.flavor == 'win':
- variables, output, implicit = precompiled_header.GetFlagsModifications(
- input, output, implicit, command, cflags_c, cflags_cc,
- self.ExpandSpecial)
- ninja_file.build(output, command, input,
- implicit=[gch for _, _, gch in implicit],
- order_only=predepends, variables=variables)
- outputs.append(output)
-
- if has_rc_source:
- resource_include_dirs = config.get('resource_include_dirs', include_dirs)
- self.WriteVariableList(ninja_file, 'resource_includes',
- [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
- for i in resource_include_dirs])
-
- self.WritePchTargets(ninja_file, pch_commands)
-
- ninja_file.newline()
- return outputs
-
- def WritePchTargets(self, ninja_file, pch_commands):
- """Writes ninja rules to compile prefix headers."""
- if not pch_commands:
- return
-
- for gch, lang_flag, lang, input in pch_commands:
- var_name = {
- 'c': 'cflags_pch_c',
- 'cc': 'cflags_pch_cc',
- 'm': 'cflags_pch_objc',
- 'mm': 'cflags_pch_objcc',
- }[lang]
-
- map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }
- cmd = map.get(lang)
- ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
-
- def WriteLink(self, spec, config_name, config, link_deps):
- """Write out a link step. Fills out target.binary. """
- if self.flavor != 'mac' or len(self.archs) == 1:
- return self.WriteLinkForArch(
- self.ninja, spec, config_name, config, link_deps)
- else:
- output = self.ComputeOutput(spec)
- inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec,
- config_name, config, link_deps[arch],
- arch=arch)
- for arch in self.archs]
- extra_bindings = []
- build_output = output
- if not self.is_mac_bundle:
- self.AppendPostbuildVariable(extra_bindings, spec, output, output)
-
- # TODO(yyanagisawa): more work needed to fix:
- # https://code.google.com/p/gyp/issues/detail?id=411
- if (spec['type'] in ('shared_library', 'loadable_module') and
- not self.is_mac_bundle):
- extra_bindings.append(('lib', output))
- self.ninja.build([output, output + '.TOC'], 'solipo', inputs,
- variables=extra_bindings)
- else:
- self.ninja.build(build_output, 'lipo', inputs, variables=extra_bindings)
- return output
-
- def WriteLinkForArch(self, ninja_file, spec, config_name, config,
- link_deps, arch=None):
- """Write out a link step. Fills out target.binary. """
- command = {
- 'executable': 'link',
- 'loadable_module': 'solink_module',
- 'shared_library': 'solink',
- }[spec['type']]
- command_suffix = ''
-
- implicit_deps = set()
- solibs = set()
- order_deps = set()
-
- if 'dependencies' in spec:
- # Two kinds of dependencies:
- # - Linkable dependencies (like a .a or a .so): add them to the link line.
- # - Non-linkable dependencies (like a rule that generates a file
- # and writes a stamp file): add them to implicit_deps
- extra_link_deps = set()
- for dep in spec['dependencies']:
- target = self.target_outputs.get(dep)
- if not target:
- continue
- linkable = target.Linkable()
- if linkable:
- new_deps = []
- if (self.flavor == 'win' and
- target.component_objs and
- self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
- new_deps = target.component_objs
- if target.compile_deps:
- order_deps.add(target.compile_deps)
- elif self.flavor == 'win' and target.import_lib:
- new_deps = [target.import_lib]
- elif target.UsesToc(self.flavor):
- solibs.add(target.binary)
- implicit_deps.add(target.binary + '.TOC')
- else:
- new_deps = [target.binary]
- for new_dep in new_deps:
- if new_dep not in extra_link_deps:
- extra_link_deps.add(new_dep)
- link_deps.append(new_dep)
-
- final_output = target.FinalOutput()
- if not linkable or final_output != target.binary:
- implicit_deps.add(final_output)
-
- extra_bindings = []
- if self.uses_cpp and self.flavor != 'win':
- extra_bindings.append(('ld', '$ldxx'))
-
- output = self.ComputeOutput(spec, arch)
- if arch is None and not self.is_mac_bundle:
- self.AppendPostbuildVariable(extra_bindings, spec, output, output)
-
- is_executable = spec['type'] == 'executable'
- # The ldflags config key is not used on mac or win. On those platforms
- # linker flags are set via xcode_settings and msvs_settings, respectively.
- env_ldflags = os.environ.get('LDFLAGS', '').split()
- if self.flavor == 'mac':
- ldflags = self.xcode_settings.GetLdflags(config_name,
- self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
- self.GypPathToNinja, arch)
- ldflags = env_ldflags + ldflags
- elif self.flavor == 'win':
- manifest_base_name = self.GypPathToUniqueOutput(
- self.ComputeOutputFileName(spec))
- ldflags, intermediate_manifest, manifest_files = \
- self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja,
- self.ExpandSpecial, manifest_base_name,
- output, is_executable,
- self.toplevel_build)
- ldflags = env_ldflags + ldflags
- self.WriteVariableList(ninja_file, 'manifests', manifest_files)
- implicit_deps = implicit_deps.union(manifest_files)
- if intermediate_manifest:
- self.WriteVariableList(
- ninja_file, 'intermediatemanifest', [intermediate_manifest])
- command_suffix = _GetWinLinkRuleNameSuffix(
- self.msvs_settings.IsEmbedManifest(config_name))
- def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
- if def_file:
- implicit_deps.add(def_file)
- else:
- # Respect environment variables related to build, but target-specific
- # flags can still override them.
- ldflags = env_ldflags + config.get('ldflags', [])
- if is_executable and len(solibs):
- rpath = 'lib/'
- if self.toolset != 'target':
- rpath += self.toolset
- ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
- ldflags.append('-Wl,-rpath-link=%s' % rpath)
- self.WriteVariableList(ninja_file, 'ldflags',
- map(self.ExpandSpecial, ldflags))
-
- library_dirs = config.get('library_dirs', [])
- if self.flavor == 'win':
- library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name)
- for l in library_dirs]
- library_dirs = ['/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l),
- self.flavor)
- for l in library_dirs]
- else:
- library_dirs = [QuoteShellArgument('-L' + self.GypPathToNinja(l),
- self.flavor)
- for l in library_dirs]
-
- libraries = gyp.common.uniquer(map(self.ExpandSpecial,
- spec.get('libraries', [])))
- if self.flavor == 'mac':
- libraries = self.xcode_settings.AdjustLibraries(libraries, config_name)
- elif self.flavor == 'win':
- libraries = self.msvs_settings.AdjustLibraries(libraries)
-
- self.WriteVariableList(ninja_file, 'libs', library_dirs + libraries)
-
- linked_binary = output
-
- if command in ('solink', 'solink_module'):
- extra_bindings.append(('soname', os.path.split(output)[1]))
- extra_bindings.append(('lib',
- gyp.common.EncodePOSIXShellArgument(output)))
- if self.flavor != 'win':
- link_file_list = output
- if self.is_mac_bundle:
- # 'Dependency Framework.framework/Versions/A/Dependency Framework' ->
- # 'Dependency Framework.framework.rsp'
- link_file_list = self.xcode_settings.GetWrapperName()
- if arch:
- link_file_list += '.' + arch
- link_file_list += '.rsp'
- # If an rspfile contains spaces, ninja surrounds the filename with
- # quotes around it and then passes it to open(), creating a file with
- # quotes in its name (and when looking for the rsp file, the name
- # makes it through bash which strips the quotes) :-/
- link_file_list = link_file_list.replace(' ', '_')
- extra_bindings.append(
- ('link_file_list',
- gyp.common.EncodePOSIXShellArgument(link_file_list)))
- if self.flavor == 'win':
- extra_bindings.append(('binary', output))
- if ('/NOENTRY' not in ldflags and
- not self.msvs_settings.GetNoImportLibrary(config_name)):
- self.target.import_lib = output + '.lib'
- extra_bindings.append(('implibflag',
- '/IMPLIB:%s' % self.target.import_lib))
- pdbname = self.msvs_settings.GetPDBName(
- config_name, self.ExpandSpecial, output + '.pdb')
- output = [output, self.target.import_lib]
- if pdbname:
- output.append(pdbname)
- elif not self.is_mac_bundle:
- output = [output, output + '.TOC']
- else:
- command = command + '_notoc'
- elif self.flavor == 'win':
- extra_bindings.append(('binary', output))
- pdbname = self.msvs_settings.GetPDBName(
- config_name, self.ExpandSpecial, output + '.pdb')
- if pdbname:
- output = [output, pdbname]
-
-
- if len(solibs):
- extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs)))
-
- ninja_file.build(output, command + command_suffix, link_deps,
- implicit=list(implicit_deps),
- order_only=list(order_deps),
- variables=extra_bindings)
- return linked_binary
-
- def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
- extra_link_deps = any(self.target_outputs.get(dep).Linkable()
- for dep in spec.get('dependencies', [])
- if dep in self.target_outputs)
- if spec['type'] == 'none' or (not link_deps and not extra_link_deps):
- # TODO(evan): don't call this function for 'none' target types, as
- # it doesn't do anything, and we fake out a 'binary' with a stamp file.
- self.target.binary = compile_deps
- self.target.type = 'none'
- elif spec['type'] == 'static_library':
- self.target.binary = self.ComputeOutput(spec)
- if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
- self.is_standalone_static_library):
- self.ninja.build(self.target.binary, 'alink_thin', link_deps,
- order_only=compile_deps)
- else:
- variables = []
- if self.xcode_settings:
- libtool_flags = self.xcode_settings.GetLibtoolflags(config_name)
- if libtool_flags:
- variables.append(('libtool_flags', libtool_flags))
- if self.msvs_settings:
- libflags = self.msvs_settings.GetLibFlags(config_name,
- self.GypPathToNinja)
- variables.append(('libflags', libflags))
-
- if self.flavor != 'mac' or len(self.archs) == 1:
- self.AppendPostbuildVariable(variables, spec,
- self.target.binary, self.target.binary)
- self.ninja.build(self.target.binary, 'alink', link_deps,
- order_only=compile_deps, variables=variables)
- else:
- inputs = []
- for arch in self.archs:
- output = self.ComputeOutput(spec, arch)
- self.arch_subninjas[arch].build(output, 'alink', link_deps[arch],
- order_only=compile_deps,
- variables=variables)
- inputs.append(output)
- # TODO: It's not clear if libtool_flags should be passed to the alink
- # call that combines single-arch .a files into a fat .a file.
- self.AppendPostbuildVariable(variables, spec,
- self.target.binary, self.target.binary)
- self.ninja.build(self.target.binary, 'alink', inputs,
- # FIXME: test proving order_only=compile_deps isn't
- # needed.
- variables=variables)
- else:
- self.target.binary = self.WriteLink(spec, config_name, config, link_deps)
- return self.target.binary
-
- def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
- assert self.is_mac_bundle
- package_framework = spec['type'] in ('shared_library', 'loadable_module')
- output = self.ComputeMacBundleOutput()
- if is_empty:
- output += '.stamp'
- variables = []
- self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
- is_command_start=not package_framework)
- if package_framework and not is_empty:
- variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
- self.ninja.build(output, 'package_framework', mac_bundle_depends,
- variables=variables)
- else:
- self.ninja.build(output, 'stamp', mac_bundle_depends,
- variables=variables)
- self.target.bundle = output
- return output
-
- def GetToolchainEnv(self, additional_settings=None):
- """Returns the variables toolchain would set for build steps."""
- env = self.GetSortedXcodeEnv(additional_settings=additional_settings)
- if self.flavor == 'win':
- env = self.GetMsvsToolchainEnv(
- additional_settings=additional_settings)
- return env
-
- def GetMsvsToolchainEnv(self, additional_settings=None):
- """Returns the variables Visual Studio would set for build steps."""
- return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
- config=self.config_name)
-
- def GetSortedXcodeEnv(self, additional_settings=None):
- """Returns the variables Xcode would set for build steps."""
- assert self.abs_build_dir
- abs_build_dir = self.abs_build_dir
- return gyp.xcode_emulation.GetSortedXcodeEnv(
- self.xcode_settings, abs_build_dir,
- os.path.join(abs_build_dir, self.build_to_base), self.config_name,
- additional_settings)
-
- def GetSortedXcodePostbuildEnv(self):
- """Returns the variables Xcode would set for postbuild steps."""
- postbuild_settings = {}
- # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
- # TODO(thakis): It would be nice to have some general mechanism instead.
- strip_save_file = self.xcode_settings.GetPerTargetSetting(
- 'CHROMIUM_STRIP_SAVE_FILE')
- if strip_save_file:
- postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file
- return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)
-
- def AppendPostbuildVariable(self, variables, spec, output, binary,
- is_command_start=False):
- """Adds a 'postbuild' variable if there is a postbuild for |output|."""
- postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start)
- if postbuild:
- variables.append(('postbuilds', postbuild))
-
- def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
- """Returns a shell command that runs all the postbuilds, and removes
- |output| if any of them fails. If |is_command_start| is False, then the
- returned string will start with ' && '."""
- if not self.xcode_settings or spec['type'] == 'none' or not output:
- return ''
- output = QuoteShellArgument(output, self.flavor)
- postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
- if output_binary is not None:
- postbuilds = self.xcode_settings.AddImplicitPostbuilds(
- self.config_name,
- os.path.normpath(os.path.join(self.base_to_build, output)),
- QuoteShellArgument(
- os.path.normpath(os.path.join(self.base_to_build, output_binary)),
- self.flavor),
- postbuilds, quiet=True)
-
- if not postbuilds:
- return ''
- # Postbuilds expect to be run in the gyp file's directory, so insert an
- # implicit postbuild to cd to there.
- postbuilds.insert(0, gyp.common.EncodePOSIXShellList(
- ['cd', self.build_to_base]))
- env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
- # G will be non-null if any postbuild fails. Run all postbuilds in a
- # subshell.
- commands = env + ' (' + \
- ' && '.join([ninja_syntax.escape(command) for command in postbuilds])
- command_string = (commands + '); G=$$?; '
- # Remove the final output if any postbuild failed.
- '((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)')
- if is_command_start:
- return '(' + command_string + ' && '
- else:
- return '$ && (' + command_string
-
- def ComputeExportEnvString(self, env):
- """Given an environment, returns a string looking like
- 'export FOO=foo; export BAR="${FOO} bar;'
- that exports |env| to the shell."""
- export_str = []
- for k, v in env:
- export_str.append('export %s=%s;' %
- (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))))
- return ' '.join(export_str)
-
- def ComputeMacBundleOutput(self):
- """Return the 'output' (full output path) to a bundle output directory."""
- assert self.is_mac_bundle
- path = generator_default_variables['PRODUCT_DIR']
- return self.ExpandSpecial(
- os.path.join(path, self.xcode_settings.GetWrapperName()))
-
- def ComputeOutputFileName(self, spec, type=None):
- """Compute the filename of the final output for the current target."""
- if not type:
- type = spec['type']
-
- default_variables = copy.copy(generator_default_variables)
- CalculateVariables(default_variables, {'flavor': self.flavor})
-
- # Compute filename prefix: the product prefix, or a default for
- # the product type.
- DEFAULT_PREFIX = {
- 'loadable_module': default_variables['SHARED_LIB_PREFIX'],
- 'shared_library': default_variables['SHARED_LIB_PREFIX'],
- 'static_library': default_variables['STATIC_LIB_PREFIX'],
- 'executable': default_variables['EXECUTABLE_PREFIX'],
- }
- prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, ''))
-
- # Compute filename extension: the product extension, or a default
- # for the product type.
- DEFAULT_EXTENSION = {
- 'loadable_module': default_variables['SHARED_LIB_SUFFIX'],
- 'shared_library': default_variables['SHARED_LIB_SUFFIX'],
- 'static_library': default_variables['STATIC_LIB_SUFFIX'],
- 'executable': default_variables['EXECUTABLE_SUFFIX'],
- }
- extension = spec.get('product_extension')
- if extension:
- extension = '.' + extension
- else:
- extension = DEFAULT_EXTENSION.get(type, '')
-
- if 'product_name' in spec:
- # If we were given an explicit name, use that.
- target = spec['product_name']
- else:
- # Otherwise, derive a name from the target name.
- target = spec['target_name']
- if prefix == 'lib':
- # Snip out an extra 'lib' from libs if appropriate.
- target = StripPrefix(target, 'lib')
-
- if type in ('static_library', 'loadable_module', 'shared_library',
- 'executable'):
- return '%s%s%s' % (prefix, target, extension)
- elif type == 'none':
- return '%s.stamp' % target
- else:
- raise Exception('Unhandled output type %s' % type)
-
- def ComputeOutput(self, spec, arch=None):
- """Compute the path for the final output of the spec."""
- type = spec['type']
-
- if self.flavor == 'win':
- override = self.msvs_settings.GetOutputName(self.config_name,
- self.ExpandSpecial)
- if override:
- return override
-
- if arch is None and self.flavor == 'mac' and type in (
- 'static_library', 'executable', 'shared_library', 'loadable_module'):
- filename = self.xcode_settings.GetExecutablePath()
- else:
- filename = self.ComputeOutputFileName(spec, type)
-
- if arch is None and 'product_dir' in spec:
- path = os.path.join(spec['product_dir'], filename)
- return self.ExpandSpecial(path)
-
- # Some products go into the output root, libraries go into shared library
- # dir, and everything else goes into the normal place.
- type_in_output_root = ['executable', 'loadable_module']
- if self.flavor == 'mac' and self.toolset == 'target':
- type_in_output_root += ['shared_library', 'static_library']
- elif self.flavor == 'win' and self.toolset == 'target':
- type_in_output_root += ['shared_library']
-
- if arch is not None:
- # Make sure partial executables don't end up in a bundle or the regular
- # output directory.
- archdir = 'arch'
- if self.toolset != 'target':
- archdir = os.path.join('arch', '%s' % self.toolset)
- return os.path.join(archdir, AddArch(filename, arch))
- elif type in type_in_output_root or self.is_standalone_static_library:
- return filename
- elif type == 'shared_library':
- libdir = 'lib'
- if self.toolset != 'target':
- libdir = os.path.join('lib', '%s' % self.toolset)
- return os.path.join(libdir, filename)
- else:
- return self.GypPathToUniqueOutput(filename, qualified=False)
-
- def WriteVariableList(self, ninja_file, var, values):
- assert not isinstance(values, str)
- if values is None:
- values = []
- ninja_file.variable(var, ' '.join(values))
-
- def WriteNewNinjaRule(self, name, args, description, is_cygwin, env, pool,
- depfile=None):
- """Write out a new ninja "rule" statement for a given command.
-
- Returns the name of the new rule, and a copy of |args| with variables
- expanded."""
-
- if self.flavor == 'win':
- args = [self.msvs_settings.ConvertVSMacros(
- arg, self.base_to_build, config=self.config_name)
- for arg in args]
- description = self.msvs_settings.ConvertVSMacros(
- description, config=self.config_name)
- elif self.flavor == 'mac':
- # |env| is an empty list on non-mac.
- args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args]
- description = gyp.xcode_emulation.ExpandEnvVars(description, env)
-
- # TODO: we shouldn't need to qualify names; we do it because
- # currently the ninja rule namespace is global, but it really
- # should be scoped to the subninja.
- rule_name = self.name
- if self.toolset == 'target':
- rule_name += '.' + self.toolset
- rule_name += '.' + name
- rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name)
-
- # Remove variable references, but not if they refer to the magic rule
- # variables. This is not quite right, as it also protects these for
- # actions, not just for rules where they are valid. Good enough.
- protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ]
- protect = '(?!' + '|'.join(map(re.escape, protect)) + ')'
- description = re.sub(protect + r'\$', '_', description)
-
- # gyp dictates that commands are run from the base directory.
- # cd into the directory before running, and adjust paths in
- # the arguments to point to the proper locations.
- rspfile = None
- rspfile_content = None
- args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args]
- if self.flavor == 'win':
- rspfile = rule_name + '.$unique_name.rsp'
- # The cygwin case handles this inside the bash sub-shell.
- run_in = '' if is_cygwin else ' ' + self.build_to_base
- if is_cygwin:
- rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(
- args, self.build_to_base)
- else:
- rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args)
- command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable +
- rspfile + run_in)
- else:
- env = self.ComputeExportEnvString(env)
- command = gyp.common.EncodePOSIXShellList(args)
- command = 'cd %s; ' % self.build_to_base + env + command
-
- # GYP rules/actions express being no-ops by not touching their outputs.
- # Avoid executing downstream dependencies in this case by specifying
- # restat=1 to ninja.
- self.ninja.rule(rule_name, command, description, depfile=depfile,
- restat=True, pool=pool,
- rspfile=rspfile, rspfile_content=rspfile_content)
- self.ninja.newline()
-
- return rule_name, args
-
-
-def CalculateVariables(default_variables, params):
- """Calculate additional variables for use in the build (called by gyp)."""
- global generator_additional_non_configuration_keys
- global generator_additional_path_sections
- flavor = gyp.common.GetFlavor(params)
- if flavor == 'mac':
- default_variables.setdefault('OS', 'mac')
- default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
- default_variables.setdefault('SHARED_LIB_DIR',
- generator_default_variables['PRODUCT_DIR'])
- default_variables.setdefault('LIB_DIR',
- generator_default_variables['PRODUCT_DIR'])
-
- # Copy additional generator configuration data from Xcode, which is shared
- # by the Mac Ninja generator.
- import gyp.generator.xcode as xcode_generator
- generator_additional_non_configuration_keys = getattr(xcode_generator,
- 'generator_additional_non_configuration_keys', [])
- generator_additional_path_sections = getattr(xcode_generator,
- 'generator_additional_path_sections', [])
- global generator_extra_sources_for_rules
- generator_extra_sources_for_rules = getattr(xcode_generator,
- 'generator_extra_sources_for_rules', [])
- elif flavor == 'win':
- exts = gyp.MSVSUtil.TARGET_TYPE_EXT
- default_variables.setdefault('OS', 'win')
- default_variables['EXECUTABLE_SUFFIX'] = '.' + exts['executable']
- default_variables['STATIC_LIB_PREFIX'] = ''
- default_variables['STATIC_LIB_SUFFIX'] = '.' + exts['static_library']
- default_variables['SHARED_LIB_PREFIX'] = ''
- default_variables['SHARED_LIB_SUFFIX'] = '.' + exts['shared_library']
-
- # Copy additional generator configuration data from VS, which is shared
- # by the Windows Ninja generator.
- import gyp.generator.msvs as msvs_generator
- generator_additional_non_configuration_keys = getattr(msvs_generator,
- 'generator_additional_non_configuration_keys', [])
- generator_additional_path_sections = getattr(msvs_generator,
- 'generator_additional_path_sections', [])
-
- gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
- else:
- operating_system = flavor
- if flavor == 'android':
- operating_system = 'linux' # Keep this legacy behavior for now.
- default_variables.setdefault('OS', operating_system)
- default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
- default_variables.setdefault('SHARED_LIB_DIR',
- os.path.join('$!PRODUCT_DIR', 'lib'))
- default_variables.setdefault('LIB_DIR',
- os.path.join('$!PRODUCT_DIR', 'obj'))
-
-def ComputeOutputDir(params):
- """Returns the path from the toplevel_dir to the build output directory."""
- # generator_dir: relative path from pwd to where make puts build files.
- # Makes migrating from make to ninja easier, ninja doesn't put anything here.
- generator_dir = os.path.relpath(params['options'].generator_output or '.')
-
- # output_dir: relative path from generator_dir to the build directory.
- output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
-
- # Relative path from source root to our output files. e.g. "out"
- return os.path.normpath(os.path.join(generator_dir, output_dir))
-
-
-def CalculateGeneratorInputInfo(params):
- """Called by __init__ to initialize generator values based on params."""
- # E.g. "out/gypfiles"
- toplevel = params['options'].toplevel_dir
- qualified_out_dir = os.path.normpath(os.path.join(
- toplevel, ComputeOutputDir(params), 'gypfiles'))
-
- global generator_filelist_paths
- generator_filelist_paths = {
- 'toplevel': toplevel,
- 'qualified_out_dir': qualified_out_dir,
- }
-
-
-def OpenOutput(path, mode='w'):
- """Open |path| for writing, creating directories if necessary."""
- gyp.common.EnsureDirExists(path)
- return open(path, mode)
-
-
-def CommandWithWrapper(cmd, wrappers, prog):
- wrapper = wrappers.get(cmd, '')
- if wrapper:
- return wrapper + ' ' + prog
- return prog
-
-
-def GetDefaultConcurrentLinks():
- """Returns a best-guess for a number of concurrent links."""
- pool_size = int(os.environ.get('GYP_LINK_CONCURRENCY', 0))
- if pool_size:
- return pool_size
-
- if sys.platform in ('win32', 'cygwin'):
- import ctypes
-
- class MEMORYSTATUSEX(ctypes.Structure):
- _fields_ = [
- ("dwLength", ctypes.c_ulong),
- ("dwMemoryLoad", ctypes.c_ulong),
- ("ullTotalPhys", ctypes.c_ulonglong),
- ("ullAvailPhys", ctypes.c_ulonglong),
- ("ullTotalPageFile", ctypes.c_ulonglong),
- ("ullAvailPageFile", ctypes.c_ulonglong),
- ("ullTotalVirtual", ctypes.c_ulonglong),
- ("ullAvailVirtual", ctypes.c_ulonglong),
- ("sullAvailExtendedVirtual", ctypes.c_ulonglong),
- ]
-
- stat = MEMORYSTATUSEX()
- stat.dwLength = ctypes.sizeof(stat)
- ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
-
- # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
- # on a 64 GB machine.
- mem_limit = max(1, stat.ullTotalPhys / (5 * (2 ** 30))) # total / 5GB
- hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32)))
- return min(mem_limit, hard_cap)
- elif sys.platform.startswith('linux'):
- if os.path.exists("/proc/meminfo"):
- with open("/proc/meminfo") as meminfo:
- memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
- for line in meminfo:
- match = memtotal_re.match(line)
- if not match:
- continue
- # Allow 8Gb per link on Linux because Gold is quite memory hungry
- return max(1, int(match.group(1)) / (8 * (2 ** 20)))
- return 1
- elif sys.platform == 'darwin':
- try:
- avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
- # A static library debug build of Chromium's unit_tests takes ~2.7GB, so
- # 4GB per ld process allows for some more bloat.
- return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB
- except:
- return 1
- else:
- # TODO(scottmg): Implement this for other platforms.
- return 1
-
-
-def _GetWinLinkRuleNameSuffix(embed_manifest):
- """Returns the suffix used to select an appropriate linking rule depending on
- whether the manifest embedding is enabled."""
- return '_embed' if embed_manifest else ''
-
-
-def _AddWinLinkRules(master_ninja, embed_manifest):
- """Adds link rules for Windows platform to |master_ninja|."""
- def FullLinkCommand(ldcmd, out, binary_type):
- resource_name = {
- 'exe': '1',
- 'dll': '2',
- }[binary_type]
- return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \
- '%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \
- '$manifests' % {
- 'python': sys.executable,
- 'out': out,
- 'ldcmd': ldcmd,
- 'resname': resource_name,
- 'embed': embed_manifest }
- rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
- use_separate_mspdbsrv = (
- int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0)
- dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper()
- dllcmd = ('%s gyp-win-tool link-wrapper $arch %s '
- '$ld /nologo $implibflag /DLL /OUT:$binary '
- '@$binary.rsp' % (sys.executable, use_separate_mspdbsrv))
- dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll')
- master_ninja.rule('solink' + rule_name_suffix,
- description=dlldesc, command=dllcmd,
- rspfile='$binary.rsp',
- rspfile_content='$libs $in_newline $ldflags',
- restat=True,
- pool='link_pool')
- master_ninja.rule('solink_module' + rule_name_suffix,
- description=dlldesc, command=dllcmd,
- rspfile='$binary.rsp',
- rspfile_content='$libs $in_newline $ldflags',
- restat=True,
- pool='link_pool')
- # Note that ldflags goes at the end so that it has the option of
- # overriding default settings earlier in the command line.
- exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s '
- '$ld /nologo /OUT:$binary @$binary.rsp' %
- (sys.executable, use_separate_mspdbsrv))
- exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe')
- master_ninja.rule('link' + rule_name_suffix,
- description='LINK%s $binary' % rule_name_suffix.upper(),
- command=exe_cmd,
- rspfile='$binary.rsp',
- rspfile_content='$in_newline $libs $ldflags',
- pool='link_pool')
-
-
-def GenerateOutputForConfig(target_list, target_dicts, data, params,
- config_name):
- options = params['options']
- flavor = gyp.common.GetFlavor(params)
- generator_flags = params.get('generator_flags', {})
-
- # build_dir: relative path from source root to our output files.
- # e.g. "out/Debug"
- build_dir = os.path.normpath(
- os.path.join(ComputeOutputDir(params), config_name))
-
- toplevel_build = os.path.join(options.toplevel_dir, build_dir)
-
- master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja'))
- master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
-
- # Put build-time support tools in out/{config_name}.
- gyp.common.CopyTool(flavor, toplevel_build)
-
- # Grab make settings for CC/CXX.
- # The rules are
- # - The priority from low to high is gcc/g++, the 'make_global_settings' in
- # gyp, the environment variable.
- # - If there is no 'make_global_settings' for CC.host/CXX.host or
- # 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
- # to cc/cxx.
- if flavor == 'win':
- ar = 'lib.exe'
- # cc and cxx must be set to the correct architecture by overriding with one
- # of cl_x86 or cl_x64 below.
- cc = 'UNSET'
- cxx = 'UNSET'
- ld = 'link.exe'
- ld_host = '$ld'
- else:
- ar = 'ar'
- cc = 'cc'
- cxx = 'c++'
- ld = '$cc'
- ldxx = '$cxx'
- ld_host = '$cc_host'
- ldxx_host = '$cxx_host'
-
- ar_host = 'ar'
- cc_host = None
- cxx_host = None
- cc_host_global_setting = None
- cxx_host_global_setting = None
- clang_cl = None
- nm = 'nm'
- nm_host = 'nm'
- readelf = 'readelf'
- readelf_host = 'readelf'
-
- build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
- make_global_settings = data[build_file].get('make_global_settings', [])
- build_to_root = gyp.common.InvertRelativePath(build_dir,
- options.toplevel_dir)
- wrappers = {}
- for key, value in make_global_settings:
- if key == 'AR':
- ar = os.path.join(build_to_root, value)
- if key == 'AR.host':
- ar_host = os.path.join(build_to_root, value)
- if key == 'CC':
- cc = os.path.join(build_to_root, value)
- if cc.endswith('clang-cl'):
- clang_cl = cc
- if key == 'CXX':
- cxx = os.path.join(build_to_root, value)
- if key == 'CC.host':
- cc_host = os.path.join(build_to_root, value)
- cc_host_global_setting = value
- if key == 'CXX.host':
- cxx_host = os.path.join(build_to_root, value)
- cxx_host_global_setting = value
- if key == 'LD':
- ld = os.path.join(build_to_root, value)
- if key == 'LD.host':
- ld_host = os.path.join(build_to_root, value)
- if key == 'NM':
- nm = os.path.join(build_to_root, value)
- if key == 'NM.host':
- nm_host = os.path.join(build_to_root, value)
- if key == 'READELF':
- readelf = os.path.join(build_to_root, value)
- if key == 'READELF.host':
- readelf_host = os.path.join(build_to_root, value)
- if key.endswith('_wrapper'):
- wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
-
- # Support wrappers from environment variables too.
- for key, value in os.environ.iteritems():
- if key.lower().endswith('_wrapper'):
- key_prefix = key[:-len('_wrapper')]
- key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
- wrappers[key_prefix] = os.path.join(build_to_root, value)
-
- if flavor == 'win':
- configs = [target_dicts[qualified_target]['configurations'][config_name]
- for qualified_target in target_list]
- shared_system_includes = None
- if not generator_flags.get('ninja_use_custom_environment_files', 0):
- shared_system_includes = \
- gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes(
- configs, generator_flags)
- cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
- toplevel_build, generator_flags, shared_system_includes, OpenOutput)
- for arch, path in cl_paths.iteritems():
- if clang_cl:
- # If we have selected clang-cl, use that instead.
- path = clang_cl
- command = CommandWithWrapper('CC', wrappers,
- QuoteShellArgument(path, 'win'))
- if clang_cl:
- # Use clang-cl to cross-compile for x86 or x86_64.
- command += (' -m32' if arch == 'x86' else ' -m64')
- master_ninja.variable('cl_' + arch, command)
-
- cc = GetEnvironFallback(['CC_target', 'CC'], cc)
- master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
- cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
- master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx))
-
- if flavor == 'win':
- master_ninja.variable('ld', ld)
- master_ninja.variable('idl', 'midl.exe')
- master_ninja.variable('ar', ar)
- master_ninja.variable('rc', 'rc.exe')
- master_ninja.variable('ml_x86', 'ml.exe')
- master_ninja.variable('ml_x64', 'ml64.exe')
- master_ninja.variable('mt', 'mt.exe')
- else:
- master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
- master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
- master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], ar))
- if flavor != 'mac':
- # Mac does not use readelf/nm for .TOC generation, so avoiding polluting
- # the master ninja with extra unused variables.
- master_ninja.variable(
- 'nm', GetEnvironFallback(['NM_target', 'NM'], nm))
- master_ninja.variable(
- 'readelf', GetEnvironFallback(['READELF_target', 'READELF'], readelf))
-
- if generator_supports_multiple_toolsets:
- if not cc_host:
- cc_host = cc
- if not cxx_host:
- cxx_host = cxx
-
- master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], ar_host))
- master_ninja.variable('nm_host', GetEnvironFallback(['NM_host'], nm_host))
- master_ninja.variable('readelf_host',
- GetEnvironFallback(['READELF_host'], readelf_host))
- cc_host = GetEnvironFallback(['CC_host'], cc_host)
- cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
-
- # The environment variable could be used in 'make_global_settings', like
- # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
- if '$(CC)' in cc_host and cc_host_global_setting:
- cc_host = cc_host_global_setting.replace('$(CC)', cc)
- if '$(CXX)' in cxx_host and cxx_host_global_setting:
- cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
- master_ninja.variable('cc_host',
- CommandWithWrapper('CC.host', wrappers, cc_host))
- master_ninja.variable('cxx_host',
- CommandWithWrapper('CXX.host', wrappers, cxx_host))
- if flavor == 'win':
- master_ninja.variable('ld_host', ld_host)
- else:
- master_ninja.variable('ld_host', CommandWithWrapper(
- 'LINK', wrappers, ld_host))
- master_ninja.variable('ldxx_host', CommandWithWrapper(
- 'LINK', wrappers, ldxx_host))
-
- master_ninja.newline()
-
- master_ninja.pool('link_pool', depth=GetDefaultConcurrentLinks())
- master_ninja.newline()
-
- deps = 'msvc' if flavor == 'win' else 'gcc'
-
- if flavor != 'win':
- master_ninja.rule(
- 'cc',
- description='CC $out',
- command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
- '$cflags_pch_c -c $in -o $out'),
- depfile='$out.d',
- deps=deps)
- master_ninja.rule(
- 'cc_s',
- description='CC $out',
- command=('$cc $defines $includes $cflags $cflags_c '
- '$cflags_pch_c -c $in -o $out'))
- master_ninja.rule(
- 'cxx',
- description='CXX $out',
- command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc '
- '$cflags_pch_cc -c $in -o $out'),
- depfile='$out.d',
- deps=deps)
- else:
- # TODO(scottmg) Separate pdb names is a test to see if it works around
- # http://crbug.com/142362. It seems there's a race between the creation of
- # the .pdb by the precompiled header step for .cc and the compilation of
- # .c files. This should be handled by mspdbsrv, but rarely errors out with
- # c1xx : fatal error C1033: cannot open program database
- # By making the rules target separate pdb files this might be avoided.
- cc_command = ('ninja -t msvc -e $arch ' +
- '-- '
- '$cc /nologo /showIncludes /FC '
- '@$out.rsp /c $in /Fo$out /Fd$pdbname_c ')
- cxx_command = ('ninja -t msvc -e $arch ' +
- '-- '
- '$cxx /nologo /showIncludes /FC '
- '@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ')
- master_ninja.rule(
- 'cc',
- description='CC $out',
- command=cc_command,
- rspfile='$out.rsp',
- rspfile_content='$defines $includes $cflags $cflags_c',
- deps=deps)
- master_ninja.rule(
- 'cxx',
- description='CXX $out',
- command=cxx_command,
- rspfile='$out.rsp',
- rspfile_content='$defines $includes $cflags $cflags_cc',
- deps=deps)
- master_ninja.rule(
- 'idl',
- description='IDL $in',
- command=('%s gyp-win-tool midl-wrapper $arch $outdir '
- '$tlb $h $dlldata $iid $proxy $in '
- '$midl_includes $idlflags' % sys.executable))
- master_ninja.rule(
- 'rc',
- description='RC $in',
- # Note: $in must be last otherwise rc.exe complains.
- command=('%s gyp-win-tool rc-wrapper '
- '$arch $rc $defines $resource_includes $rcflags /fo$out $in' %
- sys.executable))
- master_ninja.rule(
- 'asm',
- description='ASM $out',
- command=('%s gyp-win-tool asm-wrapper '
- '$arch $asm $defines $includes $asmflags /c /Fo $out $in' %
- sys.executable))
-
- if flavor != 'mac' and flavor != 'win':
- master_ninja.rule(
- 'alink',
- description='AR $out',
- command='rm -f $out && $ar rcs $arflags $out $in')
- master_ninja.rule(
- 'alink_thin',
- description='AR $out',
- command='rm -f $out && $ar rcsT $arflags $out $in')
-
- # This allows targets that only need to depend on $lib's API to declare an
- # order-only dependency on $lib.TOC and avoid relinking such downstream
- # dependencies when $lib changes only in non-public ways.
- # The resulting string leaves an uninterpolated %{suffix} which
- # is used in the final substitution below.
- mtime_preserving_solink_base = (
- 'if [ ! -e $lib -o ! -e $lib.TOC ]; then '
- '%(solink)s && %(extract_toc)s > $lib.TOC; else '
- '%(solink)s && %(extract_toc)s > $lib.tmp && '
- 'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; '
- 'fi; fi'
- % { 'solink':
- '$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
- 'extract_toc':
- ('{ $readelf -d $lib | grep SONAME ; '
- '$nm -gD -f p $lib | cut -f1-2 -d\' \'; }')})
-
- master_ninja.rule(
- 'solink',
- description='SOLINK $lib',
- restat=True,
- command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
- rspfile='$link_file_list',
- rspfile_content=
- '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs',
- pool='link_pool')
- master_ninja.rule(
- 'solink_module',
- description='SOLINK(module) $lib',
- restat=True,
- command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
- rspfile='$link_file_list',
- rspfile_content='-Wl,--start-group $in -Wl,--end-group $solibs $libs',
- pool='link_pool')
- master_ninja.rule(
- 'link',
- description='LINK $out',
- command=('$ld $ldflags -o $out '
- '-Wl,--start-group $in -Wl,--end-group $solibs $libs'),
- pool='link_pool')
- elif flavor == 'win':
- master_ninja.rule(
- 'alink',
- description='LIB $out',
- command=('%s gyp-win-tool link-wrapper $arch False '
- '$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' %
- sys.executable),
- rspfile='$out.rsp',
- rspfile_content='$in_newline $libflags')
- _AddWinLinkRules(master_ninja, embed_manifest=True)
- _AddWinLinkRules(master_ninja, embed_manifest=False)
- else:
- master_ninja.rule(
- 'objc',
- description='OBJC $out',
- command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc '
- '$cflags_pch_objc -c $in -o $out'),
- depfile='$out.d',
- deps=deps)
- master_ninja.rule(
- 'objcxx',
- description='OBJCXX $out',
- command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc '
- '$cflags_pch_objcc -c $in -o $out'),
- depfile='$out.d',
- deps=deps)
- master_ninja.rule(
- 'alink',
- description='LIBTOOL-STATIC $out, POSTBUILDS',
- command='rm -f $out && '
- './gyp-mac-tool filter-libtool libtool $libtool_flags '
- '-static -o $out $in'
- '$postbuilds')
- master_ninja.rule(
- 'lipo',
- description='LIPO $out, POSTBUILDS',
- command='rm -f $out && lipo -create $in -output $out$postbuilds')
- master_ninja.rule(
- 'solipo',
- description='SOLIPO $out, POSTBUILDS',
- command=(
- 'rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&'
- '%(extract_toc)s > $lib.TOC'
- % { 'extract_toc':
- '{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
- 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}))
-
-
- # Record the public interface of $lib in $lib.TOC. See the corresponding
- # comment in the posix section above for details.
- solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
- mtime_preserving_solink_base = (
- 'if [ ! -e $lib -o ! -e $lib.TOC ] || '
- # Always force dependent targets to relink if this library
- # reexports something. Handling this correctly would require
- # recursive TOC dumping but this is rare in practice, so punt.
- 'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
- '%(solink)s && %(extract_toc)s > $lib.TOC; '
- 'else '
- '%(solink)s && %(extract_toc)s > $lib.tmp && '
- 'if ! cmp -s $lib.tmp $lib.TOC; then '
- 'mv $lib.tmp $lib.TOC ; '
- 'fi; '
- 'fi'
- % { 'solink': solink_base,
- 'extract_toc':
- '{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
- 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
-
-
- solink_suffix = '@$link_file_list$postbuilds'
- master_ninja.rule(
- 'solink',
- description='SOLINK $lib, POSTBUILDS',
- restat=True,
- command=mtime_preserving_solink_base % {'suffix': solink_suffix,
- 'type': '-shared'},
- rspfile='$link_file_list',
- rspfile_content='$in $solibs $libs',
- pool='link_pool')
- master_ninja.rule(
- 'solink_notoc',
- description='SOLINK $lib, POSTBUILDS',
- restat=True,
- command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
- rspfile='$link_file_list',
- rspfile_content='$in $solibs $libs',
- pool='link_pool')
-
- master_ninja.rule(
- 'solink_module',
- description='SOLINK(module) $lib, POSTBUILDS',
- restat=True,
- command=mtime_preserving_solink_base % {'suffix': solink_suffix,
- 'type': '-bundle'},
- rspfile='$link_file_list',
- rspfile_content='$in $solibs $libs',
- pool='link_pool')
- master_ninja.rule(
- 'solink_module_notoc',
- description='SOLINK(module) $lib, POSTBUILDS',
- restat=True,
- command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'},
- rspfile='$link_file_list',
- rspfile_content='$in $solibs $libs',
- pool='link_pool')
-
- master_ninja.rule(
- 'link',
- description='LINK $out, POSTBUILDS',
- command=('$ld $ldflags -o $out '
- '$in $solibs $libs$postbuilds'),
- pool='link_pool')
- master_ninja.rule(
- 'preprocess_infoplist',
- description='PREPROCESS INFOPLIST $out',
- command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
- 'plutil -convert xml1 $out $out'))
- master_ninja.rule(
- 'copy_infoplist',
- description='COPY INFOPLIST $in',
- command='$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys')
- master_ninja.rule(
- 'merge_infoplist',
- description='MERGE INFOPLISTS $in',
- command='$env ./gyp-mac-tool merge-info-plist $out $in')
- master_ninja.rule(
- 'compile_xcassets',
- description='COMPILE XCASSETS $in',
- command='$env ./gyp-mac-tool compile-xcassets $keys $in')
- master_ninja.rule(
- 'mac_tool',
- description='MACTOOL $mactool_cmd $in',
- command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary')
- master_ninja.rule(
- 'package_framework',
- description='PACKAGE FRAMEWORK $out, POSTBUILDS',
- command='./gyp-mac-tool package-framework $out $version$postbuilds '
- '&& touch $out')
- if flavor == 'win':
- master_ninja.rule(
- 'stamp',
- description='STAMP $out',
- command='%s gyp-win-tool stamp $out' % sys.executable)
- master_ninja.rule(
- 'copy',
- description='COPY $in $out',
- command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable)
- else:
- master_ninja.rule(
- 'stamp',
- description='STAMP $out',
- command='${postbuilds}touch $out')
- master_ninja.rule(
- 'copy',
- description='COPY $in $out',
- command='rm -rf $out && cp -af $in $out')
- master_ninja.newline()
-
- all_targets = set()
- for build_file in params['build_files']:
- for target in gyp.common.AllTargets(target_list,
- target_dicts,
- os.path.normpath(build_file)):
- all_targets.add(target)
- all_outputs = set()
-
- # target_outputs is a map from qualified target name to a Target object.
- target_outputs = {}
- # target_short_names is a map from target short name to a list of Target
- # objects.
- target_short_names = {}
-
- # short name of targets that were skipped because they didn't contain anything
- # interesting.
- # NOTE: there may be overlap between this an non_empty_target_names.
- empty_target_names = set()
-
- # Set of non-empty short target names.
- # NOTE: there may be overlap between this an empty_target_names.
- non_empty_target_names = set()
-
- for qualified_target in target_list:
- # qualified_target is like: third_party/icu/icu.gyp:icui18n#target
- build_file, name, toolset = \
- gyp.common.ParseQualifiedTarget(qualified_target)
-
- this_make_global_settings = data[build_file].get('make_global_settings', [])
- assert make_global_settings == this_make_global_settings, (
- "make_global_settings needs to be the same for all targets. %s vs. %s" %
- (this_make_global_settings, make_global_settings))
-
- spec = target_dicts[qualified_target]
- if flavor == 'mac':
- gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
-
- # If build_file is a symlink, we must not follow it because there's a chance
- # it could point to a path above toplevel_dir, and we cannot correctly deal
- # with that case at the moment.
- build_file = gyp.common.RelativePath(build_file, options.toplevel_dir,
- False)
-
- qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name,
- toolset)
- hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest()
-
- base_path = os.path.dirname(build_file)
- obj = 'obj'
- if toolset != 'target':
- obj += '.' + toolset
- output_file = os.path.join(obj, base_path, name + '.ninja')
-
- ninja_output = StringIO()
- writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir,
- ninja_output,
- toplevel_build, output_file,
- flavor, toplevel_dir=options.toplevel_dir)
-
- target = writer.WriteSpec(spec, config_name, generator_flags)
-
- if ninja_output.tell() > 0:
- # Only create files for ninja files that actually have contents.
- with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file:
- ninja_file.write(ninja_output.getvalue())
- ninja_output.close()
- master_ninja.subninja(output_file)
-
- if target:
- if name != target.FinalOutput() and spec['toolset'] == 'target':
- target_short_names.setdefault(name, []).append(target)
- target_outputs[qualified_target] = target
- if qualified_target in all_targets:
- all_outputs.add(target.FinalOutput())
- non_empty_target_names.add(name)
- else:
- empty_target_names.add(name)
-
- if target_short_names:
- # Write a short name to build this target. This benefits both the
- # "build chrome" case as well as the gyp tests, which expect to be
- # able to run actions and build libraries by their short name.
- master_ninja.newline()
- master_ninja.comment('Short names for targets.')
- for short_name in target_short_names:
- master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
- target_short_names[short_name]])
-
- # Write phony targets for any empty targets that weren't written yet. As
- # short names are not necessarily unique only do this for short names that
- # haven't already been output for another target.
- empty_target_names = empty_target_names - non_empty_target_names
- if empty_target_names:
- master_ninja.newline()
- master_ninja.comment('Empty targets (output for completeness).')
- for name in sorted(empty_target_names):
- master_ninja.build(name, 'phony')
-
- if all_outputs:
- master_ninja.newline()
- master_ninja.build('all', 'phony', list(all_outputs))
- master_ninja.default(generator_flags.get('default_target', 'all'))
-
- master_ninja_file.close()
-
-
-def PerformBuild(data, configurations, params):
- options = params['options']
- for config in configurations:
- builddir = os.path.join(options.toplevel_dir, 'out', config)
- arguments = ['ninja', '-C', builddir]
- print 'Building [%s]: %s' % (config, arguments)
- subprocess.check_call(arguments)
-
-
-def CallGenerateOutputForConfig(arglist):
- # Ignore the interrupt signal so that the parent process catches it and
- # kills all multiprocessing children.
- signal.signal(signal.SIGINT, signal.SIG_IGN)
-
- (target_list, target_dicts, data, params, config_name) = arglist
- GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- # Update target_dicts for iOS device builds.
- target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
- target_dicts)
-
- user_config = params.get('generator_flags', {}).get('config', None)
- if gyp.common.GetFlavor(params) == 'win':
- target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
- target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
- target_list, target_dicts, generator_default_variables)
-
- if user_config:
- GenerateOutputForConfig(target_list, target_dicts, data, params,
- user_config)
- else:
- config_names = target_dicts[target_list[0]]['configurations'].keys()
- if params['parallel']:
- try:
- pool = multiprocessing.Pool(len(config_names))
- arglists = []
- for config_name in config_names:
- arglists.append(
- (target_list, target_dicts, data, params, config_name))
- pool.map(CallGenerateOutputForConfig, arglists)
- except KeyboardInterrupt, e:
- pool.terminate()
- raise e
- else:
- for config_name in config_names:
- GenerateOutputForConfig(target_list, target_dicts, data, params,
- config_name)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
deleted file mode 100644
index 1767b2f45a..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-""" Unit tests for the ninja.py file. """
-
-import gyp.generator.ninja as ninja
-import unittest
-import StringIO
-import sys
-import TestCommon
-
-
-class TestPrefixesAndSuffixes(unittest.TestCase):
- def test_BinaryNamesWindows(self):
- # These cannot run on non-Windows as they require a VS installation to
- # correctly handle variable expansion.
- if sys.platform.startswith('win'):
- writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
- 'build.ninja', 'win')
- spec = { 'target_name': 'wee' }
- self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
- endswith('.exe'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
- endswith('.dll'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
- endswith('.lib'))
-
- def test_BinaryNamesLinux(self):
- writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
- 'build.ninja', 'linux')
- spec = { 'target_name': 'wee' }
- self.assertTrue('.' not in writer.ComputeOutputFileName(spec,
- 'executable'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
- startswith('lib'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
- startswith('lib'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
- endswith('.so'))
- self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
- endswith('.a'))
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
deleted file mode 100644
index 0e3fb9301e..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
+++ /dev/null
@@ -1,1300 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import filecmp
-import gyp.common
-import gyp.xcodeproj_file
-import gyp.xcode_ninja
-import errno
-import os
-import sys
-import posixpath
-import re
-import shutil
-import subprocess
-import tempfile
-
-
-# Project files generated by this module will use _intermediate_var as a
-# custom Xcode setting whose value is a DerivedSources-like directory that's
-# project-specific and configuration-specific. The normal choice,
-# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive
-# as it is likely that multiple targets within a single project file will want
-# to access the same set of generated files. The other option,
-# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,
-# it is not configuration-specific. INTERMEDIATE_DIR is defined as
-# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).
-_intermediate_var = 'INTERMEDIATE_DIR'
-
-# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all
-# targets that share the same BUILT_PRODUCTS_DIR.
-_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'
-
-_library_search_paths_var = 'LIBRARY_SEARCH_PATHS'
-
-generator_default_variables = {
- 'EXECUTABLE_PREFIX': '',
- 'EXECUTABLE_SUFFIX': '',
- 'STATIC_LIB_PREFIX': 'lib',
- 'SHARED_LIB_PREFIX': 'lib',
- 'STATIC_LIB_SUFFIX': '.a',
- 'SHARED_LIB_SUFFIX': '.dylib',
- # INTERMEDIATE_DIR is a place for targets to build up intermediate products.
- # It is specific to each build environment. It is only guaranteed to exist
- # and be constant within the context of a project, corresponding to a single
- # input file. Some build environments may allow their intermediate directory
- # to be shared on a wider scale, but this is not guaranteed.
- 'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var,
- 'OS': 'mac',
- 'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)',
- 'LIB_DIR': '$(BUILT_PRODUCTS_DIR)',
- 'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)',
- 'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)',
- 'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)',
- 'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)',
- 'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)',
- 'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var,
- 'CONFIGURATION_NAME': '$(CONFIGURATION)',
-}
-
-# The Xcode-specific sections that hold paths.
-generator_additional_path_sections = [
- 'mac_bundle_resources',
- 'mac_framework_headers',
- 'mac_framework_private_headers',
- # 'mac_framework_dirs', input already handles _dirs endings.
-]
-
-# The Xcode-specific keys that exist on targets and aren't moved down to
-# configurations.
-generator_additional_non_configuration_keys = [
- 'ios_app_extension',
- 'ios_watch_app',
- 'ios_watchkit_extension',
- 'mac_bundle',
- 'mac_bundle_resources',
- 'mac_framework_headers',
- 'mac_framework_private_headers',
- 'mac_xctest_bundle',
- 'xcode_create_dependents_test_runner',
-]
-
-# We want to let any rules apply to files that are resources also.
-generator_extra_sources_for_rules = [
- 'mac_bundle_resources',
- 'mac_framework_headers',
- 'mac_framework_private_headers',
-]
-
-generator_filelist_paths = None
-
-# Xcode's standard set of library directories, which don't need to be duplicated
-# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
-xcode_standard_library_dirs = frozenset([
- '$(SDKROOT)/usr/lib',
- '$(SDKROOT)/usr/local/lib',
-])
-
-def CreateXCConfigurationList(configuration_names):
- xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []})
- if len(configuration_names) == 0:
- configuration_names = ['Default']
- for configuration_name in configuration_names:
- xcbc = gyp.xcodeproj_file.XCBuildConfiguration({
- 'name': configuration_name})
- xccl.AppendProperty('buildConfigurations', xcbc)
- xccl.SetProperty('defaultConfigurationName', configuration_names[0])
- return xccl
-
-
-class XcodeProject(object):
- def __init__(self, gyp_path, path, build_file_dict):
- self.gyp_path = gyp_path
- self.path = path
- self.project = gyp.xcodeproj_file.PBXProject(path=path)
- projectDirPath = gyp.common.RelativePath(
- os.path.dirname(os.path.abspath(self.gyp_path)),
- os.path.dirname(path) or '.')
- self.project.SetProperty('projectDirPath', projectDirPath)
- self.project_file = \
- gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project})
- self.build_file_dict = build_file_dict
-
- # TODO(mark): add destructor that cleans up self.path if created_dir is
- # True and things didn't complete successfully. Or do something even
- # better with "try"?
- self.created_dir = False
- try:
- os.makedirs(self.path)
- self.created_dir = True
- except OSError, e:
- if e.errno != errno.EEXIST:
- raise
-
- def Finalize1(self, xcode_targets, serialize_all_tests):
- # Collect a list of all of the build configuration names used by the
- # various targets in the file. It is very heavily advised to keep each
- # target in an entire project (even across multiple project files) using
- # the same set of configuration names.
- configurations = []
- for xct in self.project.GetProperty('targets'):
- xccl = xct.GetProperty('buildConfigurationList')
- xcbcs = xccl.GetProperty('buildConfigurations')
- for xcbc in xcbcs:
- name = xcbc.GetProperty('name')
- if name not in configurations:
- configurations.append(name)
-
- # Replace the XCConfigurationList attached to the PBXProject object with
- # a new one specifying all of the configuration names used by the various
- # targets.
- try:
- xccl = CreateXCConfigurationList(configurations)
- self.project.SetProperty('buildConfigurationList', xccl)
- except:
- sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)
- raise
-
- # The need for this setting is explained above where _intermediate_var is
- # defined. The comments below about wanting to avoid project-wide build
- # settings apply here too, but this needs to be set on a project-wide basis
- # so that files relative to the _intermediate_var setting can be displayed
- # properly in the Xcode UI.
- #
- # Note that for configuration-relative files such as anything relative to
- # _intermediate_var, for the purposes of UI tree view display, Xcode will
- # only resolve the configuration name once, when the project file is
- # opened. If the active build configuration is changed, the project file
- # must be closed and reopened if it is desired for the tree view to update.
- # This is filed as Apple radar 6588391.
- xccl.SetBuildSetting(_intermediate_var,
- '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)')
- xccl.SetBuildSetting(_shared_intermediate_var,
- '$(SYMROOT)/DerivedSources/$(CONFIGURATION)')
-
- # Set user-specified project-wide build settings and config files. This
- # is intended to be used very sparingly. Really, almost everything should
- # go into target-specific build settings sections. The project-wide
- # settings are only intended to be used in cases where Xcode attempts to
- # resolve variable references in a project context as opposed to a target
- # context, such as when resolving sourceTree references while building up
- # the tree tree view for UI display.
- # Any values set globally are applied to all configurations, then any
- # per-configuration values are applied.
- for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems():
- xccl.SetBuildSetting(xck, xcv)
- if 'xcode_config_file' in self.build_file_dict:
- config_ref = self.project.AddOrGetFileInRootGroup(
- self.build_file_dict['xcode_config_file'])
- xccl.SetBaseConfiguration(config_ref)
- build_file_configurations = self.build_file_dict.get('configurations', {})
- if build_file_configurations:
- for config_name in configurations:
- build_file_configuration_named = \
- build_file_configurations.get(config_name, {})
- if build_file_configuration_named:
- xcc = xccl.ConfigurationNamed(config_name)
- for xck, xcv in build_file_configuration_named.get('xcode_settings',
- {}).iteritems():
- xcc.SetBuildSetting(xck, xcv)
- if 'xcode_config_file' in build_file_configuration_named:
- config_ref = self.project.AddOrGetFileInRootGroup(
- build_file_configurations[config_name]['xcode_config_file'])
- xcc.SetBaseConfiguration(config_ref)
-
- # Sort the targets based on how they appeared in the input.
- # TODO(mark): Like a lot of other things here, this assumes internal
- # knowledge of PBXProject - in this case, of its "targets" property.
-
- # ordinary_targets are ordinary targets that are already in the project
- # file. run_test_targets are the targets that run unittests and should be
- # used for the Run All Tests target. support_targets are the action/rule
- # targets used by GYP file targets, just kept for the assert check.
- ordinary_targets = []
- run_test_targets = []
- support_targets = []
-
- # targets is full list of targets in the project.
- targets = []
-
- # does the it define it's own "all"?
- has_custom_all = False
-
- # targets_for_all is the list of ordinary_targets that should be listed
- # in this project's "All" target. It includes each non_runtest_target
- # that does not have suppress_wildcard set.
- targets_for_all = []
-
- for target in self.build_file_dict['targets']:
- target_name = target['target_name']
- toolset = target['toolset']
- qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name,
- toolset)
- xcode_target = xcode_targets[qualified_target]
- # Make sure that the target being added to the sorted list is already in
- # the unsorted list.
- assert xcode_target in self.project._properties['targets']
- targets.append(xcode_target)
- ordinary_targets.append(xcode_target)
- if xcode_target.support_target:
- support_targets.append(xcode_target.support_target)
- targets.append(xcode_target.support_target)
-
- if not int(target.get('suppress_wildcard', False)):
- targets_for_all.append(xcode_target)
-
- if target_name.lower() == 'all':
- has_custom_all = True;
-
- # If this target has a 'run_as' attribute, add its target to the
- # targets, and add it to the test targets.
- if target.get('run_as'):
- # Make a target to run something. It should have one
- # dependency, the parent xcode target.
- xccl = CreateXCConfigurationList(configurations)
- run_target = gyp.xcodeproj_file.PBXAggregateTarget({
- 'name': 'Run ' + target_name,
- 'productName': xcode_target.GetProperty('productName'),
- 'buildConfigurationList': xccl,
- },
- parent=self.project)
- run_target.AddDependency(xcode_target)
-
- command = target['run_as']
- script = ''
- if command.get('working_directory'):
- script = script + 'cd "%s"\n' % \
- gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
- command.get('working_directory'))
-
- if command.get('environment'):
- script = script + "\n".join(
- ['export %s="%s"' %
- (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
- for (key, val) in command.get('environment').iteritems()]) + "\n"
-
- # Some test end up using sockets, files on disk, etc. and can get
- # confused if more then one test runs at a time. The generator
- # flag 'xcode_serialize_all_test_runs' controls the forcing of all
- # tests serially. It defaults to True. To get serial runs this
- # little bit of python does the same as the linux flock utility to
- # make sure only one runs at a time.
- command_prefix = ''
- if serialize_all_tests:
- command_prefix = \
-"""python -c "import fcntl, subprocess, sys
-file = open('$TMPDIR/GYP_serialize_test_runs', 'a')
-fcntl.flock(file.fileno(), fcntl.LOCK_EX)
-sys.exit(subprocess.call(sys.argv[1:]))" """
-
- # If we were unable to exec for some reason, we want to exit
- # with an error, and fixup variable references to be shell
- # syntax instead of xcode syntax.
- script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \
- gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
- gyp.common.EncodePOSIXShellList(command.get('action')))
-
- ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
- 'shellScript': script,
- 'showEnvVarsInLog': 0,
- })
- run_target.AppendProperty('buildPhases', ssbp)
-
- # Add the run target to the project file.
- targets.append(run_target)
- run_test_targets.append(run_target)
- xcode_target.test_runner = run_target
-
-
- # Make sure that the list of targets being replaced is the same length as
- # the one replacing it, but allow for the added test runner targets.
- assert len(self.project._properties['targets']) == \
- len(ordinary_targets) + len(support_targets)
-
- self.project._properties['targets'] = targets
-
- # Get rid of unnecessary levels of depth in groups like the Source group.
- self.project.RootGroupsTakeOverOnlyChildren(True)
-
- # Sort the groups nicely. Do this after sorting the targets, because the
- # Products group is sorted based on the order of the targets.
- self.project.SortGroups()
-
- # Create an "All" target if there's more than one target in this project
- # file and the project didn't define its own "All" target. Put a generated
- # "All" target first so that people opening up the project for the first
- # time will build everything by default.
- if len(targets_for_all) > 1 and not has_custom_all:
- xccl = CreateXCConfigurationList(configurations)
- all_target = gyp.xcodeproj_file.PBXAggregateTarget(
- {
- 'buildConfigurationList': xccl,
- 'name': 'All',
- },
- parent=self.project)
-
- for target in targets_for_all:
- all_target.AddDependency(target)
-
- # TODO(mark): This is evil because it relies on internal knowledge of
- # PBXProject._properties. It's important to get the "All" target first,
- # though.
- self.project._properties['targets'].insert(0, all_target)
-
- # The same, but for run_test_targets.
- if len(run_test_targets) > 1:
- xccl = CreateXCConfigurationList(configurations)
- run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(
- {
- 'buildConfigurationList': xccl,
- 'name': 'Run All Tests',
- },
- parent=self.project)
- for run_test_target in run_test_targets:
- run_all_tests_target.AddDependency(run_test_target)
-
- # Insert after the "All" target, which must exist if there is more than
- # one run_test_target.
- self.project._properties['targets'].insert(1, run_all_tests_target)
-
- def Finalize2(self, xcode_targets, xcode_target_to_target_dict):
- # Finalize2 needs to happen in a separate step because the process of
- # updating references to other projects depends on the ordering of targets
- # within remote project files. Finalize1 is responsible for sorting duty,
- # and once all project files are sorted, Finalize2 can come in and update
- # these references.
-
- # To support making a "test runner" target that will run all the tests
- # that are direct dependents of any given target, we look for
- # xcode_create_dependents_test_runner being set on an Aggregate target,
- # and generate a second target that will run the tests runners found under
- # the marked target.
- for bf_tgt in self.build_file_dict['targets']:
- if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)):
- tgt_name = bf_tgt['target_name']
- toolset = bf_tgt['toolset']
- qualified_target = gyp.common.QualifiedTarget(self.gyp_path,
- tgt_name, toolset)
- xcode_target = xcode_targets[qualified_target]
- if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):
- # Collect all the run test targets.
- all_run_tests = []
- pbxtds = xcode_target.GetProperty('dependencies')
- for pbxtd in pbxtds:
- pbxcip = pbxtd.GetProperty('targetProxy')
- dependency_xct = pbxcip.GetProperty('remoteGlobalIDString')
- if hasattr(dependency_xct, 'test_runner'):
- all_run_tests.append(dependency_xct.test_runner)
-
- # Directly depend on all the runners as they depend on the target
- # that builds them.
- if len(all_run_tests) > 0:
- run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({
- 'name': 'Run %s Tests' % tgt_name,
- 'productName': tgt_name,
- },
- parent=self.project)
- for run_test_target in all_run_tests:
- run_all_target.AddDependency(run_test_target)
-
- # Insert the test runner after the related target.
- idx = self.project._properties['targets'].index(xcode_target)
- self.project._properties['targets'].insert(idx + 1, run_all_target)
-
- # Update all references to other projects, to make sure that the lists of
- # remote products are complete. Otherwise, Xcode will fill them in when
- # it opens the project file, which will result in unnecessary diffs.
- # TODO(mark): This is evil because it relies on internal knowledge of
- # PBXProject._other_pbxprojects.
- for other_pbxproject in self.project._other_pbxprojects.keys():
- self.project.AddOrGetProjectReference(other_pbxproject)
-
- self.project.SortRemoteProductReferences()
-
- # Give everything an ID.
- self.project_file.ComputeIDs()
-
- # Make sure that no two objects in the project file have the same ID. If
- # multiple objects wind up with the same ID, upon loading the file, Xcode
- # will only recognize one object (the last one in the file?) and the
- # results are unpredictable.
- self.project_file.EnsureNoIDCollisions()
-
- def Write(self):
- # Write the project file to a temporary location first. Xcode watches for
- # changes to the project file and presents a UI sheet offering to reload
- # the project when it does change. However, in some cases, especially when
- # multiple projects are open or when Xcode is busy, things don't work so
- # seamlessly. Sometimes, Xcode is able to detect that a project file has
- # changed but can't unload it because something else is referencing it.
- # To mitigate this problem, and to avoid even having Xcode present the UI
- # sheet when an open project is rewritten for inconsequential changes, the
- # project file is written to a temporary file in the xcodeproj directory
- # first. The new temporary file is then compared to the existing project
- # file, if any. If they differ, the new file replaces the old; otherwise,
- # the new project file is simply deleted. Xcode properly detects a file
- # being renamed over an open project file as a change and so it remains
- # able to present the "project file changed" sheet under this system.
- # Writing to a temporary file first also avoids the possible problem of
- # Xcode rereading an incomplete project file.
- (output_fd, new_pbxproj_path) = \
- tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.',
- dir=self.path)
-
- try:
- output_file = os.fdopen(output_fd, 'wb')
-
- self.project_file.Print(output_file)
- output_file.close()
-
- pbxproj_path = os.path.join(self.path, 'project.pbxproj')
-
- same = False
- try:
- same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
- except OSError, e:
- if e.errno != errno.ENOENT:
- raise
-
- if same:
- # The new file is identical to the old one, just get rid of the new
- # one.
- os.unlink(new_pbxproj_path)
- else:
- # The new file is different from the old one, or there is no old one.
- # Rename the new file to the permanent name.
- #
- # tempfile.mkstemp uses an overly restrictive mode, resulting in a
- # file that can only be read by the owner, regardless of the umask.
- # There's no reason to not respect the umask here, which means that
- # an extra hoop is required to fetch it and reset the new file's mode.
- #
- # No way to get the umask without setting a new one? Set a safe one
- # and then set it back to the old value.
- umask = os.umask(077)
- os.umask(umask)
-
- os.chmod(new_pbxproj_path, 0666 & ~umask)
- os.rename(new_pbxproj_path, pbxproj_path)
-
- except Exception:
- # Don't leave turds behind. In fact, if this code was responsible for
- # creating the xcodeproj directory, get rid of that too.
- os.unlink(new_pbxproj_path)
- if self.created_dir:
- shutil.rmtree(self.path, True)
- raise
-
-
-def AddSourceToTarget(source, type, pbxp, xct):
- # TODO(mark): Perhaps source_extensions and library_extensions can be made a
- # little bit fancier.
- source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's', 'swift']
-
- # .o is conceptually more of a "source" than a "library," but Xcode thinks
- # of "sources" as things to compile and "libraries" (or "frameworks") as
- # things to link with. Adding an object file to an Xcode target's frameworks
- # phase works properly.
- library_extensions = ['a', 'dylib', 'framework', 'o']
-
- basename = posixpath.basename(source)
- (root, ext) = posixpath.splitext(basename)
- if ext:
- ext = ext[1:].lower()
-
- if ext in source_extensions and type != 'none':
- xct.SourcesPhase().AddFile(source)
- elif ext in library_extensions and type != 'none':
- xct.FrameworksPhase().AddFile(source)
- else:
- # Files that aren't added to a sources or frameworks build phase can still
- # go into the project file, just not as part of a build phase.
- pbxp.AddOrGetFileInRootGroup(source)
-
-
-def AddResourceToTarget(resource, pbxp, xct):
- # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
- # where it's used.
- xct.ResourcesPhase().AddFile(resource)
-
-
-def AddHeaderToTarget(header, pbxp, xct, is_public):
- # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
- # where it's used.
- settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public]
- xct.HeadersPhase().AddFile(header, settings)
-
-
-_xcode_variable_re = re.compile(r'(\$\((.*?)\))')
-def ExpandXcodeVariables(string, expansions):
- """Expands Xcode-style $(VARIABLES) in string per the expansions dict.
-
- In some rare cases, it is appropriate to expand Xcode variables when a
- project file is generated. For any substring $(VAR) in string, if VAR is a
- key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
- Any $(VAR) substring in string for which VAR is not a key in the expansions
- dict will remain in the returned string.
- """
-
- matches = _xcode_variable_re.findall(string)
- if matches == None:
- return string
-
- matches.reverse()
- for match in matches:
- (to_replace, variable) = match
- if not variable in expansions:
- continue
-
- replacement = expansions[variable]
- string = re.sub(re.escape(to_replace), replacement, string)
-
- return string
-
-
-_xcode_define_re = re.compile(r'([\\\"\' ])')
-def EscapeXcodeDefine(s):
- """We must escape the defines that we give to XCode so that it knows not to
- split on spaces and to respect backslash and quote literals. However, we
- must not quote the define, or Xcode will incorrectly intepret variables
- especially $(inherited)."""
- return re.sub(_xcode_define_re, r'\\\1', s)
-
-
-def PerformBuild(data, configurations, params):
- options = params['options']
-
- for build_file, build_file_dict in data.iteritems():
- (build_file_root, build_file_ext) = os.path.splitext(build_file)
- if build_file_ext != '.gyp':
- continue
- xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
- if options.generator_output:
- xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
-
- for config in configurations:
- arguments = ['xcodebuild', '-project', xcodeproj_path]
- arguments += ['-configuration', config]
- print "Building [%s]: %s" % (config, arguments)
- subprocess.check_call(arguments)
-
-
-def CalculateGeneratorInputInfo(params):
- toplevel = params['options'].toplevel_dir
- if params.get('flavor') == 'ninja':
- generator_dir = os.path.relpath(params['options'].generator_output or '.')
- output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
- output_dir = os.path.normpath(os.path.join(generator_dir, output_dir))
- qualified_out_dir = os.path.normpath(os.path.join(
- toplevel, output_dir, 'gypfiles-xcode-ninja'))
- else:
- output_dir = os.path.normpath(os.path.join(toplevel, 'xcodebuild'))
- qualified_out_dir = os.path.normpath(os.path.join(
- toplevel, output_dir, 'gypfiles'))
-
- global generator_filelist_paths
- generator_filelist_paths = {
- 'toplevel': toplevel,
- 'qualified_out_dir': qualified_out_dir,
- }
-
-
-def GenerateOutput(target_list, target_dicts, data, params):
- # Optionally configure each spec to use ninja as the external builder.
- ninja_wrapper = params.get('flavor') == 'ninja'
- if ninja_wrapper:
- (target_list, target_dicts, data) = \
- gyp.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params)
-
- options = params['options']
- generator_flags = params.get('generator_flags', {})
- parallel_builds = generator_flags.get('xcode_parallel_builds', True)
- serialize_all_tests = \
- generator_flags.get('xcode_serialize_all_test_runs', True)
- upgrade_check_project_version = \
- generator_flags.get('xcode_upgrade_check_project_version', None)
-
- # Format upgrade_check_project_version with leading zeros as needed.
- if upgrade_check_project_version:
- upgrade_check_project_version = str(upgrade_check_project_version)
- while len(upgrade_check_project_version) < 4:
- upgrade_check_project_version = '0' + upgrade_check_project_version
-
- skip_excluded_files = \
- not generator_flags.get('xcode_list_excluded_files', True)
- xcode_projects = {}
- for build_file, build_file_dict in data.iteritems():
- (build_file_root, build_file_ext) = os.path.splitext(build_file)
- if build_file_ext != '.gyp':
- continue
- xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
- if options.generator_output:
- xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
- xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)
- xcode_projects[build_file] = xcp
- pbxp = xcp.project
-
- # Set project-level attributes from multiple options
- project_attributes = {};
- if parallel_builds:
- project_attributes['BuildIndependentTargetsInParallel'] = 'YES'
- if upgrade_check_project_version:
- project_attributes['LastUpgradeCheck'] = upgrade_check_project_version
- project_attributes['LastTestingUpgradeCheck'] = \
- upgrade_check_project_version
- project_attributes['LastSwiftUpdateCheck'] = \
- upgrade_check_project_version
- pbxp.SetProperty('attributes', project_attributes)
-
- # Add gyp/gypi files to project
- if not generator_flags.get('standalone'):
- main_group = pbxp.GetProperty('mainGroup')
- build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'})
- main_group.AppendChild(build_group)
- for included_file in build_file_dict['included_files']:
- build_group.AddOrGetFileByPath(included_file, False)
-
- xcode_targets = {}
- xcode_target_to_target_dict = {}
- for qualified_target in target_list:
- [build_file, target_name, toolset] = \
- gyp.common.ParseQualifiedTarget(qualified_target)
-
- spec = target_dicts[qualified_target]
- if spec['toolset'] != 'target':
- raise Exception(
- 'Multiple toolsets not supported in xcode build (target %s)' %
- qualified_target)
- configuration_names = [spec['default_configuration']]
- for configuration_name in sorted(spec['configurations'].keys()):
- if configuration_name not in configuration_names:
- configuration_names.append(configuration_name)
- xcp = xcode_projects[build_file]
- pbxp = xcp.project
-
- # Set up the configurations for the target according to the list of names
- # supplied.
- xccl = CreateXCConfigurationList(configuration_names)
-
- # Create an XCTarget subclass object for the target. The type with
- # "+bundle" appended will be used if the target has "mac_bundle" set.
- # loadable_modules not in a mac_bundle are mapped to
- # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
- # to create a single-file mh_bundle.
- _types = {
- 'executable': 'com.apple.product-type.tool',
- 'loadable_module': 'com.googlecode.gyp.xcode.bundle',
- 'shared_library': 'com.apple.product-type.library.dynamic',
- 'static_library': 'com.apple.product-type.library.static',
- 'mac_kernel_extension': 'com.apple.product-type.kernel-extension',
- 'executable+bundle': 'com.apple.product-type.application',
- 'loadable_module+bundle': 'com.apple.product-type.bundle',
- 'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
- 'shared_library+bundle': 'com.apple.product-type.framework',
- 'executable+extension+bundle': 'com.apple.product-type.app-extension',
- 'executable+watch+extension+bundle':
- 'com.apple.product-type.watchkit-extension',
- 'executable+watch+bundle':
- 'com.apple.product-type.application.watchapp',
- 'mac_kernel_extension+bundle': 'com.apple.product-type.kernel-extension',
- }
-
- target_properties = {
- 'buildConfigurationList': xccl,
- 'name': target_name,
- }
-
- type = spec['type']
- is_xctest = int(spec.get('mac_xctest_bundle', 0))
- is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
- is_app_extension = int(spec.get('ios_app_extension', 0))
- is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0))
- is_watch_app = int(spec.get('ios_watch_app', 0))
- if type != 'none':
- type_bundle_key = type
- if is_xctest:
- type_bundle_key += '+xctest'
- assert type == 'loadable_module', (
- 'mac_xctest_bundle targets must have type loadable_module '
- '(target %s)' % target_name)
- elif is_app_extension:
- assert is_bundle, ('ios_app_extension flag requires mac_bundle '
- '(target %s)' % target_name)
- type_bundle_key += '+extension+bundle'
- elif is_watchkit_extension:
- assert is_bundle, ('ios_watchkit_extension flag requires mac_bundle '
- '(target %s)' % target_name)
- type_bundle_key += '+watch+extension+bundle'
- elif is_watch_app:
- assert is_bundle, ('ios_watch_app flag requires mac_bundle '
- '(target %s)' % target_name)
- type_bundle_key += '+watch+bundle'
- elif is_bundle:
- type_bundle_key += '+bundle'
-
- xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
- try:
- target_properties['productType'] = _types[type_bundle_key]
- except KeyError, e:
- gyp.common.ExceptionAppend(e, "-- unknown product type while "
- "writing target %s" % target_name)
- raise
- else:
- xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget
- assert not is_bundle, (
- 'mac_bundle targets cannot have type none (target "%s")' %
- target_name)
- assert not is_xctest, (
- 'mac_xctest_bundle targets cannot have type none (target "%s")' %
- target_name)
-
- target_product_name = spec.get('product_name')
- if target_product_name is not None:
- target_properties['productName'] = target_product_name
-
- xct = xctarget_type(target_properties, parent=pbxp,
- force_outdir=spec.get('product_dir'),
- force_prefix=spec.get('product_prefix'),
- force_extension=spec.get('product_extension'))
- pbxp.AppendProperty('targets', xct)
- xcode_targets[qualified_target] = xct
- xcode_target_to_target_dict[xct] = spec
-
- spec_actions = spec.get('actions', [])
- spec_rules = spec.get('rules', [])
-
- # Xcode has some "issues" with checking dependencies for the "Compile
- # sources" step with any source files/headers generated by actions/rules.
- # To work around this, if a target is building anything directly (not
- # type "none"), then a second target is used to run the GYP actions/rules
- # and is made a dependency of this target. This way the work is done
- # before the dependency checks for what should be recompiled.
- support_xct = None
- # The Xcode "issues" don't affect xcode-ninja builds, since the dependency
- # logic all happens in ninja. Don't bother creating the extra targets in
- # that case.
- if type != 'none' and (spec_actions or spec_rules) and not ninja_wrapper:
- support_xccl = CreateXCConfigurationList(configuration_names);
- support_target_suffix = generator_flags.get(
- 'support_target_suffix', ' Support')
- support_target_properties = {
- 'buildConfigurationList': support_xccl,
- 'name': target_name + support_target_suffix,
- }
- if target_product_name:
- support_target_properties['productName'] = \
- target_product_name + ' Support'
- support_xct = \
- gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties,
- parent=pbxp)
- pbxp.AppendProperty('targets', support_xct)
- xct.AddDependency(support_xct)
- # Hang the support target off the main target so it can be tested/found
- # by the generator during Finalize.
- xct.support_target = support_xct
-
- prebuild_index = 0
-
- # Add custom shell script phases for "actions" sections.
- for action in spec_actions:
- # There's no need to write anything into the script to ensure that the
- # output directories already exist, because Xcode will look at the
- # declared outputs and automatically ensure that they exist for us.
-
- # Do we have a message to print when this action runs?
- message = action.get('message')
- if message:
- message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message)
- else:
- message = ''
-
- # Turn the list into a string that can be passed to a shell.
- action_string = gyp.common.EncodePOSIXShellList(action['action'])
-
- # Convert Xcode-type variable references to sh-compatible environment
- # variable references.
- message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)
- action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
- action_string)
-
- script = ''
- # Include the optional message
- if message_sh:
- script += message_sh + '\n'
- # Be sure the script runs in exec, and that if exec fails, the script
- # exits signalling an error.
- script += 'exec ' + action_string_sh + '\nexit 1\n'
- ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
- 'inputPaths': action['inputs'],
- 'name': 'Action "' + action['action_name'] + '"',
- 'outputPaths': action['outputs'],
- 'shellScript': script,
- 'showEnvVarsInLog': 0,
- })
-
- if support_xct:
- support_xct.AppendProperty('buildPhases', ssbp)
- else:
- # TODO(mark): this assumes too much knowledge of the internals of
- # xcodeproj_file; some of these smarts should move into xcodeproj_file
- # itself.
- xct._properties['buildPhases'].insert(prebuild_index, ssbp)
- prebuild_index = prebuild_index + 1
-
- # TODO(mark): Should verify that at most one of these is specified.
- if int(action.get('process_outputs_as_sources', False)):
- for output in action['outputs']:
- AddSourceToTarget(output, type, pbxp, xct)
-
- if int(action.get('process_outputs_as_mac_bundle_resources', False)):
- for output in action['outputs']:
- AddResourceToTarget(output, pbxp, xct)
-
- # tgt_mac_bundle_resources holds the list of bundle resources so
- # the rule processing can check against it.
- if is_bundle:
- tgt_mac_bundle_resources = spec.get('mac_bundle_resources', [])
- else:
- tgt_mac_bundle_resources = []
-
- # Add custom shell script phases driving "make" for "rules" sections.
- #
- # Xcode's built-in rule support is almost powerful enough to use directly,
- # but there are a few significant deficiencies that render them unusable.
- # There are workarounds for some of its inadequacies, but in aggregate,
- # the workarounds added complexity to the generator, and some workarounds
- # actually require input files to be crafted more carefully than I'd like.
- # Consequently, until Xcode rules are made more capable, "rules" input
- # sections will be handled in Xcode output by shell script build phases
- # performed prior to the compilation phase.
- #
- # The following problems with Xcode rules were found. The numbers are
- # Apple radar IDs. I hope that these shortcomings are addressed, I really
- # liked having the rules handled directly in Xcode during the period that
- # I was prototyping this.
- #
- # 6588600 Xcode compiles custom script rule outputs too soon, compilation
- # fails. This occurs when rule outputs from distinct inputs are
- # interdependent. The only workaround is to put rules and their
- # inputs in a separate target from the one that compiles the rule
- # outputs. This requires input file cooperation and it means that
- # process_outputs_as_sources is unusable.
- # 6584932 Need to declare that custom rule outputs should be excluded from
- # compilation. A possible workaround is to lie to Xcode about a
- # rule's output, giving it a dummy file it doesn't know how to
- # compile. The rule action script would need to touch the dummy.
- # 6584839 I need a way to declare additional inputs to a custom rule.
- # A possible workaround is a shell script phase prior to
- # compilation that touches a rule's primary input files if any
- # would-be additional inputs are newer than the output. Modifying
- # the source tree - even just modification times - feels dirty.
- # 6564240 Xcode "custom script" build rules always dump all environment
- # variables. This is a low-prioroty problem and is not a
- # show-stopper.
- rules_by_ext = {}
- for rule in spec_rules:
- rules_by_ext[rule['extension']] = rule
-
- # First, some definitions:
- #
- # A "rule source" is a file that was listed in a target's "sources"
- # list and will have a rule applied to it on the basis of matching the
- # rule's "extensions" attribute. Rule sources are direct inputs to
- # rules.
- #
- # Rule definitions may specify additional inputs in their "inputs"
- # attribute. These additional inputs are used for dependency tracking
- # purposes.
- #
- # A "concrete output" is a rule output with input-dependent variables
- # resolved. For example, given a rule with:
- # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],
- # if the target's "sources" list contained "one.ext" and "two.ext",
- # the "concrete output" for rule input "two.ext" would be "two.cc". If
- # a rule specifies multiple outputs, each input file that the rule is
- # applied to will have the same number of concrete outputs.
- #
- # If any concrete outputs are outdated or missing relative to their
- # corresponding rule_source or to any specified additional input, the
- # rule action must be performed to generate the concrete outputs.
-
- # concrete_outputs_by_rule_source will have an item at the same index
- # as the rule['rule_sources'] that it corresponds to. Each item is a
- # list of all of the concrete outputs for the rule_source.
- concrete_outputs_by_rule_source = []
-
- # concrete_outputs_all is a flat list of all concrete outputs that this
- # rule is able to produce, given the known set of input files
- # (rule_sources) that apply to it.
- concrete_outputs_all = []
-
- # messages & actions are keyed by the same indices as rule['rule_sources']
- # and concrete_outputs_by_rule_source. They contain the message and
- # action to perform after resolving input-dependent variables. The
- # message is optional, in which case None is stored for each rule source.
- messages = []
- actions = []
-
- for rule_source in rule.get('rule_sources', []):
- rule_source_dirname, rule_source_basename = \
- posixpath.split(rule_source)
- (rule_source_root, rule_source_ext) = \
- posixpath.splitext(rule_source_basename)
-
- # These are the same variable names that Xcode uses for its own native
- # rule support. Because Xcode's rule engine is not being used, they
- # need to be expanded as they are written to the makefile.
- rule_input_dict = {
- 'INPUT_FILE_BASE': rule_source_root,
- 'INPUT_FILE_SUFFIX': rule_source_ext,
- 'INPUT_FILE_NAME': rule_source_basename,
- 'INPUT_FILE_PATH': rule_source,
- 'INPUT_FILE_DIRNAME': rule_source_dirname,
- }
-
- concrete_outputs_for_this_rule_source = []
- for output in rule.get('outputs', []):
- # Fortunately, Xcode and make both use $(VAR) format for their
- # variables, so the expansion is the only transformation necessary.
- # Any remaning $(VAR)-type variables in the string can be given
- # directly to make, which will pick up the correct settings from
- # what Xcode puts into the environment.
- concrete_output = ExpandXcodeVariables(output, rule_input_dict)
- concrete_outputs_for_this_rule_source.append(concrete_output)
-
- # Add all concrete outputs to the project.
- pbxp.AddOrGetFileInRootGroup(concrete_output)
-
- concrete_outputs_by_rule_source.append( \
- concrete_outputs_for_this_rule_source)
- concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)
-
- # TODO(mark): Should verify that at most one of these is specified.
- if int(rule.get('process_outputs_as_sources', False)):
- for output in concrete_outputs_for_this_rule_source:
- AddSourceToTarget(output, type, pbxp, xct)
-
- # If the file came from the mac_bundle_resources list or if the rule
- # is marked to process outputs as bundle resource, do so.
- was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources
- if was_mac_bundle_resource or \
- int(rule.get('process_outputs_as_mac_bundle_resources', False)):
- for output in concrete_outputs_for_this_rule_source:
- AddResourceToTarget(output, pbxp, xct)
-
- # Do we have a message to print when this rule runs?
- message = rule.get('message')
- if message:
- message = gyp.common.EncodePOSIXShellArgument(message)
- message = ExpandXcodeVariables(message, rule_input_dict)
- messages.append(message)
-
- # Turn the list into a string that can be passed to a shell.
- action_string = gyp.common.EncodePOSIXShellList(rule['action'])
-
- action = ExpandXcodeVariables(action_string, rule_input_dict)
- actions.append(action)
-
- if len(concrete_outputs_all) > 0:
- # TODO(mark): There's a possibilty for collision here. Consider
- # target "t" rule "A_r" and target "t_A" rule "r".
- makefile_name = '%s.make' % re.sub(
- '[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name']))
- makefile_path = os.path.join(xcode_projects[build_file].path,
- makefile_name)
- # TODO(mark): try/close? Write to a temporary file and swap it only
- # if it's got changes?
- makefile = open(makefile_path, 'wb')
-
- # make will build the first target in the makefile by default. By
- # convention, it's called "all". List all (or at least one)
- # concrete output for each rule source as a prerequisite of the "all"
- # target.
- makefile.write('all: \\\n')
- for concrete_output_index in \
- xrange(0, len(concrete_outputs_by_rule_source)):
- # Only list the first (index [0]) concrete output of each input
- # in the "all" target. Otherwise, a parallel make (-j > 1) would
- # attempt to process each input multiple times simultaneously.
- # Otherwise, "all" could just contain the entire list of
- # concrete_outputs_all.
- concrete_output = \
- concrete_outputs_by_rule_source[concrete_output_index][0]
- if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
- eol = ''
- else:
- eol = ' \\'
- makefile.write(' %s%s\n' % (concrete_output, eol))
-
- for (rule_source, concrete_outputs, message, action) in \
- zip(rule['rule_sources'], concrete_outputs_by_rule_source,
- messages, actions):
- makefile.write('\n')
-
- # Add a rule that declares it can build each concrete output of a
- # rule source. Collect the names of the directories that are
- # required.
- concrete_output_dirs = []
- for concrete_output_index in xrange(0, len(concrete_outputs)):
- concrete_output = concrete_outputs[concrete_output_index]
- if concrete_output_index == 0:
- bol = ''
- else:
- bol = ' '
- makefile.write('%s%s \\\n' % (bol, concrete_output))
-
- concrete_output_dir = posixpath.dirname(concrete_output)
- if (concrete_output_dir and
- concrete_output_dir not in concrete_output_dirs):
- concrete_output_dirs.append(concrete_output_dir)
-
- makefile.write(' : \\\n')
-
- # The prerequisites for this rule are the rule source itself and
- # the set of additional rule inputs, if any.
- prerequisites = [rule_source]
- prerequisites.extend(rule.get('inputs', []))
- for prerequisite_index in xrange(0, len(prerequisites)):
- prerequisite = prerequisites[prerequisite_index]
- if prerequisite_index == len(prerequisites) - 1:
- eol = ''
- else:
- eol = ' \\'
- makefile.write(' %s%s\n' % (prerequisite, eol))
-
- # Make sure that output directories exist before executing the rule
- # action.
- if len(concrete_output_dirs) > 0:
- makefile.write('\t@mkdir -p "%s"\n' %
- '" "'.join(concrete_output_dirs))
-
- # The rule message and action have already had the necessary variable
- # substitutions performed.
- if message:
- # Mark it with note: so Xcode picks it up in build output.
- makefile.write('\t@echo note: %s\n' % message)
- makefile.write('\t%s\n' % action)
-
- makefile.close()
-
- # It might be nice to ensure that needed output directories exist
- # here rather than in each target in the Makefile, but that wouldn't
- # work if there ever was a concrete output that had an input-dependent
- # variable anywhere other than in the leaf position.
-
- # Don't declare any inputPaths or outputPaths. If they're present,
- # Xcode will provide a slight optimization by only running the script
- # phase if any output is missing or outdated relative to any input.
- # Unfortunately, it will also assume that all outputs are touched by
- # the script, and if the outputs serve as files in a compilation
- # phase, they will be unconditionally rebuilt. Since make might not
- # rebuild everything that could be declared here as an output, this
- # extra compilation activity is unnecessary. With inputPaths and
- # outputPaths not supplied, make will always be called, but it knows
- # enough to not do anything when everything is up-to-date.
-
- # To help speed things up, pass -j COUNT to make so it does some work
- # in parallel. Don't use ncpus because Xcode will build ncpus targets
- # in parallel and if each target happens to have a rules step, there
- # would be ncpus^2 things going. With a machine that has 2 quad-core
- # Xeons, a build can quickly run out of processes based on
- # scheduling/other tasks, and randomly failing builds are no good.
- script = \
-"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"
-if [ "${JOB_COUNT}" -gt 4 ]; then
- JOB_COUNT=4
-fi
-exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
-exit 1
-""" % makefile_name
- ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
- 'name': 'Rule "' + rule['rule_name'] + '"',
- 'shellScript': script,
- 'showEnvVarsInLog': 0,
- })
-
- if support_xct:
- support_xct.AppendProperty('buildPhases', ssbp)
- else:
- # TODO(mark): this assumes too much knowledge of the internals of
- # xcodeproj_file; some of these smarts should move into xcodeproj_file
- # itself.
- xct._properties['buildPhases'].insert(prebuild_index, ssbp)
- prebuild_index = prebuild_index + 1
-
- # Extra rule inputs also go into the project file. Concrete outputs were
- # already added when they were computed.
- groups = ['inputs', 'inputs_excluded']
- if skip_excluded_files:
- groups = [x for x in groups if not x.endswith('_excluded')]
- for group in groups:
- for item in rule.get(group, []):
- pbxp.AddOrGetFileInRootGroup(item)
-
- # Add "sources".
- for source in spec.get('sources', []):
- (source_root, source_extension) = posixpath.splitext(source)
- if source_extension[1:] not in rules_by_ext:
- # AddSourceToTarget will add the file to a root group if it's not
- # already there.
- AddSourceToTarget(source, type, pbxp, xct)
- else:
- pbxp.AddOrGetFileInRootGroup(source)
-
- # Add "mac_bundle_resources" and "mac_framework_private_headers" if
- # it's a bundle of any type.
- if is_bundle:
- for resource in tgt_mac_bundle_resources:
- (resource_root, resource_extension) = posixpath.splitext(resource)
- if resource_extension[1:] not in rules_by_ext:
- AddResourceToTarget(resource, pbxp, xct)
- else:
- pbxp.AddOrGetFileInRootGroup(resource)
-
- for header in spec.get('mac_framework_private_headers', []):
- AddHeaderToTarget(header, pbxp, xct, False)
-
- # Add "mac_framework_headers". These can be valid for both frameworks
- # and static libraries.
- if is_bundle or type == 'static_library':
- for header in spec.get('mac_framework_headers', []):
- AddHeaderToTarget(header, pbxp, xct, True)
-
- # Add "copies".
- pbxcp_dict = {}
- for copy_group in spec.get('copies', []):
- dest = copy_group['destination']
- if dest[0] not in ('/', '$'):
- # Relative paths are relative to $(SRCROOT).
- dest = '$(SRCROOT)/' + dest
-
- code_sign = int(copy_group.get('xcode_code_sign', 0))
- settings = (None, '{ATTRIBUTES = (CodeSignOnCopy, ); }')[code_sign];
-
- # Coalesce multiple "copies" sections in the same target with the same
- # "destination" property into the same PBXCopyFilesBuildPhase, otherwise
- # they'll wind up with ID collisions.
- pbxcp = pbxcp_dict.get(dest, None)
- if pbxcp is None:
- pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
- 'name': 'Copy to ' + copy_group['destination']
- },
- parent=xct)
- pbxcp.SetDestination(dest)
-
- # TODO(mark): The usual comment about this knowing too much about
- # gyp.xcodeproj_file internals applies.
- xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
-
- pbxcp_dict[dest] = pbxcp
-
- for file in copy_group['files']:
- pbxcp.AddFile(file, settings)
-
- # Excluded files can also go into the project file.
- if not skip_excluded_files:
- for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers',
- 'mac_framework_private_headers']:
- excluded_key = key + '_excluded'
- for item in spec.get(excluded_key, []):
- pbxp.AddOrGetFileInRootGroup(item)
-
- # So can "inputs" and "outputs" sections of "actions" groups.
- groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded']
- if skip_excluded_files:
- groups = [x for x in groups if not x.endswith('_excluded')]
- for action in spec.get('actions', []):
- for group in groups:
- for item in action.get(group, []):
- # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not
- # sources.
- if not item.startswith('$(BUILT_PRODUCTS_DIR)/'):
- pbxp.AddOrGetFileInRootGroup(item)
-
- for postbuild in spec.get('postbuilds', []):
- action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action'])
- script = 'exec ' + action_string_sh + '\nexit 1\n'
-
- # Make the postbuild step depend on the output of ld or ar from this
- # target. Apparently putting the script step after the link step isn't
- # sufficient to ensure proper ordering in all cases. With an input
- # declared but no outputs, the script step should run every time, as
- # desired.
- ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
- 'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'],
- 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"',
- 'shellScript': script,
- 'showEnvVarsInLog': 0,
- })
- xct.AppendProperty('buildPhases', ssbp)
-
- # Add dependencies before libraries, because adding a dependency may imply
- # adding a library. It's preferable to keep dependencies listed first
- # during a link phase so that they can override symbols that would
- # otherwise be provided by libraries, which will usually include system
- # libraries. On some systems, ld is finicky and even requires the
- # libraries to be ordered in such a way that unresolved symbols in
- # earlier-listed libraries may only be resolved by later-listed libraries.
- # The Mac linker doesn't work that way, but other platforms do, and so
- # their linker invocations need to be constructed in this way. There's
- # no compelling reason for Xcode's linker invocations to differ.
-
- if 'dependencies' in spec:
- for dependency in spec['dependencies']:
- xct.AddDependency(xcode_targets[dependency])
- # The support project also gets the dependencies (in case they are
- # needed for the actions/rules to work).
- if support_xct:
- support_xct.AddDependency(xcode_targets[dependency])
-
- if 'libraries' in spec:
- for library in spec['libraries']:
- xct.FrameworksPhase().AddFile(library)
- # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.
- # I wish Xcode handled this automatically.
- library_dir = posixpath.dirname(library)
- if library_dir not in xcode_standard_library_dirs and (
- not xct.HasBuildSetting(_library_search_paths_var) or
- library_dir not in xct.GetBuildSetting(_library_search_paths_var)):
- xct.AppendBuildSetting(_library_search_paths_var, library_dir)
-
- for configuration_name in configuration_names:
- configuration = spec['configurations'][configuration_name]
- xcbc = xct.ConfigurationNamed(configuration_name)
- for include_dir in configuration.get('mac_framework_dirs', []):
- xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
- for include_dir in configuration.get('include_dirs', []):
- xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
- for library_dir in configuration.get('library_dirs', []):
- if library_dir not in xcode_standard_library_dirs and (
- not xcbc.HasBuildSetting(_library_search_paths_var) or
- library_dir not in xcbc.GetBuildSetting(_library_search_paths_var)):
- xcbc.AppendBuildSetting(_library_search_paths_var, library_dir)
-
- if 'defines' in configuration:
- for define in configuration['defines']:
- set_define = EscapeXcodeDefine(define)
- xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
- if 'xcode_settings' in configuration:
- for xck, xcv in configuration['xcode_settings'].iteritems():
- xcbc.SetBuildSetting(xck, xcv)
- if 'xcode_config_file' in configuration:
- config_ref = pbxp.AddOrGetFileInRootGroup(
- configuration['xcode_config_file'])
- xcbc.SetBaseConfiguration(config_ref)
-
- build_files = []
- for build_file, build_file_dict in data.iteritems():
- if build_file.endswith('.gyp'):
- build_files.append(build_file)
-
- for build_file in build_files:
- xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)
-
- for build_file in build_files:
- xcode_projects[build_file].Finalize2(xcode_targets,
- xcode_target_to_target_dict)
-
- for build_file in build_files:
- xcode_projects[build_file].Write()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
deleted file mode 100644
index 260324a43f..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-""" Unit tests for the xcode.py file. """
-
-import gyp.generator.xcode as xcode
-import unittest
-import sys
-
-
-class TestEscapeXcodeDefine(unittest.TestCase):
- if sys.platform == 'darwin':
- def test_InheritedRemainsUnescaped(self):
- self.assertEqual(xcode.EscapeXcodeDefine('$(inherited)'), '$(inherited)')
-
- def test_Escaping(self):
- self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\')
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/input.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/input.py
deleted file mode 100644
index 7567d0a05b..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/input.py
+++ /dev/null
@@ -1,2897 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from compiler.ast import Const
-from compiler.ast import Dict
-from compiler.ast import Discard
-from compiler.ast import List
-from compiler.ast import Module
-from compiler.ast import Node
-from compiler.ast import Stmt
-import compiler
-import gyp.common
-import gyp.simple_copy
-import multiprocessing
-import optparse
-import os.path
-import re
-import shlex
-import signal
-import subprocess
-import sys
-import threading
-import time
-import traceback
-from gyp.common import GypError
-from gyp.common import OrderedSet
-
-
-# A list of types that are treated as linkable.
-linkable_types = [
- 'executable',
- 'shared_library',
- 'loadable_module',
- 'mac_kernel_extension',
-]
-
-# A list of sections that contain links to other targets.
-dependency_sections = ['dependencies', 'export_dependent_settings']
-
-# base_path_sections is a list of sections defined by GYP that contain
-# pathnames. The generators can provide more keys, the two lists are merged
-# into path_sections, but you should call IsPathSection instead of using either
-# list directly.
-base_path_sections = [
- 'destination',
- 'files',
- 'include_dirs',
- 'inputs',
- 'libraries',
- 'outputs',
- 'sources',
-]
-path_sections = set()
-
-# These per-process dictionaries are used to cache build file data when loading
-# in parallel mode.
-per_process_data = {}
-per_process_aux_data = {}
-
-def IsPathSection(section):
- # If section ends in one of the '=+?!' characters, it's applied to a section
- # without the trailing characters. '/' is notably absent from this list,
- # because there's no way for a regular expression to be treated as a path.
- while section and section[-1:] in '=+?!':
- section = section[:-1]
-
- if section in path_sections:
- return True
-
- # Sections mathing the regexp '_(dir|file|path)s?$' are also
- # considered PathSections. Using manual string matching since that
- # is much faster than the regexp and this can be called hundreds of
- # thousands of times so micro performance matters.
- if "_" in section:
- tail = section[-6:]
- if tail[-1] == 's':
- tail = tail[:-1]
- if tail[-5:] in ('_file', '_path'):
- return True
- return tail[-4:] == '_dir'
-
- return False
-
-# base_non_configuration_keys is a list of key names that belong in the target
-# itself and should not be propagated into its configurations. It is merged
-# with a list that can come from the generator to
-# create non_configuration_keys.
-base_non_configuration_keys = [
- # Sections that must exist inside targets and not configurations.
- 'actions',
- 'configurations',
- 'copies',
- 'default_configuration',
- 'dependencies',
- 'dependencies_original',
- 'libraries',
- 'postbuilds',
- 'product_dir',
- 'product_extension',
- 'product_name',
- 'product_prefix',
- 'rules',
- 'run_as',
- 'sources',
- 'standalone_static_library',
- 'suppress_wildcard',
- 'target_name',
- 'toolset',
- 'toolsets',
- 'type',
-
- # Sections that can be found inside targets or configurations, but that
- # should not be propagated from targets into their configurations.
- 'variables',
-]
-non_configuration_keys = []
-
-# Keys that do not belong inside a configuration dictionary.
-invalid_configuration_keys = [
- 'actions',
- 'all_dependent_settings',
- 'configurations',
- 'dependencies',
- 'direct_dependent_settings',
- 'libraries',
- 'link_settings',
- 'sources',
- 'standalone_static_library',
- 'target_name',
- 'type',
-]
-
-# Controls whether or not the generator supports multiple toolsets.
-multiple_toolsets = False
-
-# Paths for converting filelist paths to output paths: {
-# toplevel,
-# qualified_output_dir,
-# }
-generator_filelist_paths = None
-
-def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
- """Return a list of all build files included into build_file_path.
-
- The returned list will contain build_file_path as well as all other files
- that it included, either directly or indirectly. Note that the list may
- contain files that were included into a conditional section that evaluated
- to false and was not merged into build_file_path's dict.
-
- aux_data is a dict containing a key for each build file or included build
- file. Those keys provide access to dicts whose "included" keys contain
- lists of all other files included by the build file.
-
- included should be left at its default None value by external callers. It
- is used for recursion.
-
- The returned list will not contain any duplicate entries. Each build file
- in the list will be relative to the current directory.
- """
-
- if included == None:
- included = []
-
- if build_file_path in included:
- return included
-
- included.append(build_file_path)
-
- for included_build_file in aux_data[build_file_path].get('included', []):
- GetIncludedBuildFiles(included_build_file, aux_data, included)
-
- return included
-
-
-def CheckedEval(file_contents):
- """Return the eval of a gyp file.
-
- The gyp file is restricted to dictionaries and lists only, and
- repeated keys are not allowed.
-
- Note that this is slower than eval() is.
- """
-
- ast = compiler.parse(file_contents)
- assert isinstance(ast, Module)
- c1 = ast.getChildren()
- assert c1[0] is None
- assert isinstance(c1[1], Stmt)
- c2 = c1[1].getChildren()
- assert isinstance(c2[0], Discard)
- c3 = c2[0].getChildren()
- assert len(c3) == 1
- return CheckNode(c3[0], [])
-
-
-def CheckNode(node, keypath):
- if isinstance(node, Dict):
- c = node.getChildren()
- dict = {}
- for n in range(0, len(c), 2):
- assert isinstance(c[n], Const)
- key = c[n].getChildren()[0]
- if key in dict:
- raise GypError("Key '" + key + "' repeated at level " +
- repr(len(keypath) + 1) + " with key path '" +
- '.'.join(keypath) + "'")
- kp = list(keypath) # Make a copy of the list for descending this node.
- kp.append(key)
- dict[key] = CheckNode(c[n + 1], kp)
- return dict
- elif isinstance(node, List):
- c = node.getChildren()
- children = []
- for index, child in enumerate(c):
- kp = list(keypath) # Copy list.
- kp.append(repr(index))
- children.append(CheckNode(child, kp))
- return children
- elif isinstance(node, Const):
- return node.getChildren()[0]
- else:
- raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) +
- "': " + repr(node))
-
-
-def LoadOneBuildFile(build_file_path, data, aux_data, includes,
- is_target, check):
- if build_file_path in data:
- return data[build_file_path]
-
- if os.path.exists(build_file_path):
- # Open the build file for read ('r') with universal-newlines mode ('U')
- # to make sure platform specific newlines ('\r\n' or '\r') are converted to '\n'
- # which otherwise will fail eval()
- build_file_contents = open(build_file_path, 'rU').read()
- else:
- raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
-
- build_file_data = None
- try:
- if check:
- build_file_data = CheckedEval(build_file_contents)
- else:
- build_file_data = eval(build_file_contents, {'__builtins__': None},
- None)
- except SyntaxError, e:
- e.filename = build_file_path
- raise
- except Exception, e:
- gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
- raise
-
- if type(build_file_data) is not dict:
- raise GypError("%s does not evaluate to a dictionary." % build_file_path)
-
- data[build_file_path] = build_file_data
- aux_data[build_file_path] = {}
-
- # Scan for includes and merge them in.
- if ('skip_includes' not in build_file_data or
- not build_file_data['skip_includes']):
- try:
- if is_target:
- LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
- aux_data, includes, check)
- else:
- LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
- aux_data, None, check)
- except Exception, e:
- gyp.common.ExceptionAppend(e,
- 'while reading includes of ' + build_file_path)
- raise
-
- return build_file_data
-
-
-def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
- includes, check):
- includes_list = []
- if includes != None:
- includes_list.extend(includes)
- if 'includes' in subdict:
- for include in subdict['includes']:
- # "include" is specified relative to subdict_path, so compute the real
- # path to include by appending the provided "include" to the directory
- # in which subdict_path resides.
- relative_include = \
- os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
- includes_list.append(relative_include)
- # Unhook the includes list, it's no longer needed.
- del subdict['includes']
-
- # Merge in the included files.
- for include in includes_list:
- if not 'included' in aux_data[subdict_path]:
- aux_data[subdict_path]['included'] = []
- aux_data[subdict_path]['included'].append(include)
-
- gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
-
- MergeDicts(subdict,
- LoadOneBuildFile(include, data, aux_data, None, False, check),
- subdict_path, include)
-
- # Recurse into subdictionaries.
- for k, v in subdict.iteritems():
- if type(v) is dict:
- LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
- None, check)
- elif type(v) is list:
- LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data,
- check)
-
-
-# This recurses into lists so that it can look for dicts.
-def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
- for item in sublist:
- if type(item) is dict:
- LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
- None, check)
- elif type(item) is list:
- LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
-
-# Processes toolsets in all the targets. This recurses into condition entries
-# since they can contain toolsets as well.
-def ProcessToolsetsInDict(data):
- if 'targets' in data:
- target_list = data['targets']
- new_target_list = []
- for target in target_list:
- # If this target already has an explicit 'toolset', and no 'toolsets'
- # list, don't modify it further.
- if 'toolset' in target and 'toolsets' not in target:
- new_target_list.append(target)
- continue
- if multiple_toolsets:
- toolsets = target.get('toolsets', ['target'])
- else:
- toolsets = ['target']
- # Make sure this 'toolsets' definition is only processed once.
- if 'toolsets' in target:
- del target['toolsets']
- if len(toolsets) > 0:
- # Optimization: only do copies if more than one toolset is specified.
- for build in toolsets[1:]:
- new_target = gyp.simple_copy.deepcopy(target)
- new_target['toolset'] = build
- new_target_list.append(new_target)
- target['toolset'] = toolsets[0]
- new_target_list.append(target)
- data['targets'] = new_target_list
- if 'conditions' in data:
- for condition in data['conditions']:
- if type(condition) is list:
- for condition_dict in condition[1:]:
- if type(condition_dict) is dict:
- ProcessToolsetsInDict(condition_dict)
-
-
-# TODO(mark): I don't love this name. It just means that it's going to load
-# a build file that contains targets and is expected to provide a targets dict
-# that contains the targets...
-def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
- depth, check, load_dependencies):
- # If depth is set, predefine the DEPTH variable to be a relative path from
- # this build file's directory to the directory identified by depth.
- if depth:
- # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
- # temporary measure. This should really be addressed by keeping all paths
- # in POSIX until actual project generation.
- d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
- if d == '':
- variables['DEPTH'] = '.'
- else:
- variables['DEPTH'] = d.replace('\\', '/')
-
- # The 'target_build_files' key is only set when loading target build files in
- # the non-parallel code path, where LoadTargetBuildFile is called
- # recursively. In the parallel code path, we don't need to check whether the
- # |build_file_path| has already been loaded, because the 'scheduled' set in
- # ParallelState guarantees that we never load the same |build_file_path|
- # twice.
- if 'target_build_files' in data:
- if build_file_path in data['target_build_files']:
- # Already loaded.
- return False
- data['target_build_files'].add(build_file_path)
-
- gyp.DebugOutput(gyp.DEBUG_INCLUDES,
- "Loading Target Build File '%s'", build_file_path)
-
- build_file_data = LoadOneBuildFile(build_file_path, data, aux_data,
- includes, True, check)
-
- # Store DEPTH for later use in generators.
- build_file_data['_DEPTH'] = depth
-
- # Set up the included_files key indicating which .gyp files contributed to
- # this target dict.
- if 'included_files' in build_file_data:
- raise GypError(build_file_path + ' must not contain included_files key')
-
- included = GetIncludedBuildFiles(build_file_path, aux_data)
- build_file_data['included_files'] = []
- for included_file in included:
- # included_file is relative to the current directory, but it needs to
- # be made relative to build_file_path's directory.
- included_relative = \
- gyp.common.RelativePath(included_file,
- os.path.dirname(build_file_path))
- build_file_data['included_files'].append(included_relative)
-
- # Do a first round of toolsets expansion so that conditions can be defined
- # per toolset.
- ProcessToolsetsInDict(build_file_data)
-
- # Apply "pre"/"early" variable expansions and condition evaluations.
- ProcessVariablesAndConditionsInDict(
- build_file_data, PHASE_EARLY, variables, build_file_path)
-
- # Since some toolsets might have been defined conditionally, perform
- # a second round of toolsets expansion now.
- ProcessToolsetsInDict(build_file_data)
-
- # Look at each project's target_defaults dict, and merge settings into
- # targets.
- if 'target_defaults' in build_file_data:
- if 'targets' not in build_file_data:
- raise GypError("Unable to find targets in build file %s" %
- build_file_path)
-
- index = 0
- while index < len(build_file_data['targets']):
- # This procedure needs to give the impression that target_defaults is
- # used as defaults, and the individual targets inherit from that.
- # The individual targets need to be merged into the defaults. Make
- # a deep copy of the defaults for each target, merge the target dict
- # as found in the input file into that copy, and then hook up the
- # copy with the target-specific data merged into it as the replacement
- # target dict.
- old_target_dict = build_file_data['targets'][index]
- new_target_dict = gyp.simple_copy.deepcopy(
- build_file_data['target_defaults'])
- MergeDicts(new_target_dict, old_target_dict,
- build_file_path, build_file_path)
- build_file_data['targets'][index] = new_target_dict
- index += 1
-
- # No longer needed.
- del build_file_data['target_defaults']
-
- # Look for dependencies. This means that dependency resolution occurs
- # after "pre" conditionals and variable expansion, but before "post" -
- # in other words, you can't put a "dependencies" section inside a "post"
- # conditional within a target.
-
- dependencies = []
- if 'targets' in build_file_data:
- for target_dict in build_file_data['targets']:
- if 'dependencies' not in target_dict:
- continue
- for dependency in target_dict['dependencies']:
- dependencies.append(
- gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
-
- if load_dependencies:
- for dependency in dependencies:
- try:
- LoadTargetBuildFile(dependency, data, aux_data, variables,
- includes, depth, check, load_dependencies)
- except Exception, e:
- gyp.common.ExceptionAppend(
- e, 'while loading dependencies of %s' % build_file_path)
- raise
- else:
- return (build_file_path, dependencies)
-
-def CallLoadTargetBuildFile(global_flags,
- build_file_path, variables,
- includes, depth, check,
- generator_input_info):
- """Wrapper around LoadTargetBuildFile for parallel processing.
-
- This wrapper is used when LoadTargetBuildFile is executed in
- a worker process.
- """
-
- try:
- signal.signal(signal.SIGINT, signal.SIG_IGN)
-
- # Apply globals so that the worker process behaves the same.
- for key, value in global_flags.iteritems():
- globals()[key] = value
-
- SetGeneratorGlobals(generator_input_info)
- result = LoadTargetBuildFile(build_file_path, per_process_data,
- per_process_aux_data, variables,
- includes, depth, check, False)
- if not result:
- return result
-
- (build_file_path, dependencies) = result
-
- # We can safely pop the build_file_data from per_process_data because it
- # will never be referenced by this process again, so we don't need to keep
- # it in the cache.
- build_file_data = per_process_data.pop(build_file_path)
-
- # This gets serialized and sent back to the main process via a pipe.
- # It's handled in LoadTargetBuildFileCallback.
- return (build_file_path,
- build_file_data,
- dependencies)
- except GypError, e:
- sys.stderr.write("gyp: %s\n" % e)
- return None
- except Exception, e:
- print >>sys.stderr, 'Exception:', e
- print >>sys.stderr, traceback.format_exc()
- return None
-
-
-class ParallelProcessingError(Exception):
- pass
-
-
-class ParallelState(object):
- """Class to keep track of state when processing input files in parallel.
-
- If build files are loaded in parallel, use this to keep track of
- state during farming out and processing parallel jobs. It's stored
- in a global so that the callback function can have access to it.
- """
-
- def __init__(self):
- # The multiprocessing pool.
- self.pool = None
- # The condition variable used to protect this object and notify
- # the main loop when there might be more data to process.
- self.condition = None
- # The "data" dict that was passed to LoadTargetBuildFileParallel
- self.data = None
- # The number of parallel calls outstanding; decremented when a response
- # was received.
- self.pending = 0
- # The set of all build files that have been scheduled, so we don't
- # schedule the same one twice.
- self.scheduled = set()
- # A list of dependency build file paths that haven't been scheduled yet.
- self.dependencies = []
- # Flag to indicate if there was an error in a child process.
- self.error = False
-
- def LoadTargetBuildFileCallback(self, result):
- """Handle the results of running LoadTargetBuildFile in another process.
- """
- self.condition.acquire()
- if not result:
- self.error = True
- self.condition.notify()
- self.condition.release()
- return
- (build_file_path0, build_file_data0, dependencies0) = result
- self.data[build_file_path0] = build_file_data0
- self.data['target_build_files'].add(build_file_path0)
- for new_dependency in dependencies0:
- if new_dependency not in self.scheduled:
- self.scheduled.add(new_dependency)
- self.dependencies.append(new_dependency)
- self.pending -= 1
- self.condition.notify()
- self.condition.release()
-
-
-def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
- check, generator_input_info):
- parallel_state = ParallelState()
- parallel_state.condition = threading.Condition()
- # Make copies of the build_files argument that we can modify while working.
- parallel_state.dependencies = list(build_files)
- parallel_state.scheduled = set(build_files)
- parallel_state.pending = 0
- parallel_state.data = data
-
- try:
- parallel_state.condition.acquire()
- while parallel_state.dependencies or parallel_state.pending:
- if parallel_state.error:
- break
- if not parallel_state.dependencies:
- parallel_state.condition.wait()
- continue
-
- dependency = parallel_state.dependencies.pop()
-
- parallel_state.pending += 1
- global_flags = {
- 'path_sections': globals()['path_sections'],
- 'non_configuration_keys': globals()['non_configuration_keys'],
- 'multiple_toolsets': globals()['multiple_toolsets']}
-
- if not parallel_state.pool:
- parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count())
- parallel_state.pool.apply_async(
- CallLoadTargetBuildFile,
- args = (global_flags, dependency,
- variables, includes, depth, check, generator_input_info),
- callback = parallel_state.LoadTargetBuildFileCallback)
- except KeyboardInterrupt, e:
- parallel_state.pool.terminate()
- raise e
-
- parallel_state.condition.release()
-
- parallel_state.pool.close()
- parallel_state.pool.join()
- parallel_state.pool = None
-
- if parallel_state.error:
- sys.exit(1)
-
-# Look for the bracket that matches the first bracket seen in a
-# string, and return the start and end as a tuple. For example, if
-# the input is something like "<(foo <(bar)) blah", then it would
-# return (1, 13), indicating the entire string except for the leading
-# "<" and trailing " blah".
-LBRACKETS= set('{[(')
-BRACKETS = {'}': '{', ']': '[', ')': '('}
-def FindEnclosingBracketGroup(input_str):
- stack = []
- start = -1
- for index, char in enumerate(input_str):
- if char in LBRACKETS:
- stack.append(char)
- if start == -1:
- start = index
- elif char in BRACKETS:
- if not stack:
- return (-1, -1)
- if stack.pop() != BRACKETS[char]:
- return (-1, -1)
- if not stack:
- return (start, index + 1)
- return (-1, -1)
-
-
-def IsStrCanonicalInt(string):
- """Returns True if |string| is in its canonical integer form.
-
- The canonical form is such that str(int(string)) == string.
- """
- if type(string) is str:
- # This function is called a lot so for maximum performance, avoid
- # involving regexps which would otherwise make the code much
- # shorter. Regexps would need twice the time of this function.
- if string:
- if string == "0":
- return True
- if string[0] == "-":
- string = string[1:]
- if not string:
- return False
- if '1' <= string[0] <= '9':
- return string.isdigit()
-
- return False
-
-
-# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
-# "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
-# In the last case, the inner "<()" is captured in match['content'].
-early_variable_re = re.compile(
- r'(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
- r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
- r'\((?P<is_array>\s*\[?)'
- r'(?P<content>.*?)(\]?)\))')
-
-# This matches the same as early_variable_re, but with '>' instead of '<'.
-late_variable_re = re.compile(
- r'(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
- r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
- r'\((?P<is_array>\s*\[?)'
- r'(?P<content>.*?)(\]?)\))')
-
-# This matches the same as early_variable_re, but with '^' instead of '<'.
-latelate_variable_re = re.compile(
- r'(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
- r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
- r'\((?P<is_array>\s*\[?)'
- r'(?P<content>.*?)(\]?)\))')
-
-# Global cache of results from running commands so they don't have to be run
-# more then once.
-cached_command_results = {}
-
-
-def FixupPlatformCommand(cmd):
- if sys.platform == 'win32':
- if type(cmd) is list:
- cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
- else:
- cmd = re.sub('^cat ', 'type ', cmd)
- return cmd
-
-
-PHASE_EARLY = 0
-PHASE_LATE = 1
-PHASE_LATELATE = 2
-
-
-def ExpandVariables(input, phase, variables, build_file):
- # Look for the pattern that gets expanded into variables
- if phase == PHASE_EARLY:
- variable_re = early_variable_re
- expansion_symbol = '<'
- elif phase == PHASE_LATE:
- variable_re = late_variable_re
- expansion_symbol = '>'
- elif phase == PHASE_LATELATE:
- variable_re = latelate_variable_re
- expansion_symbol = '^'
- else:
- assert False
-
- input_str = str(input)
- if IsStrCanonicalInt(input_str):
- return int(input_str)
-
- # Do a quick scan to determine if an expensive regex search is warranted.
- if expansion_symbol not in input_str:
- return input_str
-
- # Get the entire list of matches as a list of MatchObject instances.
- # (using findall here would return strings instead of MatchObjects).
- matches = list(variable_re.finditer(input_str))
- if not matches:
- return input_str
-
- output = input_str
- # Reverse the list of matches so that replacements are done right-to-left.
- # That ensures that earlier replacements won't mess up the string in a
- # way that causes later calls to find the earlier substituted text instead
- # of what's intended for replacement.
- matches.reverse()
- for match_group in matches:
- match = match_group.groupdict()
- gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
- # match['replace'] is the substring to look for, match['type']
- # is the character code for the replacement type (< > <! >! <| >| <@
- # >@ <!@ >!@), match['is_array'] contains a '[' for command
- # arrays, and match['content'] is the name of the variable (< >)
- # or command to run (<! >!). match['command_string'] is an optional
- # command string. Currently, only 'pymod_do_main' is supported.
-
- # run_command is true if a ! variant is used.
- run_command = '!' in match['type']
- command_string = match['command_string']
-
- # file_list is true if a | variant is used.
- file_list = '|' in match['type']
-
- # Capture these now so we can adjust them later.
- replace_start = match_group.start('replace')
- replace_end = match_group.end('replace')
-
- # Find the ending paren, and re-evaluate the contained string.
- (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
-
- # Adjust the replacement range to match the entire command
- # found by FindEnclosingBracketGroup (since the variable_re
- # probably doesn't match the entire command if it contained
- # nested variables).
- replace_end = replace_start + c_end
-
- # Find the "real" replacement, matching the appropriate closing
- # paren, and adjust the replacement start and end.
- replacement = input_str[replace_start:replace_end]
-
- # Figure out what the contents of the variable parens are.
- contents_start = replace_start + c_start + 1
- contents_end = replace_end - 1
- contents = input_str[contents_start:contents_end]
-
- # Do filter substitution now for <|().
- # Admittedly, this is different than the evaluation order in other
- # contexts. However, since filtration has no chance to run on <|(),
- # this seems like the only obvious way to give them access to filters.
- if file_list:
- processed_variables = gyp.simple_copy.deepcopy(variables)
- ProcessListFiltersInDict(contents, processed_variables)
- # Recurse to expand variables in the contents
- contents = ExpandVariables(contents, phase,
- processed_variables, build_file)
- else:
- # Recurse to expand variables in the contents
- contents = ExpandVariables(contents, phase, variables, build_file)
-
- # Strip off leading/trailing whitespace so that variable matches are
- # simpler below (and because they are rarely needed).
- contents = contents.strip()
-
- # expand_to_list is true if an @ variant is used. In that case,
- # the expansion should result in a list. Note that the caller
- # is to be expecting a list in return, and not all callers do
- # because not all are working in list context. Also, for list
- # expansions, there can be no other text besides the variable
- # expansion in the input string.
- expand_to_list = '@' in match['type'] and input_str == replacement
-
- if run_command or file_list:
- # Find the build file's directory, so commands can be run or file lists
- # generated relative to it.
- build_file_dir = os.path.dirname(build_file)
- if build_file_dir == '' and not file_list:
- # If build_file is just a leaf filename indicating a file in the
- # current directory, build_file_dir might be an empty string. Set
- # it to None to signal to subprocess.Popen that it should run the
- # command in the current directory.
- build_file_dir = None
-
- # Support <|(listfile.txt ...) which generates a file
- # containing items from a gyp list, generated at gyp time.
- # This works around actions/rules which have more inputs than will
- # fit on the command line.
- if file_list:
- if type(contents) is list:
- contents_list = contents
- else:
- contents_list = contents.split(' ')
- replacement = contents_list[0]
- if os.path.isabs(replacement):
- raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
-
- if not generator_filelist_paths:
- path = os.path.join(build_file_dir, replacement)
- else:
- if os.path.isabs(build_file_dir):
- toplevel = generator_filelist_paths['toplevel']
- rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
- else:
- rel_build_file_dir = build_file_dir
- qualified_out_dir = generator_filelist_paths['qualified_out_dir']
- path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
- gyp.common.EnsureDirExists(path)
-
- replacement = gyp.common.RelativePath(path, build_file_dir)
- f = gyp.common.WriteOnDiff(path)
- for i in contents_list[1:]:
- f.write('%s\n' % i)
- f.close()
-
- elif run_command:
- use_shell = True
- if match['is_array']:
- contents = eval(contents)
- use_shell = False
-
- # Check for a cached value to avoid executing commands, or generating
- # file lists more than once. The cache key contains the command to be
- # run as well as the directory to run it from, to account for commands
- # that depend on their current directory.
- # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
- # someone could author a set of GYP files where each time the command
- # is invoked it produces different output by design. When the need
- # arises, the syntax should be extended to support no caching off a
- # command's output so it is run every time.
- cache_key = (str(contents), build_file_dir)
- cached_value = cached_command_results.get(cache_key, None)
- if cached_value is None:
- gyp.DebugOutput(gyp.DEBUG_VARIABLES,
- "Executing command '%s' in directory '%s'",
- contents, build_file_dir)
-
- replacement = ''
-
- if command_string == 'pymod_do_main':
- # <!pymod_do_main(modulename param eters) loads |modulename| as a
- # python module and then calls that module's DoMain() function,
- # passing ["param", "eters"] as a single list argument. For modules
- # that don't load quickly, this can be faster than
- # <!(python modulename param eters). Do this in |build_file_dir|.
- oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
- if build_file_dir: # build_file_dir may be None (see above).
- os.chdir(build_file_dir)
- try:
-
- parsed_contents = shlex.split(contents)
- try:
- py_module = __import__(parsed_contents[0])
- except ImportError as e:
- raise GypError("Error importing pymod_do_main"
- "module (%s): %s" % (parsed_contents[0], e))
- replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
- finally:
- os.chdir(oldwd)
- assert replacement != None
- elif command_string:
- raise GypError("Unknown command string '%s' in '%s'." %
- (command_string, contents))
- else:
- # Fix up command with platform specific workarounds.
- contents = FixupPlatformCommand(contents)
- try:
- p = subprocess.Popen(contents, shell=use_shell,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- stdin=subprocess.PIPE,
- cwd=build_file_dir)
- except Exception, e:
- raise GypError("%s while executing command '%s' in %s" %
- (e, contents, build_file))
-
- p_stdout, p_stderr = p.communicate('')
-
- if p.wait() != 0 or p_stderr:
- sys.stderr.write(p_stderr)
- # Simulate check_call behavior, since check_call only exists
- # in python 2.5 and later.
- raise GypError("Call to '%s' returned exit status %d while in %s." %
- (contents, p.returncode, build_file))
- replacement = p_stdout.rstrip()
-
- cached_command_results[cache_key] = replacement
- else:
- gyp.DebugOutput(gyp.DEBUG_VARIABLES,
- "Had cache value for command '%s' in directory '%s'",
- contents,build_file_dir)
- replacement = cached_value
-
- else:
- if not contents in variables:
- if contents[-1] in ['!', '/']:
- # In order to allow cross-compiles (nacl) to happen more naturally,
- # we will allow references to >(sources/) etc. to resolve to
- # and empty list if undefined. This allows actions to:
- # 'action!': [
- # '>@(_sources!)',
- # ],
- # 'action/': [
- # '>@(_sources/)',
- # ],
- replacement = []
- else:
- raise GypError('Undefined variable ' + contents +
- ' in ' + build_file)
- else:
- replacement = variables[contents]
-
- if type(replacement) is list:
- for item in replacement:
- if not contents[-1] == '/' and type(item) not in (str, int):
- raise GypError('Variable ' + contents +
- ' must expand to a string or list of strings; ' +
- 'list contains a ' +
- item.__class__.__name__)
- # Run through the list and handle variable expansions in it. Since
- # the list is guaranteed not to contain dicts, this won't do anything
- # with conditions sections.
- ProcessVariablesAndConditionsInList(replacement, phase, variables,
- build_file)
- elif type(replacement) not in (str, int):
- raise GypError('Variable ' + contents +
- ' must expand to a string or list of strings; ' +
- 'found a ' + replacement.__class__.__name__)
-
- if expand_to_list:
- # Expanding in list context. It's guaranteed that there's only one
- # replacement to do in |input_str| and that it's this replacement. See
- # above.
- if type(replacement) is list:
- # If it's already a list, make a copy.
- output = replacement[:]
- else:
- # Split it the same way sh would split arguments.
- output = shlex.split(str(replacement))
- else:
- # Expanding in string context.
- encoded_replacement = ''
- if type(replacement) is list:
- # When expanding a list into string context, turn the list items
- # into a string in a way that will work with a subprocess call.
- #
- # TODO(mark): This isn't completely correct. This should
- # call a generator-provided function that observes the
- # proper list-to-argument quoting rules on a specific
- # platform instead of just calling the POSIX encoding
- # routine.
- encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
- else:
- encoded_replacement = replacement
-
- output = output[:replace_start] + str(encoded_replacement) + \
- output[replace_end:]
- # Prepare for the next match iteration.
- input_str = output
-
- if output == input:
- gyp.DebugOutput(gyp.DEBUG_VARIABLES,
- "Found only identity matches on %r, avoiding infinite "
- "recursion.",
- output)
- else:
- # Look for more matches now that we've replaced some, to deal with
- # expanding local variables (variables defined in the same
- # variables block as this one).
- gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
- if type(output) is list:
- if output and type(output[0]) is list:
- # Leave output alone if it's a list of lists.
- # We don't want such lists to be stringified.
- pass
- else:
- new_output = []
- for item in output:
- new_output.append(
- ExpandVariables(item, phase, variables, build_file))
- output = new_output
- else:
- output = ExpandVariables(output, phase, variables, build_file)
-
- # Convert all strings that are canonically-represented integers into integers.
- if type(output) is list:
- for index in xrange(0, len(output)):
- if IsStrCanonicalInt(output[index]):
- output[index] = int(output[index])
- elif IsStrCanonicalInt(output):
- output = int(output)
-
- return output
-
-# The same condition is often evaluated over and over again so it
-# makes sense to cache as much as possible between evaluations.
-cached_conditions_asts = {}
-
-def EvalCondition(condition, conditions_key, phase, variables, build_file):
- """Returns the dict that should be used or None if the result was
- that nothing should be used."""
- if type(condition) is not list:
- raise GypError(conditions_key + ' must be a list')
- if len(condition) < 2:
- # It's possible that condition[0] won't work in which case this
- # attempt will raise its own IndexError. That's probably fine.
- raise GypError(conditions_key + ' ' + condition[0] +
- ' must be at least length 2, not ' + str(len(condition)))
-
- i = 0
- result = None
- while i < len(condition):
- cond_expr = condition[i]
- true_dict = condition[i + 1]
- if type(true_dict) is not dict:
- raise GypError('{} {} must be followed by a dictionary, not {}'.format(
- conditions_key, cond_expr, type(true_dict)))
- if len(condition) > i + 2 and type(condition[i + 2]) is dict:
- false_dict = condition[i + 2]
- i = i + 3
- if i != len(condition):
- raise GypError('{} {} has {} unexpected trailing items'.format(
- conditions_key, cond_expr, len(condition) - i))
- else:
- false_dict = None
- i = i + 2
- if result == None:
- result = EvalSingleCondition(
- cond_expr, true_dict, false_dict, phase, variables, build_file)
-
- return result
-
-
-def EvalSingleCondition(
- cond_expr, true_dict, false_dict, phase, variables, build_file):
- """Returns true_dict if cond_expr evaluates to true, and false_dict
- otherwise."""
- # Do expansions on the condition itself. Since the conditon can naturally
- # contain variable references without needing to resort to GYP expansion
- # syntax, this is of dubious value for variables, but someone might want to
- # use a command expansion directly inside a condition.
- cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
- build_file)
- if type(cond_expr_expanded) not in (str, int):
- raise ValueError(
- 'Variable expansion in this context permits str and int ' + \
- 'only, found ' + cond_expr_expanded.__class__.__name__)
-
- try:
- if cond_expr_expanded in cached_conditions_asts:
- ast_code = cached_conditions_asts[cond_expr_expanded]
- else:
- ast_code = compile(cond_expr_expanded, '<string>', 'eval')
- cached_conditions_asts[cond_expr_expanded] = ast_code
- if eval(ast_code, {'__builtins__': None}, variables):
- return true_dict
- return false_dict
- except SyntaxError, e:
- syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
- 'at character %d.' %
- (str(e.args[0]), e.text, build_file, e.offset),
- e.filename, e.lineno, e.offset, e.text)
- raise syntax_error
- except NameError, e:
- gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
- (cond_expr_expanded, build_file))
- raise GypError(e)
-
-
-def ProcessConditionsInDict(the_dict, phase, variables, build_file):
- # Process a 'conditions' or 'target_conditions' section in the_dict,
- # depending on phase.
- # early -> conditions
- # late -> target_conditions
- # latelate -> no conditions
- #
- # Each item in a conditions list consists of cond_expr, a string expression
- # evaluated as the condition, and true_dict, a dict that will be merged into
- # the_dict if cond_expr evaluates to true. Optionally, a third item,
- # false_dict, may be present. false_dict is merged into the_dict if
- # cond_expr evaluates to false.
- #
- # Any dict merged into the_dict will be recursively processed for nested
- # conditionals and other expansions, also according to phase, immediately
- # prior to being merged.
-
- if phase == PHASE_EARLY:
- conditions_key = 'conditions'
- elif phase == PHASE_LATE:
- conditions_key = 'target_conditions'
- elif phase == PHASE_LATELATE:
- return
- else:
- assert False
-
- if not conditions_key in the_dict:
- return
-
- conditions_list = the_dict[conditions_key]
- # Unhook the conditions list, it's no longer needed.
- del the_dict[conditions_key]
-
- for condition in conditions_list:
- merge_dict = EvalCondition(condition, conditions_key, phase, variables,
- build_file)
-
- if merge_dict != None:
- # Expand variables and nested conditinals in the merge_dict before
- # merging it.
- ProcessVariablesAndConditionsInDict(merge_dict, phase,
- variables, build_file)
-
- MergeDicts(the_dict, merge_dict, build_file, build_file)
-
-
-def LoadAutomaticVariablesFromDict(variables, the_dict):
- # Any keys with plain string values in the_dict become automatic variables.
- # The variable name is the key name with a "_" character prepended.
- for key, value in the_dict.iteritems():
- if type(value) in (str, int, list):
- variables['_' + key] = value
-
-
-def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
- # Any keys in the_dict's "variables" dict, if it has one, becomes a
- # variable. The variable name is the key name in the "variables" dict.
- # Variables that end with the % character are set only if they are unset in
- # the variables dict. the_dict_key is the name of the key that accesses
- # the_dict in the_dict's parent dict. If the_dict's parent is not a dict
- # (it could be a list or it could be parentless because it is a root dict),
- # the_dict_key will be None.
- for key, value in the_dict.get('variables', {}).iteritems():
- if type(value) not in (str, int, list):
- continue
-
- if key.endswith('%'):
- variable_name = key[:-1]
- if variable_name in variables:
- # If the variable is already set, don't set it.
- continue
- if the_dict_key is 'variables' and variable_name in the_dict:
- # If the variable is set without a % in the_dict, and the_dict is a
- # variables dict (making |variables| a varaibles sub-dict of a
- # variables dict), use the_dict's definition.
- value = the_dict[variable_name]
- else:
- variable_name = key
-
- variables[variable_name] = value
-
-
-def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
- build_file, the_dict_key=None):
- """Handle all variable and command expansion and conditional evaluation.
-
- This function is the public entry point for all variable expansions and
- conditional evaluations. The variables_in dictionary will not be modified
- by this function.
- """
-
- # Make a copy of the variables_in dict that can be modified during the
- # loading of automatics and the loading of the variables dict.
- variables = variables_in.copy()
- LoadAutomaticVariablesFromDict(variables, the_dict)
-
- if 'variables' in the_dict:
- # Make sure all the local variables are added to the variables
- # list before we process them so that you can reference one
- # variable from another. They will be fully expanded by recursion
- # in ExpandVariables.
- for key, value in the_dict['variables'].iteritems():
- variables[key] = value
-
- # Handle the associated variables dict first, so that any variable
- # references within can be resolved prior to using them as variables.
- # Pass a copy of the variables dict to avoid having it be tainted.
- # Otherwise, it would have extra automatics added for everything that
- # should just be an ordinary variable in this scope.
- ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
- variables, build_file, 'variables')
-
- LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
-
- for key, value in the_dict.iteritems():
- # Skip "variables", which was already processed if present.
- if key != 'variables' and type(value) is str:
- expanded = ExpandVariables(value, phase, variables, build_file)
- if type(expanded) not in (str, int):
- raise ValueError(
- 'Variable expansion in this context permits str and int ' + \
- 'only, found ' + expanded.__class__.__name__ + ' for ' + key)
- the_dict[key] = expanded
-
- # Variable expansion may have resulted in changes to automatics. Reload.
- # TODO(mark): Optimization: only reload if no changes were made.
- variables = variables_in.copy()
- LoadAutomaticVariablesFromDict(variables, the_dict)
- LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
-
- # Process conditions in this dict. This is done after variable expansion
- # so that conditions may take advantage of expanded variables. For example,
- # if the_dict contains:
- # {'type': '<(library_type)',
- # 'conditions': [['_type=="static_library"', { ... }]]},
- # _type, as used in the condition, will only be set to the value of
- # library_type if variable expansion is performed before condition
- # processing. However, condition processing should occur prior to recursion
- # so that variables (both automatic and "variables" dict type) may be
- # adjusted by conditions sections, merged into the_dict, and have the
- # intended impact on contained dicts.
- #
- # This arrangement means that a "conditions" section containing a "variables"
- # section will only have those variables effective in subdicts, not in
- # the_dict. The workaround is to put a "conditions" section within a
- # "variables" section. For example:
- # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
- # 'defines': ['<(define)'],
- # 'my_subdict': {'defines': ['<(define)']}},
- # will not result in "IS_MAC" being appended to the "defines" list in the
- # current scope but would result in it being appended to the "defines" list
- # within "my_subdict". By comparison:
- # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
- # 'defines': ['<(define)'],
- # 'my_subdict': {'defines': ['<(define)']}},
- # will append "IS_MAC" to both "defines" lists.
-
- # Evaluate conditions sections, allowing variable expansions within them
- # as well as nested conditionals. This will process a 'conditions' or
- # 'target_conditions' section, perform appropriate merging and recursive
- # conditional and variable processing, and then remove the conditions section
- # from the_dict if it is present.
- ProcessConditionsInDict(the_dict, phase, variables, build_file)
-
- # Conditional processing may have resulted in changes to automatics or the
- # variables dict. Reload.
- variables = variables_in.copy()
- LoadAutomaticVariablesFromDict(variables, the_dict)
- LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
-
- # Recurse into child dicts, or process child lists which may result in
- # further recursion into descendant dicts.
- for key, value in the_dict.iteritems():
- # Skip "variables" and string values, which were already processed if
- # present.
- if key == 'variables' or type(value) is str:
- continue
- if type(value) is dict:
- # Pass a copy of the variables dict so that subdicts can't influence
- # parents.
- ProcessVariablesAndConditionsInDict(value, phase, variables,
- build_file, key)
- elif type(value) is list:
- # The list itself can't influence the variables dict, and
- # ProcessVariablesAndConditionsInList will make copies of the variables
- # dict if it needs to pass it to something that can influence it. No
- # copy is necessary here.
- ProcessVariablesAndConditionsInList(value, phase, variables,
- build_file)
- elif type(value) is not int:
- raise TypeError('Unknown type ' + value.__class__.__name__ + \
- ' for ' + key)
-
-
-def ProcessVariablesAndConditionsInList(the_list, phase, variables,
- build_file):
- # Iterate using an index so that new values can be assigned into the_list.
- index = 0
- while index < len(the_list):
- item = the_list[index]
- if type(item) is dict:
- # Make a copy of the variables dict so that it won't influence anything
- # outside of its own scope.
- ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
- elif type(item) is list:
- ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
- elif type(item) is str:
- expanded = ExpandVariables(item, phase, variables, build_file)
- if type(expanded) in (str, int):
- the_list[index] = expanded
- elif type(expanded) is list:
- the_list[index:index+1] = expanded
- index += len(expanded)
-
- # index now identifies the next item to examine. Continue right now
- # without falling into the index increment below.
- continue
- else:
- raise ValueError(
- 'Variable expansion in this context permits strings and ' + \
- 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
- index)
- elif type(item) is not int:
- raise TypeError('Unknown type ' + item.__class__.__name__ + \
- ' at index ' + index)
- index = index + 1
-
-
-def BuildTargetsDict(data):
- """Builds a dict mapping fully-qualified target names to their target dicts.
-
- |data| is a dict mapping loaded build files by pathname relative to the
- current directory. Values in |data| are build file contents. For each
- |data| value with a "targets" key, the value of the "targets" key is taken
- as a list containing target dicts. Each target's fully-qualified name is
- constructed from the pathname of the build file (|data| key) and its
- "target_name" property. These fully-qualified names are used as the keys
- in the returned dict. These keys provide access to the target dicts,
- the dicts in the "targets" lists.
- """
-
- targets = {}
- for build_file in data['target_build_files']:
- for target in data[build_file].get('targets', []):
- target_name = gyp.common.QualifiedTarget(build_file,
- target['target_name'],
- target['toolset'])
- if target_name in targets:
- raise GypError('Duplicate target definitions for ' + target_name)
- targets[target_name] = target
-
- return targets
-
-
-def QualifyDependencies(targets):
- """Make dependency links fully-qualified relative to the current directory.
-
- |targets| is a dict mapping fully-qualified target names to their target
- dicts. For each target in this dict, keys known to contain dependency
- links are examined, and any dependencies referenced will be rewritten
- so that they are fully-qualified and relative to the current directory.
- All rewritten dependencies are suitable for use as keys to |targets| or a
- similar dict.
- """
-
- all_dependency_sections = [dep + op
- for dep in dependency_sections
- for op in ('', '!', '/')]
-
- for target, target_dict in targets.iteritems():
- target_build_file = gyp.common.BuildFile(target)
- toolset = target_dict['toolset']
- for dependency_key in all_dependency_sections:
- dependencies = target_dict.get(dependency_key, [])
- for index in xrange(0, len(dependencies)):
- dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
- target_build_file, dependencies[index], toolset)
- if not multiple_toolsets:
- # Ignore toolset specification in the dependency if it is specified.
- dep_toolset = toolset
- dependency = gyp.common.QualifiedTarget(dep_file,
- dep_target,
- dep_toolset)
- dependencies[index] = dependency
-
- # Make sure anything appearing in a list other than "dependencies" also
- # appears in the "dependencies" list.
- if dependency_key != 'dependencies' and \
- dependency not in target_dict['dependencies']:
- raise GypError('Found ' + dependency + ' in ' + dependency_key +
- ' of ' + target + ', but not in dependencies')
-
-
-def ExpandWildcardDependencies(targets, data):
- """Expands dependencies specified as build_file:*.
-
- For each target in |targets|, examines sections containing links to other
- targets. If any such section contains a link of the form build_file:*, it
- is taken as a wildcard link, and is expanded to list each target in
- build_file. The |data| dict provides access to build file dicts.
-
- Any target that does not wish to be included by wildcard can provide an
- optional "suppress_wildcard" key in its target dict. When present and
- true, a wildcard dependency link will not include such targets.
-
- All dependency names, including the keys to |targets| and the values in each
- dependency list, must be qualified when this function is called.
- """
-
- for target, target_dict in targets.iteritems():
- toolset = target_dict['toolset']
- target_build_file = gyp.common.BuildFile(target)
- for dependency_key in dependency_sections:
- dependencies = target_dict.get(dependency_key, [])
-
- # Loop this way instead of "for dependency in" or "for index in xrange"
- # because the dependencies list will be modified within the loop body.
- index = 0
- while index < len(dependencies):
- (dependency_build_file, dependency_target, dependency_toolset) = \
- gyp.common.ParseQualifiedTarget(dependencies[index])
- if dependency_target != '*' and dependency_toolset != '*':
- # Not a wildcard. Keep it moving.
- index = index + 1
- continue
-
- if dependency_build_file == target_build_file:
- # It's an error for a target to depend on all other targets in
- # the same file, because a target cannot depend on itself.
- raise GypError('Found wildcard in ' + dependency_key + ' of ' +
- target + ' referring to same build file')
-
- # Take the wildcard out and adjust the index so that the next
- # dependency in the list will be processed the next time through the
- # loop.
- del dependencies[index]
- index = index - 1
-
- # Loop through the targets in the other build file, adding them to
- # this target's list of dependencies in place of the removed
- # wildcard.
- dependency_target_dicts = data[dependency_build_file]['targets']
- for dependency_target_dict in dependency_target_dicts:
- if int(dependency_target_dict.get('suppress_wildcard', False)):
- continue
- dependency_target_name = dependency_target_dict['target_name']
- if (dependency_target != '*' and
- dependency_target != dependency_target_name):
- continue
- dependency_target_toolset = dependency_target_dict['toolset']
- if (dependency_toolset != '*' and
- dependency_toolset != dependency_target_toolset):
- continue
- dependency = gyp.common.QualifiedTarget(dependency_build_file,
- dependency_target_name,
- dependency_target_toolset)
- index = index + 1
- dependencies.insert(index, dependency)
-
- index = index + 1
-
-
-def Unify(l):
- """Removes duplicate elements from l, keeping the first element."""
- seen = {}
- return [seen.setdefault(e, e) for e in l if e not in seen]
-
-
-def RemoveDuplicateDependencies(targets):
- """Makes sure every dependency appears only once in all targets's dependency
- lists."""
- for target_name, target_dict in targets.iteritems():
- for dependency_key in dependency_sections:
- dependencies = target_dict.get(dependency_key, [])
- if dependencies:
- target_dict[dependency_key] = Unify(dependencies)
-
-
-def Filter(l, item):
- """Removes item from l."""
- res = {}
- return [res.setdefault(e, e) for e in l if e != item]
-
-
-def RemoveSelfDependencies(targets):
- """Remove self dependencies from targets that have the prune_self_dependency
- variable set."""
- for target_name, target_dict in targets.iteritems():
- for dependency_key in dependency_sections:
- dependencies = target_dict.get(dependency_key, [])
- if dependencies:
- for t in dependencies:
- if t == target_name:
- if targets[t].get('variables', {}).get('prune_self_dependency', 0):
- target_dict[dependency_key] = Filter(dependencies, target_name)
-
-
-def RemoveLinkDependenciesFromNoneTargets(targets):
- """Remove dependencies having the 'link_dependency' attribute from the 'none'
- targets."""
- for target_name, target_dict in targets.iteritems():
- for dependency_key in dependency_sections:
- dependencies = target_dict.get(dependency_key, [])
- if dependencies:
- for t in dependencies:
- if target_dict.get('type', None) == 'none':
- if targets[t].get('variables', {}).get('link_dependency', 0):
- target_dict[dependency_key] = \
- Filter(target_dict[dependency_key], t)
-
-
-class DependencyGraphNode(object):
- """
-
- Attributes:
- ref: A reference to an object that this DependencyGraphNode represents.
- dependencies: List of DependencyGraphNodes on which this one depends.
- dependents: List of DependencyGraphNodes that depend on this one.
- """
-
- class CircularException(GypError):
- pass
-
- def __init__(self, ref):
- self.ref = ref
- self.dependencies = []
- self.dependents = []
-
- def __repr__(self):
- return '<DependencyGraphNode: %r>' % self.ref
-
- def FlattenToList(self):
- # flat_list is the sorted list of dependencies - actually, the list items
- # are the "ref" attributes of DependencyGraphNodes. Every target will
- # appear in flat_list after all of its dependencies, and before all of its
- # dependents.
- flat_list = OrderedSet()
-
- # in_degree_zeros is the list of DependencyGraphNodes that have no
- # dependencies not in flat_list. Initially, it is a copy of the children
- # of this node, because when the graph was built, nodes with no
- # dependencies were made implicit dependents of the root node.
- in_degree_zeros = set(self.dependents[:])
-
- while in_degree_zeros:
- # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
- # can be appended to flat_list. Take these nodes out of in_degree_zeros
- # as work progresses, so that the next node to process from the list can
- # always be accessed at a consistent position.
- node = in_degree_zeros.pop()
- flat_list.add(node.ref)
-
- # Look at dependents of the node just added to flat_list. Some of them
- # may now belong in in_degree_zeros.
- for node_dependent in node.dependents:
- is_in_degree_zero = True
- # TODO: We want to check through the
- # node_dependent.dependencies list but if it's long and we
- # always start at the beginning, then we get O(n^2) behaviour.
- for node_dependent_dependency in node_dependent.dependencies:
- if not node_dependent_dependency.ref in flat_list:
- # The dependent one or more dependencies not in flat_list. There
- # will be more chances to add it to flat_list when examining
- # it again as a dependent of those other dependencies, provided
- # that there are no cycles.
- is_in_degree_zero = False
- break
-
- if is_in_degree_zero:
- # All of the dependent's dependencies are already in flat_list. Add
- # it to in_degree_zeros where it will be processed in a future
- # iteration of the outer loop.
- in_degree_zeros.add(node_dependent)
-
- return list(flat_list)
-
- def FindCycles(self):
- """
- Returns a list of cycles in the graph, where each cycle is its own list.
- """
- results = []
- visited = set()
-
- def Visit(node, path):
- for child in node.dependents:
- if child in path:
- results.append([child] + path[:path.index(child) + 1])
- elif not child in visited:
- visited.add(child)
- Visit(child, [child] + path)
-
- visited.add(self)
- Visit(self, [self])
-
- return results
-
- def DirectDependencies(self, dependencies=None):
- """Returns a list of just direct dependencies."""
- if dependencies == None:
- dependencies = []
-
- for dependency in self.dependencies:
- # Check for None, corresponding to the root node.
- if dependency.ref != None and dependency.ref not in dependencies:
- dependencies.append(dependency.ref)
-
- return dependencies
-
- def _AddImportedDependencies(self, targets, dependencies=None):
- """Given a list of direct dependencies, adds indirect dependencies that
- other dependencies have declared to export their settings.
-
- This method does not operate on self. Rather, it operates on the list
- of dependencies in the |dependencies| argument. For each dependency in
- that list, if any declares that it exports the settings of one of its
- own dependencies, those dependencies whose settings are "passed through"
- are added to the list. As new items are added to the list, they too will
- be processed, so it is possible to import settings through multiple levels
- of dependencies.
-
- This method is not terribly useful on its own, it depends on being
- "primed" with a list of direct dependencies such as one provided by
- DirectDependencies. DirectAndImportedDependencies is intended to be the
- public entry point.
- """
-
- if dependencies == None:
- dependencies = []
-
- index = 0
- while index < len(dependencies):
- dependency = dependencies[index]
- dependency_dict = targets[dependency]
- # Add any dependencies whose settings should be imported to the list
- # if not already present. Newly-added items will be checked for
- # their own imports when the list iteration reaches them.
- # Rather than simply appending new items, insert them after the
- # dependency that exported them. This is done to more closely match
- # the depth-first method used by DeepDependencies.
- add_index = 1
- for imported_dependency in \
- dependency_dict.get('export_dependent_settings', []):
- if imported_dependency not in dependencies:
- dependencies.insert(index + add_index, imported_dependency)
- add_index = add_index + 1
- index = index + 1
-
- return dependencies
-
- def DirectAndImportedDependencies(self, targets, dependencies=None):
- """Returns a list of a target's direct dependencies and all indirect
- dependencies that a dependency has advertised settings should be exported
- through the dependency for.
- """
-
- dependencies = self.DirectDependencies(dependencies)
- return self._AddImportedDependencies(targets, dependencies)
-
- def DeepDependencies(self, dependencies=None):
- """Returns an OrderedSet of all of a target's dependencies, recursively."""
- if dependencies is None:
- # Using a list to get ordered output and a set to do fast "is it
- # already added" checks.
- dependencies = OrderedSet()
-
- for dependency in self.dependencies:
- # Check for None, corresponding to the root node.
- if dependency.ref is None:
- continue
- if dependency.ref not in dependencies:
- dependency.DeepDependencies(dependencies)
- dependencies.add(dependency.ref)
-
- return dependencies
-
- def _LinkDependenciesInternal(self, targets, include_shared_libraries,
- dependencies=None, initial=True):
- """Returns an OrderedSet of dependency targets that are linked
- into this target.
-
- This function has a split personality, depending on the setting of
- |initial|. Outside callers should always leave |initial| at its default
- setting.
-
- When adding a target to the list of dependencies, this function will
- recurse into itself with |initial| set to False, to collect dependencies
- that are linked into the linkable target for which the list is being built.
-
- If |include_shared_libraries| is False, the resulting dependencies will not
- include shared_library targets that are linked into this target.
- """
- if dependencies is None:
- # Using a list to get ordered output and a set to do fast "is it
- # already added" checks.
- dependencies = OrderedSet()
-
- # Check for None, corresponding to the root node.
- if self.ref is None:
- return dependencies
-
- # It's kind of sucky that |targets| has to be passed into this function,
- # but that's presently the easiest way to access the target dicts so that
- # this function can find target types.
-
- if 'target_name' not in targets[self.ref]:
- raise GypError("Missing 'target_name' field in target.")
-
- if 'type' not in targets[self.ref]:
- raise GypError("Missing 'type' field in target %s" %
- targets[self.ref]['target_name'])
-
- target_type = targets[self.ref]['type']
-
- is_linkable = target_type in linkable_types
-
- if initial and not is_linkable:
- # If this is the first target being examined and it's not linkable,
- # return an empty list of link dependencies, because the link
- # dependencies are intended to apply to the target itself (initial is
- # True) and this target won't be linked.
- return dependencies
-
- # Don't traverse 'none' targets if explicitly excluded.
- if (target_type == 'none' and
- not targets[self.ref].get('dependencies_traverse', True)):
- dependencies.add(self.ref)
- return dependencies
-
- # Executables, mac kernel extensions and loadable modules are already fully
- # and finally linked. Nothing else can be a link dependency of them, there
- # can only be dependencies in the sense that a dependent target might run
- # an executable or load the loadable_module.
- if not initial and target_type in ('executable', 'loadable_module',
- 'mac_kernel_extension'):
- return dependencies
-
- # Shared libraries are already fully linked. They should only be included
- # in |dependencies| when adjusting static library dependencies (in order to
- # link against the shared_library's import lib), but should not be included
- # in |dependencies| when propagating link_settings.
- # The |include_shared_libraries| flag controls which of these two cases we
- # are handling.
- if (not initial and target_type == 'shared_library' and
- not include_shared_libraries):
- return dependencies
-
- # The target is linkable, add it to the list of link dependencies.
- if self.ref not in dependencies:
- dependencies.add(self.ref)
- if initial or not is_linkable:
- # If this is a subsequent target and it's linkable, don't look any
- # further for linkable dependencies, as they'll already be linked into
- # this target linkable. Always look at dependencies of the initial
- # target, and always look at dependencies of non-linkables.
- for dependency in self.dependencies:
- dependency._LinkDependenciesInternal(targets,
- include_shared_libraries,
- dependencies, False)
-
- return dependencies
-
- def DependenciesForLinkSettings(self, targets):
- """
- Returns a list of dependency targets whose link_settings should be merged
- into this target.
- """
-
- # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
- # link_settings are propagated. So for now, we will allow it, unless the
- # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
- # False. Once chrome is fixed, we can remove this flag.
- include_shared_libraries = \
- targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
- return self._LinkDependenciesInternal(targets, include_shared_libraries)
-
- def DependenciesToLinkAgainst(self, targets):
- """
- Returns a list of dependency targets that are linked into this target.
- """
- return self._LinkDependenciesInternal(targets, True)
-
-
-def BuildDependencyList(targets):
- # Create a DependencyGraphNode for each target. Put it into a dict for easy
- # access.
- dependency_nodes = {}
- for target, spec in targets.iteritems():
- if target not in dependency_nodes:
- dependency_nodes[target] = DependencyGraphNode(target)
-
- # Set up the dependency links. Targets that have no dependencies are treated
- # as dependent on root_node.
- root_node = DependencyGraphNode(None)
- for target, spec in targets.iteritems():
- target_node = dependency_nodes[target]
- target_build_file = gyp.common.BuildFile(target)
- dependencies = spec.get('dependencies')
- if not dependencies:
- target_node.dependencies = [root_node]
- root_node.dependents.append(target_node)
- else:
- for dependency in dependencies:
- dependency_node = dependency_nodes.get(dependency)
- if not dependency_node:
- raise GypError("Dependency '%s' not found while "
- "trying to load target %s" % (dependency, target))
- target_node.dependencies.append(dependency_node)
- dependency_node.dependents.append(target_node)
-
- flat_list = root_node.FlattenToList()
-
- # If there's anything left unvisited, there must be a circular dependency
- # (cycle).
- if len(flat_list) != len(targets):
- if not root_node.dependents:
- # If all targets have dependencies, add the first target as a dependent
- # of root_node so that the cycle can be discovered from root_node.
- target = targets.keys()[0]
- target_node = dependency_nodes[target]
- target_node.dependencies.append(root_node)
- root_node.dependents.append(target_node)
-
- cycles = []
- for cycle in root_node.FindCycles():
- paths = [node.ref for node in cycle]
- cycles.append('Cycle: %s' % ' -> '.join(paths))
- raise DependencyGraphNode.CircularException(
- 'Cycles in dependency graph detected:\n' + '\n'.join(cycles))
-
- return [dependency_nodes, flat_list]
-
-
-def VerifyNoGYPFileCircularDependencies(targets):
- # Create a DependencyGraphNode for each gyp file containing a target. Put
- # it into a dict for easy access.
- dependency_nodes = {}
- for target in targets.iterkeys():
- build_file = gyp.common.BuildFile(target)
- if not build_file in dependency_nodes:
- dependency_nodes[build_file] = DependencyGraphNode(build_file)
-
- # Set up the dependency links.
- for target, spec in targets.iteritems():
- build_file = gyp.common.BuildFile(target)
- build_file_node = dependency_nodes[build_file]
- target_dependencies = spec.get('dependencies', [])
- for dependency in target_dependencies:
- try:
- dependency_build_file = gyp.common.BuildFile(dependency)
- except GypError, e:
- gyp.common.ExceptionAppend(
- e, 'while computing dependencies of .gyp file %s' % build_file)
- raise
-
- if dependency_build_file == build_file:
- # A .gyp file is allowed to refer back to itself.
- continue
- dependency_node = dependency_nodes.get(dependency_build_file)
- if not dependency_node:
- raise GypError("Dependancy '%s' not found" % dependency_build_file)
- if dependency_node not in build_file_node.dependencies:
- build_file_node.dependencies.append(dependency_node)
- dependency_node.dependents.append(build_file_node)
-
-
- # Files that have no dependencies are treated as dependent on root_node.
- root_node = DependencyGraphNode(None)
- for build_file_node in dependency_nodes.itervalues():
- if len(build_file_node.dependencies) == 0:
- build_file_node.dependencies.append(root_node)
- root_node.dependents.append(build_file_node)
-
- flat_list = root_node.FlattenToList()
-
- # If there's anything left unvisited, there must be a circular dependency
- # (cycle).
- if len(flat_list) != len(dependency_nodes):
- if not root_node.dependents:
- # If all files have dependencies, add the first file as a dependent
- # of root_node so that the cycle can be discovered from root_node.
- file_node = dependency_nodes.values()[0]
- file_node.dependencies.append(root_node)
- root_node.dependents.append(file_node)
- cycles = []
- for cycle in root_node.FindCycles():
- paths = [node.ref for node in cycle]
- cycles.append('Cycle: %s' % ' -> '.join(paths))
- raise DependencyGraphNode.CircularException(
- 'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles))
-
-
-def DoDependentSettings(key, flat_list, targets, dependency_nodes):
- # key should be one of all_dependent_settings, direct_dependent_settings,
- # or link_settings.
-
- for target in flat_list:
- target_dict = targets[target]
- build_file = gyp.common.BuildFile(target)
-
- if key == 'all_dependent_settings':
- dependencies = dependency_nodes[target].DeepDependencies()
- elif key == 'direct_dependent_settings':
- dependencies = \
- dependency_nodes[target].DirectAndImportedDependencies(targets)
- elif key == 'link_settings':
- dependencies = \
- dependency_nodes[target].DependenciesForLinkSettings(targets)
- else:
- raise GypError("DoDependentSettings doesn't know how to determine "
- 'dependencies for ' + key)
-
- for dependency in dependencies:
- dependency_dict = targets[dependency]
- if not key in dependency_dict:
- continue
- dependency_build_file = gyp.common.BuildFile(dependency)
- MergeDicts(target_dict, dependency_dict[key],
- build_file, dependency_build_file)
-
-
-def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
- sort_dependencies):
- # Recompute target "dependencies" properties. For each static library
- # target, remove "dependencies" entries referring to other static libraries,
- # unless the dependency has the "hard_dependency" attribute set. For each
- # linkable target, add a "dependencies" entry referring to all of the
- # target's computed list of link dependencies (including static libraries
- # if no such entry is already present.
- for target in flat_list:
- target_dict = targets[target]
- target_type = target_dict['type']
-
- if target_type == 'static_library':
- if not 'dependencies' in target_dict:
- continue
-
- target_dict['dependencies_original'] = target_dict.get(
- 'dependencies', [])[:]
-
- # A static library should not depend on another static library unless
- # the dependency relationship is "hard," which should only be done when
- # a dependent relies on some side effect other than just the build
- # product, like a rule or action output. Further, if a target has a
- # non-hard dependency, but that dependency exports a hard dependency,
- # the non-hard dependency can safely be removed, but the exported hard
- # dependency must be added to the target to keep the same dependency
- # ordering.
- dependencies = \
- dependency_nodes[target].DirectAndImportedDependencies(targets)
- index = 0
- while index < len(dependencies):
- dependency = dependencies[index]
- dependency_dict = targets[dependency]
-
- # Remove every non-hard static library dependency and remove every
- # non-static library dependency that isn't a direct dependency.
- if (dependency_dict['type'] == 'static_library' and \
- not dependency_dict.get('hard_dependency', False)) or \
- (dependency_dict['type'] != 'static_library' and \
- not dependency in target_dict['dependencies']):
- # Take the dependency out of the list, and don't increment index
- # because the next dependency to analyze will shift into the index
- # formerly occupied by the one being removed.
- del dependencies[index]
- else:
- index = index + 1
-
- # Update the dependencies. If the dependencies list is empty, it's not
- # needed, so unhook it.
- if len(dependencies) > 0:
- target_dict['dependencies'] = dependencies
- else:
- del target_dict['dependencies']
-
- elif target_type in linkable_types:
- # Get a list of dependency targets that should be linked into this
- # target. Add them to the dependencies list if they're not already
- # present.
-
- link_dependencies = \
- dependency_nodes[target].DependenciesToLinkAgainst(targets)
- for dependency in link_dependencies:
- if dependency == target:
- continue
- if not 'dependencies' in target_dict:
- target_dict['dependencies'] = []
- if not dependency in target_dict['dependencies']:
- target_dict['dependencies'].append(dependency)
- # Sort the dependencies list in the order from dependents to dependencies.
- # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
- # Note: flat_list is already sorted in the order from dependencies to
- # dependents.
- if sort_dependencies and 'dependencies' in target_dict:
- target_dict['dependencies'] = [dep for dep in reversed(flat_list)
- if dep in target_dict['dependencies']]
-
-
-# Initialize this here to speed up MakePathRelative.
-exception_re = re.compile(r'''["']?[-/$<>^]''')
-
-
-def MakePathRelative(to_file, fro_file, item):
- # If item is a relative path, it's relative to the build file dict that it's
- # coming from. Fix it up to make it relative to the build file dict that
- # it's going into.
- # Exception: any |item| that begins with these special characters is
- # returned without modification.
- # / Used when a path is already absolute (shortcut optimization;
- # such paths would be returned as absolute anyway)
- # $ Used for build environment variables
- # - Used for some build environment flags (such as -lapr-1 in a
- # "libraries" section)
- # < Used for our own variable and command expansions (see ExpandVariables)
- # > Used for our own variable and command expansions (see ExpandVariables)
- # ^ Used for our own variable and command expansions (see ExpandVariables)
- #
- # "/' Used when a value is quoted. If these are present, then we
- # check the second character instead.
- #
- if to_file == fro_file or exception_re.match(item):
- return item
- else:
- # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
- # temporary measure. This should really be addressed by keeping all paths
- # in POSIX until actual project generation.
- ret = os.path.normpath(os.path.join(
- gyp.common.RelativePath(os.path.dirname(fro_file),
- os.path.dirname(to_file)),
- item)).replace('\\', '/')
- if item[-1] == '/':
- ret += '/'
- return ret
-
-def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
- # Python documentation recommends objects which do not support hash
- # set this value to None. Python library objects follow this rule.
- is_hashable = lambda val: val.__hash__
-
- # If x is hashable, returns whether x is in s. Else returns whether x is in l.
- def is_in_set_or_list(x, s, l):
- if is_hashable(x):
- return x in s
- return x in l
-
- prepend_index = 0
-
- # Make membership testing of hashables in |to| (in particular, strings)
- # faster.
- hashable_to_set = set(x for x in to if is_hashable(x))
- for item in fro:
- singleton = False
- if type(item) in (str, int):
- # The cheap and easy case.
- if is_paths:
- to_item = MakePathRelative(to_file, fro_file, item)
- else:
- to_item = item
-
- if not (type(item) is str and item.startswith('-')):
- # Any string that doesn't begin with a "-" is a singleton - it can
- # only appear once in a list, to be enforced by the list merge append
- # or prepend.
- singleton = True
- elif type(item) is dict:
- # Make a copy of the dictionary, continuing to look for paths to fix.
- # The other intelligent aspects of merge processing won't apply because
- # item is being merged into an empty dict.
- to_item = {}
- MergeDicts(to_item, item, to_file, fro_file)
- elif type(item) is list:
- # Recurse, making a copy of the list. If the list contains any
- # descendant dicts, path fixing will occur. Note that here, custom
- # values for is_paths and append are dropped; those are only to be
- # applied to |to| and |fro|, not sublists of |fro|. append shouldn't
- # matter anyway because the new |to_item| list is empty.
- to_item = []
- MergeLists(to_item, item, to_file, fro_file)
- else:
- raise TypeError(
- 'Attempt to merge list item of unsupported type ' + \
- item.__class__.__name__)
-
- if append:
- # If appending a singleton that's already in the list, don't append.
- # This ensures that the earliest occurrence of the item will stay put.
- if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
- to.append(to_item)
- if is_hashable(to_item):
- hashable_to_set.add(to_item)
- else:
- # If prepending a singleton that's already in the list, remove the
- # existing instance and proceed with the prepend. This ensures that the
- # item appears at the earliest possible position in the list.
- while singleton and to_item in to:
- to.remove(to_item)
-
- # Don't just insert everything at index 0. That would prepend the new
- # items to the list in reverse order, which would be an unwelcome
- # surprise.
- to.insert(prepend_index, to_item)
- if is_hashable(to_item):
- hashable_to_set.add(to_item)
- prepend_index = prepend_index + 1
-
-
-def MergeDicts(to, fro, to_file, fro_file):
- # I wanted to name the parameter "from" but it's a Python keyword...
- for k, v in fro.iteritems():
- # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
- # copy semantics. Something else may want to merge from the |fro| dict
- # later, and having the same dict ref pointed to twice in the tree isn't
- # what anyone wants considering that the dicts may subsequently be
- # modified.
- if k in to:
- bad_merge = False
- if type(v) in (str, int):
- if type(to[k]) not in (str, int):
- bad_merge = True
- elif type(v) is not type(to[k]):
- bad_merge = True
-
- if bad_merge:
- raise TypeError(
- 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
- ' into incompatible type ' + to[k].__class__.__name__ + \
- ' for key ' + k)
- if type(v) in (str, int):
- # Overwrite the existing value, if any. Cheap and easy.
- is_path = IsPathSection(k)
- if is_path:
- to[k] = MakePathRelative(to_file, fro_file, v)
- else:
- to[k] = v
- elif type(v) is dict:
- # Recurse, guaranteeing copies will be made of objects that require it.
- if not k in to:
- to[k] = {}
- MergeDicts(to[k], v, to_file, fro_file)
- elif type(v) is list:
- # Lists in dicts can be merged with different policies, depending on
- # how the key in the "from" dict (k, the from-key) is written.
- #
- # If the from-key has ...the to-list will have this action
- # this character appended:... applied when receiving the from-list:
- # = replace
- # + prepend
- # ? set, only if to-list does not yet exist
- # (none) append
- #
- # This logic is list-specific, but since it relies on the associated
- # dict key, it's checked in this dict-oriented function.
- ext = k[-1]
- append = True
- if ext == '=':
- list_base = k[:-1]
- lists_incompatible = [list_base, list_base + '?']
- to[list_base] = []
- elif ext == '+':
- list_base = k[:-1]
- lists_incompatible = [list_base + '=', list_base + '?']
- append = False
- elif ext == '?':
- list_base = k[:-1]
- lists_incompatible = [list_base, list_base + '=', list_base + '+']
- else:
- list_base = k
- lists_incompatible = [list_base + '=', list_base + '?']
-
- # Some combinations of merge policies appearing together are meaningless.
- # It's stupid to replace and append simultaneously, for example. Append
- # and prepend are the only policies that can coexist.
- for list_incompatible in lists_incompatible:
- if list_incompatible in fro:
- raise GypError('Incompatible list policies ' + k + ' and ' +
- list_incompatible)
-
- if list_base in to:
- if ext == '?':
- # If the key ends in "?", the list will only be merged if it doesn't
- # already exist.
- continue
- elif type(to[list_base]) is not list:
- # This may not have been checked above if merging in a list with an
- # extension character.
- raise TypeError(
- 'Attempt to merge dict value of type ' + v.__class__.__name__ + \
- ' into incompatible type ' + to[list_base].__class__.__name__ + \
- ' for key ' + list_base + '(' + k + ')')
- else:
- to[list_base] = []
-
- # Call MergeLists, which will make copies of objects that require it.
- # MergeLists can recurse back into MergeDicts, although this will be
- # to make copies of dicts (with paths fixed), there will be no
- # subsequent dict "merging" once entering a list because lists are
- # always replaced, appended to, or prepended to.
- is_paths = IsPathSection(list_base)
- MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
- else:
- raise TypeError(
- 'Attempt to merge dict value of unsupported type ' + \
- v.__class__.__name__ + ' for key ' + k)
-
-
-def MergeConfigWithInheritance(new_configuration_dict, build_file,
- target_dict, configuration, visited):
- # Skip if previously visted.
- if configuration in visited:
- return
-
- # Look at this configuration.
- configuration_dict = target_dict['configurations'][configuration]
-
- # Merge in parents.
- for parent in configuration_dict.get('inherit_from', []):
- MergeConfigWithInheritance(new_configuration_dict, build_file,
- target_dict, parent, visited + [configuration])
-
- # Merge it into the new config.
- MergeDicts(new_configuration_dict, configuration_dict,
- build_file, build_file)
-
- # Drop abstract.
- if 'abstract' in new_configuration_dict:
- del new_configuration_dict['abstract']
-
-
-def SetUpConfigurations(target, target_dict):
- # key_suffixes is a list of key suffixes that might appear on key names.
- # These suffixes are handled in conditional evaluations (for =, +, and ?)
- # and rules/exclude processing (for ! and /). Keys with these suffixes
- # should be treated the same as keys without.
- key_suffixes = ['=', '+', '?', '!', '/']
-
- build_file = gyp.common.BuildFile(target)
-
- # Provide a single configuration by default if none exists.
- # TODO(mark): Signal an error if default_configurations exists but
- # configurations does not.
- if not 'configurations' in target_dict:
- target_dict['configurations'] = {'Default': {}}
- if not 'default_configuration' in target_dict:
- concrete = [i for (i, config) in target_dict['configurations'].iteritems()
- if not config.get('abstract')]
- target_dict['default_configuration'] = sorted(concrete)[0]
-
- merged_configurations = {}
- configs = target_dict['configurations']
- for (configuration, old_configuration_dict) in configs.iteritems():
- # Skip abstract configurations (saves work only).
- if old_configuration_dict.get('abstract'):
- continue
- # Configurations inherit (most) settings from the enclosing target scope.
- # Get the inheritance relationship right by making a copy of the target
- # dict.
- new_configuration_dict = {}
- for (key, target_val) in target_dict.iteritems():
- key_ext = key[-1:]
- if key_ext in key_suffixes:
- key_base = key[:-1]
- else:
- key_base = key
- if not key_base in non_configuration_keys:
- new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val)
-
- # Merge in configuration (with all its parents first).
- MergeConfigWithInheritance(new_configuration_dict, build_file,
- target_dict, configuration, [])
-
- merged_configurations[configuration] = new_configuration_dict
-
- # Put the new configurations back into the target dict as a configuration.
- for configuration in merged_configurations.keys():
- target_dict['configurations'][configuration] = (
- merged_configurations[configuration])
-
- # Now drop all the abstract ones.
- for configuration in target_dict['configurations'].keys():
- old_configuration_dict = target_dict['configurations'][configuration]
- if old_configuration_dict.get('abstract'):
- del target_dict['configurations'][configuration]
-
- # Now that all of the target's configurations have been built, go through
- # the target dict's keys and remove everything that's been moved into a
- # "configurations" section.
- delete_keys = []
- for key in target_dict:
- key_ext = key[-1:]
- if key_ext in key_suffixes:
- key_base = key[:-1]
- else:
- key_base = key
- if not key_base in non_configuration_keys:
- delete_keys.append(key)
- for key in delete_keys:
- del target_dict[key]
-
- # Check the configurations to see if they contain invalid keys.
- for configuration in target_dict['configurations'].keys():
- configuration_dict = target_dict['configurations'][configuration]
- for key in configuration_dict.keys():
- if key in invalid_configuration_keys:
- raise GypError('%s not allowed in the %s configuration, found in '
- 'target %s' % (key, configuration, target))
-
-
-
-def ProcessListFiltersInDict(name, the_dict):
- """Process regular expression and exclusion-based filters on lists.
-
- An exclusion list is in a dict key named with a trailing "!", like
- "sources!". Every item in such a list is removed from the associated
- main list, which in this example, would be "sources". Removed items are
- placed into a "sources_excluded" list in the dict.
-
- Regular expression (regex) filters are contained in dict keys named with a
- trailing "/", such as "sources/" to operate on the "sources" list. Regex
- filters in a dict take the form:
- 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
- ['include', '_mac\\.cc$'] ],
- The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
- _win.cc. The second filter then includes all files ending in _mac.cc that
- are now or were once in the "sources" list. Items matching an "exclude"
- filter are subject to the same processing as would occur if they were listed
- by name in an exclusion list (ending in "!"). Items matching an "include"
- filter are brought back into the main list if previously excluded by an
- exclusion list or exclusion regex filter. Subsequent matching "exclude"
- patterns can still cause items to be excluded after matching an "include".
- """
-
- # Look through the dictionary for any lists whose keys end in "!" or "/".
- # These are lists that will be treated as exclude lists and regular
- # expression-based exclude/include lists. Collect the lists that are
- # needed first, looking for the lists that they operate on, and assemble
- # then into |lists|. This is done in a separate loop up front, because
- # the _included and _excluded keys need to be added to the_dict, and that
- # can't be done while iterating through it.
-
- lists = []
- del_lists = []
- for key, value in the_dict.iteritems():
- operation = key[-1]
- if operation != '!' and operation != '/':
- continue
-
- if type(value) is not list:
- raise ValueError(name + ' key ' + key + ' must be list, not ' + \
- value.__class__.__name__)
-
- list_key = key[:-1]
- if list_key not in the_dict:
- # This happens when there's a list like "sources!" but no corresponding
- # "sources" list. Since there's nothing for it to operate on, queue up
- # the "sources!" list for deletion now.
- del_lists.append(key)
- continue
-
- if type(the_dict[list_key]) is not list:
- value = the_dict[list_key]
- raise ValueError(name + ' key ' + list_key + \
- ' must be list, not ' + \
- value.__class__.__name__ + ' when applying ' + \
- {'!': 'exclusion', '/': 'regex'}[operation])
-
- if not list_key in lists:
- lists.append(list_key)
-
- # Delete the lists that are known to be unneeded at this point.
- for del_list in del_lists:
- del the_dict[del_list]
-
- for list_key in lists:
- the_list = the_dict[list_key]
-
- # Initialize the list_actions list, which is parallel to the_list. Each
- # item in list_actions identifies whether the corresponding item in
- # the_list should be excluded, unconditionally preserved (included), or
- # whether no exclusion or inclusion has been applied. Items for which
- # no exclusion or inclusion has been applied (yet) have value -1, items
- # excluded have value 0, and items included have value 1. Includes and
- # excludes override previous actions. All items in list_actions are
- # initialized to -1 because no excludes or includes have been processed
- # yet.
- list_actions = list((-1,) * len(the_list))
-
- exclude_key = list_key + '!'
- if exclude_key in the_dict:
- for exclude_item in the_dict[exclude_key]:
- for index in xrange(0, len(the_list)):
- if exclude_item == the_list[index]:
- # This item matches the exclude_item, so set its action to 0
- # (exclude).
- list_actions[index] = 0
-
- # The "whatever!" list is no longer needed, dump it.
- del the_dict[exclude_key]
-
- regex_key = list_key + '/'
- if regex_key in the_dict:
- for regex_item in the_dict[regex_key]:
- [action, pattern] = regex_item
- pattern_re = re.compile(pattern)
-
- if action == 'exclude':
- # This item matches an exclude regex, so set its value to 0 (exclude).
- action_value = 0
- elif action == 'include':
- # This item matches an include regex, so set its value to 1 (include).
- action_value = 1
- else:
- # This is an action that doesn't make any sense.
- raise ValueError('Unrecognized action ' + action + ' in ' + name + \
- ' key ' + regex_key)
-
- for index in xrange(0, len(the_list)):
- list_item = the_list[index]
- if list_actions[index] == action_value:
- # Even if the regex matches, nothing will change so continue (regex
- # searches are expensive).
- continue
- if pattern_re.search(list_item):
- # Regular expression match.
- list_actions[index] = action_value
-
- # The "whatever/" list is no longer needed, dump it.
- del the_dict[regex_key]
-
- # Add excluded items to the excluded list.
- #
- # Note that exclude_key ("sources!") is different from excluded_key
- # ("sources_excluded"). The exclude_key list is input and it was already
- # processed and deleted; the excluded_key list is output and it's about
- # to be created.
- excluded_key = list_key + '_excluded'
- if excluded_key in the_dict:
- raise GypError(name + ' key ' + excluded_key +
- ' must not be present prior '
- ' to applying exclusion/regex filters for ' + list_key)
-
- excluded_list = []
-
- # Go backwards through the list_actions list so that as items are deleted,
- # the indices of items that haven't been seen yet don't shift. That means
- # that things need to be prepended to excluded_list to maintain them in the
- # same order that they existed in the_list.
- for index in xrange(len(list_actions) - 1, -1, -1):
- if list_actions[index] == 0:
- # Dump anything with action 0 (exclude). Keep anything with action 1
- # (include) or -1 (no include or exclude seen for the item).
- excluded_list.insert(0, the_list[index])
- del the_list[index]
-
- # If anything was excluded, put the excluded list into the_dict at
- # excluded_key.
- if len(excluded_list) > 0:
- the_dict[excluded_key] = excluded_list
-
- # Now recurse into subdicts and lists that may contain dicts.
- for key, value in the_dict.iteritems():
- if type(value) is dict:
- ProcessListFiltersInDict(key, value)
- elif type(value) is list:
- ProcessListFiltersInList(key, value)
-
-
-def ProcessListFiltersInList(name, the_list):
- for item in the_list:
- if type(item) is dict:
- ProcessListFiltersInDict(name, item)
- elif type(item) is list:
- ProcessListFiltersInList(name, item)
-
-
-def ValidateTargetType(target, target_dict):
- """Ensures the 'type' field on the target is one of the known types.
-
- Arguments:
- target: string, name of target.
- target_dict: dict, target spec.
-
- Raises an exception on error.
- """
- VALID_TARGET_TYPES = ('executable', 'loadable_module',
- 'static_library', 'shared_library',
- 'mac_kernel_extension', 'none')
- target_type = target_dict.get('type', None)
- if target_type not in VALID_TARGET_TYPES:
- raise GypError("Target %s has an invalid target type '%s'. "
- "Must be one of %s." %
- (target, target_type, '/'.join(VALID_TARGET_TYPES)))
- if (target_dict.get('standalone_static_library', 0) and
- not target_type == 'static_library'):
- raise GypError('Target %s has type %s but standalone_static_library flag is'
- ' only valid for static_library type.' % (target,
- target_type))
-
-
-def ValidateSourcesInTarget(target, target_dict, build_file,
- duplicate_basename_check):
- if not duplicate_basename_check:
- return
- if target_dict.get('type', None) != 'static_library':
- return
- sources = target_dict.get('sources', [])
- basenames = {}
- for source in sources:
- name, ext = os.path.splitext(source)
- is_compiled_file = ext in [
- '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
- if not is_compiled_file:
- continue
- basename = os.path.basename(name) # Don't include extension.
- basenames.setdefault(basename, []).append(source)
-
- error = ''
- for basename, files in basenames.iteritems():
- if len(files) > 1:
- error += ' %s: %s\n' % (basename, ' '.join(files))
-
- if error:
- print('static library %s has several files with the same basename:\n' %
- target + error + 'libtool on Mac cannot handle that. Use '
- '--no-duplicate-basename-check to disable this validation.')
- raise GypError('Duplicate basenames in sources section, see list above')
-
-
-def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
- """Ensures that the rules sections in target_dict are valid and consistent,
- and determines which sources they apply to.
-
- Arguments:
- target: string, name of target.
- target_dict: dict, target spec containing "rules" and "sources" lists.
- extra_sources_for_rules: a list of keys to scan for rule matches in
- addition to 'sources'.
- """
-
- # Dicts to map between values found in rules' 'rule_name' and 'extension'
- # keys and the rule dicts themselves.
- rule_names = {}
- rule_extensions = {}
-
- rules = target_dict.get('rules', [])
- for rule in rules:
- # Make sure that there's no conflict among rule names and extensions.
- rule_name = rule['rule_name']
- if rule_name in rule_names:
- raise GypError('rule %s exists in duplicate, target %s' %
- (rule_name, target))
- rule_names[rule_name] = rule
-
- rule_extension = rule['extension']
- if rule_extension.startswith('.'):
- rule_extension = rule_extension[1:]
- if rule_extension in rule_extensions:
- raise GypError(('extension %s associated with multiple rules, ' +
- 'target %s rules %s and %s') %
- (rule_extension, target,
- rule_extensions[rule_extension]['rule_name'],
- rule_name))
- rule_extensions[rule_extension] = rule
-
- # Make sure rule_sources isn't already there. It's going to be
- # created below if needed.
- if 'rule_sources' in rule:
- raise GypError(
- 'rule_sources must not exist in input, target %s rule %s' %
- (target, rule_name))
-
- rule_sources = []
- source_keys = ['sources']
- source_keys.extend(extra_sources_for_rules)
- for source_key in source_keys:
- for source in target_dict.get(source_key, []):
- (source_root, source_extension) = os.path.splitext(source)
- if source_extension.startswith('.'):
- source_extension = source_extension[1:]
- if source_extension == rule_extension:
- rule_sources.append(source)
-
- if len(rule_sources) > 0:
- rule['rule_sources'] = rule_sources
-
-
-def ValidateRunAsInTarget(target, target_dict, build_file):
- target_name = target_dict.get('target_name')
- run_as = target_dict.get('run_as')
- if not run_as:
- return
- if type(run_as) is not dict:
- raise GypError("The 'run_as' in target %s from file %s should be a "
- "dictionary." %
- (target_name, build_file))
- action = run_as.get('action')
- if not action:
- raise GypError("The 'run_as' in target %s from file %s must have an "
- "'action' section." %
- (target_name, build_file))
- if type(action) is not list:
- raise GypError("The 'action' for 'run_as' in target %s from file %s "
- "must be a list." %
- (target_name, build_file))
- working_directory = run_as.get('working_directory')
- if working_directory and type(working_directory) is not str:
- raise GypError("The 'working_directory' for 'run_as' in target %s "
- "in file %s should be a string." %
- (target_name, build_file))
- environment = run_as.get('environment')
- if environment and type(environment) is not dict:
- raise GypError("The 'environment' for 'run_as' in target %s "
- "in file %s should be a dictionary." %
- (target_name, build_file))
-
-
-def ValidateActionsInTarget(target, target_dict, build_file):
- '''Validates the inputs to the actions in a target.'''
- target_name = target_dict.get('target_name')
- actions = target_dict.get('actions', [])
- for action in actions:
- action_name = action.get('action_name')
- if not action_name:
- raise GypError("Anonymous action in target %s. "
- "An action must have an 'action_name' field." %
- target_name)
- inputs = action.get('inputs', None)
- if inputs is None:
- raise GypError('Action in target %s has no inputs.' % target_name)
- action_command = action.get('action')
- if action_command and not action_command[0]:
- raise GypError("Empty action as command in target %s." % target_name)
-
-
-def TurnIntIntoStrInDict(the_dict):
- """Given dict the_dict, recursively converts all integers into strings.
- """
- # Use items instead of iteritems because there's no need to try to look at
- # reinserted keys and their associated values.
- for k, v in the_dict.items():
- if type(v) is int:
- v = str(v)
- the_dict[k] = v
- elif type(v) is dict:
- TurnIntIntoStrInDict(v)
- elif type(v) is list:
- TurnIntIntoStrInList(v)
-
- if type(k) is int:
- del the_dict[k]
- the_dict[str(k)] = v
-
-
-def TurnIntIntoStrInList(the_list):
- """Given list the_list, recursively converts all integers into strings.
- """
- for index in xrange(0, len(the_list)):
- item = the_list[index]
- if type(item) is int:
- the_list[index] = str(item)
- elif type(item) is dict:
- TurnIntIntoStrInDict(item)
- elif type(item) is list:
- TurnIntIntoStrInList(item)
-
-
-def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
- data):
- """Return only the targets that are deep dependencies of |root_targets|."""
- qualified_root_targets = []
- for target in root_targets:
- target = target.strip()
- qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
- if not qualified_targets:
- raise GypError("Could not find target %s" % target)
- qualified_root_targets.extend(qualified_targets)
-
- wanted_targets = {}
- for target in qualified_root_targets:
- wanted_targets[target] = targets[target]
- for dependency in dependency_nodes[target].DeepDependencies():
- wanted_targets[dependency] = targets[dependency]
-
- wanted_flat_list = [t for t in flat_list if t in wanted_targets]
-
- # Prune unwanted targets from each build_file's data dict.
- for build_file in data['target_build_files']:
- if not 'targets' in data[build_file]:
- continue
- new_targets = []
- for target in data[build_file]['targets']:
- qualified_name = gyp.common.QualifiedTarget(build_file,
- target['target_name'],
- target['toolset'])
- if qualified_name in wanted_targets:
- new_targets.append(target)
- data[build_file]['targets'] = new_targets
-
- return wanted_targets, wanted_flat_list
-
-
-def VerifyNoCollidingTargets(targets):
- """Verify that no two targets in the same directory share the same name.
-
- Arguments:
- targets: A list of targets in the form 'path/to/file.gyp:target_name'.
- """
- # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
- used = {}
- for target in targets:
- # Separate out 'path/to/file.gyp, 'target_name' from
- # 'path/to/file.gyp:target_name'.
- path, name = target.rsplit(':', 1)
- # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
- subdir, gyp = os.path.split(path)
- # Use '.' for the current directory '', so that the error messages make
- # more sense.
- if not subdir:
- subdir = '.'
- # Prepare a key like 'path/to:target_name'.
- key = subdir + ':' + name
- if key in used:
- # Complain if this target is already used.
- raise GypError('Duplicate target name "%s" in directory "%s" used both '
- 'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
- used[key] = gyp
-
-
-def SetGeneratorGlobals(generator_input_info):
- # Set up path_sections and non_configuration_keys with the default data plus
- # the generator-specific data.
- global path_sections
- path_sections = set(base_path_sections)
- path_sections.update(generator_input_info['path_sections'])
-
- global non_configuration_keys
- non_configuration_keys = base_non_configuration_keys[:]
- non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
-
- global multiple_toolsets
- multiple_toolsets = generator_input_info[
- 'generator_supports_multiple_toolsets']
-
- global generator_filelist_paths
- generator_filelist_paths = generator_input_info['generator_filelist_paths']
-
-
-def Load(build_files, variables, includes, depth, generator_input_info, check,
- circular_check, duplicate_basename_check, parallel, root_targets):
- SetGeneratorGlobals(generator_input_info)
- # A generator can have other lists (in addition to sources) be processed
- # for rules.
- extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
-
- # Load build files. This loads every target-containing build file into
- # the |data| dictionary such that the keys to |data| are build file names,
- # and the values are the entire build file contents after "early" or "pre"
- # processing has been done and includes have been resolved.
- # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
- # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
- # track of the keys corresponding to "target" files.
- data = {'target_build_files': set()}
- # Normalize paths everywhere. This is important because paths will be
- # used as keys to the data dict and for references between input files.
- build_files = set(map(os.path.normpath, build_files))
- if parallel:
- LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
- check, generator_input_info)
- else:
- aux_data = {}
- for build_file in build_files:
- try:
- LoadTargetBuildFile(build_file, data, aux_data,
- variables, includes, depth, check, True)
- except Exception, e:
- gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
- raise
-
- # Build a dict to access each target's subdict by qualified name.
- targets = BuildTargetsDict(data)
-
- # Fully qualify all dependency links.
- QualifyDependencies(targets)
-
- # Remove self-dependencies from targets that have 'prune_self_dependencies'
- # set to 1.
- RemoveSelfDependencies(targets)
-
- # Expand dependencies specified as build_file:*.
- ExpandWildcardDependencies(targets, data)
-
- # Remove all dependencies marked as 'link_dependency' from the targets of
- # type 'none'.
- RemoveLinkDependenciesFromNoneTargets(targets)
-
- # Apply exclude (!) and regex (/) list filters only for dependency_sections.
- for target_name, target_dict in targets.iteritems():
- tmp_dict = {}
- for key_base in dependency_sections:
- for op in ('', '!', '/'):
- key = key_base + op
- if key in target_dict:
- tmp_dict[key] = target_dict[key]
- del target_dict[key]
- ProcessListFiltersInDict(target_name, tmp_dict)
- # Write the results back to |target_dict|.
- for key in tmp_dict:
- target_dict[key] = tmp_dict[key]
-
- # Make sure every dependency appears at most once.
- RemoveDuplicateDependencies(targets)
-
- if circular_check:
- # Make sure that any targets in a.gyp don't contain dependencies in other
- # .gyp files that further depend on a.gyp.
- VerifyNoGYPFileCircularDependencies(targets)
-
- [dependency_nodes, flat_list] = BuildDependencyList(targets)
-
- if root_targets:
- # Remove, from |targets| and |flat_list|, the targets that are not deep
- # dependencies of the targets specified in |root_targets|.
- targets, flat_list = PruneUnwantedTargets(
- targets, flat_list, dependency_nodes, root_targets, data)
-
- # Check that no two targets in the same directory have the same name.
- VerifyNoCollidingTargets(flat_list)
-
- # Handle dependent settings of various types.
- for settings_type in ['all_dependent_settings',
- 'direct_dependent_settings',
- 'link_settings']:
- DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
-
- # Take out the dependent settings now that they've been published to all
- # of the targets that require them.
- for target in flat_list:
- if settings_type in targets[target]:
- del targets[target][settings_type]
-
- # Make sure static libraries don't declare dependencies on other static
- # libraries, but that linkables depend on all unlinked static libraries
- # that they need so that their link steps will be correct.
- gii = generator_input_info
- if gii['generator_wants_static_library_dependencies_adjusted']:
- AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
- gii['generator_wants_sorted_dependencies'])
-
- # Apply "post"/"late"/"target" variable expansions and condition evaluations.
- for target in flat_list:
- target_dict = targets[target]
- build_file = gyp.common.BuildFile(target)
- ProcessVariablesAndConditionsInDict(
- target_dict, PHASE_LATE, variables, build_file)
-
- # Move everything that can go into a "configurations" section into one.
- for target in flat_list:
- target_dict = targets[target]
- SetUpConfigurations(target, target_dict)
-
- # Apply exclude (!) and regex (/) list filters.
- for target in flat_list:
- target_dict = targets[target]
- ProcessListFiltersInDict(target, target_dict)
-
- # Apply "latelate" variable expansions and condition evaluations.
- for target in flat_list:
- target_dict = targets[target]
- build_file = gyp.common.BuildFile(target)
- ProcessVariablesAndConditionsInDict(
- target_dict, PHASE_LATELATE, variables, build_file)
-
- # Make sure that the rules make sense, and build up rule_sources lists as
- # needed. Not all generators will need to use the rule_sources lists, but
- # some may, and it seems best to build the list in a common spot.
- # Also validate actions and run_as elements in targets.
- for target in flat_list:
- target_dict = targets[target]
- build_file = gyp.common.BuildFile(target)
- ValidateTargetType(target, target_dict)
- ValidateSourcesInTarget(target, target_dict, build_file,
- duplicate_basename_check)
- ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
- ValidateRunAsInTarget(target, target_dict, build_file)
- ValidateActionsInTarget(target, target_dict, build_file)
-
- # Generators might not expect ints. Turn them into strs.
- TurnIntIntoStrInDict(data)
-
- # TODO(mark): Return |data| for now because the generator needs a list of
- # build files that came in. In the future, maybe it should just accept
- # a list, and not the whole data dict.
- return [flat_list, targets, data]
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/input_test.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
deleted file mode 100755
index 4234fbb830..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
+++ /dev/null
@@ -1,90 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright 2013 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Unit tests for the input.py file."""
-
-import gyp.input
-import unittest
-import sys
-
-
-class TestFindCycles(unittest.TestCase):
- def setUp(self):
- self.nodes = {}
- for x in ('a', 'b', 'c', 'd', 'e'):
- self.nodes[x] = gyp.input.DependencyGraphNode(x)
-
- def _create_dependency(self, dependent, dependency):
- dependent.dependencies.append(dependency)
- dependency.dependents.append(dependent)
-
- def test_no_cycle_empty_graph(self):
- for label, node in self.nodes.iteritems():
- self.assertEquals([], node.FindCycles())
-
- def test_no_cycle_line(self):
- self._create_dependency(self.nodes['a'], self.nodes['b'])
- self._create_dependency(self.nodes['b'], self.nodes['c'])
- self._create_dependency(self.nodes['c'], self.nodes['d'])
-
- for label, node in self.nodes.iteritems():
- self.assertEquals([], node.FindCycles())
-
- def test_no_cycle_dag(self):
- self._create_dependency(self.nodes['a'], self.nodes['b'])
- self._create_dependency(self.nodes['a'], self.nodes['c'])
- self._create_dependency(self.nodes['b'], self.nodes['c'])
-
- for label, node in self.nodes.iteritems():
- self.assertEquals([], node.FindCycles())
-
- def test_cycle_self_reference(self):
- self._create_dependency(self.nodes['a'], self.nodes['a'])
-
- self.assertEquals([[self.nodes['a'], self.nodes['a']]],
- self.nodes['a'].FindCycles())
-
- def test_cycle_two_nodes(self):
- self._create_dependency(self.nodes['a'], self.nodes['b'])
- self._create_dependency(self.nodes['b'], self.nodes['a'])
-
- self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
- self.nodes['a'].FindCycles())
- self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
- self.nodes['b'].FindCycles())
-
- def test_two_cycles(self):
- self._create_dependency(self.nodes['a'], self.nodes['b'])
- self._create_dependency(self.nodes['b'], self.nodes['a'])
-
- self._create_dependency(self.nodes['b'], self.nodes['c'])
- self._create_dependency(self.nodes['c'], self.nodes['b'])
-
- cycles = self.nodes['a'].FindCycles()
- self.assertTrue(
- [self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
- self.assertTrue(
- [self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
- self.assertEquals(2, len(cycles))
-
- def test_big_cycle(self):
- self._create_dependency(self.nodes['a'], self.nodes['b'])
- self._create_dependency(self.nodes['b'], self.nodes['c'])
- self._create_dependency(self.nodes['c'], self.nodes['d'])
- self._create_dependency(self.nodes['d'], self.nodes['e'])
- self._create_dependency(self.nodes['e'], self.nodes['a'])
-
- self.assertEquals([[self.nodes['a'],
- self.nodes['b'],
- self.nodes['c'],
- self.nodes['d'],
- self.nodes['e'],
- self.nodes['a']]],
- self.nodes['a'].FindCycles())
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
deleted file mode 100755
index eeeaceb0c7..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
+++ /dev/null
@@ -1,610 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions to perform Xcode-style build steps.
-
-These functions are executed via gyp-mac-tool when using the Makefile generator.
-"""
-
-import fcntl
-import fnmatch
-import glob
-import json
-import os
-import plistlib
-import re
-import shutil
-import string
-import subprocess
-import sys
-import tempfile
-
-
-def main(args):
- executor = MacTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class MacTool(object):
- """This class performs all the Mac tooling steps. The methods can either be
- executed directly, or dispatched from an argument list."""
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like copy-info-plist to CopyInfoPlist"""
- return name_string.title().replace('-', '')
-
- def ExecCopyBundleResource(self, source, dest, convert_to_binary):
- """Copies a resource file to the bundle/Resources directory, performing any
- necessary compilation on each resource."""
- extension = os.path.splitext(source)[1].lower()
- if os.path.isdir(source):
- # Copy tree.
- # TODO(thakis): This copies file attributes like mtime, while the
- # single-file branch below doesn't. This should probably be changed to
- # be consistent with the single-file branch.
- if os.path.exists(dest):
- shutil.rmtree(dest)
- shutil.copytree(source, dest)
- elif extension == '.xib':
- return self._CopyXIBFile(source, dest)
- elif extension == '.storyboard':
- return self._CopyXIBFile(source, dest)
- elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
- else:
- shutil.copy(source, dest)
-
- def _CopyXIBFile(self, source, dest):
- """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-
- # ibtool sometimes crashes with relative paths. See crbug.com/314728.
- base = os.path.dirname(os.path.realpath(__file__))
- if os.path.relpath(source):
- source = os.path.join(base, source)
- if os.path.relpath(dest):
- dest = os.path.join(base, dest)
-
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
- ibtool_section_re = re.compile(r'/\*.*\*/')
- ibtool_re = re.compile(r'.*note:.*is clipping its content')
- ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
- current_section_header = None
- for line in ibtoolout.stdout:
- if ibtool_section_re.match(line):
- current_section_header = line
- elif not ibtool_re.match(line):
- if current_section_header:
- sys.stdout.write(current_section_header)
- current_section_header = None
- sys.stdout.write(line)
- return ibtoolout.returncode
-
- def _ConvertToBinary(self, dest):
- subprocess.check_call([
- 'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
-
- def _CopyStringsFile(self, source, dest, convert_to_binary):
- """Copies a .strings file using iconv to reconvert the input into UTF-16."""
- input_code = self._DetectInputEncoding(source) or "UTF-8"
-
- # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call
- # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints
- # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
- # semicolon in dictionary.
- # on invalid files. Do the same kind of validation.
- import CoreFoundation
- s = open(source, 'rb').read()
- d = CoreFoundation.CFDataCreate(None, s, len(s))
- _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
- if error:
- return
-
- fp = open(dest, 'wb')
- fp.write(s.decode(input_code).encode('UTF-16'))
- fp.close()
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _DetectInputEncoding(self, file_name):
- """Reads the first few bytes from file_name and tries to guess the text
- encoding. Returns None as a guess if it can't detect it."""
- fp = open(file_name, 'rb')
- try:
- header = fp.read(3)
- except e:
- fp.close()
- return None
- fp.close()
- if header.startswith("\xFE\xFF"):
- return "UTF-16"
- elif header.startswith("\xFF\xFE"):
- return "UTF-16"
- elif header.startswith("\xEF\xBB\xBF"):
- return "UTF-8"
- else:
- return None
-
- def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys):
- """Copies the |source| Info.plist to the destination directory |dest|."""
- # Read the source Info.plist into memory.
- fd = open(source, 'r')
- lines = fd.read()
- fd.close()
-
- # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
- plist = plistlib.readPlistFromString(lines)
- if keys:
- plist = dict(plist.items() + json.loads(keys[0]).items())
- lines = plistlib.writePlistToString(plist)
-
- # Go through all the environment variables and replace them as variables in
- # the file.
- IDENT_RE = re.compile(r'[/\s]')
- for key in os.environ:
- if key.startswith('_'):
- continue
- evar = '${%s}' % key
- evalue = os.environ[key]
- lines = string.replace(lines, evar, evalue)
-
- # Xcode supports various suffices on environment variables, which are
- # all undocumented. :rfc1034identifier is used in the standard project
- # template these days, and :identifier was used earlier. They are used to
- # convert non-url characters into things that look like valid urls --
- # except that the replacement character for :identifier, '_' isn't valid
- # in a URL either -- oops, hence :rfc1034identifier was born.
- evar = '${%s:identifier}' % key
- evalue = IDENT_RE.sub('_', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- evar = '${%s:rfc1034identifier}' % key
- evalue = IDENT_RE.sub('-', os.environ[key])
- lines = string.replace(lines, evar, evalue)
-
- # Remove any keys with values that haven't been replaced.
- lines = lines.split('\n')
- for i in range(len(lines)):
- if lines[i].strip().startswith("<string>${"):
- lines[i] = None
- lines[i - 1] = None
- lines = '\n'.join(filter(lambda x: x is not None, lines))
-
- # Write out the file with variables replaced.
- fd = open(dest, 'w')
- fd.write(lines)
- fd.close()
-
- # Now write out PkgInfo file now that the Info.plist file has been
- # "compiled".
- self._WritePkgInfo(dest)
-
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
- def _WritePkgInfo(self, info_plist):
- """This writes the PkgInfo file from the data stored in Info.plist."""
- plist = plistlib.readPlist(info_plist)
- if not plist:
- return
-
- # Only create PkgInfo for executable types.
- package_type = plist['CFBundlePackageType']
- if package_type != 'APPL':
- return
-
- # The format of PkgInfo is eight characters, representing the bundle type
- # and bundle signature, each four characters. If that is missing, four
- # '?' characters are used instead.
- signature_code = plist.get('CFBundleSignature', '????')
- if len(signature_code) != 4: # Wrong length resets everything, too.
- signature_code = '?' * 4
-
- dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo')
- fp = open(dest, 'w')
- fp.write('%s%s' % (package_type, signature_code))
- fp.close()
-
- def ExecFlock(self, lockfile, *cmd_list):
- """Emulates the most basic behavior of Linux's flock(1)."""
- # Rely on exception handling to report errors.
- fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666)
- fcntl.flock(fd, fcntl.LOCK_EX)
- return subprocess.call(cmd_list)
-
- def ExecFilterLibtool(self, *cmd_list):
- """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
- symbols'."""
- libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
- libtool_re5 = re.compile(
- r'^.*libtool: warning for library: ' +
- r'.* the table of contents is empty ' +
- r'\(no object file members in the library define global symbols\)$')
- env = os.environ.copy()
- # Ref:
- # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
- # The problem with this flag is that it resets the file mtime on the file to
- # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
- env['ZERO_AR_DATE'] = '1'
- libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
- _, err = libtoolout.communicate()
- for line in err.splitlines():
- if not libtool_re.match(line) and not libtool_re5.match(line):
- print >>sys.stderr, line
- # Unconditionally touch the output .a file on the command line if present
- # and the command succeeded. A bit hacky.
- if not libtoolout.returncode:
- for i in range(len(cmd_list) - 1):
- if cmd_list[i] == "-o" and cmd_list[i+1].endswith('.a'):
- os.utime(cmd_list[i+1], None)
- break
- return libtoolout.returncode
-
- def ExecPackageFramework(self, framework, version):
- """Takes a path to Something.framework and the Current version of that and
- sets up all the symlinks."""
- # Find the name of the binary based on the part before the ".framework".
- binary = os.path.basename(framework).split('.')[0]
-
- CURRENT = 'Current'
- RESOURCES = 'Resources'
- VERSIONS = 'Versions'
-
- if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)):
- # Binary-less frameworks don't seem to contain symlinks (see e.g.
- # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle).
- return
-
- # Move into the framework directory to set the symlinks correctly.
- pwd = os.getcwd()
- os.chdir(framework)
-
- # Set up the Current version.
- self._Relink(version, os.path.join(VERSIONS, CURRENT))
-
- # Set up the root symlinks.
- self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary)
- self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES)
-
- # Back to where we were before!
- os.chdir(pwd)
-
- def _Relink(self, dest, link):
- """Creates a symlink to |dest| named |link|. If |link| already exists,
- it is overwritten."""
- if os.path.lexists(link):
- os.remove(link)
- os.symlink(dest, link)
-
- def ExecCompileXcassets(self, keys, *inputs):
- """Compiles multiple .xcassets files into a single .car file.
-
- This invokes 'actool' to compile all the inputs .xcassets files. The
- |keys| arguments is a json-encoded dictionary of extra arguments to
- pass to 'actool' when the asset catalogs contains an application icon
- or a launch image.
-
- Note that 'actool' does not create the Assets.car file if the asset
- catalogs does not contains imageset.
- """
- command_line = [
- 'xcrun', 'actool', '--output-format', 'human-readable-text',
- '--compress-pngs', '--notices', '--warnings', '--errors',
- ]
- is_iphone_target = 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ
- if is_iphone_target:
- platform = os.environ['CONFIGURATION'].split('-')[-1]
- if platform not in ('iphoneos', 'iphonesimulator'):
- platform = 'iphonesimulator'
- command_line.extend([
- '--platform', platform, '--target-device', 'iphone',
- '--target-device', 'ipad', '--minimum-deployment-target',
- os.environ['IPHONEOS_DEPLOYMENT_TARGET'], '--compile',
- os.path.abspath(os.environ['CONTENTS_FOLDER_PATH']),
- ])
- else:
- command_line.extend([
- '--platform', 'macosx', '--target-device', 'mac',
- '--minimum-deployment-target', os.environ['MACOSX_DEPLOYMENT_TARGET'],
- '--compile',
- os.path.abspath(os.environ['UNLOCALIZED_RESOURCES_FOLDER_PATH']),
- ])
- if keys:
- keys = json.loads(keys)
- for key, value in keys.iteritems():
- arg_name = '--' + key
- if isinstance(value, bool):
- if value:
- command_line.append(arg_name)
- elif isinstance(value, list):
- for v in value:
- command_line.append(arg_name)
- command_line.append(str(v))
- else:
- command_line.append(arg_name)
- command_line.append(str(value))
- # Note: actool crashes if inputs path are relative, so use os.path.abspath
- # to get absolute path name for inputs.
- command_line.extend(map(os.path.abspath, inputs))
- subprocess.check_call(command_line)
-
- def ExecMergeInfoPlist(self, output, *inputs):
- """Merge multiple .plist files into a single .plist file."""
- merged_plist = {}
- for path in inputs:
- plist = self._LoadPlistMaybeBinary(path)
- self._MergePlist(merged_plist, plist)
- plistlib.writePlist(merged_plist, output)
-
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
- """Code sign a bundle.
-
- This function tries to code sign an iOS bundle, following the same
- algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
- and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
- """
- resource_rules_path = self._InstallResourceRules(resource_rules)
- substitutions, overrides = self._InstallProvisioningProfile(
- provisioning, self._GetCFBundleIdentifier())
- entitlements_path = self._InstallEntitlements(
- entitlements, substitutions, overrides)
- subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['FULL_PRODUCT_NAME'])])
-
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
- def _InstallProvisioningProfile(self, profile, bundle_identifier):
- """Installs embedded.mobileprovision into the bundle.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple containing two dictionary: variables substitutions and values
- to overrides when generating the entitlements file.
- """
- source_path, provisioning_data, team_id = self._FindProvisioningProfile(
- profile, bundle_identifier)
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'embedded.mobileprovision')
- shutil.copy2(source_path, target_path)
- substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
- return substitutions, provisioning_data['Entitlements']
-
- def _FindProvisioningProfile(self, profile, bundle_identifier):
- """Finds the .mobileprovision file to use for signing the bundle.
-
- Checks all the installed provisioning profiles (or if the user specified
- the PROVISIONING_PROFILE variable, only consult it) and select the most
- specific that correspond to the bundle identifier.
-
- Args:
- profile: string, optional, short name of the .mobileprovision file
- to use, if empty or the file is missing, the best file installed
- will be used
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-
- Returns:
- A tuple of the path to the selected provisioning profile, the data of
- the embedded plist in the provisioning profile and the team identifier
- to use for code signing.
-
- Raises:
- SystemExit: if no .mobileprovision can be used to sign the bundle.
- """
- profiles_dir = os.path.join(
- os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
- if not os.path.isdir(profiles_dir):
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- provisioning_profiles = None
- if profile:
- profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
- if os.path.exists(profile_path):
- provisioning_profiles = [profile_path]
- if not provisioning_profiles:
- provisioning_profiles = glob.glob(
- os.path.join(profiles_dir, '*.mobileprovision'))
- valid_provisioning_profiles = {}
- for profile_path in provisioning_profiles:
- profile_data = self._LoadProvisioningProfile(profile_path)
- app_id_pattern = profile_data.get(
- 'Entitlements', {}).get('application-identifier', '')
- for team_identifier in profile_data.get('TeamIdentifier', []):
- app_id = '%s.%s' % (team_identifier, bundle_identifier)
- if fnmatch.fnmatch(app_id, app_id_pattern):
- valid_provisioning_profiles[app_id_pattern] = (
- profile_path, profile_data, team_identifier)
- if not valid_provisioning_profiles:
- print >>sys.stderr, (
- 'cannot find mobile provisioning for %s' % bundle_identifier)
- sys.exit(1)
- # If the user has multiple provisioning profiles installed that can be
- # used for ${bundle_identifier}, pick the most specific one (ie. the
- # provisioning profile whose pattern is the longest).
- selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
- return valid_provisioning_profiles[selected_key]
-
- def _LoadProvisioningProfile(self, profile_path):
- """Extracts the plist embedded in a provisioning profile.
-
- Args:
- profile_path: string, path to the .mobileprovision file
-
- Returns:
- Content of the plist embedded in the provisioning profile as a dictionary.
- """
- with tempfile.NamedTemporaryFile() as temp:
- subprocess.check_call([
- 'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
- return self._LoadPlistMaybeBinary(temp.name)
-
- def _MergePlist(self, merged_plist, plist):
- """Merge |plist| into |merged_plist|."""
- for key, value in plist.iteritems():
- if isinstance(value, dict):
- merged_value = merged_plist.get(key, {})
- if isinstance(merged_value, dict):
- self._MergePlist(merged_value, value)
- merged_plist[key] = merged_value
- else:
- merged_plist[key] = value
- else:
- merged_plist[key] = value
-
- def _LoadPlistMaybeBinary(self, plist_path):
- """Loads into a memory a plist possibly encoded in binary format.
-
- This is a wrapper around plistlib.readPlist that tries to convert the
- plist to the XML format if it can't be parsed (assuming that it is in
- the binary format).
-
- Args:
- plist_path: string, path to a plist file, in XML or binary format
-
- Returns:
- Content of the plist as a dictionary.
- """
- try:
- # First, try to read the file using plistlib that only supports XML,
- # and if an exception is raised, convert a temporary copy to XML and
- # load that copy.
- return plistlib.readPlist(plist_path)
- except:
- pass
- with tempfile.NamedTemporaryFile() as temp:
- shutil.copy2(plist_path, temp.name)
- subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
- return plistlib.readPlist(temp.name)
-
- def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
- """Constructs a dictionary of variable substitutions for Entitlements.plist.
-
- Args:
- bundle_identifier: string, value of CFBundleIdentifier from Info.plist
- app_identifier_prefix: string, value for AppIdentifierPrefix
-
- Returns:
- Dictionary of substitutions to apply when generating Entitlements.plist.
- """
- return {
- 'CFBundleIdentifier': bundle_identifier,
- 'AppIdentifierPrefix': app_identifier_prefix,
- }
-
- def _GetCFBundleIdentifier(self):
- """Extracts CFBundleIdentifier value from Info.plist in the bundle.
-
- Returns:
- Value of CFBundleIdentifier in the Info.plist located in the bundle.
- """
- info_plist_path = os.path.join(
- os.environ['TARGET_BUILD_DIR'],
- os.environ['INFOPLIST_PATH'])
- info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
- return info_plist_data['CFBundleIdentifier']
-
- def _InstallEntitlements(self, entitlements, substitutions, overrides):
- """Generates and install the ${BundleName}.xcent entitlements file.
-
- Expands variables "$(variable)" pattern in the source entitlements file,
- add extra entitlements defined in the .mobileprovision file and the copy
- the generated plist to "${BundlePath}.xcent".
-
- Args:
- entitlements: string, optional, path to the Entitlements.plist template
- to use, defaults to "${SDKROOT}/Entitlements.plist"
- substitutions: dictionary, variable substitutions
- overrides: dictionary, values to add to the entitlements
-
- Returns:
- Path to the generated entitlements file.
- """
- source_path = entitlements
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['PRODUCT_NAME'] + '.xcent')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'],
- 'Entitlements.plist')
- shutil.copy2(source_path, target_path)
- data = self._LoadPlistMaybeBinary(target_path)
- data = self._ExpandVariables(data, substitutions)
- if overrides:
- for key in overrides:
- if key not in data:
- data[key] = overrides[key]
- plistlib.writePlist(data, target_path)
- return target_path
-
- def _ExpandVariables(self, data, substitutions):
- """Expands variables "$(variable)" in data.
-
- Args:
- data: object, can be either string, list or dictionary
- substitutions: dictionary, variable substitutions to perform
-
- Returns:
- Copy of data where each references to "$(variable)" has been replaced
- by the corresponding value found in substitutions, or left intact if
- the key was not found.
- """
- if isinstance(data, str):
- for key, value in substitutions.iteritems():
- data = data.replace('$(%s)' % key, value)
- return data
- if isinstance(data, list):
- return [self._ExpandVariables(v, substitutions) for v in data]
- if isinstance(data, dict):
- return {k: self._ExpandVariables(data[k], substitutions) for k in data}
- return data
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
deleted file mode 100644
index ca67b122f0..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
+++ /dev/null
@@ -1,1087 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This module helps emulate Visual Studio 2008 behavior on top of other
-build systems, primarily ninja.
-"""
-
-import os
-import re
-import subprocess
-import sys
-
-from gyp.common import OrderedSet
-import gyp.MSVSUtil
-import gyp.MSVSVersion
-
-
-windows_quoter_regex = re.compile(r'(\\*)"')
-
-
-def QuoteForRspFile(arg):
- """Quote a command line argument so that it appears as one argument when
- processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
- Windows programs)."""
- # See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
- # threads. This is actually the quoting rules for CommandLineToArgvW, not
- # for the shell, because the shell doesn't do anything in Windows. This
- # works more or less because most programs (including the compiler, etc.)
- # use that function to handle command line arguments.
-
- # For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
- # preceding it, and results in n backslashes + the quote. So we substitute
- # in 2* what we match, +1 more, plus the quote.
- arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
-
- # %'s also need to be doubled otherwise they're interpreted as batch
- # positional arguments. Also make sure to escape the % so that they're
- # passed literally through escaping so they can be singled to just the
- # original %. Otherwise, trying to pass the literal representation that
- # looks like an environment variable to the shell (e.g. %PATH%) would fail.
- arg = arg.replace('%', '%%')
-
- # These commands are used in rsp files, so no escaping for the shell (via ^)
- # is necessary.
-
- # Finally, wrap the whole thing in quotes so that the above quote rule
- # applies and whitespace isn't a word break.
- return '"' + arg + '"'
-
-
-def EncodeRspFileList(args):
- """Process a list of arguments using QuoteCmdExeArgument."""
- # Note that the first argument is assumed to be the command. Don't add
- # quotes around it because then built-ins like 'echo', etc. won't work.
- # Take care to normpath only the path in the case of 'call ../x.bat' because
- # otherwise the whole thing is incorrectly interpreted as a path and not
- # normalized correctly.
- if not args: return ''
- if args[0].startswith('call '):
- call, program = args[0].split(' ', 1)
- program = call + ' ' + os.path.normpath(program)
- else:
- program = os.path.normpath(args[0])
- return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
-
-
-def _GenericRetrieve(root, default, path):
- """Given a list of dictionary keys |path| and a tree of dicts |root|, find
- value at path, or return |default| if any of the path doesn't exist."""
- if not root:
- return default
- if not path:
- return root
- return _GenericRetrieve(root.get(path[0]), default, path[1:])
-
-
-def _AddPrefix(element, prefix):
- """Add |prefix| to |element| or each subelement if element is iterable."""
- if element is None:
- return element
- # Note, not Iterable because we don't want to handle strings like that.
- if isinstance(element, list) or isinstance(element, tuple):
- return [prefix + e for e in element]
- else:
- return prefix + element
-
-
-def _DoRemapping(element, map):
- """If |element| then remap it through |map|. If |element| is iterable then
- each item will be remapped. Any elements not found will be removed."""
- if map is not None and element is not None:
- if not callable(map):
- map = map.get # Assume it's a dict, otherwise a callable to do the remap.
- if isinstance(element, list) or isinstance(element, tuple):
- element = filter(None, [map(elem) for elem in element])
- else:
- element = map(element)
- return element
-
-
-def _AppendOrReturn(append, element):
- """If |append| is None, simply return |element|. If |append| is not None,
- then add |element| to it, adding each item in |element| if it's a list or
- tuple."""
- if append is not None and element is not None:
- if isinstance(element, list) or isinstance(element, tuple):
- append.extend(element)
- else:
- append.append(element)
- else:
- return element
-
-
-def _FindDirectXInstallation():
- """Try to find an installation location for the DirectX SDK. Check for the
- standard environment variable, and if that doesn't exist, try to find
- via the registry. May return None if not found in either location."""
- # Return previously calculated value, if there is one
- if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
- return _FindDirectXInstallation.dxsdk_dir
-
- dxsdk_dir = os.environ.get('DXSDK_DIR')
- if not dxsdk_dir:
- # Setup params to pass to and attempt to launch reg.exe.
- cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- for line in p.communicate()[0].splitlines():
- if 'InstallPath' in line:
- dxsdk_dir = line.split(' ')[3] + "\\"
-
- # Cache return value
- _FindDirectXInstallation.dxsdk_dir = dxsdk_dir
- return dxsdk_dir
-
-
-def GetGlobalVSMacroEnv(vs_version):
- """Get a dict of variables mapping internal VS macro names to their gyp
- equivalents. Returns all variables that are independent of the target."""
- env = {}
- # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
- # Visual Studio is actually installed.
- if vs_version.Path():
- env['$(VSInstallDir)'] = vs_version.Path()
- env['$(VCInstallDir)'] = os.path.join(vs_version.Path(), 'VC') + '\\'
- # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
- # set. This happens when the SDK is sync'd via src-internal, rather than
- # by typical end-user installation of the SDK. If it's not set, we don't
- # want to leave the unexpanded variable in the path, so simply strip it.
- dxsdk_dir = _FindDirectXInstallation()
- env['$(DXSDK_DIR)'] = dxsdk_dir if dxsdk_dir else ''
- # Try to find an installation location for the Windows DDK by checking
- # the WDK_DIR environment variable, may be None.
- env['$(WDK_DIR)'] = os.environ.get('WDK_DIR', '')
- return env
-
-def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
- """Finds msvs_system_include_dirs that are common to all targets, removes
- them from all targets, and returns an OrderedSet containing them."""
- all_system_includes = OrderedSet(
- configs[0].get('msvs_system_include_dirs', []))
- for config in configs[1:]:
- system_includes = config.get('msvs_system_include_dirs', [])
- all_system_includes = all_system_includes & OrderedSet(system_includes)
- if not all_system_includes:
- return None
- # Expand macros in all_system_includes.
- env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
- expanded_system_includes = OrderedSet([ExpandMacros(include, env)
- for include in all_system_includes])
- if any(['$' in include for include in expanded_system_includes]):
- # Some path relies on target-specific variables, bail.
- return None
-
- # Remove system includes shared by all targets from the targets.
- for config in configs:
- includes = config.get('msvs_system_include_dirs', [])
- if includes: # Don't insert a msvs_system_include_dirs key if not needed.
- # This must check the unexpanded includes list:
- new_includes = [i for i in includes if i not in all_system_includes]
- config['msvs_system_include_dirs'] = new_includes
- return expanded_system_includes
-
-
-class MsvsSettings(object):
- """A class that understands the gyp 'msvs_...' values (especially the
- msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
- class helps map those settings to command line options."""
-
- def __init__(self, spec, generator_flags):
- self.spec = spec
- self.vs_version = GetVSVersion(generator_flags)
-
- supported_fields = [
- ('msvs_configuration_attributes', dict),
- ('msvs_settings', dict),
- ('msvs_system_include_dirs', list),
- ('msvs_disabled_warnings', list),
- ('msvs_precompiled_header', str),
- ('msvs_precompiled_source', str),
- ('msvs_configuration_platform', str),
- ('msvs_target_platform', str),
- ]
- configs = spec['configurations']
- for field, default in supported_fields:
- setattr(self, field, {})
- for configname, config in configs.iteritems():
- getattr(self, field)[configname] = config.get(field, default())
-
- self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
-
- unsupported_fields = [
- 'msvs_prebuild',
- 'msvs_postbuild',
- ]
- unsupported = []
- for field in unsupported_fields:
- for config in configs.values():
- if field in config:
- unsupported += ["%s not supported (target %s)." %
- (field, spec['target_name'])]
- if unsupported:
- raise Exception('\n'.join(unsupported))
-
- def GetExtension(self):
- """Returns the extension for the target, with no leading dot.
-
- Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
- the target type.
- """
- ext = self.spec.get('product_extension', None)
- if ext:
- return ext
- return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec['type'], '')
-
- def GetVSMacroEnv(self, base_to_build=None, config=None):
- """Get a dict of variables mapping internal VS macro names to their gyp
- equivalents."""
- target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
- target_name = self.spec.get('product_prefix', '') + \
- self.spec.get('product_name', self.spec['target_name'])
- target_dir = base_to_build + '\\' if base_to_build else ''
- target_ext = '.' + self.GetExtension()
- target_file_name = target_name + target_ext
-
- replacements = {
- '$(InputName)': '${root}',
- '$(InputPath)': '${source}',
- '$(IntDir)': '$!INTERMEDIATE_DIR',
- '$(OutDir)\\': target_dir,
- '$(PlatformName)': target_platform,
- '$(ProjectDir)\\': '',
- '$(ProjectName)': self.spec['target_name'],
- '$(TargetDir)\\': target_dir,
- '$(TargetExt)': target_ext,
- '$(TargetFileName)': target_file_name,
- '$(TargetName)': target_name,
- '$(TargetPath)': os.path.join(target_dir, target_file_name),
- }
- replacements.update(GetGlobalVSMacroEnv(self.vs_version))
- return replacements
-
- def ConvertVSMacros(self, s, base_to_build=None, config=None):
- """Convert from VS macro names to something equivalent."""
- env = self.GetVSMacroEnv(base_to_build, config=config)
- return ExpandMacros(s, env)
-
- def AdjustLibraries(self, libraries):
- """Strip -l from library if it's specified with that."""
- libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
- return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
-
- def _GetAndMunge(self, field, path, default, prefix, append, map):
- """Retrieve a value from |field| at |path| or return |default|. If
- |append| is specified, and the item is found, it will be appended to that
- object instead of returned. If |map| is specified, results will be
- remapped through |map| before being returned or appended."""
- result = _GenericRetrieve(field, default, path)
- result = _DoRemapping(result, map)
- result = _AddPrefix(result, prefix)
- return _AppendOrReturn(append, result)
-
- class _GetWrapper(object):
- def __init__(self, parent, field, base_path, append=None):
- self.parent = parent
- self.field = field
- self.base_path = [base_path]
- self.append = append
- def __call__(self, name, map=None, prefix='', default=None):
- return self.parent._GetAndMunge(self.field, self.base_path + [name],
- default=default, prefix=prefix, append=self.append, map=map)
-
- def GetArch(self, config):
- """Get architecture based on msvs_configuration_platform and
- msvs_target_platform. Returns either 'x86' or 'x64'."""
- configuration_platform = self.msvs_configuration_platform.get(config, '')
- platform = self.msvs_target_platform.get(config, '')
- if not platform: # If no specific override, use the configuration's.
- platform = configuration_platform
- # Map from platform to architecture.
- return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')
-
- def _TargetConfig(self, config):
- """Returns the target-specific configuration."""
- # There's two levels of architecture/platform specification in VS. The
- # first level is globally for the configuration (this is what we consider
- # "the" config at the gyp level, which will be something like 'Debug' or
- # 'Release_x64'), and a second target-specific configuration, which is an
- # override for the global one. |config| is remapped here to take into
- # account the local target-specific overrides to the global configuration.
- arch = self.GetArch(config)
- if arch == 'x64' and not config.endswith('_x64'):
- config += '_x64'
- if arch == 'x86' and config.endswith('_x64'):
- config = config.rsplit('_', 1)[0]
- return config
-
- def _Setting(self, path, config,
- default=None, prefix='', append=None, map=None):
- """_GetAndMunge for msvs_settings."""
- return self._GetAndMunge(
- self.msvs_settings[config], path, default, prefix, append, map)
-
- def _ConfigAttrib(self, path, config,
- default=None, prefix='', append=None, map=None):
- """_GetAndMunge for msvs_configuration_attributes."""
- return self._GetAndMunge(
- self.msvs_configuration_attributes[config],
- path, default, prefix, append, map)
-
- def AdjustIncludeDirs(self, include_dirs, config):
- """Updates include_dirs to expand VS specific paths, and adds the system
- include dirs used for platform SDK and similar."""
- config = self._TargetConfig(config)
- includes = include_dirs + self.msvs_system_include_dirs[config]
- includes.extend(self._Setting(
- ('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
- return [self.ConvertVSMacros(p, config=config) for p in includes]
-
- def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
- """Updates midl_include_dirs to expand VS specific paths, and adds the
- system include dirs used for platform SDK and similar."""
- config = self._TargetConfig(config)
- includes = midl_include_dirs + self.msvs_system_include_dirs[config]
- includes.extend(self._Setting(
- ('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[]))
- return [self.ConvertVSMacros(p, config=config) for p in includes]
-
- def GetComputedDefines(self, config):
- """Returns the set of defines that are injected to the defines list based
- on other VS settings."""
- config = self._TargetConfig(config)
- defines = []
- if self._ConfigAttrib(['CharacterSet'], config) == '1':
- defines.extend(('_UNICODE', 'UNICODE'))
- if self._ConfigAttrib(['CharacterSet'], config) == '2':
- defines.append('_MBCS')
- defines.extend(self._Setting(
- ('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
- return defines
-
- def GetCompilerPdbName(self, config, expand_special):
- """Get the pdb file name that should be used for compiler invocations, or
- None if there's no explicit name specified."""
- config = self._TargetConfig(config)
- pdbname = self._Setting(
- ('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
- if pdbname:
- pdbname = expand_special(self.ConvertVSMacros(pdbname))
- return pdbname
-
- def GetMapFileName(self, config, expand_special):
- """Gets the explicitly overriden map file name for a target or returns None
- if it's not set."""
- config = self._TargetConfig(config)
- map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
- if map_file:
- map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
- return map_file
-
- def GetOutputName(self, config, expand_special):
- """Gets the explicitly overridden output name for a target or returns None
- if it's not overridden."""
- config = self._TargetConfig(config)
- type = self.spec['type']
- root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
- # TODO(scottmg): Handle OutputDirectory without OutputFile.
- output_file = self._Setting((root, 'OutputFile'), config)
- if output_file:
- output_file = expand_special(self.ConvertVSMacros(
- output_file, config=config))
- return output_file
-
- def GetPDBName(self, config, expand_special, default):
- """Gets the explicitly overridden pdb name for a target or returns
- default if it's not overridden, or if no pdb will be generated."""
- config = self._TargetConfig(config)
- output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
- generate_debug_info = self._Setting(
- ('VCLinkerTool', 'GenerateDebugInformation'), config)
- if generate_debug_info == 'true':
- if output_file:
- return expand_special(self.ConvertVSMacros(output_file, config=config))
- else:
- return default
- else:
- return None
-
- def GetNoImportLibrary(self, config):
- """If NoImportLibrary: true, ninja will not expect the output to include
- an import library."""
- config = self._TargetConfig(config)
- noimplib = self._Setting(('NoImportLibrary',), config)
- return noimplib == 'true'
-
- def GetAsmflags(self, config):
- """Returns the flags that need to be added to ml invocations."""
- config = self._TargetConfig(config)
- asmflags = []
- safeseh = self._Setting(('MASM', 'UseSafeExceptionHandlers'), config)
- if safeseh == 'true':
- asmflags.append('/safeseh')
- return asmflags
-
- def GetCflags(self, config):
- """Returns the flags that need to be added to .c and .cc compilations."""
- config = self._TargetConfig(config)
- cflags = []
- cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
- cl = self._GetWrapper(self, self.msvs_settings[config],
- 'VCCLCompilerTool', append=cflags)
- cl('Optimization',
- map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
- cl('InlineFunctionExpansion', prefix='/Ob')
- cl('DisableSpecificWarnings', prefix='/wd')
- cl('StringPooling', map={'true': '/GF'})
- cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
- cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
- cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
- cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
- cl('FloatingPointModel',
- map={'0': 'precise', '1': 'strict', '2': 'fast'}, prefix='/fp:',
- default='0')
- cl('CompileAsManaged', map={'false': '', 'true': '/clr'})
- cl('WholeProgramOptimization', map={'true': '/GL'})
- cl('WarningLevel', prefix='/W')
- cl('WarnAsError', map={'true': '/WX'})
- cl('CallingConvention',
- map={'0': 'd', '1': 'r', '2': 'z', '3': 'v'}, prefix='/G')
- cl('DebugInformationFormat',
- map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
- cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
- cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
- cl('MinimalRebuild', map={'true': '/Gm'})
- cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
- cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
- cl('RuntimeLibrary',
- map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
- cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
- cl('DefaultCharIsUnsigned', map={'true': '/J'})
- cl('TreatWChar_tAsBuiltInType',
- map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
- cl('EnablePREfast', map={'true': '/analyze'})
- cl('AdditionalOptions', prefix='')
- cl('EnableEnhancedInstructionSet',
- map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32', '5': 'AVX2'},
- prefix='/arch:')
- cflags.extend(['/FI' + f for f in self._Setting(
- ('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
- if self.vs_version.short_name in ('2013', '2013e', '2015'):
- # New flag required in 2013 to maintain previous PDB behavior.
- cflags.append('/FS')
- # ninja handles parallelism by itself, don't have the compiler do it too.
- cflags = filter(lambda x: not x.startswith('/MP'), cflags)
- return cflags
-
- def _GetPchFlags(self, config, extension):
- """Get the flags to be added to the cflags for precompiled header support.
- """
- config = self._TargetConfig(config)
- # The PCH is only built once by a particular source file. Usage of PCH must
- # only be for the same language (i.e. C vs. C++), so only include the pch
- # flags when the language matches.
- if self.msvs_precompiled_header[config]:
- source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
- if _LanguageMatchesForPch(source_ext, extension):
- pch = os.path.split(self.msvs_precompiled_header[config])[1]
- return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
- return []
-
- def GetCflagsC(self, config):
- """Returns the flags that need to be added to .c compilations."""
- config = self._TargetConfig(config)
- return self._GetPchFlags(config, '.c')
-
- def GetCflagsCC(self, config):
- """Returns the flags that need to be added to .cc compilations."""
- config = self._TargetConfig(config)
- return ['/TP'] + self._GetPchFlags(config, '.cc')
-
- def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
- """Get and normalize the list of paths in AdditionalLibraryDirectories
- setting."""
- config = self._TargetConfig(config)
- libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
- config, default=[])
- libpaths = [os.path.normpath(
- gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
- for p in libpaths]
- return ['/LIBPATH:"' + p + '"' for p in libpaths]
-
- def GetLibFlags(self, config, gyp_to_build_path):
- """Returns the flags that need to be added to lib commands."""
- config = self._TargetConfig(config)
- libflags = []
- lib = self._GetWrapper(self, self.msvs_settings[config],
- 'VCLibrarianTool', append=libflags)
- libflags.extend(self._GetAdditionalLibraryDirectories(
- 'VCLibrarianTool', config, gyp_to_build_path))
- lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
- lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
- prefix='/MACHINE:')
- lib('AdditionalOptions')
- return libflags
-
- def GetDefFile(self, gyp_to_build_path):
- """Returns the .def file from sources, if any. Otherwise returns None."""
- spec = self.spec
- if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
- def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
- if len(def_files) == 1:
- return gyp_to_build_path(def_files[0])
- elif len(def_files) > 1:
- raise Exception("Multiple .def files")
- return None
-
- def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
- """.def files get implicitly converted to a ModuleDefinitionFile for the
- linker in the VS generator. Emulate that behaviour here."""
- def_file = self.GetDefFile(gyp_to_build_path)
- if def_file:
- ldflags.append('/DEF:"%s"' % def_file)
-
- def GetPGDName(self, config, expand_special):
- """Gets the explicitly overridden pgd name for a target or returns None
- if it's not overridden."""
- config = self._TargetConfig(config)
- output_file = self._Setting(
- ('VCLinkerTool', 'ProfileGuidedDatabase'), config)
- if output_file:
- output_file = expand_special(self.ConvertVSMacros(
- output_file, config=config))
- return output_file
-
- def GetLdflags(self, config, gyp_to_build_path, expand_special,
- manifest_base_name, output_name, is_executable, build_dir):
- """Returns the flags that need to be added to link commands, and the
- manifest files."""
- config = self._TargetConfig(config)
- ldflags = []
- ld = self._GetWrapper(self, self.msvs_settings[config],
- 'VCLinkerTool', append=ldflags)
- self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
- ld('GenerateDebugInformation', map={'true': '/DEBUG'})
- ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
- prefix='/MACHINE:')
- ldflags.extend(self._GetAdditionalLibraryDirectories(
- 'VCLinkerTool', config, gyp_to_build_path))
- ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
- ld('TreatLinkerWarningAsErrors', prefix='/WX',
- map={'true': '', 'false': ':NO'})
- out = self.GetOutputName(config, expand_special)
- if out:
- ldflags.append('/OUT:' + out)
- pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
- if pdb:
- ldflags.append('/PDB:' + pdb)
- pgd = self.GetPGDName(config, expand_special)
- if pgd:
- ldflags.append('/PGD:' + pgd)
- map_file = self.GetMapFileName(config, expand_special)
- ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
- else '/MAP'})
- ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
- ld('AdditionalOptions', prefix='')
-
- minimum_required_version = self._Setting(
- ('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
- if minimum_required_version:
- minimum_required_version = ',' + minimum_required_version
- ld('SubSystem',
- map={'1': 'CONSOLE%s' % minimum_required_version,
- '2': 'WINDOWS%s' % minimum_required_version},
- prefix='/SUBSYSTEM:')
-
- stack_reserve_size = self._Setting(
- ('VCLinkerTool', 'StackReserveSize'), config, default='')
- if stack_reserve_size:
- stack_commit_size = self._Setting(
- ('VCLinkerTool', 'StackCommitSize'), config, default='')
- if stack_commit_size:
- stack_commit_size = ',' + stack_commit_size
- ldflags.append('/STACK:%s%s' % (stack_reserve_size, stack_commit_size))
-
- ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
- ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
- ld('BaseAddress', prefix='/BASE:')
- ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
- ld('RandomizedBaseAddress',
- map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
- ld('DataExecutionPrevention',
- map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
- ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
- ld('ForceSymbolReferences', prefix='/INCLUDE:')
- ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
- ld('LinkTimeCodeGeneration',
- map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
- '4': ':PGUPDATE'},
- prefix='/LTCG')
- ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
- ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
- ld('EntryPointSymbol', prefix='/ENTRY:')
- ld('Profile', map={'true': '/PROFILE'})
- ld('LargeAddressAware',
- map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
- # TODO(scottmg): This should sort of be somewhere else (not really a flag).
- ld('AdditionalDependencies', prefix='')
-
- if self.GetArch(config) == 'x86':
- safeseh_default = 'true'
- else:
- safeseh_default = None
- ld('ImageHasSafeExceptionHandlers',
- map={'false': ':NO', 'true': ''}, prefix='/SAFESEH',
- default=safeseh_default)
-
- # If the base address is not specifically controlled, DYNAMICBASE should
- # be on by default.
- base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
- ldflags)
- if not base_flags:
- ldflags.append('/DYNAMICBASE')
-
- # If the NXCOMPAT flag has not been specified, default to on. Despite the
- # documentation that says this only defaults to on when the subsystem is
- # Vista or greater (which applies to the linker), the IDE defaults it on
- # unless it's explicitly off.
- if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
- ldflags.append('/NXCOMPAT')
-
- have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
- manifest_flags, intermediate_manifest, manifest_files = \
- self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
- is_executable and not have_def_file, build_dir)
- ldflags.extend(manifest_flags)
- return ldflags, intermediate_manifest, manifest_files
-
- def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
- allow_isolation, build_dir):
- """Returns a 3-tuple:
- - the set of flags that need to be added to the link to generate
- a default manifest
- - the intermediate manifest that the linker will generate that should be
- used to assert it doesn't add anything to the merged one.
- - the list of all the manifest files to be merged by the manifest tool and
- included into the link."""
- generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
- config,
- default='true')
- if generate_manifest != 'true':
- # This means not only that the linker should not generate the intermediate
- # manifest but also that the manifest tool should do nothing even when
- # additional manifests are specified.
- return ['/MANIFEST:NO'], [], []
-
- output_name = name + '.intermediate.manifest'
- flags = [
- '/MANIFEST',
- '/ManifestFile:' + output_name,
- ]
-
- # Instead of using the MANIFESTUAC flags, we generate a .manifest to
- # include into the list of manifests. This allows us to avoid the need to
- # do two passes during linking. The /MANIFEST flag and /ManifestFile are
- # still used, and the intermediate manifest is used to assert that the
- # final manifest we get from merging all the additional manifest files
- # (plus the one we generate here) isn't modified by merging the
- # intermediate into it.
-
- # Always NO, because we generate a manifest file that has what we want.
- flags.append('/MANIFESTUAC:NO')
-
- config = self._TargetConfig(config)
- enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
- default='true')
- manifest_files = []
- generated_manifest_outer = \
-"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
-"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
-"</assembly>"
- if enable_uac == 'true':
- execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
- config, default='0')
- execution_level_map = {
- '0': 'asInvoker',
- '1': 'highestAvailable',
- '2': 'requireAdministrator'
- }
-
- ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
- default='false')
-
- inner = '''
-<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
- <security>
- <requestedPrivileges>
- <requestedExecutionLevel level='%s' uiAccess='%s' />
- </requestedPrivileges>
- </security>
-</trustInfo>''' % (execution_level_map[execution_level], ui_access)
- else:
- inner = ''
-
- generated_manifest_contents = generated_manifest_outer % inner
- generated_name = name + '.generated.manifest'
- # Need to join with the build_dir here as we're writing it during
- # generation time, but we return the un-joined version because the build
- # will occur in that directory. We only write the file if the contents
- # have changed so that simply regenerating the project files doesn't
- # cause a relink.
- build_dir_generated_name = os.path.join(build_dir, generated_name)
- gyp.common.EnsureDirExists(build_dir_generated_name)
- f = gyp.common.WriteOnDiff(build_dir_generated_name)
- f.write(generated_manifest_contents)
- f.close()
- manifest_files = [generated_name]
-
- if allow_isolation:
- flags.append('/ALLOWISOLATION')
-
- manifest_files += self._GetAdditionalManifestFiles(config,
- gyp_to_build_path)
- return flags, output_name, manifest_files
-
- def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
- """Gets additional manifest files that are added to the default one
- generated by the linker."""
- files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
- default=[])
- if isinstance(files, str):
- files = files.split(';')
- return [os.path.normpath(
- gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
- for f in files]
-
- def IsUseLibraryDependencyInputs(self, config):
- """Returns whether the target should be linked via Use Library Dependency
- Inputs (using component .objs of a given .lib)."""
- config = self._TargetConfig(config)
- uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
- return uldi == 'true'
-
- def IsEmbedManifest(self, config):
- """Returns whether manifest should be linked into binary."""
- config = self._TargetConfig(config)
- embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
- default='true')
- return embed == 'true'
-
- def IsLinkIncremental(self, config):
- """Returns whether the target should be linked incrementally."""
- config = self._TargetConfig(config)
- link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
- return link_inc != '1'
-
- def GetRcflags(self, config, gyp_to_ninja_path):
- """Returns the flags that need to be added to invocations of the resource
- compiler."""
- config = self._TargetConfig(config)
- rcflags = []
- rc = self._GetWrapper(self, self.msvs_settings[config],
- 'VCResourceCompilerTool', append=rcflags)
- rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
- rcflags.append('/I' + gyp_to_ninja_path('.'))
- rc('PreprocessorDefinitions', prefix='/d')
- # /l arg must be in hex without leading '0x'
- rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
- return rcflags
-
- def BuildCygwinBashCommandLine(self, args, path_to_base):
- """Build a command line that runs args via cygwin bash. We assume that all
- incoming paths are in Windows normpath'd form, so they need to be
- converted to posix style for the part of the command line that's passed to
- bash. We also have to do some Visual Studio macro emulation here because
- various rules use magic VS names for things. Also note that rules that
- contain ninja variables cannot be fixed here (for example ${source}), so
- the outer generator needs to make sure that the paths that are written out
- are in posix style, if the command line will be used here."""
- cygwin_dir = os.path.normpath(
- os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
- cd = ('cd %s' % path_to_base).replace('\\', '/')
- args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
- args = ["'%s'" % a.replace("'", "'\\''") for a in args]
- bash_cmd = ' '.join(args)
- cmd = (
- 'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
- 'bash -c "%s ; %s"' % (cd, bash_cmd))
- return cmd
-
- def IsRuleRunUnderCygwin(self, rule):
- """Determine if an action should be run under cygwin. If the variable is
- unset, or set to 1 we use cygwin."""
- return int(rule.get('msvs_cygwin_shell',
- self.spec.get('msvs_cygwin_shell', 1))) != 0
-
- def _HasExplicitRuleForExtension(self, spec, extension):
- """Determine if there's an explicit rule for a particular extension."""
- for rule in spec.get('rules', []):
- if rule['extension'] == extension:
- return True
- return False
-
- def _HasExplicitIdlActions(self, spec):
- """Determine if an action should not run midl for .idl files."""
- return any([action.get('explicit_idl_action', 0)
- for action in spec.get('actions', [])])
-
- def HasExplicitIdlRulesOrActions(self, spec):
- """Determine if there's an explicit rule or action for idl files. When
- there isn't we need to generate implicit rules to build MIDL .idl files."""
- return (self._HasExplicitRuleForExtension(spec, 'idl') or
- self._HasExplicitIdlActions(spec))
-
- def HasExplicitAsmRules(self, spec):
- """Determine if there's an explicit rule for asm files. When there isn't we
- need to generate implicit rules to assemble .asm files."""
- return self._HasExplicitRuleForExtension(spec, 'asm')
-
- def GetIdlBuildData(self, source, config):
- """Determine the implicit outputs for an idl file. Returns output
- directory, outputs, and variables and flags that are required."""
- config = self._TargetConfig(config)
- midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
- def midl(name, default=None):
- return self.ConvertVSMacros(midl_get(name, default=default),
- config=config)
- tlb = midl('TypeLibraryName', default='${root}.tlb')
- header = midl('HeaderFileName', default='${root}.h')
- dlldata = midl('DLLDataFileName', default='dlldata.c')
- iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
- proxy = midl('ProxyFileName', default='${root}_p.c')
- # Note that .tlb is not included in the outputs as it is not always
- # generated depending on the content of the input idl file.
- outdir = midl('OutputDirectory', default='')
- output = [header, dlldata, iid, proxy]
- variables = [('tlb', tlb),
- ('h', header),
- ('dlldata', dlldata),
- ('iid', iid),
- ('proxy', proxy)]
- # TODO(scottmg): Are there configuration settings to set these flags?
- target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
- flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
- return outdir, output, variables, flags
-
-
-def _LanguageMatchesForPch(source_ext, pch_source_ext):
- c_exts = ('.c',)
- cc_exts = ('.cc', '.cxx', '.cpp')
- return ((source_ext in c_exts and pch_source_ext in c_exts) or
- (source_ext in cc_exts and pch_source_ext in cc_exts))
-
-
-class PrecompiledHeader(object):
- """Helper to generate dependencies and build rules to handle generation of
- precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
- """
- def __init__(
- self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
- self.settings = settings
- self.config = config
- pch_source = self.settings.msvs_precompiled_source[self.config]
- self.pch_source = gyp_to_build_path(pch_source)
- filename, _ = os.path.splitext(pch_source)
- self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
-
- def _PchHeader(self):
- """Get the header that will appear in an #include line for all source
- files."""
- return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
-
- def GetObjDependencies(self, sources, objs, arch):
- """Given a list of sources files and the corresponding object files,
- returns a list of the pch files that should be depended upon. The
- additional wrapping in the return value is for interface compatibility
- with make.py on Mac, and xcode_emulation.py."""
- assert arch is None
- if not self._PchHeader():
- return []
- pch_ext = os.path.splitext(self.pch_source)[1]
- for source in sources:
- if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
- return [(None, None, self.output_obj)]
- return []
-
- def GetPchBuildCommands(self, arch):
- """Not used on Windows as there are no additional build steps required
- (instead, existing steps are modified in GetFlagsModifications below)."""
- return []
-
- def GetFlagsModifications(self, input, output, implicit, command,
- cflags_c, cflags_cc, expand_special):
- """Get the modified cflags and implicit dependencies that should be used
- for the pch compilation step."""
- if input == self.pch_source:
- pch_output = ['/Yc' + self._PchHeader()]
- if command == 'cxx':
- return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
- self.output_obj, [])
- elif command == 'cc':
- return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
- self.output_obj, [])
- return [], output, implicit
-
-
-vs_version = None
-def GetVSVersion(generator_flags):
- global vs_version
- if not vs_version:
- vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
- generator_flags.get('msvs_version', 'auto'),
- allow_fallback=False)
- return vs_version
-
-def _GetVsvarsSetupArgs(generator_flags, arch):
- vs = GetVSVersion(generator_flags)
- return vs.SetupScript()
-
-def ExpandMacros(string, expansions):
- """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
- for the canonical way to retrieve a suitable dict."""
- if '$' in string:
- for old, new in expansions.iteritems():
- assert '$(' not in new, new
- string = string.replace(old, new)
- return string
-
-def _ExtractImportantEnvironment(output_of_set):
- """Extracts environment variables required for the toolchain to run from
- a textual dump output by the cmd.exe 'set' command."""
- envvars_to_save = (
- 'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
- 'include',
- 'lib',
- 'libpath',
- 'path',
- 'pathext',
- 'systemroot',
- 'temp',
- 'tmp',
- )
- env = {}
- for line in output_of_set.splitlines():
- for envvar in envvars_to_save:
- if re.match(envvar + '=', line.lower()):
- var, setting = line.split('=', 1)
- if envvar == 'path':
- # Our own rules (for running gyp-win-tool) and other actions in
- # Chromium rely on python being in the path. Add the path to this
- # python here so that if it's not in the path when ninja is run
- # later, python will still be found.
- setting = os.path.dirname(sys.executable) + os.pathsep + setting
- env[var.upper()] = setting
- break
- for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
- if required not in env:
- raise Exception('Environment variable "%s" '
- 'required to be set to valid path' % required)
- return env
-
-def _FormatAsEnvironmentBlock(envvar_dict):
- """Format as an 'environment block' directly suitable for CreateProcess.
- Briefly this is a list of key=value\0, terminated by an additional \0. See
- CreateProcess documentation for more details."""
- block = ''
- nul = '\0'
- for key, value in envvar_dict.iteritems():
- block += key + '=' + value + nul
- block += nul
- return block
-
-def _ExtractCLPath(output_of_where):
- """Gets the path to cl.exe based on the output of calling the environment
- setup batch file, followed by the equivalent of `where`."""
- # Take the first line, as that's the first found in the PATH.
- for line in output_of_where.strip().splitlines():
- if line.startswith('LOC:'):
- return line[len('LOC:'):].strip()
-
-def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
- system_includes, open_out):
- """It's not sufficient to have the absolute path to the compiler, linker,
- etc. on Windows, as those tools rely on .dlls being in the PATH. We also
- need to support both x86 and x64 compilers within the same build (to support
- msvs_target_platform hackery). Different architectures require a different
- compiler binary, and different supporting environment variables (INCLUDE,
- LIB, LIBPATH). So, we extract the environment here, wrap all invocations
- of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
- sets up the environment, and then we do not prefix the compiler with
- an absolute path, instead preferring something like "cl.exe" in the rule
- which will then run whichever the environment setup has put in the path.
- When the following procedure to generate environment files does not
- meet your requirement (e.g. for custom toolchains), you can pass
- "-G ninja_use_custom_environment_files" to the gyp to suppress file
- generation and use custom environment files prepared by yourself."""
- archs = ('x86', 'x64')
- if generator_flags.get('ninja_use_custom_environment_files', 0):
- cl_paths = {}
- for arch in archs:
- cl_paths[arch] = 'cl.exe'
- return cl_paths
- vs = GetVSVersion(generator_flags)
- cl_paths = {}
- for arch in archs:
- # Extract environment variables for subprocesses.
- args = vs.SetupScript(arch)
- args.extend(('&&', 'set'))
- popen = subprocess.Popen(
- args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- variables, _ = popen.communicate()
- env = _ExtractImportantEnvironment(variables)
-
- # Inject system includes from gyp files into INCLUDE.
- if system_includes:
- system_includes = system_includes | OrderedSet(
- env.get('INCLUDE', '').split(';'))
- env['INCLUDE'] = ';'.join(system_includes)
-
- env_block = _FormatAsEnvironmentBlock(env)
- f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
- f.write(env_block)
- f.close()
-
- # Find cl.exe location for this architecture.
- args = vs.SetupScript(arch)
- args.extend(('&&',
- 'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
- popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
- output, _ = popen.communicate()
- cl_paths[arch] = _ExtractCLPath(output)
- return cl_paths
-
-def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
- """Emulate behavior of msvs_error_on_missing_sources present in the msvs
- generator: Check that all regular source files, i.e. not created at run time,
- exist on disk. Missing files cause needless recompilation when building via
- VS, and we want this check to match for people/bots that build using ninja,
- so they're not surprised when the VS build fails."""
- if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
- no_specials = filter(lambda x: '$' not in x, sources)
- relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
- missing = filter(lambda x: not os.path.exists(x), relative)
- if missing:
- # They'll look like out\Release\..\..\stuff\things.cc, so normalize the
- # path for a slightly less crazy looking output.
- cleaned_up = [os.path.normpath(x) for x in missing]
- raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
-
-# Sets some values in default_variables, which are required for many
-# generators, run on Windows.
-def CalculateCommonVariables(default_variables, params):
- generator_flags = params.get('generator_flags', {})
-
- # Set a variable so conditions can be based on msvs_version.
- msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
- default_variables['MSVS_VERSION'] = msvs_version.ShortName()
-
- # To determine processor word size on Windows, in addition to checking
- # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
- # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
- # contains the actual word size of the system when running thru WOW64).
- if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
- '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
- default_variables['MSVS_OS_BITS'] = 64
- else:
- default_variables['MSVS_OS_BITS'] = 32
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
deleted file mode 100644
index d2948f06c0..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py
+++ /dev/null
@@ -1,160 +0,0 @@
-# This file comes from
-# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
-# Do not edit! Edit the upstream one instead.
-
-"""Python module for generating .ninja files.
-
-Note that this is emphatically not a required piece of Ninja; it's
-just a helpful utility for build-file-generation systems that already
-use Python.
-"""
-
-import textwrap
-import re
-
-def escape_path(word):
- return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:')
-
-class Writer(object):
- def __init__(self, output, width=78):
- self.output = output
- self.width = width
-
- def newline(self):
- self.output.write('\n')
-
- def comment(self, text):
- for line in textwrap.wrap(text, self.width - 2):
- self.output.write('# ' + line + '\n')
-
- def variable(self, key, value, indent=0):
- if value is None:
- return
- if isinstance(value, list):
- value = ' '.join(filter(None, value)) # Filter out empty strings.
- self._line('%s = %s' % (key, value), indent)
-
- def pool(self, name, depth):
- self._line('pool %s' % name)
- self.variable('depth', depth, indent=1)
-
- def rule(self, name, command, description=None, depfile=None,
- generator=False, pool=None, restat=False, rspfile=None,
- rspfile_content=None, deps=None):
- self._line('rule %s' % name)
- self.variable('command', command, indent=1)
- if description:
- self.variable('description', description, indent=1)
- if depfile:
- self.variable('depfile', depfile, indent=1)
- if generator:
- self.variable('generator', '1', indent=1)
- if pool:
- self.variable('pool', pool, indent=1)
- if restat:
- self.variable('restat', '1', indent=1)
- if rspfile:
- self.variable('rspfile', rspfile, indent=1)
- if rspfile_content:
- self.variable('rspfile_content', rspfile_content, indent=1)
- if deps:
- self.variable('deps', deps, indent=1)
-
- def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
- variables=None):
- outputs = self._as_list(outputs)
- all_inputs = self._as_list(inputs)[:]
- out_outputs = list(map(escape_path, outputs))
- all_inputs = list(map(escape_path, all_inputs))
-
- if implicit:
- implicit = map(escape_path, self._as_list(implicit))
- all_inputs.append('|')
- all_inputs.extend(implicit)
- if order_only:
- order_only = map(escape_path, self._as_list(order_only))
- all_inputs.append('||')
- all_inputs.extend(order_only)
-
- self._line('build %s: %s' % (' '.join(out_outputs),
- ' '.join([rule] + all_inputs)))
-
- if variables:
- if isinstance(variables, dict):
- iterator = iter(variables.items())
- else:
- iterator = iter(variables)
-
- for key, val in iterator:
- self.variable(key, val, indent=1)
-
- return outputs
-
- def include(self, path):
- self._line('include %s' % path)
-
- def subninja(self, path):
- self._line('subninja %s' % path)
-
- def default(self, paths):
- self._line('default %s' % ' '.join(self._as_list(paths)))
-
- def _count_dollars_before_index(self, s, i):
- """Returns the number of '$' characters right in front of s[i]."""
- dollar_count = 0
- dollar_index = i - 1
- while dollar_index > 0 and s[dollar_index] == '$':
- dollar_count += 1
- dollar_index -= 1
- return dollar_count
-
- def _line(self, text, indent=0):
- """Write 'text' word-wrapped at self.width characters."""
- leading_space = ' ' * indent
- while len(leading_space) + len(text) > self.width:
- # The text is too wide; wrap if possible.
-
- # Find the rightmost space that would obey our width constraint and
- # that's not an escaped space.
- available_space = self.width - len(leading_space) - len(' $')
- space = available_space
- while True:
- space = text.rfind(' ', 0, space)
- if space < 0 or \
- self._count_dollars_before_index(text, space) % 2 == 0:
- break
-
- if space < 0:
- # No such space; just use the first unescaped space we can find.
- space = available_space - 1
- while True:
- space = text.find(' ', space + 1)
- if space < 0 or \
- self._count_dollars_before_index(text, space) % 2 == 0:
- break
- if space < 0:
- # Give up on breaking.
- break
-
- self.output.write(leading_space + text[0:space] + ' $\n')
- text = text[space+1:]
-
- # Subsequent lines are continuations, so indent them.
- leading_space = ' ' * (indent+2)
-
- self.output.write(leading_space + text + '\n')
-
- def _as_list(self, input):
- if input is None:
- return []
- if isinstance(input, list):
- return input
- return [input]
-
-
-def escape(string):
- """Escape a string such that it can be embedded into a Ninja file without
- further interpretation."""
- assert '\n' not in string, 'Ninja syntax does not allow newlines'
- # We only have one special metacharacter: '$'.
- return string.replace('$', '$$')
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/ordered_dict.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/ordered_dict.py
deleted file mode 100644
index fd6b7276be..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/ordered_dict.py
+++ /dev/null
@@ -1,288 +0,0 @@
-# Unmodified from http://code.activestate.com/recipes/576693/
-# other than to add MIT license header (as specified on page, but not in code).
-# Linked from Python documentation here:
-# http://docs.python.org/2/library/collections.html#collections.OrderedDict
-#
-# This should be deleted once Py2.7 is available on all bots, see
-# http://crbug.com/241769.
-#
-# Copyright (c) 2009 Raymond Hettinger.
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in
-# all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-# THE SOFTWARE.
-
-# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
-# Passes Python2.7's test suite and incorporates all the latest updates.
-
-try:
- from thread import get_ident as _get_ident
-except ImportError:
- from dummy_thread import get_ident as _get_ident
-
-try:
- from _abcoll import KeysView, ValuesView, ItemsView
-except ImportError:
- pass
-
-
-class OrderedDict(dict):
- 'Dictionary that remembers insertion order'
- # An inherited dict maps keys to values.
- # The inherited dict provides __getitem__, __len__, __contains__, and get.
- # The remaining methods are order-aware.
- # Big-O running times for all methods are the same as for regular dictionaries.
-
- # The internal self.__map dictionary maps keys to links in a doubly linked list.
- # The circular doubly linked list starts and ends with a sentinel element.
- # The sentinel element never gets deleted (this simplifies the algorithm).
- # Each link is stored as a list of length three: [PREV, NEXT, KEY].
-
- def __init__(self, *args, **kwds):
- '''Initialize an ordered dictionary. Signature is the same as for
- regular dictionaries, but keyword arguments are not recommended
- because their insertion order is arbitrary.
-
- '''
- if len(args) > 1:
- raise TypeError('expected at most 1 arguments, got %d' % len(args))
- try:
- self.__root
- except AttributeError:
- self.__root = root = [] # sentinel node
- root[:] = [root, root, None]
- self.__map = {}
- self.__update(*args, **kwds)
-
- def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
- 'od.__setitem__(i, y) <==> od[i]=y'
- # Setting a new item creates a new link which goes at the end of the linked
- # list, and the inherited dictionary is updated with the new key/value pair.
- if key not in self:
- root = self.__root
- last = root[0]
- last[1] = root[0] = self.__map[key] = [last, root, key]
- dict_setitem(self, key, value)
-
- def __delitem__(self, key, dict_delitem=dict.__delitem__):
- 'od.__delitem__(y) <==> del od[y]'
- # Deleting an existing item uses self.__map to find the link which is
- # then removed by updating the links in the predecessor and successor nodes.
- dict_delitem(self, key)
- link_prev, link_next, key = self.__map.pop(key)
- link_prev[1] = link_next
- link_next[0] = link_prev
-
- def __iter__(self):
- 'od.__iter__() <==> iter(od)'
- root = self.__root
- curr = root[1]
- while curr is not root:
- yield curr[2]
- curr = curr[1]
-
- def __reversed__(self):
- 'od.__reversed__() <==> reversed(od)'
- root = self.__root
- curr = root[0]
- while curr is not root:
- yield curr[2]
- curr = curr[0]
-
- def clear(self):
- 'od.clear() -> None. Remove all items from od.'
- try:
- for node in self.__map.itervalues():
- del node[:]
- root = self.__root
- root[:] = [root, root, None]
- self.__map.clear()
- except AttributeError:
- pass
- dict.clear(self)
-
- def popitem(self, last=True):
- '''od.popitem() -> (k, v), return and remove a (key, value) pair.
- Pairs are returned in LIFO order if last is true or FIFO order if false.
-
- '''
- if not self:
- raise KeyError('dictionary is empty')
- root = self.__root
- if last:
- link = root[0]
- link_prev = link[0]
- link_prev[1] = root
- root[0] = link_prev
- else:
- link = root[1]
- link_next = link[1]
- root[1] = link_next
- link_next[0] = root
- key = link[2]
- del self.__map[key]
- value = dict.pop(self, key)
- return key, value
-
- # -- the following methods do not depend on the internal structure --
-
- def keys(self):
- 'od.keys() -> list of keys in od'
- return list(self)
-
- def values(self):
- 'od.values() -> list of values in od'
- return [self[key] for key in self]
-
- def items(self):
- 'od.items() -> list of (key, value) pairs in od'
- return [(key, self[key]) for key in self]
-
- def iterkeys(self):
- 'od.iterkeys() -> an iterator over the keys in od'
- return iter(self)
-
- def itervalues(self):
- 'od.itervalues -> an iterator over the values in od'
- for k in self:
- yield self[k]
-
- def iteritems(self):
- 'od.iteritems -> an iterator over the (key, value) items in od'
- for k in self:
- yield (k, self[k])
-
- # Suppress 'OrderedDict.update: Method has no argument':
- # pylint: disable=E0211
- def update(*args, **kwds):
- '''od.update(E, **F) -> None. Update od from dict/iterable E and F.
-
- If E is a dict instance, does: for k in E: od[k] = E[k]
- If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
- Or if E is an iterable of items, does: for k, v in E: od[k] = v
- In either case, this is followed by: for k, v in F.items(): od[k] = v
-
- '''
- if len(args) > 2:
- raise TypeError('update() takes at most 2 positional '
- 'arguments (%d given)' % (len(args),))
- elif not args:
- raise TypeError('update() takes at least 1 argument (0 given)')
- self = args[0]
- # Make progressively weaker assumptions about "other"
- other = ()
- if len(args) == 2:
- other = args[1]
- if isinstance(other, dict):
- for key in other:
- self[key] = other[key]
- elif hasattr(other, 'keys'):
- for key in other.keys():
- self[key] = other[key]
- else:
- for key, value in other:
- self[key] = value
- for key, value in kwds.items():
- self[key] = value
-
- __update = update # let subclasses override update without breaking __init__
-
- __marker = object()
-
- def pop(self, key, default=__marker):
- '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
- If key is not found, d is returned if given, otherwise KeyError is raised.
-
- '''
- if key in self:
- result = self[key]
- del self[key]
- return result
- if default is self.__marker:
- raise KeyError(key)
- return default
-
- def setdefault(self, key, default=None):
- 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
- if key in self:
- return self[key]
- self[key] = default
- return default
-
- def __repr__(self, _repr_running={}):
- 'od.__repr__() <==> repr(od)'
- call_key = id(self), _get_ident()
- if call_key in _repr_running:
- return '...'
- _repr_running[call_key] = 1
- try:
- if not self:
- return '%s()' % (self.__class__.__name__,)
- return '%s(%r)' % (self.__class__.__name__, self.items())
- finally:
- del _repr_running[call_key]
-
- def __reduce__(self):
- 'Return state information for pickling'
- items = [[k, self[k]] for k in self]
- inst_dict = vars(self).copy()
- for k in vars(OrderedDict()):
- inst_dict.pop(k, None)
- if inst_dict:
- return (self.__class__, (items,), inst_dict)
- return self.__class__, (items,)
-
- def copy(self):
- 'od.copy() -> a shallow copy of od'
- return self.__class__(self)
-
- @classmethod
- def fromkeys(cls, iterable, value=None):
- '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
- and values equal to v (which defaults to None).
-
- '''
- d = cls()
- for key in iterable:
- d[key] = value
- return d
-
- def __eq__(self, other):
- '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
- while comparison to a regular mapping is order-insensitive.
-
- '''
- if isinstance(other, OrderedDict):
- return len(self)==len(other) and self.items() == other.items()
- return dict.__eq__(self, other)
-
- def __ne__(self, other):
- return not self == other
-
- # -- the following methods are only used in Python 2.7 --
-
- def viewkeys(self):
- "od.viewkeys() -> a set-like object providing a view on od's keys"
- return KeysView(self)
-
- def viewvalues(self):
- "od.viewvalues() -> an object providing a view on od's values"
- return ValuesView(self)
-
- def viewitems(self):
- "od.viewitems() -> a set-like object providing a view on od's items"
- return ItemsView(self)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
deleted file mode 100644
index 74c98c5a79..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""A clone of the default copy.deepcopy that doesn't handle cyclic
-structures or complex types except for dicts and lists. This is
-because gyp copies so large structure that small copy overhead ends up
-taking seconds in a project the size of Chromium."""
-
-class Error(Exception):
- pass
-
-__all__ = ["Error", "deepcopy"]
-
-def deepcopy(x):
- """Deep copy operation on gyp objects such as strings, ints, dicts
- and lists. More than twice as fast as copy.deepcopy but much less
- generic."""
-
- try:
- return _deepcopy_dispatch[type(x)](x)
- except KeyError:
- raise Error('Unsupported type %s for deepcopy. Use copy.deepcopy ' +
- 'or expand simple_copy support.' % type(x))
-
-_deepcopy_dispatch = d = {}
-
-def _deepcopy_atomic(x):
- return x
-
-for x in (type(None), int, long, float,
- bool, str, unicode, type):
- d[x] = _deepcopy_atomic
-
-def _deepcopy_list(x):
- return [deepcopy(a) for a in x]
-d[list] = _deepcopy_list
-
-def _deepcopy_dict(x):
- y = {}
- for key, value in x.iteritems():
- y[deepcopy(key)] = deepcopy(value)
- return y
-d[dict] = _deepcopy_dict
-
-del d
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
deleted file mode 100755
index bb6f1ea436..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
+++ /dev/null
@@ -1,314 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utility functions for Windows builds.
-
-These functions are executed via gyp-win-tool when using the ninja generator.
-"""
-
-import os
-import re
-import shutil
-import subprocess
-import stat
-import string
-import sys
-
-BASE_DIR = os.path.dirname(os.path.abspath(__file__))
-
-# A regex matching an argument corresponding to the output filename passed to
-# link.exe.
-_LINK_EXE_OUT_ARG = re.compile('/OUT:(?P<out>.+)$', re.IGNORECASE)
-
-def main(args):
- executor = WinTool()
- exit_code = executor.Dispatch(args)
- if exit_code is not None:
- sys.exit(exit_code)
-
-
-class WinTool(object):
- """This class performs all the Windows tooling steps. The methods can either
- be executed directly, or dispatched from an argument list."""
-
- def _UseSeparateMspdbsrv(self, env, args):
- """Allows to use a unique instance of mspdbsrv.exe per linker instead of a
- shared one."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- if args[0] != 'link.exe':
- return
-
- # Use the output filename passed to the linker to generate an endpoint name
- # for mspdbsrv.exe.
- endpoint_name = None
- for arg in args:
- m = _LINK_EXE_OUT_ARG.match(arg)
- if m:
- endpoint_name = re.sub(r'\W+', '',
- '%s_%d' % (m.group('out'), os.getpid()))
- break
-
- if endpoint_name is None:
- return
-
- # Adds the appropriate environment variable. This will be read by link.exe
- # to know which instance of mspdbsrv.exe it should connect to (if it's
- # not set then the default endpoint is used).
- env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
-
- def Dispatch(self, args):
- """Dispatches a string command to a method."""
- if len(args) < 1:
- raise Exception("Not enough arguments")
-
- method = "Exec%s" % self._CommandifyName(args[0])
- return getattr(self, method)(*args[1:])
-
- def _CommandifyName(self, name_string):
- """Transforms a tool name like recursive-mirror to RecursiveMirror."""
- return name_string.title().replace('-', '')
-
- def _GetEnv(self, arch):
- """Gets the saved environment from a file for a given architecture."""
- # The environment is saved as an "environment block" (see CreateProcess
- # and msvs_emulation for details). We convert to a dict here.
- # Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
- pairs = open(arch).read()[:-2].split('\0')
- kvs = [item.split('=', 1) for item in pairs]
- return dict(kvs)
-
- def ExecStamp(self, path):
- """Simple stamp command."""
- open(path, 'w').close()
-
- def ExecRecursiveMirror(self, source, dest):
- """Emulation of rm -rf out && cp -af in out."""
- if os.path.exists(dest):
- if os.path.isdir(dest):
- def _on_error(fn, path, excinfo):
- # The operation failed, possibly because the file is set to
- # read-only. If that's why, make it writable and try the op again.
- if not os.access(path, os.W_OK):
- os.chmod(path, stat.S_IWRITE)
- fn(path)
- shutil.rmtree(dest, onerror=_on_error)
- else:
- if not os.access(dest, os.W_OK):
- # Attempt to make the file writable before deleting it.
- os.chmod(dest, stat.S_IWRITE)
- os.unlink(dest)
-
- if os.path.isdir(source):
- shutil.copytree(source, dest)
- else:
- shutil.copy2(source, dest)
-
- def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
- """Filter diagnostic output from link that looks like:
- ' Creating library ui.dll.lib and object ui.dll.exp'
- This happens when there are exports from the dll or exe.
- """
- env = self._GetEnv(arch)
- if use_separate_mspdbsrv == 'True':
- self._UseSeparateMspdbsrv(env, args)
- link = subprocess.Popen([args[0].replace('/', '\\')] + list(args[1:]),
- shell=True,
- env=env,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- out, _ = link.communicate()
- for line in out.splitlines():
- if (not line.startswith(' Creating library ') and
- not line.startswith('Generating code') and
- not line.startswith('Finished generating code')):
- print line
- return link.returncode
-
- def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
- mt, rc, intermediate_manifest, *manifests):
- """A wrapper for handling creating a manifest resource and then executing
- a link command."""
- # The 'normal' way to do manifests is to have link generate a manifest
- # based on gathering dependencies from the object files, then merge that
- # manifest with other manifests supplied as sources, convert the merged
- # manifest to a resource, and then *relink*, including the compiled
- # version of the manifest resource. This breaks incremental linking, and
- # is generally overly complicated. Instead, we merge all the manifests
- # provided (along with one that includes what would normally be in the
- # linker-generated one, see msvs_emulation.py), and include that into the
- # first and only link. We still tell link to generate a manifest, but we
- # only use that to assert that our simpler process did not miss anything.
- variables = {
- 'python': sys.executable,
- 'arch': arch,
- 'out': out,
- 'ldcmd': ldcmd,
- 'resname': resname,
- 'mt': mt,
- 'rc': rc,
- 'intermediate_manifest': intermediate_manifest,
- 'manifests': ' '.join(manifests),
- }
- add_to_ld = ''
- if manifests:
- subprocess.check_call(
- '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
- '-manifest %(manifests)s -out:%(out)s.manifest' % variables)
- if embed_manifest == 'True':
- subprocess.check_call(
- '%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest'
- ' %(out)s.manifest.rc %(resname)s' % variables)
- subprocess.check_call(
- '%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s '
- '%(out)s.manifest.rc' % variables)
- add_to_ld = ' %(out)s.manifest.res' % variables
- subprocess.check_call(ldcmd + add_to_ld)
-
- # Run mt.exe on the theoretically complete manifest we generated, merging
- # it with the one the linker generated to confirm that the linker
- # generated one does not add anything. This is strictly unnecessary for
- # correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
- # used in a #pragma comment.
- if manifests:
- # Merge the intermediate one with ours to .assert.manifest, then check
- # that .assert.manifest is identical to ours.
- subprocess.check_call(
- '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
- '-manifest %(out)s.manifest %(intermediate_manifest)s '
- '-out:%(out)s.assert.manifest' % variables)
- assert_manifest = '%(out)s.assert.manifest' % variables
- our_manifest = '%(out)s.manifest' % variables
- # Load and normalize the manifests. mt.exe sometimes removes whitespace,
- # and sometimes doesn't unfortunately.
- with open(our_manifest, 'rb') as our_f:
- with open(assert_manifest, 'rb') as assert_f:
- our_data = our_f.read().translate(None, string.whitespace)
- assert_data = assert_f.read().translate(None, string.whitespace)
- if our_data != assert_data:
- os.unlink(out)
- def dump(filename):
- sys.stderr.write('%s\n-----\n' % filename)
- with open(filename, 'rb') as f:
- sys.stderr.write(f.read() + '\n-----\n')
- dump(intermediate_manifest)
- dump(our_manifest)
- dump(assert_manifest)
- sys.stderr.write(
- 'Linker generated manifest "%s" added to final manifest "%s" '
- '(result in "%s"). '
- 'Were /MANIFEST switches used in #pragma statements? ' % (
- intermediate_manifest, our_manifest, assert_manifest))
- return 1
-
- def ExecManifestWrapper(self, arch, *args):
- """Run manifest tool with environment set. Strip out undesirable warning
- (some XML blocks are recognized by the OS loader, but not the manifest
- tool)."""
- env = self._GetEnv(arch)
- popen = subprocess.Popen(args, shell=True, env=env,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out, _ = popen.communicate()
- for line in out.splitlines():
- if line and 'manifest authoring warning 81010002' not in line:
- print line
- return popen.returncode
-
- def ExecManifestToRc(self, arch, *args):
- """Creates a resource file pointing a SxS assembly manifest.
- |args| is tuple containing path to resource file, path to manifest file
- and resource name which can be "1" (for executables) or "2" (for DLLs)."""
- manifest_path, resource_path, resource_name = args
- with open(resource_path, 'wb') as output:
- output.write('#include <windows.h>\n%s RT_MANIFEST "%s"' % (
- resource_name,
- os.path.abspath(manifest_path).replace('\\', '/')))
-
- def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl,
- *flags):
- """Filter noisy filenames output from MIDL compile step that isn't
- quietable via command line flags.
- """
- args = ['midl', '/nologo'] + list(flags) + [
- '/out', outdir,
- '/tlb', tlb,
- '/h', h,
- '/dlldata', dlldata,
- '/iid', iid,
- '/proxy', proxy,
- idl]
- env = self._GetEnv(arch)
- popen = subprocess.Popen(args, shell=True, env=env,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out, _ = popen.communicate()
- # Filter junk out of stdout, and write filtered versions. Output we want
- # to filter is pairs of lines that look like this:
- # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
- # objidl.idl
- lines = out.splitlines()
- prefixes = ('Processing ', '64 bit Processing ')
- processing = set(os.path.basename(x)
- for x in lines if x.startswith(prefixes))
- for line in lines:
- if not line.startswith(prefixes) and line not in processing:
- print line
- return popen.returncode
-
- def ExecAsmWrapper(self, arch, *args):
- """Filter logo banner from invocations of asm.exe."""
- env = self._GetEnv(arch)
- popen = subprocess.Popen(args, shell=True, env=env,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out, _ = popen.communicate()
- for line in out.splitlines():
- if (not line.startswith('Copyright (C) Microsoft Corporation') and
- not line.startswith('Microsoft (R) Macro Assembler') and
- not line.startswith(' Assembling: ') and
- line):
- print line
- return popen.returncode
-
- def ExecRcWrapper(self, arch, *args):
- """Filter logo banner from invocations of rc.exe. Older versions of RC
- don't support the /nologo flag."""
- env = self._GetEnv(arch)
- popen = subprocess.Popen(args, shell=True, env=env,
- stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out, _ = popen.communicate()
- for line in out.splitlines():
- if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and
- not line.startswith('Copyright (C) Microsoft Corporation') and
- line):
- print line
- return popen.returncode
-
- def ExecActionWrapper(self, arch, rspfile, *dir):
- """Runs an action command line from a response file using the environment
- for |arch|. If |dir| is supplied, use that as the working directory."""
- env = self._GetEnv(arch)
- # TODO(scottmg): This is a temporary hack to get some specific variables
- # through to actions that are set after gyp-time. http://crbug.com/333738.
- for k, v in os.environ.iteritems():
- if k not in env:
- env[k] = v
- args = open(rspfile).read()
- dir = dir[0] if dir else None
- return subprocess.call(args, shell=True, env=env, cwd=dir)
-
- def ExecClCompile(self, project_dir, selected_files):
- """Executed by msvs-ninja projects when the 'ClCompile' target is used to
- build selected C/C++ files."""
- project_dir = os.path.relpath(project_dir, BASE_DIR)
- selected_files = selected_files.split(';')
- ninja_targets = [os.path.join(project_dir, filename) + '^^'
- for filename in selected_files]
- cmd = ['ninja.exe']
- cmd.extend(ninja_targets)
- return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
deleted file mode 100644
index b06bdc4e8b..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
+++ /dev/null
@@ -1,1629 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-This module contains classes that help to emulate xcodebuild behavior on top of
-other build systems, such as make and ninja.
-"""
-
-import copy
-import gyp.common
-import os
-import os.path
-import re
-import shlex
-import subprocess
-import sys
-import tempfile
-from gyp.common import GypError
-
-# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when
-# "xcodebuild" is called too quickly (it has been found to return incorrect
-# version number).
-XCODE_VERSION_CACHE = None
-
-# Populated lazily by GetXcodeArchsDefault, to an |XcodeArchsDefault| instance
-# corresponding to the installed version of Xcode.
-XCODE_ARCHS_DEFAULT_CACHE = None
-
-
-def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
- """Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable,
- and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT)."""
- mapping = {'$(ARCHS_STANDARD)': archs}
- if archs_including_64_bit:
- mapping['$(ARCHS_STANDARD_INCLUDING_64_BIT)'] = archs_including_64_bit
- return mapping
-
-class XcodeArchsDefault(object):
- """A class to resolve ARCHS variable from xcode_settings, resolving Xcode
- macros and implementing filtering by VALID_ARCHS. The expansion of macros
- depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
- on the version of Xcode.
- """
-
- # Match variable like $(ARCHS_STANDARD).
- variable_pattern = re.compile(r'\$\([a-zA-Z_][a-zA-Z0-9_]*\)$')
-
- def __init__(self, default, mac, iphonesimulator, iphoneos):
- self._default = (default,)
- self._archs = {'mac': mac, 'ios': iphoneos, 'iossim': iphonesimulator}
-
- def _VariableMapping(self, sdkroot):
- """Returns the dictionary of variable mapping depending on the SDKROOT."""
- sdkroot = sdkroot.lower()
- if 'iphoneos' in sdkroot:
- return self._archs['ios']
- elif 'iphonesimulator' in sdkroot:
- return self._archs['iossim']
- else:
- return self._archs['mac']
-
- def _ExpandArchs(self, archs, sdkroot):
- """Expands variables references in ARCHS, and remove duplicates."""
- variable_mapping = self._VariableMapping(sdkroot)
- expanded_archs = []
- for arch in archs:
- if self.variable_pattern.match(arch):
- variable = arch
- try:
- variable_expansion = variable_mapping[variable]
- for arch in variable_expansion:
- if arch not in expanded_archs:
- expanded_archs.append(arch)
- except KeyError as e:
- print 'Warning: Ignoring unsupported variable "%s".' % variable
- elif arch not in expanded_archs:
- expanded_archs.append(arch)
- return expanded_archs
-
- def ActiveArchs(self, archs, valid_archs, sdkroot):
- """Expands variables references in ARCHS, and filter by VALID_ARCHS if it
- is defined (if not set, Xcode accept any value in ARCHS, otherwise, only
- values present in VALID_ARCHS are kept)."""
- expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or '')
- if valid_archs:
- filtered_archs = []
- for arch in expanded_archs:
- if arch in valid_archs:
- filtered_archs.append(arch)
- expanded_archs = filtered_archs
- return expanded_archs
-
-
-def GetXcodeArchsDefault():
- """Returns the |XcodeArchsDefault| object to use to expand ARCHS for the
- installed version of Xcode. The default values used by Xcode for ARCHS
- and the expansion of the variables depends on the version of Xcode used.
-
- For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included
- uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses
- $(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0
- and deprecated with Xcode 5.1.
-
- For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit
- architecture as part of $(ARCHS_STANDARD) and default to only building it.
-
- For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part
- of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they
- are also part of $(ARCHS_STANDARD).
-
- All thoses rules are coded in the construction of the |XcodeArchsDefault|
- object to use depending on the version of Xcode detected. The object is
- for performance reason."""
- global XCODE_ARCHS_DEFAULT_CACHE
- if XCODE_ARCHS_DEFAULT_CACHE:
- return XCODE_ARCHS_DEFAULT_CACHE
- xcode_version, _ = XcodeVersion()
- if xcode_version < '0500':
- XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
- '$(ARCHS_STANDARD)',
- XcodeArchsVariableMapping(['i386']),
- XcodeArchsVariableMapping(['i386']),
- XcodeArchsVariableMapping(['armv7']))
- elif xcode_version < '0510':
- XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
- '$(ARCHS_STANDARD_INCLUDING_64_BIT)',
- XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
- XcodeArchsVariableMapping(['i386'], ['i386', 'x86_64']),
- XcodeArchsVariableMapping(
- ['armv7', 'armv7s'],
- ['armv7', 'armv7s', 'arm64']))
- else:
- XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault(
- '$(ARCHS_STANDARD)',
- XcodeArchsVariableMapping(['x86_64'], ['x86_64']),
- XcodeArchsVariableMapping(['i386', 'x86_64'], ['i386', 'x86_64']),
- XcodeArchsVariableMapping(
- ['armv7', 'armv7s', 'arm64'],
- ['armv7', 'armv7s', 'arm64']))
- return XCODE_ARCHS_DEFAULT_CACHE
-
-
-class XcodeSettings(object):
- """A class that understands the gyp 'xcode_settings' object."""
-
- # Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
- # at class-level for efficiency.
- _sdk_path_cache = {}
- _sdk_root_cache = {}
-
- # Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
- # cached at class-level for efficiency.
- _plist_cache = {}
-
- # Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so
- # cached at class-level for efficiency.
- _codesigning_key_cache = {}
-
- def __init__(self, spec):
- self.spec = spec
-
- self.isIOS = False
-
- # Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
- # This means self.xcode_settings[config] always contains all settings
- # for that config -- the per-target settings as well. Settings that are
- # the same for all configs are implicitly per-target settings.
- self.xcode_settings = {}
- configs = spec['configurations']
- for configname, config in configs.iteritems():
- self.xcode_settings[configname] = config.get('xcode_settings', {})
- self._ConvertConditionalKeys(configname)
- if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
- None):
- self.isIOS = True
-
- # This is only non-None temporarily during the execution of some methods.
- self.configname = None
-
- # Used by _AdjustLibrary to match .a and .dylib entries in libraries.
- self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$')
-
- def _ConvertConditionalKeys(self, configname):
- """Converts or warns on conditional keys. Xcode supports conditional keys,
- such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation
- with some keys converted while the rest force a warning."""
- settings = self.xcode_settings[configname]
- conditional_keys = [key for key in settings if key.endswith(']')]
- for key in conditional_keys:
- # If you need more, speak up at http://crbug.com/122592
- if key.endswith("[sdk=iphoneos*]"):
- if configname.endswith("iphoneos"):
- new_key = key.split("[")[0]
- settings[new_key] = settings[key]
- else:
- print 'Warning: Conditional keys not implemented, ignoring:', \
- ' '.join(conditional_keys)
- del settings[key]
-
- def _Settings(self):
- assert self.configname
- return self.xcode_settings[self.configname]
-
- def _Test(self, test_key, cond_key, default):
- return self._Settings().get(test_key, default) == cond_key
-
- def _Appendf(self, lst, test_key, format_str, default=None):
- if test_key in self._Settings():
- lst.append(format_str % str(self._Settings()[test_key]))
- elif default:
- lst.append(format_str % str(default))
-
- def _WarnUnimplemented(self, test_key):
- if test_key in self._Settings():
- print 'Warning: Ignoring not yet implemented key "%s".' % test_key
-
- def IsBinaryOutputFormat(self, configname):
- default = "binary" if self.isIOS else "xml"
- format = self.xcode_settings[configname].get('INFOPLIST_OUTPUT_FORMAT',
- default)
- return format == "binary"
-
- def _IsBundle(self):
- return int(self.spec.get('mac_bundle', 0)) != 0
-
- def _IsIosAppExtension(self):
- return int(self.spec.get('ios_app_extension', 0)) != 0
-
- def _IsIosWatchKitExtension(self):
- return int(self.spec.get('ios_watchkit_extension', 0)) != 0
-
- def _IsIosWatchApp(self):
- return int(self.spec.get('ios_watch_app', 0)) != 0
-
- def GetFrameworkVersion(self):
- """Returns the framework version of the current target. Only valid for
- bundles."""
- assert self._IsBundle()
- return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A')
-
- def GetWrapperExtension(self):
- """Returns the bundle extension (.app, .framework, .plugin, etc). Only
- valid for bundles."""
- assert self._IsBundle()
- if self.spec['type'] in ('loadable_module', 'shared_library'):
- default_wrapper_extension = {
- 'loadable_module': 'bundle',
- 'shared_library': 'framework',
- }[self.spec['type']]
- wrapper_extension = self.GetPerTargetSetting(
- 'WRAPPER_EXTENSION', default=default_wrapper_extension)
- return '.' + self.spec.get('product_extension', wrapper_extension)
- elif self.spec['type'] == 'executable':
- if self._IsIosAppExtension() or self._IsIosWatchKitExtension():
- return '.' + self.spec.get('product_extension', 'appex')
- else:
- return '.' + self.spec.get('product_extension', 'app')
- else:
- assert False, "Don't know extension for '%s', target '%s'" % (
- self.spec['type'], self.spec['target_name'])
-
- def GetProductName(self):
- """Returns PRODUCT_NAME."""
- return self.spec.get('product_name', self.spec['target_name'])
-
- def GetFullProductName(self):
- """Returns FULL_PRODUCT_NAME."""
- if self._IsBundle():
- return self.GetWrapperName()
- else:
- return self._GetStandaloneBinaryPath()
-
- def GetWrapperName(self):
- """Returns the directory name of the bundle represented by this target.
- Only valid for bundles."""
- assert self._IsBundle()
- return self.GetProductName() + self.GetWrapperExtension()
-
- def GetBundleContentsFolderPath(self):
- """Returns the qualified path to the bundle's contents folder. E.g.
- Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
- if self.isIOS:
- return self.GetWrapperName()
- assert self._IsBundle()
- if self.spec['type'] == 'shared_library':
- return os.path.join(
- self.GetWrapperName(), 'Versions', self.GetFrameworkVersion())
- else:
- # loadable_modules have a 'Contents' folder like executables.
- return os.path.join(self.GetWrapperName(), 'Contents')
-
- def GetBundleResourceFolder(self):
- """Returns the qualified path to the bundle's resource folder. E.g.
- Chromium.app/Contents/Resources. Only valid for bundles."""
- assert self._IsBundle()
- if self.isIOS:
- return self.GetBundleContentsFolderPath()
- return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
-
- def GetBundlePlistPath(self):
- """Returns the qualified path to the bundle's plist file. E.g.
- Chromium.app/Contents/Info.plist. Only valid for bundles."""
- assert self._IsBundle()
- if self.spec['type'] in ('executable', 'loadable_module'):
- return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist')
- else:
- return os.path.join(self.GetBundleContentsFolderPath(),
- 'Resources', 'Info.plist')
-
- def GetProductType(self):
- """Returns the PRODUCT_TYPE of this target."""
- if self._IsIosAppExtension():
- assert self._IsBundle(), ('ios_app_extension flag requires mac_bundle '
- '(target %s)' % self.spec['target_name'])
- return 'com.apple.product-type.app-extension'
- if self._IsIosWatchKitExtension():
- assert self._IsBundle(), ('ios_watchkit_extension flag requires '
- 'mac_bundle (target %s)' % self.spec['target_name'])
- return 'com.apple.product-type.watchkit-extension'
- if self._IsIosWatchApp():
- assert self._IsBundle(), ('ios_watch_app flag requires mac_bundle '
- '(target %s)' % self.spec['target_name'])
- return 'com.apple.product-type.application.watchapp'
- if self._IsBundle():
- return {
- 'executable': 'com.apple.product-type.application',
- 'loadable_module': 'com.apple.product-type.bundle',
- 'shared_library': 'com.apple.product-type.framework',
- }[self.spec['type']]
- else:
- return {
- 'executable': 'com.apple.product-type.tool',
- 'loadable_module': 'com.apple.product-type.library.dynamic',
- 'shared_library': 'com.apple.product-type.library.dynamic',
- 'static_library': 'com.apple.product-type.library.static',
- }[self.spec['type']]
-
- def GetMachOType(self):
- """Returns the MACH_O_TYPE of this target."""
- # Weird, but matches Xcode.
- if not self._IsBundle() and self.spec['type'] == 'executable':
- return ''
- return {
- 'executable': 'mh_execute',
- 'static_library': 'staticlib',
- 'shared_library': 'mh_dylib',
- 'loadable_module': 'mh_bundle',
- }[self.spec['type']]
-
- def _GetBundleBinaryPath(self):
- """Returns the name of the bundle binary of by this target.
- E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
- assert self._IsBundle()
- if self.spec['type'] in ('shared_library') or self.isIOS:
- path = self.GetBundleContentsFolderPath()
- elif self.spec['type'] in ('executable', 'loadable_module'):
- path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
- return os.path.join(path, self.GetExecutableName())
-
- def _GetStandaloneExecutableSuffix(self):
- if 'product_extension' in self.spec:
- return '.' + self.spec['product_extension']
- return {
- 'executable': '',
- 'static_library': '.a',
- 'shared_library': '.dylib',
- 'loadable_module': '.so',
- }[self.spec['type']]
-
- def _GetStandaloneExecutablePrefix(self):
- return self.spec.get('product_prefix', {
- 'executable': '',
- 'static_library': 'lib',
- 'shared_library': 'lib',
- # Non-bundled loadable_modules are called foo.so for some reason
- # (that is, .so and no prefix) with the xcode build -- match that.
- 'loadable_module': '',
- }[self.spec['type']])
-
- def _GetStandaloneBinaryPath(self):
- """Returns the name of the non-bundle binary represented by this target.
- E.g. hello_world. Only valid for non-bundles."""
- assert not self._IsBundle()
- assert self.spec['type'] in (
- 'executable', 'shared_library', 'static_library', 'loadable_module'), (
- 'Unexpected type %s' % self.spec['type'])
- target = self.spec['target_name']
- if self.spec['type'] == 'static_library':
- if target[:3] == 'lib':
- target = target[3:]
- elif self.spec['type'] in ('loadable_module', 'shared_library'):
- if target[:3] == 'lib':
- target = target[3:]
-
- target_prefix = self._GetStandaloneExecutablePrefix()
- target = self.spec.get('product_name', target)
- target_ext = self._GetStandaloneExecutableSuffix()
- return target_prefix + target + target_ext
-
- def GetExecutableName(self):
- """Returns the executable name of the bundle represented by this target.
- E.g. Chromium."""
- if self._IsBundle():
- return self.spec.get('product_name', self.spec['target_name'])
- else:
- return self._GetStandaloneBinaryPath()
-
- def GetExecutablePath(self):
- """Returns the directory name of the bundle represented by this target. E.g.
- Chromium.app/Contents/MacOS/Chromium."""
- if self._IsBundle():
- return self._GetBundleBinaryPath()
- else:
- return self._GetStandaloneBinaryPath()
-
- def GetActiveArchs(self, configname):
- """Returns the architectures this target should be built for."""
- config_settings = self.xcode_settings[configname]
- xcode_archs_default = GetXcodeArchsDefault()
- return xcode_archs_default.ActiveArchs(
- config_settings.get('ARCHS'),
- config_settings.get('VALID_ARCHS'),
- config_settings.get('SDKROOT'))
-
- def _GetSdkVersionInfoItem(self, sdk, infoitem):
- # xcodebuild requires Xcode and can't run on Command Line Tools-only
- # systems from 10.7 onward.
- # Since the CLT has no SDK paths anyway, returning None is the
- # most sensible route and should still do the right thing.
- try:
- return GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
- except:
- pass
-
- def _SdkRoot(self, configname):
- if configname is None:
- configname = self.configname
- return self.GetPerConfigSetting('SDKROOT', configname, default='')
-
- def _SdkPath(self, configname=None):
- sdk_root = self._SdkRoot(configname)
- if sdk_root.startswith('/'):
- return sdk_root
- return self._XcodeSdkPath(sdk_root)
-
- def _XcodeSdkPath(self, sdk_root):
- if sdk_root not in XcodeSettings._sdk_path_cache:
- sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path')
- XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
- if sdk_root:
- XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
- return XcodeSettings._sdk_path_cache[sdk_root]
-
- def _AppendPlatformVersionMinFlags(self, lst):
- self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
- if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings():
- # TODO: Implement this better?
- sdk_path_basename = os.path.basename(self._SdkPath())
- if sdk_path_basename.lower().startswith('iphonesimulator'):
- self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
- '-mios-simulator-version-min=%s')
- else:
- self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
- '-miphoneos-version-min=%s')
-
- def GetCflags(self, configname, arch=None):
- """Returns flags that need to be added to .c, .cc, .m, and .mm
- compilations."""
- # This functions (and the similar ones below) do not offer complete
- # emulation of all xcode_settings keys. They're implemented on demand.
-
- self.configname = configname
- cflags = []
-
- sdk_root = self._SdkPath()
- if 'SDKROOT' in self._Settings() and sdk_root:
- cflags.append('-isysroot %s' % sdk_root)
-
- if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
- cflags.append('-Wconstant-conversion')
-
- if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'):
- cflags.append('-funsigned-char')
-
- if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'):
- cflags.append('-fasm-blocks')
-
- if 'GCC_DYNAMIC_NO_PIC' in self._Settings():
- if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES':
- cflags.append('-mdynamic-no-pic')
- else:
- pass
- # TODO: In this case, it depends on the target. xcode passes
- # mdynamic-no-pic by default for executable and possibly static lib
- # according to mento
-
- if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'):
- cflags.append('-mpascal-strings')
-
- self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s')
-
- if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'):
- dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf')
- if dbg_format == 'dwarf':
- cflags.append('-gdwarf-2')
- elif dbg_format == 'stabs':
- raise NotImplementedError('stabs debug format is not supported yet.')
- elif dbg_format == 'dwarf-with-dsym':
- cflags.append('-gdwarf-2')
- else:
- raise NotImplementedError('Unknown debug format %s' % dbg_format)
-
- if self._Settings().get('GCC_STRICT_ALIASING') == 'YES':
- cflags.append('-fstrict-aliasing')
- elif self._Settings().get('GCC_STRICT_ALIASING') == 'NO':
- cflags.append('-fno-strict-aliasing')
-
- if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'):
- cflags.append('-fvisibility=hidden')
-
- if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'):
- cflags.append('-Werror')
-
- if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'):
- cflags.append('-Wnewline-eof')
-
- # In Xcode, this is only activated when GCC_COMPILER_VERSION is clang or
- # llvm-gcc. It also requires a fairly recent libtool, and
- # if the system clang isn't used, DYLD_LIBRARY_PATH needs to contain the
- # path to the libLTO.dylib that matches the used clang.
- if self._Test('LLVM_LTO', 'YES', default='NO'):
- cflags.append('-flto')
-
- self._AppendPlatformVersionMinFlags(cflags)
-
- # TODO:
- if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'):
- self._WarnUnimplemented('COPY_PHASE_STRIP')
- self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS')
- self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS')
-
- # TODO: This is exported correctly, but assigning to it is not supported.
- self._WarnUnimplemented('MACH_O_TYPE')
- self._WarnUnimplemented('PRODUCT_TYPE')
-
- if arch is not None:
- archs = [arch]
- else:
- assert self.configname
- archs = self.GetActiveArchs(self.configname)
- if len(archs) != 1:
- # TODO: Supporting fat binaries will be annoying.
- self._WarnUnimplemented('ARCHS')
- archs = ['i386']
- cflags.append('-arch ' + archs[0])
-
- if archs[0] in ('i386', 'x86_64'):
- if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'):
- cflags.append('-msse3')
- if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES',
- default='NO'):
- cflags.append('-mssse3') # Note 3rd 's'.
- if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'):
- cflags.append('-msse4.1')
- if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'):
- cflags.append('-msse4.2')
-
- cflags += self._Settings().get('WARNING_CFLAGS', [])
-
- if sdk_root:
- framework_root = sdk_root
- else:
- framework_root = ''
- config = self.spec['configurations'][self.configname]
- framework_dirs = config.get('mac_framework_dirs', [])
- for directory in framework_dirs:
- cflags.append('-F' + directory.replace('$(SDKROOT)', framework_root))
-
- self.configname = None
- return cflags
-
- def GetCflagsC(self, configname):
- """Returns flags that need to be added to .c, and .m compilations."""
- self.configname = configname
- cflags_c = []
- if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi':
- cflags_c.append('-ansi')
- else:
- self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
- cflags_c += self._Settings().get('OTHER_CFLAGS', [])
- self.configname = None
- return cflags_c
-
- def GetCflagsCC(self, configname):
- """Returns flags that need to be added to .cc, and .mm compilations."""
- self.configname = configname
- cflags_cc = []
-
- clang_cxx_language_standard = self._Settings().get(
- 'CLANG_CXX_LANGUAGE_STANDARD')
- # Note: Don't make c++0x to c++11 so that c++0x can be used with older
- # clangs that don't understand c++11 yet (like Xcode 4.2's).
- if clang_cxx_language_standard:
- cflags_cc.append('-std=%s' % clang_cxx_language_standard)
-
- self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
-
- if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'):
- cflags_cc.append('-fno-rtti')
- if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'):
- cflags_cc.append('-fno-exceptions')
- if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'):
- cflags_cc.append('-fvisibility-inlines-hidden')
- if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
- cflags_cc.append('-fno-threadsafe-statics')
- # Note: This flag is a no-op for clang, it only has an effect for gcc.
- if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
- cflags_cc.append('-Wno-invalid-offsetof')
-
- other_ccflags = []
-
- for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']):
- # TODO: More general variable expansion. Missing in many other places too.
- if flag in ('$inherited', '$(inherited)', '${inherited}'):
- flag = '$OTHER_CFLAGS'
- if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'):
- other_ccflags += self._Settings().get('OTHER_CFLAGS', [])
- else:
- other_ccflags.append(flag)
- cflags_cc += other_ccflags
-
- self.configname = None
- return cflags_cc
-
- def _AddObjectiveCGarbageCollectionFlags(self, flags):
- gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported')
- if gc_policy == 'supported':
- flags.append('-fobjc-gc')
- elif gc_policy == 'required':
- flags.append('-fobjc-gc-only')
-
- def _AddObjectiveCARCFlags(self, flags):
- if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'):
- flags.append('-fobjc-arc')
-
- def _AddObjectiveCMissingPropertySynthesisFlags(self, flags):
- if self._Test('CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS',
- 'YES', default='NO'):
- flags.append('-Wobjc-missing-property-synthesis')
-
- def GetCflagsObjC(self, configname):
- """Returns flags that need to be added to .m compilations."""
- self.configname = configname
- cflags_objc = []
- self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
- self._AddObjectiveCARCFlags(cflags_objc)
- self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc)
- self.configname = None
- return cflags_objc
-
- def GetCflagsObjCC(self, configname):
- """Returns flags that need to be added to .mm compilations."""
- self.configname = configname
- cflags_objcc = []
- self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
- self._AddObjectiveCARCFlags(cflags_objcc)
- self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc)
- if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
- cflags_objcc.append('-fobjc-call-cxx-cdtors')
- self.configname = None
- return cflags_objcc
-
- def GetInstallNameBase(self):
- """Return DYLIB_INSTALL_NAME_BASE for this target."""
- # Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
- if (self.spec['type'] != 'shared_library' and
- (self.spec['type'] != 'loadable_module' or self._IsBundle())):
- return None
- install_base = self.GetPerTargetSetting(
- 'DYLIB_INSTALL_NAME_BASE',
- default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib')
- return install_base
-
- def _StandardizePath(self, path):
- """Do :standardizepath processing for path."""
- # I'm not quite sure what :standardizepath does. Just call normpath(),
- # but don't let @executable_path/../foo collapse to foo.
- if '/' in path:
- prefix, rest = '', path
- if path.startswith('@'):
- prefix, rest = path.split('/', 1)
- rest = os.path.normpath(rest) # :standardizepath
- path = os.path.join(prefix, rest)
- return path
-
- def GetInstallName(self):
- """Return LD_DYLIB_INSTALL_NAME for this target."""
- # Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
- if (self.spec['type'] != 'shared_library' and
- (self.spec['type'] != 'loadable_module' or self._IsBundle())):
- return None
-
- default_install_name = \
- '$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)'
- install_name = self.GetPerTargetSetting(
- 'LD_DYLIB_INSTALL_NAME', default=default_install_name)
-
- # Hardcode support for the variables used in chromium for now, to
- # unblock people using the make build.
- if '$' in install_name:
- assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
- '$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), (
- 'Variables in LD_DYLIB_INSTALL_NAME are not generally supported '
- 'yet in target \'%s\' (got \'%s\')' %
- (self.spec['target_name'], install_name))
-
- install_name = install_name.replace(
- '$(DYLIB_INSTALL_NAME_BASE:standardizepath)',
- self._StandardizePath(self.GetInstallNameBase()))
- if self._IsBundle():
- # These are only valid for bundles, hence the |if|.
- install_name = install_name.replace(
- '$(WRAPPER_NAME)', self.GetWrapperName())
- install_name = install_name.replace(
- '$(PRODUCT_NAME)', self.GetProductName())
- else:
- assert '$(WRAPPER_NAME)' not in install_name
- assert '$(PRODUCT_NAME)' not in install_name
-
- install_name = install_name.replace(
- '$(EXECUTABLE_PATH)', self.GetExecutablePath())
- return install_name
-
- def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
- """Checks if ldflag contains a filename and if so remaps it from
- gyp-directory-relative to build-directory-relative."""
- # This list is expanded on demand.
- # They get matched as:
- # -exported_symbols_list file
- # -Wl,exported_symbols_list file
- # -Wl,exported_symbols_list,file
- LINKER_FILE = r'(\S+)'
- WORD = r'\S+'
- linker_flags = [
- ['-exported_symbols_list', LINKER_FILE], # Needed for NaCl.
- ['-unexported_symbols_list', LINKER_FILE],
- ['-reexported_symbols_list', LINKER_FILE],
- ['-sectcreate', WORD, WORD, LINKER_FILE], # Needed for remoting.
- ]
- for flag_pattern in linker_flags:
- regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern))
- m = regex.match(ldflag)
- if m:
- ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \
- ldflag[m.end(1):]
- # Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS,
- # TODO(thakis): Update ffmpeg.gyp):
- if ldflag.startswith('-L'):
- ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):])
- return ldflag
-
- def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
- """Returns flags that need to be passed to the linker.
-
- Args:
- configname: The name of the configuration to get ld flags for.
- product_dir: The directory where products such static and dynamic
- libraries are placed. This is added to the library search path.
- gyp_to_build_path: A function that converts paths relative to the
- current gyp file to paths relative to the build direcotry.
- """
- self.configname = configname
- ldflags = []
-
- # The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS
- # can contain entries that depend on this. Explicitly absolutify these.
- for ldflag in self._Settings().get('OTHER_LDFLAGS', []):
- ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path))
-
- if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'):
- ldflags.append('-Wl,-dead_strip')
-
- if self._Test('PREBINDING', 'YES', default='NO'):
- ldflags.append('-Wl,-prebind')
-
- self._Appendf(
- ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s')
- self._Appendf(
- ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s')
-
- self._AppendPlatformVersionMinFlags(ldflags)
-
- if 'SDKROOT' in self._Settings() and self._SdkPath():
- ldflags.append('-isysroot ' + self._SdkPath())
-
- for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []):
- ldflags.append('-L' + gyp_to_build_path(library_path))
-
- if 'ORDER_FILE' in self._Settings():
- ldflags.append('-Wl,-order_file ' +
- '-Wl,' + gyp_to_build_path(
- self._Settings()['ORDER_FILE']))
-
- if arch is not None:
- archs = [arch]
- else:
- assert self.configname
- archs = self.GetActiveArchs(self.configname)
- if len(archs) != 1:
- # TODO: Supporting fat binaries will be annoying.
- self._WarnUnimplemented('ARCHS')
- archs = ['i386']
- ldflags.append('-arch ' + archs[0])
-
- # Xcode adds the product directory by default.
- ldflags.append('-L' + product_dir)
-
- install_name = self.GetInstallName()
- if install_name and self.spec['type'] != 'loadable_module':
- ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
-
- for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
- ldflags.append('-Wl,-rpath,' + rpath)
-
- sdk_root = self._SdkPath()
- if not sdk_root:
- sdk_root = ''
- config = self.spec['configurations'][self.configname]
- framework_dirs = config.get('mac_framework_dirs', [])
- for directory in framework_dirs:
- ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
-
- is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension()
- if sdk_root and is_extension:
- # Adds the link flags for extensions. These flags are common for all
- # extensions and provide loader and main function.
- # These flags reflect the compilation options used by xcode to compile
- # extensions.
- ldflags.append('-lpkstart')
- if XcodeVersion() < '0900':
- ldflags.append(sdk_root +
- '/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit')
- ldflags.append('-fapplication-extension')
- ldflags.append('-Xlinker -rpath '
- '-Xlinker @executable_path/../../Frameworks')
-
- self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
-
- self.configname = None
- return ldflags
-
- def GetLibtoolflags(self, configname):
- """Returns flags that need to be passed to the static linker.
-
- Args:
- configname: The name of the configuration to get ld flags for.
- """
- self.configname = configname
- libtoolflags = []
-
- for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []):
- libtoolflags.append(libtoolflag)
- # TODO(thakis): ARCHS?
-
- self.configname = None
- return libtoolflags
-
- def GetPerTargetSettings(self):
- """Gets a list of all the per-target settings. This will only fetch keys
- whose values are the same across all configurations."""
- first_pass = True
- result = {}
- for configname in sorted(self.xcode_settings.keys()):
- if first_pass:
- result = dict(self.xcode_settings[configname])
- first_pass = False
- else:
- for key, value in self.xcode_settings[configname].iteritems():
- if key not in result:
- continue
- elif result[key] != value:
- del result[key]
- return result
-
- def GetPerConfigSetting(self, setting, configname, default=None):
- if configname in self.xcode_settings:
- return self.xcode_settings[configname].get(setting, default)
- else:
- return self.GetPerTargetSetting(setting, default)
-
- def GetPerTargetSetting(self, setting, default=None):
- """Tries to get xcode_settings.setting from spec. Assumes that the setting
- has the same value in all configurations and throws otherwise."""
- is_first_pass = True
- result = None
- for configname in sorted(self.xcode_settings.keys()):
- if is_first_pass:
- result = self.xcode_settings[configname].get(setting, None)
- is_first_pass = False
- else:
- assert result == self.xcode_settings[configname].get(setting, None), (
- "Expected per-target setting for '%s', got per-config setting "
- "(target %s)" % (setting, self.spec['target_name']))
- if result is None:
- return default
- return result
-
- def _GetStripPostbuilds(self, configname, output_binary, quiet):
- """Returns a list of shell commands that contain the shell commands
- neccessary to strip this target's binary. These should be run as postbuilds
- before the actual postbuilds run."""
- self.configname = configname
-
- result = []
- if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and
- self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
-
- default_strip_style = 'debugging'
- if self.spec['type'] == 'loadable_module' and self._IsBundle():
- default_strip_style = 'non-global'
- elif self.spec['type'] == 'executable':
- default_strip_style = 'all'
-
- strip_style = self._Settings().get('STRIP_STYLE', default_strip_style)
- strip_flags = {
- 'all': '',
- 'non-global': '-x',
- 'debugging': '-S',
- }[strip_style]
-
- explicit_strip_flags = self._Settings().get('STRIPFLAGS', '')
- if explicit_strip_flags:
- strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags)
-
- if not quiet:
- result.append('echo STRIP\\(%s\\)' % self.spec['target_name'])
- result.append('strip %s %s' % (strip_flags, output_binary))
-
- self.configname = None
- return result
-
- def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
- """Returns a list of shell commands that contain the shell commands
- neccessary to massage this target's debug information. These should be run
- as postbuilds before the actual postbuilds run."""
- self.configname = configname
-
- # For static libraries, no dSYMs are created.
- result = []
- if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and
- self._Test(
- 'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and
- self.spec['type'] != 'static_library'):
- if not quiet:
- result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name'])
- result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM'))
-
- self.configname = None
- return result
-
- def _GetTargetPostbuilds(self, configname, output, output_binary,
- quiet=False):
- """Returns a list of shell commands that contain the shell commands
- to run as postbuilds for this target, before the actual postbuilds."""
- # dSYMs need to build before stripping happens.
- return (
- self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) +
- self._GetStripPostbuilds(configname, output_binary, quiet))
-
- def _GetIOSPostbuilds(self, configname, output_binary):
- """Return a shell command to codesign the iOS output binary so it can
- be deployed to a device. This should be run as the very last step of the
- build."""
- if not (self.isIOS and self.spec['type'] == 'executable'):
- return []
-
- settings = self.xcode_settings[configname]
- key = self._GetIOSCodeSignIdentityKey(settings)
- if not key:
- return []
-
- # Warn for any unimplemented signing xcode keys.
- unimpl = ['OTHER_CODE_SIGN_FLAGS']
- unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
- if unimpl:
- print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
- ', '.join(sorted(unimpl)))
-
- return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % (
- os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
- settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''),
- settings.get('CODE_SIGN_ENTITLEMENTS', ''),
- settings.get('PROVISIONING_PROFILE', ''))
- ]
-
- def _GetIOSCodeSignIdentityKey(self, settings):
- identity = settings.get('CODE_SIGN_IDENTITY')
- if not identity:
- return None
- if identity not in XcodeSettings._codesigning_key_cache:
- output = subprocess.check_output(
- ['security', 'find-identity', '-p', 'codesigning', '-v'])
- for line in output.splitlines():
- if identity in line:
- fingerprint = line.split()[1]
- cache = XcodeSettings._codesigning_key_cache
- assert identity not in cache or fingerprint == cache[identity], (
- "Multiple codesigning fingerprints for identity: %s" % identity)
- XcodeSettings._codesigning_key_cache[identity] = fingerprint
- return XcodeSettings._codesigning_key_cache.get(identity, '')
-
- def AddImplicitPostbuilds(self, configname, output, output_binary,
- postbuilds=[], quiet=False):
- """Returns a list of shell commands that should run before and after
- |postbuilds|."""
- assert output_binary is not None
- pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
- post = self._GetIOSPostbuilds(configname, output_binary)
- return pre + postbuilds + post
-
- def _AdjustLibrary(self, library, config_name=None):
- if library.endswith('.framework'):
- l = '-framework ' + os.path.splitext(os.path.basename(library))[0]
- else:
- m = self.library_re.match(library)
- if m:
- l = '-l' + m.group(1)
- else:
- l = library
-
- sdk_root = self._SdkPath(config_name)
- if not sdk_root:
- sdk_root = ''
- # Xcode 7 started shipping with ".tbd" (text based stubs) files instead of
- # ".dylib" without providing a real support for them. What it does, for
- # "/usr/lib" libraries, is do "-L/usr/lib -lname" which is dependent on the
- # library order and cause collision when building Chrome.
- #
- # Instead substitude ".tbd" to ".dylib" in the generated project when the
- # following conditions are both true:
- # - library is referenced in the gyp file as "$(SDKROOT)/**/*.dylib",
- # - the ".dylib" file does not exists but a ".tbd" file do.
- library = l.replace('$(SDKROOT)', sdk_root)
- if l.startswith('$(SDKROOT)'):
- basename, ext = os.path.splitext(library)
- if ext == '.dylib' and not os.path.exists(library):
- tbd_library = basename + '.tbd'
- if os.path.exists(tbd_library):
- library = tbd_library
- return library
-
- def AdjustLibraries(self, libraries, config_name=None):
- """Transforms entries like 'Cocoa.framework' in libraries into entries like
- '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
- """
- libraries = [self._AdjustLibrary(library, config_name)
- for library in libraries]
- return libraries
-
- def _BuildMachineOSBuild(self):
- return GetStdout(['sw_vers', '-buildVersion'])
-
- def _XcodeIOSDeviceFamily(self, configname):
- family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
- return [int(x) for x in family.split(',')]
-
- def GetExtraPlistItems(self, configname=None):
- """Returns a dictionary with extra items to insert into Info.plist."""
- if configname not in XcodeSettings._plist_cache:
- cache = {}
- cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
-
- xcode, xcode_build = XcodeVersion()
- cache['DTXcode'] = xcode
- cache['DTXcodeBuild'] = xcode_build
-
- sdk_root = self._SdkRoot(configname)
- if not sdk_root:
- sdk_root = self._DefaultSdkRoot()
- cache['DTSDKName'] = sdk_root
- if xcode >= '0430':
- cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
- sdk_root, 'ProductBuildVersion')
- else:
- cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
-
- if self.isIOS:
- cache['DTPlatformName'] = cache['DTSDKName']
- if configname.endswith("iphoneos"):
- cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem(
- sdk_root, 'ProductVersion')
- cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
- else:
- cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
- XcodeSettings._plist_cache[configname] = cache
-
- # Include extra plist items that are per-target, not per global
- # XcodeSettings.
- items = dict(XcodeSettings._plist_cache[configname])
- if self.isIOS:
- items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
- return items
-
- def _DefaultSdkRoot(self):
- """Returns the default SDKROOT to use.
-
- Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
- project, then the environment variable was empty. Starting with this
- version, Xcode uses the name of the newest SDK installed.
- """
- xcode_version, xcode_build = XcodeVersion()
- if xcode_version < '0500':
- return ''
- default_sdk_path = self._XcodeSdkPath('')
- default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
- if default_sdk_root:
- return default_sdk_root
- try:
- all_sdks = GetStdout(['xcodebuild', '-showsdks'])
- except:
- # If xcodebuild fails, there will be no valid SDKs
- return ''
- for line in all_sdks.splitlines():
- items = line.split()
- if len(items) >= 3 and items[-2] == '-sdk':
- sdk_root = items[-1]
- sdk_path = self._XcodeSdkPath(sdk_root)
- if sdk_path == default_sdk_path:
- return sdk_root
- return ''
-
-
-class MacPrefixHeader(object):
- """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
-
- This feature consists of several pieces:
- * If GCC_PREFIX_HEADER is present, all compilations in that project get an
- additional |-include path_to_prefix_header| cflag.
- * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
- instead compiled, and all other compilations in the project get an
- additional |-include path_to_compiled_header| instead.
- + Compiled prefix headers have the extension gch. There is one gch file for
- every language used in the project (c, cc, m, mm), since gch files for
- different languages aren't compatible.
- + gch files themselves are built with the target's normal cflags, but they
- obviously don't get the |-include| flag. Instead, they need a -x flag that
- describes their language.
- + All o files in the target need to depend on the gch file, to make sure
- it's built before any o file is built.
-
- This class helps with some of these tasks, but it needs help from the build
- system for writing dependencies to the gch files, for writing build commands
- for the gch files, and for figuring out the location of the gch files.
- """
- def __init__(self, xcode_settings,
- gyp_path_to_build_path, gyp_path_to_build_output):
- """If xcode_settings is None, all methods on this class are no-ops.
-
- Args:
- gyp_path_to_build_path: A function that takes a gyp-relative path,
- and returns a path relative to the build directory.
- gyp_path_to_build_output: A function that takes a gyp-relative path and
- a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
- to where the output of precompiling that path for that language
- should be placed (without the trailing '.gch').
- """
- # This doesn't support per-configuration prefix headers. Good enough
- # for now.
- self.header = None
- self.compile_headers = False
- if xcode_settings:
- self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER')
- self.compile_headers = xcode_settings.GetPerTargetSetting(
- 'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO'
- self.compiled_headers = {}
- if self.header:
- if self.compile_headers:
- for lang in ['c', 'cc', 'm', 'mm']:
- self.compiled_headers[lang] = gyp_path_to_build_output(
- self.header, lang)
- self.header = gyp_path_to_build_path(self.header)
-
- def _CompiledHeader(self, lang, arch):
- assert self.compile_headers
- h = self.compiled_headers[lang]
- if arch:
- h += '.' + arch
- return h
-
- def GetInclude(self, lang, arch=None):
- """Gets the cflags to include the prefix header for language |lang|."""
- if self.compile_headers and lang in self.compiled_headers:
- return '-include %s' % self._CompiledHeader(lang, arch)
- elif self.header:
- return '-include %s' % self.header
- else:
- return ''
-
- def _Gch(self, lang, arch):
- """Returns the actual file name of the prefix header for language |lang|."""
- assert self.compile_headers
- return self._CompiledHeader(lang, arch) + '.gch'
-
- def GetObjDependencies(self, sources, objs, arch=None):
- """Given a list of source files and the corresponding object files, returns
- a list of (source, object, gch) tuples, where |gch| is the build-directory
- relative path to the gch file each object file depends on. |compilable[i]|
- has to be the source file belonging to |objs[i]|."""
- if not self.header or not self.compile_headers:
- return []
-
- result = []
- for source, obj in zip(sources, objs):
- ext = os.path.splitext(source)[1]
- lang = {
- '.c': 'c',
- '.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc',
- '.m': 'm',
- '.mm': 'mm',
- }.get(ext, None)
- if lang:
- result.append((source, obj, self._Gch(lang, arch)))
- return result
-
- def GetPchBuildCommands(self, arch=None):
- """Returns [(path_to_gch, language_flag, language, header)].
- |path_to_gch| and |header| are relative to the build directory.
- """
- if not self.header or not self.compile_headers:
- return []
- return [
- (self._Gch('c', arch), '-x c-header', 'c', self.header),
- (self._Gch('cc', arch), '-x c++-header', 'cc', self.header),
- (self._Gch('m', arch), '-x objective-c-header', 'm', self.header),
- (self._Gch('mm', arch), '-x objective-c++-header', 'mm', self.header),
- ]
-
-
-def XcodeVersion():
- """Returns a tuple of version and build version of installed Xcode."""
- # `xcodebuild -version` output looks like
- # Xcode 4.6.3
- # Build version 4H1503
- # or like
- # Xcode 3.2.6
- # Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
- # BuildVersion: 10M2518
- # Convert that to '0463', '4H1503'.
- global XCODE_VERSION_CACHE
- if XCODE_VERSION_CACHE:
- return XCODE_VERSION_CACHE
- try:
- version_list = GetStdout(['xcodebuild', '-version']).splitlines()
- # In some circumstances xcodebuild exits 0 but doesn't return
- # the right results; for example, a user on 10.7 or 10.8 with
- # a bogus path set via xcode-select
- # In that case this may be a CLT-only install so fall back to
- # checking that version.
- if len(version_list) < 2:
- raise GypError("xcodebuild returned unexpected results")
- except:
- version = CLTVersion()
- if version:
- version = re.match(r'(\d\.\d\.?\d*)', version).groups()[0]
- else:
- raise GypError("No Xcode or CLT version detected!")
- # The CLT has no build information, so we return an empty string.
- version_list = [version, '']
- version = version_list[0]
- build = version_list[-1]
- # Be careful to convert "4.2" to "0420":
- version = version.split()[-1].replace('.', '')
- version = (version + '0' * (3 - len(version))).zfill(4)
- if build:
- build = build.split()[-1]
- XCODE_VERSION_CACHE = (version, build)
- return XCODE_VERSION_CACHE
-
-
-# This function ported from the logic in Homebrew's CLT version check
-def CLTVersion():
- """Returns the version of command-line tools from pkgutil."""
- # pkgutil output looks like
- # package-id: com.apple.pkg.CLTools_Executables
- # version: 5.0.1.0.1.1382131676
- # volume: /
- # location: /
- # install-time: 1382544035
- # groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group
- STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
- FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
- MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
-
- regex = re.compile('version: (?P<version>.+)')
- for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
- try:
- output = GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key])
- return re.search(regex, output).groupdict()['version']
- except:
- continue
-
-
-def GetStdout(cmdlist):
- """Returns the content of standard output returned by invoking |cmdlist|.
- Raises |GypError| if the command return with a non-zero return code."""
- job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
- out = job.communicate()[0]
- if job.returncode != 0:
- sys.stderr.write(out + '\n')
- raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
- return out.rstrip('\n')
-
-
-def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
- """Merges the global xcode_settings dictionary into each configuration of the
- target represented by spec. For keys that are both in the global and the local
- xcode_settings dict, the local key gets precendence.
- """
- # The xcode generator special-cases global xcode_settings and does something
- # that amounts to merging in the global xcode_settings into each local
- # xcode_settings dict.
- global_xcode_settings = global_dict.get('xcode_settings', {})
- for config in spec['configurations'].values():
- if 'xcode_settings' in config:
- new_settings = global_xcode_settings.copy()
- new_settings.update(config['xcode_settings'])
- config['xcode_settings'] = new_settings
-
-
-def IsMacBundle(flavor, spec):
- """Returns if |spec| should be treated as a bundle.
-
- Bundles are directories with a certain subdirectory structure, instead of
- just a single file. Bundle rules do not produce a binary but also package
- resources into that directory."""
- is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
- if is_mac_bundle:
- assert spec['type'] != 'none', (
- 'mac_bundle targets cannot have type none (target "%s")' %
- spec['target_name'])
- return is_mac_bundle
-
-
-def GetMacBundleResources(product_dir, xcode_settings, resources):
- """Yields (output, resource) pairs for every resource in |resources|.
- Only call this for mac bundle targets.
-
- Args:
- product_dir: Path to the directory containing the output bundle,
- relative to the build directory.
- xcode_settings: The XcodeSettings of the current target.
- resources: A list of bundle resources, relative to the build directory.
- """
- dest = os.path.join(product_dir,
- xcode_settings.GetBundleResourceFolder())
- for res in resources:
- output = dest
-
- # The make generator doesn't support it, so forbid it everywhere
- # to keep the generators more interchangable.
- assert ' ' not in res, (
- "Spaces in resource filenames not supported (%s)" % res)
-
- # Split into (path,file).
- res_parts = os.path.split(res)
-
- # Now split the path into (prefix,maybe.lproj).
- lproj_parts = os.path.split(res_parts[0])
- # If the resource lives in a .lproj bundle, add that to the destination.
- if lproj_parts[1].endswith('.lproj'):
- output = os.path.join(output, lproj_parts[1])
-
- output = os.path.join(output, res_parts[1])
- # Compiled XIB files are referred to by .nib.
- if output.endswith('.xib'):
- output = os.path.splitext(output)[0] + '.nib'
- # Compiled storyboard files are referred to by .storyboardc.
- if output.endswith('.storyboard'):
- output = os.path.splitext(output)[0] + '.storyboardc'
-
- yield output, res
-
-
-def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
- """Returns (info_plist, dest_plist, defines, extra_env), where:
- * |info_plist| is the source plist path, relative to the
- build directory,
- * |dest_plist| is the destination plist path, relative to the
- build directory,
- * |defines| is a list of preprocessor defines (empty if the plist
- shouldn't be preprocessed,
- * |extra_env| is a dict of env variables that should be exported when
- invoking |mac_tool copy-info-plist|.
-
- Only call this for mac bundle targets.
-
- Args:
- product_dir: Path to the directory containing the output bundle,
- relative to the build directory.
- xcode_settings: The XcodeSettings of the current target.
- gyp_to_build_path: A function that converts paths relative to the
- current gyp file to paths relative to the build direcotry.
- """
- info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE')
- if not info_plist:
- return None, None, [], {}
-
- # The make generator doesn't support it, so forbid it everywhere
- # to keep the generators more interchangable.
- assert ' ' not in info_plist, (
- "Spaces in Info.plist filenames not supported (%s)" % info_plist)
-
- info_plist = gyp_path_to_build_path(info_plist)
-
- # If explicitly set to preprocess the plist, invoke the C preprocessor and
- # specify any defines as -D flags.
- if xcode_settings.GetPerTargetSetting(
- 'INFOPLIST_PREPROCESS', default='NO') == 'YES':
- # Create an intermediate file based on the path.
- defines = shlex.split(xcode_settings.GetPerTargetSetting(
- 'INFOPLIST_PREPROCESSOR_DEFINITIONS', default=''))
- else:
- defines = []
-
- dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath())
- extra_env = xcode_settings.GetPerTargetSettings()
-
- return info_plist, dest_plist, defines, extra_env
-
-
-def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
- additional_settings=None):
- """Return the environment variables that Xcode would set. See
- http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
- for a full list.
-
- Args:
- xcode_settings: An XcodeSettings object. If this is None, this function
- returns an empty dict.
- built_products_dir: Absolute path to the built products dir.
- srcroot: Absolute path to the source root.
- configuration: The build configuration name.
- additional_settings: An optional dict with more values to add to the
- result.
- """
- if not xcode_settings: return {}
-
- # This function is considered a friend of XcodeSettings, so let it reach into
- # its implementation details.
- spec = xcode_settings.spec
-
- # These are filled in on a as-needed basis.
- env = {
- 'BUILT_FRAMEWORKS_DIR' : built_products_dir,
- 'BUILT_PRODUCTS_DIR' : built_products_dir,
- 'CONFIGURATION' : configuration,
- 'PRODUCT_NAME' : xcode_settings.GetProductName(),
- # See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME
- 'SRCROOT' : srcroot,
- 'SOURCE_ROOT': '${SRCROOT}',
- # This is not true for static libraries, but currently the env is only
- # written for bundles:
- 'TARGET_BUILD_DIR' : built_products_dir,
- 'TEMP_DIR' : '${TMPDIR}',
- }
- if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
- env['SDKROOT'] = xcode_settings._SdkPath(configuration)
- else:
- env['SDKROOT'] = ''
-
- if spec['type'] in (
- 'executable', 'static_library', 'shared_library', 'loadable_module'):
- env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()
- env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath()
- env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName()
- mach_o_type = xcode_settings.GetMachOType()
- if mach_o_type:
- env['MACH_O_TYPE'] = mach_o_type
- env['PRODUCT_TYPE'] = xcode_settings.GetProductType()
- if xcode_settings._IsBundle():
- env['CONTENTS_FOLDER_PATH'] = \
- xcode_settings.GetBundleContentsFolderPath()
- env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \
- xcode_settings.GetBundleResourceFolder()
- env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath()
- env['WRAPPER_NAME'] = xcode_settings.GetWrapperName()
-
- install_name = xcode_settings.GetInstallName()
- if install_name:
- env['LD_DYLIB_INSTALL_NAME'] = install_name
- install_name_base = xcode_settings.GetInstallNameBase()
- if install_name_base:
- env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
- if XcodeVersion() >= '0500' and not env.get('SDKROOT'):
- sdk_root = xcode_settings._SdkRoot(configuration)
- if not sdk_root:
- sdk_root = xcode_settings._XcodeSdkPath('')
- if sdk_root is None:
- sdk_root = ''
- env['SDKROOT'] = sdk_root
-
- if not additional_settings:
- additional_settings = {}
- else:
- # Flatten lists to strings.
- for k in additional_settings:
- if not isinstance(additional_settings[k], str):
- additional_settings[k] = ' '.join(additional_settings[k])
- additional_settings.update(env)
-
- for k in additional_settings:
- additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k])
-
- return additional_settings
-
-
-def _NormalizeEnvVarReferences(str):
- """Takes a string containing variable references in the form ${FOO}, $(FOO),
- or $FOO, and returns a string with all variable references in the form ${FOO}.
- """
- # $FOO -> ${FOO}
- str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str)
-
- # $(FOO) -> ${FOO}
- matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str)
- for match in matches:
- to_replace, variable = match
- assert '$(' not in match, '$($(FOO)) variables not supported: ' + match
- str = str.replace(to_replace, '${' + variable + '}')
-
- return str
-
-
-def ExpandEnvVars(string, expansions):
- """Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
- expansions list. If the variable expands to something that references
- another variable, this variable is expanded as well if it's in env --
- until no variables present in env are left."""
- for k, v in reversed(expansions):
- string = string.replace('${' + k + '}', v)
- string = string.replace('$(' + k + ')', v)
- string = string.replace('$' + k, v)
- return string
-
-
-def _TopologicallySortedEnvVarKeys(env):
- """Takes a dict |env| whose values are strings that can refer to other keys,
- for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
- env such that key2 is after key1 in L if env[key2] refers to env[key1].
-
- Throws an Exception in case of dependency cycles.
- """
- # Since environment variables can refer to other variables, the evaluation
- # order is important. Below is the logic to compute the dependency graph
- # and sort it.
- regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}')
- def GetEdges(node):
- # Use a definition of edges such that user_of_variable -> used_varible.
- # This happens to be easier in this case, since a variable's
- # definition contains all variables it references in a single string.
- # We can then reverse the result of the topological sort at the end.
- # Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
- matches = set([v for v in regex.findall(env[node]) if v in env])
- for dependee in matches:
- assert '${' not in dependee, 'Nested variables not supported: ' + dependee
- return matches
-
- try:
- # Topologically sort, and then reverse, because we used an edge definition
- # that's inverted from the expected result of this function (see comment
- # above).
- order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
- order.reverse()
- return order
- except gyp.common.CycleError, e:
- raise GypError(
- 'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
-
-
-def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot,
- configuration, additional_settings=None):
- env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
- additional_settings)
- return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)]
-
-
-def GetSpecPostbuildCommands(spec, quiet=False):
- """Returns the list of postbuilds explicitly defined on |spec|, in a form
- executable by a shell."""
- postbuilds = []
- for postbuild in spec.get('postbuilds', []):
- if not quiet:
- postbuilds.append('echo POSTBUILD\\(%s\\) %s' % (
- spec['target_name'], postbuild['postbuild_name']))
- postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
- return postbuilds
-
-
-def _HasIOSTarget(targets):
- """Returns true if any target contains the iOS specific key
- IPHONEOS_DEPLOYMENT_TARGET."""
- for target_dict in targets.values():
- for config in target_dict['configurations'].values():
- if config.get('xcode_settings', {}).get('IPHONEOS_DEPLOYMENT_TARGET'):
- return True
- return False
-
-
-def _AddIOSDeviceConfigurations(targets):
- """Clone all targets and append -iphoneos to the name. Configure these targets
- to build for iOS devices and use correct architectures for those builds."""
- for target_dict in targets.itervalues():
- toolset = target_dict['toolset']
- configs = target_dict['configurations']
- for config_name, config_dict in dict(configs).iteritems():
- iphoneos_config_dict = copy.deepcopy(config_dict)
- configs[config_name + '-iphoneos'] = iphoneos_config_dict
- configs[config_name + '-iphonesimulator'] = config_dict
- if toolset == 'target':
- iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
- return targets
-
-def CloneConfigurationForDeviceAndEmulator(target_dicts):
- """If |target_dicts| contains any iOS targets, automatically create -iphoneos
- targets for iOS device builds."""
- if _HasIOSTarget(target_dicts):
- return _AddIOSDeviceConfigurations(target_dicts)
- return target_dicts
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
deleted file mode 100644
index 3820d6bf04..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
+++ /dev/null
@@ -1,270 +0,0 @@
-# Copyright (c) 2014 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Xcode-ninja wrapper project file generator.
-
-This updates the data structures passed to the Xcode gyp generator to build
-with ninja instead. The Xcode project itself is transformed into a list of
-executable targets, each with a build step to build with ninja, and a target
-with every source and resource file. This appears to sidestep some of the
-major performance headaches experienced using complex projects and large number
-of targets within Xcode.
-"""
-
-import errno
-import gyp.generator.ninja
-import os
-import re
-import xml.sax.saxutils
-
-
-def _WriteWorkspace(main_gyp, sources_gyp, params):
- """ Create a workspace to wrap main and sources gyp paths. """
- (build_file_root, build_file_ext) = os.path.splitext(main_gyp)
- workspace_path = build_file_root + '.xcworkspace'
- options = params['options']
- if options.generator_output:
- workspace_path = os.path.join(options.generator_output, workspace_path)
- try:
- os.makedirs(workspace_path)
- except OSError, e:
- if e.errno != errno.EEXIST:
- raise
- output_string = '<?xml version="1.0" encoding="UTF-8"?>\n' + \
- '<Workspace version = "1.0">\n'
- for gyp_name in [main_gyp, sources_gyp]:
- name = os.path.splitext(os.path.basename(gyp_name))[0] + '.xcodeproj'
- name = xml.sax.saxutils.quoteattr("group:" + name)
- output_string += ' <FileRef location = %s></FileRef>\n' % name
- output_string += '</Workspace>\n'
-
- workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata")
-
- try:
- with open(workspace_file, 'r') as input_file:
- input_string = input_file.read()
- if input_string == output_string:
- return
- except IOError:
- # Ignore errors if the file doesn't exist.
- pass
-
- with open(workspace_file, 'w') as output_file:
- output_file.write(output_string)
-
-def _TargetFromSpec(old_spec, params):
- """ Create fake target for xcode-ninja wrapper. """
- # Determine ninja top level build dir (e.g. /path/to/out).
- ninja_toplevel = None
- jobs = 0
- if params:
- options = params['options']
- ninja_toplevel = \
- os.path.join(options.toplevel_dir,
- gyp.generator.ninja.ComputeOutputDir(params))
- jobs = params.get('generator_flags', {}).get('xcode_ninja_jobs', 0)
-
- target_name = old_spec.get('target_name')
- product_name = old_spec.get('product_name', target_name)
- product_extension = old_spec.get('product_extension')
-
- ninja_target = {}
- ninja_target['target_name'] = target_name
- ninja_target['product_name'] = product_name
- if product_extension:
- ninja_target['product_extension'] = product_extension
- ninja_target['toolset'] = old_spec.get('toolset')
- ninja_target['default_configuration'] = old_spec.get('default_configuration')
- ninja_target['configurations'] = {}
-
- # Tell Xcode to look in |ninja_toplevel| for build products.
- new_xcode_settings = {}
- if ninja_toplevel:
- new_xcode_settings['CONFIGURATION_BUILD_DIR'] = \
- "%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel
-
- if 'configurations' in old_spec:
- for config in old_spec['configurations'].iterkeys():
- old_xcode_settings = \
- old_spec['configurations'][config].get('xcode_settings', {})
- if 'IPHONEOS_DEPLOYMENT_TARGET' in old_xcode_settings:
- new_xcode_settings['CODE_SIGNING_REQUIRED'] = "NO"
- new_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET'] = \
- old_xcode_settings['IPHONEOS_DEPLOYMENT_TARGET']
- ninja_target['configurations'][config] = {}
- ninja_target['configurations'][config]['xcode_settings'] = \
- new_xcode_settings
-
- ninja_target['mac_bundle'] = old_spec.get('mac_bundle', 0)
- ninja_target['ios_app_extension'] = old_spec.get('ios_app_extension', 0)
- ninja_target['ios_watchkit_extension'] = \
- old_spec.get('ios_watchkit_extension', 0)
- ninja_target['ios_watchkit_app'] = old_spec.get('ios_watchkit_app', 0)
- ninja_target['type'] = old_spec['type']
- if ninja_toplevel:
- ninja_target['actions'] = [
- {
- 'action_name': 'Compile and copy %s via ninja' % target_name,
- 'inputs': [],
- 'outputs': [],
- 'action': [
- 'env',
- 'PATH=%s' % os.environ['PATH'],
- 'ninja',
- '-C',
- new_xcode_settings['CONFIGURATION_BUILD_DIR'],
- target_name,
- ],
- 'message': 'Compile and copy %s via ninja' % target_name,
- },
- ]
- if jobs > 0:
- ninja_target['actions'][0]['action'].extend(('-j', jobs))
- return ninja_target
-
-def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
- """Limit targets for Xcode wrapper.
-
- Xcode sometimes performs poorly with too many targets, so only include
- proper executable targets, with filters to customize.
- Arguments:
- target_extras: Regular expression to always add, matching any target.
- executable_target_pattern: Regular expression limiting executable targets.
- spec: Specifications for target.
- """
- target_name = spec.get('target_name')
- # Always include targets matching target_extras.
- if target_extras is not None and re.search(target_extras, target_name):
- return True
-
- # Otherwise just show executable targets.
- if spec.get('type', '') == 'executable' and \
- spec.get('product_extension', '') != 'bundle':
-
- # If there is a filter and the target does not match, exclude the target.
- if executable_target_pattern is not None:
- if not re.search(executable_target_pattern, target_name):
- return False
- return True
- return False
-
-def CreateWrapper(target_list, target_dicts, data, params):
- """Initialize targets for the ninja wrapper.
-
- This sets up the necessary variables in the targets to generate Xcode projects
- that use ninja as an external builder.
- Arguments:
- target_list: List of target pairs: 'base/base.gyp:base'.
- target_dicts: Dict of target properties keyed on target pair.
- data: Dict of flattened build files keyed on gyp path.
- params: Dict of global options for gyp.
- """
- orig_gyp = params['build_files'][0]
- for gyp_name, gyp_dict in data.iteritems():
- if gyp_name == orig_gyp:
- depth = gyp_dict['_DEPTH']
-
- # Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE
- # and prepend .ninja before the .gyp extension.
- generator_flags = params.get('generator_flags', {})
- main_gyp = generator_flags.get('xcode_ninja_main_gyp', None)
- if main_gyp is None:
- (build_file_root, build_file_ext) = os.path.splitext(orig_gyp)
- main_gyp = build_file_root + ".ninja" + build_file_ext
-
- # Create new |target_list|, |target_dicts| and |data| data structures.
- new_target_list = []
- new_target_dicts = {}
- new_data = {}
-
- # Set base keys needed for |data|.
- new_data[main_gyp] = {}
- new_data[main_gyp]['included_files'] = []
- new_data[main_gyp]['targets'] = []
- new_data[main_gyp]['xcode_settings'] = \
- data[orig_gyp].get('xcode_settings', {})
-
- # Normally the xcode-ninja generator includes only valid executable targets.
- # If |xcode_ninja_executable_target_pattern| is set, that list is reduced to
- # executable targets that match the pattern. (Default all)
- executable_target_pattern = \
- generator_flags.get('xcode_ninja_executable_target_pattern', None)
-
- # For including other non-executable targets, add the matching target name
- # to the |xcode_ninja_target_pattern| regular expression. (Default none)
- target_extras = generator_flags.get('xcode_ninja_target_pattern', None)
-
- for old_qualified_target in target_list:
- spec = target_dicts[old_qualified_target]
- if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
- # Add to new_target_list.
- target_name = spec.get('target_name')
- new_target_name = '%s:%s#target' % (main_gyp, target_name)
- new_target_list.append(new_target_name)
-
- # Add to new_target_dicts.
- new_target_dicts[new_target_name] = _TargetFromSpec(spec, params)
-
- # Add to new_data.
- for old_target in data[old_qualified_target.split(':')[0]]['targets']:
- if old_target['target_name'] == target_name:
- new_data_target = {}
- new_data_target['target_name'] = old_target['target_name']
- new_data_target['toolset'] = old_target['toolset']
- new_data[main_gyp]['targets'].append(new_data_target)
-
- # Create sources target.
- sources_target_name = 'sources_for_indexing'
- sources_target = _TargetFromSpec(
- { 'target_name' : sources_target_name,
- 'toolset': 'target',
- 'default_configuration': 'Default',
- 'mac_bundle': '0',
- 'type': 'executable'
- }, None)
-
- # Tell Xcode to look everywhere for headers.
- sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } }
-
- sources = []
- for target, target_dict in target_dicts.iteritems():
- base = os.path.dirname(target)
- files = target_dict.get('sources', []) + \
- target_dict.get('mac_bundle_resources', [])
- for action in target_dict.get('actions', []):
- files.extend(action.get('inputs', []))
- # Remove files starting with $. These are mostly intermediate files for the
- # build system.
- files = [ file for file in files if not file.startswith('$')]
-
- # Make sources relative to root build file.
- relative_path = os.path.dirname(main_gyp)
- sources += [ os.path.relpath(os.path.join(base, file), relative_path)
- for file in files ]
-
- sources_target['sources'] = sorted(set(sources))
-
- # Put sources_to_index in it's own gyp.
- sources_gyp = \
- os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp")
- fully_qualified_target_name = \
- '%s:%s#target' % (sources_gyp, sources_target_name)
-
- # Add to new_target_list, new_target_dicts and new_data.
- new_target_list.append(fully_qualified_target_name)
- new_target_dicts[fully_qualified_target_name] = sources_target
- new_data_target = {}
- new_data_target['target_name'] = sources_target['target_name']
- new_data_target['_DEPTH'] = depth
- new_data_target['toolset'] = "target"
- new_data[sources_gyp] = {}
- new_data[sources_gyp]['targets'] = []
- new_data[sources_gyp]['included_files'] = []
- new_data[sources_gyp]['xcode_settings'] = \
- data[orig_gyp].get('xcode_settings', {})
- new_data[sources_gyp]['targets'].append(new_data_target)
-
- # Write workspace to file.
- _WriteWorkspace(main_gyp, sources_gyp, params)
- return (new_target_list, new_target_dicts, new_data)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
deleted file mode 100644
index d08b7f7770..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
+++ /dev/null
@@ -1,2927 +0,0 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Xcode project file generator.
-
-This module is both an Xcode project file generator and a documentation of the
-Xcode project file format. Knowledge of the project file format was gained
-based on extensive experience with Xcode, and by making changes to projects in
-Xcode.app and observing the resultant changes in the associated project files.
-
-XCODE PROJECT FILES
-
-The generator targets the file format as written by Xcode 3.2 (specifically,
-3.2.6), but past experience has taught that the format has not changed
-significantly in the past several years, and future versions of Xcode are able
-to read older project files.
-
-Xcode project files are "bundled": the project "file" from an end-user's
-perspective is actually a directory with an ".xcodeproj" extension. The
-project file from this module's perspective is actually a file inside this
-directory, always named "project.pbxproj". This file contains a complete
-description of the project and is all that is needed to use the xcodeproj.
-Other files contained in the xcodeproj directory are simply used to store
-per-user settings, such as the state of various UI elements in the Xcode
-application.
-
-The project.pbxproj file is a property list, stored in a format almost
-identical to the NeXTstep property list format. The file is able to carry
-Unicode data, and is encoded in UTF-8. The root element in the property list
-is a dictionary that contains several properties of minimal interest, and two
-properties of immense interest. The most important property is a dictionary
-named "objects". The entire structure of the project is represented by the
-children of this property. The objects dictionary is keyed by unique 96-bit
-values represented by 24 uppercase hexadecimal characters. Each value in the
-objects dictionary is itself a dictionary, describing an individual object.
-
-Each object in the dictionary is a member of a class, which is identified by
-the "isa" property of each object. A variety of classes are represented in a
-project file. Objects can refer to other objects by ID, using the 24-character
-hexadecimal object key. A project's objects form a tree, with a root object
-of class PBXProject at the root. As an example, the PBXProject object serves
-as parent to an XCConfigurationList object defining the build configurations
-used in the project, a PBXGroup object serving as a container for all files
-referenced in the project, and a list of target objects, each of which defines
-a target in the project. There are several different types of target object,
-such as PBXNativeTarget and PBXAggregateTarget. In this module, this
-relationship is expressed by having each target type derive from an abstract
-base named XCTarget.
-
-The project.pbxproj file's root dictionary also contains a property, sibling to
-the "objects" dictionary, named "rootObject". The value of rootObject is a
-24-character object key referring to the root PBXProject object in the
-objects dictionary.
-
-In Xcode, every file used as input to a target or produced as a final product
-of a target must appear somewhere in the hierarchy rooted at the PBXGroup
-object referenced by the PBXProject's mainGroup property. A PBXGroup is
-generally represented as a folder in the Xcode application. PBXGroups can
-contain other PBXGroups as well as PBXFileReferences, which are pointers to
-actual files.
-
-Each XCTarget contains a list of build phases, represented in this module by
-the abstract base XCBuildPhase. Examples of concrete XCBuildPhase derivations
-are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the
-"Compile Sources" and "Link Binary With Libraries" phases displayed in the
-Xcode application. Files used as input to these phases (for example, source
-files in the former case and libraries and frameworks in the latter) are
-represented by PBXBuildFile objects, referenced by elements of "files" lists
-in XCTarget objects. Each PBXBuildFile object refers to a PBXBuildFile
-object as a "weak" reference: it does not "own" the PBXBuildFile, which is
-owned by the root object's mainGroup or a descendant group. In most cases, the
-layer of indirection between an XCBuildPhase and a PBXFileReference via a
-PBXBuildFile appears extraneous, but there's actually one reason for this:
-file-specific compiler flags are added to the PBXBuildFile object so as to
-allow a single file to be a member of multiple targets while having distinct
-compiler flags for each. These flags can be modified in the Xcode applciation
-in the "Build" tab of a File Info window.
-
-When a project is open in the Xcode application, Xcode will rewrite it. As
-such, this module is careful to adhere to the formatting used by Xcode, to
-avoid insignificant changes appearing in the file when it is used in the
-Xcode application. This will keep version control repositories happy, and
-makes it possible to compare a project file used in Xcode to one generated by
-this module to determine if any significant changes were made in the
-application.
-
-Xcode has its own way of assigning 24-character identifiers to each object,
-which is not duplicated here. Because the identifier only is only generated
-once, when an object is created, and is then left unchanged, there is no need
-to attempt to duplicate Xcode's behavior in this area. The generator is free
-to select any identifier, even at random, to refer to the objects it creates,
-and Xcode will retain those identifiers and use them when subsequently
-rewriting the project file. However, the generator would choose new random
-identifiers each time the project files are generated, leading to difficulties
-comparing "used" project files to "pristine" ones produced by this module,
-and causing the appearance of changes as every object identifier is changed
-when updated projects are checked in to a version control repository. To
-mitigate this problem, this module chooses identifiers in a more deterministic
-way, by hashing a description of each object as well as its parent and ancestor
-objects. This strategy should result in minimal "shift" in IDs as successive
-generations of project files are produced.
-
-THIS MODULE
-
-This module introduces several classes, all derived from the XCObject class.
-Nearly all of the "brains" are built into the XCObject class, which understands
-how to create and modify objects, maintain the proper tree structure, compute
-identifiers, and print objects. For the most part, classes derived from
-XCObject need only provide a _schema class object, a dictionary that
-expresses what properties objects of the class may contain.
-
-Given this structure, it's possible to build a minimal project file by creating
-objects of the appropriate types and making the proper connections:
-
- config_list = XCConfigurationList()
- group = PBXGroup()
- project = PBXProject({'buildConfigurationList': config_list,
- 'mainGroup': group})
-
-With the project object set up, it can be added to an XCProjectFile object.
-XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject
-subclass that does not actually correspond to a class type found in a project
-file. Rather, it is used to represent the project file's root dictionary.
-Printing an XCProjectFile will print the entire project file, including the
-full "objects" dictionary.
-
- project_file = XCProjectFile({'rootObject': project})
- project_file.ComputeIDs()
- project_file.Print()
-
-Xcode project files are always encoded in UTF-8. This module will accept
-strings of either the str class or the unicode class. Strings of class str
-are assumed to already be encoded in UTF-8. Obviously, if you're just using
-ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset.
-Strings of class unicode are handled properly and encoded in UTF-8 when
-a project file is output.
-"""
-
-import gyp.common
-import posixpath
-import re
-import struct
-import sys
-
-# hashlib is supplied as of Python 2.5 as the replacement interface for sha
-# and other secure hashes. In 2.6, sha is deprecated. Import hashlib if
-# available, avoiding a deprecation warning under 2.6. Import sha otherwise,
-# preserving 2.4 compatibility.
-try:
- import hashlib
- _new_sha1 = hashlib.sha1
-except ImportError:
- import sha
- _new_sha1 = sha.new
-
-
-# See XCObject._EncodeString. This pattern is used to determine when a string
-# can be printed unquoted. Strings that match this pattern may be printed
-# unquoted. Strings that do not match must be quoted and may be further
-# transformed to be properly encoded. Note that this expression matches the
-# characters listed with "+", for 1 or more occurrences: if a string is empty,
-# it must not match this pattern, because it needs to be encoded as "".
-_unquoted = re.compile('^[A-Za-z0-9$./_]+$')
-
-# Strings that match this pattern are quoted regardless of what _unquoted says.
-# Oddly, Xcode will quote any string with a run of three or more underscores.
-_quoted = re.compile('___')
-
-# This pattern should match any character that needs to be escaped by
-# XCObject._EncodeString. See that function.
-_escaped = re.compile('[\\\\"]|[\x00-\x1f]')
-
-
-# Used by SourceTreeAndPathFromPath
-_path_leading_variable = re.compile(r'^\$\((.*?)\)(/(.*))?$')
-
-def SourceTreeAndPathFromPath(input_path):
- """Given input_path, returns a tuple with sourceTree and path values.
-
- Examples:
- input_path (source_tree, output_path)
- '$(VAR)/path' ('VAR', 'path')
- '$(VAR)' ('VAR', None)
- 'path' (None, 'path')
- """
-
- source_group_match = _path_leading_variable.match(input_path)
- if source_group_match:
- source_tree = source_group_match.group(1)
- output_path = source_group_match.group(3) # This may be None.
- else:
- source_tree = None
- output_path = input_path
-
- return (source_tree, output_path)
-
-def ConvertVariablesToShellSyntax(input_string):
- return re.sub(r'\$\((.*?)\)', '${\\1}', input_string)
-
-class XCObject(object):
- """The abstract base of all class types used in Xcode project files.
-
- Class variables:
- _schema: A dictionary defining the properties of this class. The keys to
- _schema are string property keys as used in project files. Values
- are a list of four or five elements:
- [ is_list, property_type, is_strong, is_required, default ]
- is_list: True if the property described is a list, as opposed
- to a single element.
- property_type: The type to use as the value of the property,
- or if is_list is True, the type to use for each
- element of the value's list. property_type must
- be an XCObject subclass, or one of the built-in
- types str, int, or dict.
- is_strong: If property_type is an XCObject subclass, is_strong
- is True to assert that this class "owns," or serves
- as parent, to the property value (or, if is_list is
- True, values). is_strong must be False if
- property_type is not an XCObject subclass.
- is_required: True if the property is required for the class.
- Note that is_required being True does not preclude
- an empty string ("", in the case of property_type
- str) or list ([], in the case of is_list True) from
- being set for the property.
- default: Optional. If is_requried is True, default may be set
- to provide a default value for objects that do not supply
- their own value. If is_required is True and default
- is not provided, users of the class must supply their own
- value for the property.
- Note that although the values of the array are expressed in
- boolean terms, subclasses provide values as integers to conserve
- horizontal space.
- _should_print_single_line: False in XCObject. Subclasses whose objects
- should be written to the project file in the
- alternate single-line format, such as
- PBXFileReference and PBXBuildFile, should
- set this to True.
- _encode_transforms: Used by _EncodeString to encode unprintable characters.
- The index into this list is the ordinal of the
- character to transform; each value is a string
- used to represent the character in the output. XCObject
- provides an _encode_transforms list suitable for most
- XCObject subclasses.
- _alternate_encode_transforms: Provided for subclasses that wish to use
- the alternate encoding rules. Xcode seems
- to use these rules when printing objects in
- single-line format. Subclasses that desire
- this behavior should set _encode_transforms
- to _alternate_encode_transforms.
- _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs
- to construct this object's ID. Most classes that need custom
- hashing behavior should do it by overriding Hashables,
- but in some cases an object's parent may wish to push a
- hashable value into its child, and it can do so by appending
- to _hashables.
- Attributes:
- id: The object's identifier, a 24-character uppercase hexadecimal string.
- Usually, objects being created should not set id until the entire
- project file structure is built. At that point, UpdateIDs() should
- be called on the root object to assign deterministic values for id to
- each object in the tree.
- parent: The object's parent. This is set by a parent XCObject when a child
- object is added to it.
- _properties: The object's property dictionary. An object's properties are
- described by its class' _schema variable.
- """
-
- _schema = {}
- _should_print_single_line = False
-
- # See _EncodeString.
- _encode_transforms = []
- i = 0
- while i < ord(' '):
- _encode_transforms.append('\\U%04x' % i)
- i = i + 1
- _encode_transforms[7] = '\\a'
- _encode_transforms[8] = '\\b'
- _encode_transforms[9] = '\\t'
- _encode_transforms[10] = '\\n'
- _encode_transforms[11] = '\\v'
- _encode_transforms[12] = '\\f'
- _encode_transforms[13] = '\\n'
-
- _alternate_encode_transforms = list(_encode_transforms)
- _alternate_encode_transforms[9] = chr(9)
- _alternate_encode_transforms[10] = chr(10)
- _alternate_encode_transforms[11] = chr(11)
-
- def __init__(self, properties=None, id=None, parent=None):
- self.id = id
- self.parent = parent
- self._properties = {}
- self._hashables = []
- self._SetDefaultsFromSchema()
- self.UpdateProperties(properties)
-
- def __repr__(self):
- try:
- name = self.Name()
- except NotImplementedError:
- return '<%s at 0x%x>' % (self.__class__.__name__, id(self))
- return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
-
- def Copy(self):
- """Make a copy of this object.
-
- The new object will have its own copy of lists and dicts. Any XCObject
- objects owned by this object (marked "strong") will be copied in the
- new object, even those found in lists. If this object has any weak
- references to other XCObjects, the same references are added to the new
- object without making a copy.
- """
-
- that = self.__class__(id=self.id, parent=self.parent)
- for key, value in self._properties.iteritems():
- is_strong = self._schema[key][2]
-
- if isinstance(value, XCObject):
- if is_strong:
- new_value = value.Copy()
- new_value.parent = that
- that._properties[key] = new_value
- else:
- that._properties[key] = value
- elif isinstance(value, str) or isinstance(value, unicode) or \
- isinstance(value, int):
- that._properties[key] = value
- elif isinstance(value, list):
- if is_strong:
- # If is_strong is True, each element is an XCObject, so it's safe to
- # call Copy.
- that._properties[key] = []
- for item in value:
- new_item = item.Copy()
- new_item.parent = that
- that._properties[key].append(new_item)
- else:
- that._properties[key] = value[:]
- elif isinstance(value, dict):
- # dicts are never strong.
- if is_strong:
- raise TypeError('Strong dict for key ' + key + ' in ' + \
- self.__class__.__name__)
- else:
- that._properties[key] = value.copy()
- else:
- raise TypeError('Unexpected type ' + value.__class__.__name__ + \
- ' for key ' + key + ' in ' + self.__class__.__name__)
-
- return that
-
- def Name(self):
- """Return the name corresponding to an object.
-
- Not all objects necessarily need to be nameable, and not all that do have
- a "name" property. Override as needed.
- """
-
- # If the schema indicates that "name" is required, try to access the
- # property even if it doesn't exist. This will result in a KeyError
- # being raised for the property that should be present, which seems more
- # appropriate than NotImplementedError in this case.
- if 'name' in self._properties or \
- ('name' in self._schema and self._schema['name'][3]):
- return self._properties['name']
-
- raise NotImplementedError(self.__class__.__name__ + ' must implement Name')
-
- def Comment(self):
- """Return a comment string for the object.
-
- Most objects just use their name as the comment, but PBXProject uses
- different values.
-
- The returned comment is not escaped and does not have any comment marker
- strings applied to it.
- """
-
- return self.Name()
-
- def Hashables(self):
- hashables = [self.__class__.__name__]
-
- name = self.Name()
- if name != None:
- hashables.append(name)
-
- hashables.extend(self._hashables)
-
- return hashables
-
- def HashablesForChild(self):
- return None
-
- def ComputeIDs(self, recursive=True, overwrite=True, seed_hash=None):
- """Set "id" properties deterministically.
-
- An object's "id" property is set based on a hash of its class type and
- name, as well as the class type and name of all ancestor objects. As
- such, it is only advisable to call ComputeIDs once an entire project file
- tree is built.
-
- If recursive is True, recurse into all descendant objects and update their
- hashes.
-
- If overwrite is True, any existing value set in the "id" property will be
- replaced.
- """
-
- def _HashUpdate(hash, data):
- """Update hash with data's length and contents.
-
- If the hash were updated only with the value of data, it would be
- possible for clowns to induce collisions by manipulating the names of
- their objects. By adding the length, it's exceedingly less likely that
- ID collisions will be encountered, intentionally or not.
- """
-
- hash.update(struct.pack('>i', len(data)))
- hash.update(data)
-
- if seed_hash is None:
- seed_hash = _new_sha1()
-
- hash = seed_hash.copy()
-
- hashables = self.Hashables()
- assert len(hashables) > 0
- for hashable in hashables:
- _HashUpdate(hash, hashable)
-
- if recursive:
- hashables_for_child = self.HashablesForChild()
- if hashables_for_child is None:
- child_hash = hash
- else:
- assert len(hashables_for_child) > 0
- child_hash = seed_hash.copy()
- for hashable in hashables_for_child:
- _HashUpdate(child_hash, hashable)
-
- for child in self.Children():
- child.ComputeIDs(recursive, overwrite, child_hash)
-
- if overwrite or self.id is None:
- # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is
- # is 160 bits. Instead of throwing out 64 bits of the digest, xor them
- # into the portion that gets used.
- assert hash.digest_size % 4 == 0
- digest_int_count = hash.digest_size / 4
- digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest())
- id_ints = [0, 0, 0]
- for index in xrange(0, digest_int_count):
- id_ints[index % 3] ^= digest_ints[index]
- self.id = '%08X%08X%08X' % tuple(id_ints)
-
- def EnsureNoIDCollisions(self):
- """Verifies that no two objects have the same ID. Checks all descendants.
- """
-
- ids = {}
- descendants = self.Descendants()
- for descendant in descendants:
- if descendant.id in ids:
- other = ids[descendant.id]
- raise KeyError(
- 'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \
- (descendant.id, str(descendant._properties),
- str(other._properties), self._properties['rootObject'].Name()))
- ids[descendant.id] = descendant
-
- def Children(self):
- """Returns a list of all of this object's owned (strong) children."""
-
- children = []
- for property, attributes in self._schema.iteritems():
- (is_list, property_type, is_strong) = attributes[0:3]
- if is_strong and property in self._properties:
- if not is_list:
- children.append(self._properties[property])
- else:
- children.extend(self._properties[property])
- return children
-
- def Descendants(self):
- """Returns a list of all of this object's descendants, including this
- object.
- """
-
- children = self.Children()
- descendants = [self]
- for child in children:
- descendants.extend(child.Descendants())
- return descendants
-
- def PBXProjectAncestor(self):
- # The base case for recursion is defined at PBXProject.PBXProjectAncestor.
- if self.parent:
- return self.parent.PBXProjectAncestor()
- return None
-
- def _EncodeComment(self, comment):
- """Encodes a comment to be placed in the project file output, mimicing
- Xcode behavior.
- """
-
- # This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If
- # the string already contains a "*/", it is turned into "(*)/". This keeps
- # the file writer from outputting something that would be treated as the
- # end of a comment in the middle of something intended to be entirely a
- # comment.
-
- return '/* ' + comment.replace('*/', '(*)/') + ' */'
-
- def _EncodeTransform(self, match):
- # This function works closely with _EncodeString. It will only be called
- # by re.sub with match.group(0) containing a character matched by the
- # the _escaped expression.
- char = match.group(0)
-
- # Backslashes (\) and quotation marks (") are always replaced with a
- # backslash-escaped version of the same. Everything else gets its
- # replacement from the class' _encode_transforms array.
- if char == '\\':
- return '\\\\'
- if char == '"':
- return '\\"'
- return self._encode_transforms[ord(char)]
-
- def _EncodeString(self, value):
- """Encodes a string to be placed in the project file output, mimicing
- Xcode behavior.
- """
-
- # Use quotation marks when any character outside of the range A-Z, a-z, 0-9,
- # $ (dollar sign), . (period), and _ (underscore) is present. Also use
- # quotation marks to represent empty strings.
- #
- # Escape " (double-quote) and \ (backslash) by preceding them with a
- # backslash.
- #
- # Some characters below the printable ASCII range are encoded specially:
- # 7 ^G BEL is encoded as "\a"
- # 8 ^H BS is encoded as "\b"
- # 11 ^K VT is encoded as "\v"
- # 12 ^L NP is encoded as "\f"
- # 127 ^? DEL is passed through as-is without escaping
- # - In PBXFileReference and PBXBuildFile objects:
- # 9 ^I HT is passed through as-is without escaping
- # 10 ^J NL is passed through as-is without escaping
- # 13 ^M CR is passed through as-is without escaping
- # - In other objects:
- # 9 ^I HT is encoded as "\t"
- # 10 ^J NL is encoded as "\n"
- # 13 ^M CR is encoded as "\n" rendering it indistinguishable from
- # 10 ^J NL
- # All other characters within the ASCII control character range (0 through
- # 31 inclusive) are encoded as "\U001f" referring to the Unicode code point
- # in hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e".
- # Characters above the ASCII range are passed through to the output encoded
- # as UTF-8 without any escaping. These mappings are contained in the
- # class' _encode_transforms list.
-
- if _unquoted.search(value) and not _quoted.search(value):
- return value
-
- return '"' + _escaped.sub(self._EncodeTransform, value) + '"'
-
- def _XCPrint(self, file, tabs, line):
- file.write('\t' * tabs + line)
-
- def _XCPrintableValue(self, tabs, value, flatten_list=False):
- """Returns a representation of value that may be printed in a project file,
- mimicing Xcode's behavior.
-
- _XCPrintableValue can handle str and int values, XCObjects (which are
- made printable by returning their id property), and list and dict objects
- composed of any of the above types. When printing a list or dict, and
- _should_print_single_line is False, the tabs parameter is used to determine
- how much to indent the lines corresponding to the items in the list or
- dict.
-
- If flatten_list is True, single-element lists will be transformed into
- strings.
- """
-
- printable = ''
- comment = None
-
- if self._should_print_single_line:
- sep = ' '
- element_tabs = ''
- end_tabs = ''
- else:
- sep = '\n'
- element_tabs = '\t' * (tabs + 1)
- end_tabs = '\t' * tabs
-
- if isinstance(value, XCObject):
- printable += value.id
- comment = value.Comment()
- elif isinstance(value, str):
- printable += self._EncodeString(value)
- elif isinstance(value, unicode):
- printable += self._EncodeString(value.encode('utf-8'))
- elif isinstance(value, int):
- printable += str(value)
- elif isinstance(value, list):
- if flatten_list and len(value) <= 1:
- if len(value) == 0:
- printable += self._EncodeString('')
- else:
- printable += self._EncodeString(value[0])
- else:
- printable = '(' + sep
- for item in value:
- printable += element_tabs + \
- self._XCPrintableValue(tabs + 1, item, flatten_list) + \
- ',' + sep
- printable += end_tabs + ')'
- elif isinstance(value, dict):
- printable = '{' + sep
- for item_key, item_value in sorted(value.iteritems()):
- printable += element_tabs + \
- self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \
- self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \
- sep
- printable += end_tabs + '}'
- else:
- raise TypeError("Can't make " + value.__class__.__name__ + ' printable')
-
- if comment != None:
- printable += ' ' + self._EncodeComment(comment)
-
- return printable
-
- def _XCKVPrint(self, file, tabs, key, value):
- """Prints a key and value, members of an XCObject's _properties dictionary,
- to file.
-
- tabs is an int identifying the indentation level. If the class'
- _should_print_single_line variable is True, tabs is ignored and the
- key-value pair will be followed by a space insead of a newline.
- """
-
- if self._should_print_single_line:
- printable = ''
- after_kv = ' '
- else:
- printable = '\t' * tabs
- after_kv = '\n'
-
- # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy
- # objects without comments. Sometimes it prints them with comments, but
- # the majority of the time, it doesn't. To avoid unnecessary changes to
- # the project file after Xcode opens it, don't write comments for
- # remoteGlobalIDString. This is a sucky hack and it would certainly be
- # cleaner to extend the schema to indicate whether or not a comment should
- # be printed, but since this is the only case where the problem occurs and
- # Xcode itself can't seem to make up its mind, the hack will suffice.
- #
- # Also see PBXContainerItemProxy._schema['remoteGlobalIDString'].
- if key == 'remoteGlobalIDString' and isinstance(self,
- PBXContainerItemProxy):
- value_to_print = value.id
- else:
- value_to_print = value
-
- # PBXBuildFile's settings property is represented in the output as a dict,
- # but a hack here has it represented as a string. Arrange to strip off the
- # quotes so that it shows up in the output as expected.
- if key == 'settings' and isinstance(self, PBXBuildFile):
- strip_value_quotes = True
- else:
- strip_value_quotes = False
-
- # In another one-off, let's set flatten_list on buildSettings properties
- # of XCBuildConfiguration objects, because that's how Xcode treats them.
- if key == 'buildSettings' and isinstance(self, XCBuildConfiguration):
- flatten_list = True
- else:
- flatten_list = False
-
- try:
- printable_key = self._XCPrintableValue(tabs, key, flatten_list)
- printable_value = self._XCPrintableValue(tabs, value_to_print,
- flatten_list)
- if strip_value_quotes and len(printable_value) > 1 and \
- printable_value[0] == '"' and printable_value[-1] == '"':
- printable_value = printable_value[1:-1]
- printable += printable_key + ' = ' + printable_value + ';' + after_kv
- except TypeError, e:
- gyp.common.ExceptionAppend(e,
- 'while printing key "%s"' % key)
- raise
-
- self._XCPrint(file, 0, printable)
-
- def Print(self, file=sys.stdout):
- """Prints a reprentation of this object to file, adhering to Xcode output
- formatting.
- """
-
- self.VerifyHasRequiredProperties()
-
- if self._should_print_single_line:
- # When printing an object in a single line, Xcode doesn't put any space
- # between the beginning of a dictionary (or presumably a list) and the
- # first contained item, so you wind up with snippets like
- # ...CDEF = {isa = PBXFileReference; fileRef = 0123...
- # If it were me, I would have put a space in there after the opening
- # curly, but I guess this is just another one of those inconsistencies
- # between how Xcode prints PBXFileReference and PBXBuildFile objects as
- # compared to other objects. Mimic Xcode's behavior here by using an
- # empty string for sep.
- sep = ''
- end_tabs = 0
- else:
- sep = '\n'
- end_tabs = 2
-
- # Start the object. For example, '\t\tPBXProject = {\n'.
- self._XCPrint(file, 2, self._XCPrintableValue(2, self) + ' = {' + sep)
-
- # "isa" isn't in the _properties dictionary, it's an intrinsic property
- # of the class which the object belongs to. Xcode always outputs "isa"
- # as the first element of an object dictionary.
- self._XCKVPrint(file, 3, 'isa', self.__class__.__name__)
-
- # The remaining elements of an object dictionary are sorted alphabetically.
- for property, value in sorted(self._properties.iteritems()):
- self._XCKVPrint(file, 3, property, value)
-
- # End the object.
- self._XCPrint(file, end_tabs, '};\n')
-
- def UpdateProperties(self, properties, do_copy=False):
- """Merge the supplied properties into the _properties dictionary.
-
- The input properties must adhere to the class schema or a KeyError or
- TypeError exception will be raised. If adding an object of an XCObject
- subclass and the schema indicates a strong relationship, the object's
- parent will be set to this object.
-
- If do_copy is True, then lists, dicts, strong-owned XCObjects, and
- strong-owned XCObjects in lists will be copied instead of having their
- references added.
- """
-
- if properties is None:
- return
-
- for property, value in properties.iteritems():
- # Make sure the property is in the schema.
- if not property in self._schema:
- raise KeyError(property + ' not in ' + self.__class__.__name__)
-
- # Make sure the property conforms to the schema.
- (is_list, property_type, is_strong) = self._schema[property][0:3]
- if is_list:
- if value.__class__ != list:
- raise TypeError(
- property + ' of ' + self.__class__.__name__ + \
- ' must be list, not ' + value.__class__.__name__)
- for item in value:
- if not isinstance(item, property_type) and \
- not (item.__class__ == unicode and property_type == str):
- # Accept unicode where str is specified. str is treated as
- # UTF-8-encoded.
- raise TypeError(
- 'item of ' + property + ' of ' + self.__class__.__name__ + \
- ' must be ' + property_type.__name__ + ', not ' + \
- item.__class__.__name__)
- elif not isinstance(value, property_type) and \
- not (value.__class__ == unicode and property_type == str):
- # Accept unicode where str is specified. str is treated as
- # UTF-8-encoded.
- raise TypeError(
- property + ' of ' + self.__class__.__name__ + ' must be ' + \
- property_type.__name__ + ', not ' + value.__class__.__name__)
-
- # Checks passed, perform the assignment.
- if do_copy:
- if isinstance(value, XCObject):
- if is_strong:
- self._properties[property] = value.Copy()
- else:
- self._properties[property] = value
- elif isinstance(value, str) or isinstance(value, unicode) or \
- isinstance(value, int):
- self._properties[property] = value
- elif isinstance(value, list):
- if is_strong:
- # If is_strong is True, each element is an XCObject, so it's safe
- # to call Copy.
- self._properties[property] = []
- for item in value:
- self._properties[property].append(item.Copy())
- else:
- self._properties[property] = value[:]
- elif isinstance(value, dict):
- self._properties[property] = value.copy()
- else:
- raise TypeError("Don't know how to copy a " + \
- value.__class__.__name__ + ' object for ' + \
- property + ' in ' + self.__class__.__name__)
- else:
- self._properties[property] = value
-
- # Set up the child's back-reference to this object. Don't use |value|
- # any more because it may not be right if do_copy is true.
- if is_strong:
- if not is_list:
- self._properties[property].parent = self
- else:
- for item in self._properties[property]:
- item.parent = self
-
- def HasProperty(self, key):
- return key in self._properties
-
- def GetProperty(self, key):
- return self._properties[key]
-
- def SetProperty(self, key, value):
- self.UpdateProperties({key: value})
-
- def DelProperty(self, key):
- if key in self._properties:
- del self._properties[key]
-
- def AppendProperty(self, key, value):
- # TODO(mark): Support ExtendProperty too (and make this call that)?
-
- # Schema validation.
- if not key in self._schema:
- raise KeyError(key + ' not in ' + self.__class__.__name__)
-
- (is_list, property_type, is_strong) = self._schema[key][0:3]
- if not is_list:
- raise TypeError(key + ' of ' + self.__class__.__name__ + ' must be list')
- if not isinstance(value, property_type):
- raise TypeError('item of ' + key + ' of ' + self.__class__.__name__ + \
- ' must be ' + property_type.__name__ + ', not ' + \
- value.__class__.__name__)
-
- # If the property doesn't exist yet, create a new empty list to receive the
- # item.
- if not key in self._properties:
- self._properties[key] = []
-
- # Set up the ownership link.
- if is_strong:
- value.parent = self
-
- # Store the item.
- self._properties[key].append(value)
-
- def VerifyHasRequiredProperties(self):
- """Ensure that all properties identified as required by the schema are
- set.
- """
-
- # TODO(mark): A stronger verification mechanism is needed. Some
- # subclasses need to perform validation beyond what the schema can enforce.
- for property, attributes in self._schema.iteritems():
- (is_list, property_type, is_strong, is_required) = attributes[0:4]
- if is_required and not property in self._properties:
- raise KeyError(self.__class__.__name__ + ' requires ' + property)
-
- def _SetDefaultsFromSchema(self):
- """Assign object default values according to the schema. This will not
- overwrite properties that have already been set."""
-
- defaults = {}
- for property, attributes in self._schema.iteritems():
- (is_list, property_type, is_strong, is_required) = attributes[0:4]
- if is_required and len(attributes) >= 5 and \
- not property in self._properties:
- default = attributes[4]
-
- defaults[property] = default
-
- if len(defaults) > 0:
- # Use do_copy=True so that each new object gets its own copy of strong
- # objects, lists, and dicts.
- self.UpdateProperties(defaults, do_copy=True)
-
-
-class XCHierarchicalElement(XCObject):
- """Abstract base for PBXGroup and PBXFileReference. Not represented in a
- project file."""
-
- # TODO(mark): Do name and path belong here? Probably so.
- # If path is set and name is not, name may have a default value. Name will
- # be set to the basename of path, if the basename of path is different from
- # the full value of path. If path is already just a leaf name, name will
- # not be set.
- _schema = XCObject._schema.copy()
- _schema.update({
- 'comments': [0, str, 0, 0],
- 'fileEncoding': [0, str, 0, 0],
- 'includeInIndex': [0, int, 0, 0],
- 'indentWidth': [0, int, 0, 0],
- 'lineEnding': [0, int, 0, 0],
- 'sourceTree': [0, str, 0, 1, '<group>'],
- 'tabWidth': [0, int, 0, 0],
- 'usesTabs': [0, int, 0, 0],
- 'wrapsLines': [0, int, 0, 0],
- })
-
- def __init__(self, properties=None, id=None, parent=None):
- # super
- XCObject.__init__(self, properties, id, parent)
- if 'path' in self._properties and not 'name' in self._properties:
- path = self._properties['path']
- name = posixpath.basename(path)
- if name != '' and path != name:
- self.SetProperty('name', name)
-
- if 'path' in self._properties and \
- (not 'sourceTree' in self._properties or \
- self._properties['sourceTree'] == '<group>'):
- # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take
- # the variable out and make the path be relative to that variable by
- # assigning the variable name as the sourceTree.
- (source_tree, path) = SourceTreeAndPathFromPath(self._properties['path'])
- if source_tree != None:
- self._properties['sourceTree'] = source_tree
- if path != None:
- self._properties['path'] = path
- if source_tree != None and path is None and \
- not 'name' in self._properties:
- # The path was of the form "$(SDKROOT)" with no path following it.
- # This object is now relative to that variable, so it has no path
- # attribute of its own. It does, however, keep a name.
- del self._properties['path']
- self._properties['name'] = source_tree
-
- def Name(self):
- if 'name' in self._properties:
- return self._properties['name']
- elif 'path' in self._properties:
- return self._properties['path']
- else:
- # This happens in the case of the root PBXGroup.
- return None
-
- def Hashables(self):
- """Custom hashables for XCHierarchicalElements.
-
- XCHierarchicalElements are special. Generally, their hashes shouldn't
- change if the paths don't change. The normal XCObject implementation of
- Hashables adds a hashable for each object, which means that if
- the hierarchical structure changes (possibly due to changes caused when
- TakeOverOnlyChild runs and encounters slight changes in the hierarchy),
- the hashes will change. For example, if a project file initially contains
- a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent
- a/b. If someone later adds a/f2 to the project file, a/b can no longer be
- collapsed, and f1 winds up with parent b and grandparent a. That would
- be sufficient to change f1's hash.
-
- To counteract this problem, hashables for all XCHierarchicalElements except
- for the main group (which has neither a name nor a path) are taken to be
- just the set of path components. Because hashables are inherited from
- parents, this provides assurance that a/b/f1 has the same set of hashables
- whether its parent is b or a/b.
-
- The main group is a special case. As it is permitted to have no name or
- path, it is permitted to use the standard XCObject hash mechanism. This
- is not considered a problem because there can be only one main group.
- """
-
- if self == self.PBXProjectAncestor()._properties['mainGroup']:
- # super
- return XCObject.Hashables(self)
-
- hashables = []
-
- # Put the name in first, ensuring that if TakeOverOnlyChild collapses
- # children into a top-level group like "Source", the name always goes
- # into the list of hashables without interfering with path components.
- if 'name' in self._properties:
- # Make it less likely for people to manipulate hashes by following the
- # pattern of always pushing an object type value onto the list first.
- hashables.append(self.__class__.__name__ + '.name')
- hashables.append(self._properties['name'])
-
- # NOTE: This still has the problem that if an absolute path is encountered,
- # including paths with a sourceTree, they'll still inherit their parents'
- # hashables, even though the paths aren't relative to their parents. This
- # is not expected to be much of a problem in practice.
- path = self.PathFromSourceTreeAndPath()
- if path != None:
- components = path.split(posixpath.sep)
- for component in components:
- hashables.append(self.__class__.__name__ + '.path')
- hashables.append(component)
-
- hashables.extend(self._hashables)
-
- return hashables
-
- def Compare(self, other):
- # Allow comparison of these types. PBXGroup has the highest sort rank;
- # PBXVariantGroup is treated as equal to PBXFileReference.
- valid_class_types = {
- PBXFileReference: 'file',
- PBXGroup: 'group',
- PBXVariantGroup: 'file',
- }
- self_type = valid_class_types[self.__class__]
- other_type = valid_class_types[other.__class__]
-
- if self_type == other_type:
- # If the two objects are of the same sort rank, compare their names.
- return cmp(self.Name(), other.Name())
-
- # Otherwise, sort groups before everything else.
- if self_type == 'group':
- return -1
- return 1
-
- def CompareRootGroup(self, other):
- # This function should be used only to compare direct children of the
- # containing PBXProject's mainGroup. These groups should appear in the
- # listed order.
- # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the
- # generator should have a way of influencing this list rather than having
- # to hardcode for the generator here.
- order = ['Source', 'Intermediates', 'Projects', 'Frameworks', 'Products',
- 'Build']
-
- # If the groups aren't in the listed order, do a name comparison.
- # Otherwise, groups in the listed order should come before those that
- # aren't.
- self_name = self.Name()
- other_name = other.Name()
- self_in = isinstance(self, PBXGroup) and self_name in order
- other_in = isinstance(self, PBXGroup) and other_name in order
- if not self_in and not other_in:
- return self.Compare(other)
- if self_name in order and not other_name in order:
- return -1
- if other_name in order and not self_name in order:
- return 1
-
- # If both groups are in the listed order, go by the defined order.
- self_index = order.index(self_name)
- other_index = order.index(other_name)
- if self_index < other_index:
- return -1
- if self_index > other_index:
- return 1
- return 0
-
- def PathFromSourceTreeAndPath(self):
- # Turn the object's sourceTree and path properties into a single flat
- # string of a form comparable to the path parameter. If there's a
- # sourceTree property other than "<group>", wrap it in $(...) for the
- # comparison.
- components = []
- if self._properties['sourceTree'] != '<group>':
- components.append('$(' + self._properties['sourceTree'] + ')')
- if 'path' in self._properties:
- components.append(self._properties['path'])
-
- if len(components) > 0:
- return posixpath.join(*components)
-
- return None
-
- def FullPath(self):
- # Returns a full path to self relative to the project file, or relative
- # to some other source tree. Start with self, and walk up the chain of
- # parents prepending their paths, if any, until no more parents are
- # available (project-relative path) or until a path relative to some
- # source tree is found.
- xche = self
- path = None
- while isinstance(xche, XCHierarchicalElement) and \
- (path is None or \
- (not path.startswith('/') and not path.startswith('$'))):
- this_path = xche.PathFromSourceTreeAndPath()
- if this_path != None and path != None:
- path = posixpath.join(this_path, path)
- elif this_path != None:
- path = this_path
- xche = xche.parent
-
- return path
-
-
-class PBXGroup(XCHierarchicalElement):
- """
- Attributes:
- _children_by_path: Maps pathnames of children of this PBXGroup to the
- actual child XCHierarchicalElement objects.
- _variant_children_by_name_and_path: Maps (name, path) tuples of
- PBXVariantGroup children to the actual child PBXVariantGroup objects.
- """
-
- _schema = XCHierarchicalElement._schema.copy()
- _schema.update({
- 'children': [1, XCHierarchicalElement, 1, 1, []],
- 'name': [0, str, 0, 0],
- 'path': [0, str, 0, 0],
- })
-
- def __init__(self, properties=None, id=None, parent=None):
- # super
- XCHierarchicalElement.__init__(self, properties, id, parent)
- self._children_by_path = {}
- self._variant_children_by_name_and_path = {}
- for child in self._properties.get('children', []):
- self._AddChildToDicts(child)
-
- def Hashables(self):
- # super
- hashables = XCHierarchicalElement.Hashables(self)
-
- # It is not sufficient to just rely on name and parent to build a unique
- # hashable : a node could have two child PBXGroup sharing a common name.
- # To add entropy the hashable is enhanced with the names of all its
- # children.
- for child in self._properties.get('children', []):
- child_name = child.Name()
- if child_name != None:
- hashables.append(child_name)
-
- return hashables
-
- def HashablesForChild(self):
- # To avoid a circular reference the hashables used to compute a child id do
- # not include the child names.
- return XCHierarchicalElement.Hashables(self)
-
- def _AddChildToDicts(self, child):
- # Sets up this PBXGroup object's dicts to reference the child properly.
- child_path = child.PathFromSourceTreeAndPath()
- if child_path:
- if child_path in self._children_by_path:
- raise ValueError('Found multiple children with path ' + child_path)
- self._children_by_path[child_path] = child
-
- if isinstance(child, PBXVariantGroup):
- child_name = child._properties.get('name', None)
- key = (child_name, child_path)
- if key in self._variant_children_by_name_and_path:
- raise ValueError('Found multiple PBXVariantGroup children with ' + \
- 'name ' + str(child_name) + ' and path ' + \
- str(child_path))
- self._variant_children_by_name_and_path[key] = child
-
- def AppendChild(self, child):
- # Callers should use this instead of calling
- # AppendProperty('children', child) directly because this function
- # maintains the group's dicts.
- self.AppendProperty('children', child)
- self._AddChildToDicts(child)
-
- def GetChildByName(self, name):
- # This is not currently optimized with a dict as GetChildByPath is because
- # it has few callers. Most callers probably want GetChildByPath. This
- # function is only useful to get children that have names but no paths,
- # which is rare. The children of the main group ("Source", "Products",
- # etc.) is pretty much the only case where this likely to come up.
- #
- # TODO(mark): Maybe this should raise an error if more than one child is
- # present with the same name.
- if not 'children' in self._properties:
- return None
-
- for child in self._properties['children']:
- if child.Name() == name:
- return child
-
- return None
-
- def GetChildByPath(self, path):
- if not path:
- return None
-
- if path in self._children_by_path:
- return self._children_by_path[path]
-
- return None
-
- def GetChildByRemoteObject(self, remote_object):
- # This method is a little bit esoteric. Given a remote_object, which
- # should be a PBXFileReference in another project file, this method will
- # return this group's PBXReferenceProxy object serving as a local proxy
- # for the remote PBXFileReference.
- #
- # This function might benefit from a dict optimization as GetChildByPath
- # for some workloads, but profiling shows that it's not currently a
- # problem.
- if not 'children' in self._properties:
- return None
-
- for child in self._properties['children']:
- if not isinstance(child, PBXReferenceProxy):
- continue
-
- container_proxy = child._properties['remoteRef']
- if container_proxy._properties['remoteGlobalIDString'] == remote_object:
- return child
-
- return None
-
- def AddOrGetFileByPath(self, path, hierarchical):
- """Returns an existing or new file reference corresponding to path.
-
- If hierarchical is True, this method will create or use the necessary
- hierarchical group structure corresponding to path. Otherwise, it will
- look in and create an item in the current group only.
-
- If an existing matching reference is found, it is returned, otherwise, a
- new one will be created, added to the correct group, and returned.
-
- If path identifies a directory by virtue of carrying a trailing slash,
- this method returns a PBXFileReference of "folder" type. If path
- identifies a variant, by virtue of it identifying a file inside a directory
- with an ".lproj" extension, this method returns a PBXVariantGroup
- containing the variant named by path, and possibly other variants. For
- all other paths, a "normal" PBXFileReference will be returned.
- """
-
- # Adding or getting a directory? Directories end with a trailing slash.
- is_dir = False
- if path.endswith('/'):
- is_dir = True
- path = posixpath.normpath(path)
- if is_dir:
- path = path + '/'
-
- # Adding or getting a variant? Variants are files inside directories
- # with an ".lproj" extension. Xcode uses variants for localization. For
- # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named
- # MainMenu.nib inside path/to, and give it a variant named Language. In
- # this example, grandparent would be set to path/to and parent_root would
- # be set to Language.
- variant_name = None
- parent = posixpath.dirname(path)
- grandparent = posixpath.dirname(parent)
- parent_basename = posixpath.basename(parent)
- (parent_root, parent_ext) = posixpath.splitext(parent_basename)
- if parent_ext == '.lproj':
- variant_name = parent_root
- if grandparent == '':
- grandparent = None
-
- # Putting a directory inside a variant group is not currently supported.
- assert not is_dir or variant_name is None
-
- path_split = path.split(posixpath.sep)
- if len(path_split) == 1 or \
- ((is_dir or variant_name != None) and len(path_split) == 2) or \
- not hierarchical:
- # The PBXFileReference or PBXVariantGroup will be added to or gotten from
- # this PBXGroup, no recursion necessary.
- if variant_name is None:
- # Add or get a PBXFileReference.
- file_ref = self.GetChildByPath(path)
- if file_ref != None:
- assert file_ref.__class__ == PBXFileReference
- else:
- file_ref = PBXFileReference({'path': path})
- self.AppendChild(file_ref)
- else:
- # Add or get a PBXVariantGroup. The variant group name is the same
- # as the basename (MainMenu.nib in the example above). grandparent
- # specifies the path to the variant group itself, and path_split[-2:]
- # is the path of the specific variant relative to its group.
- variant_group_name = posixpath.basename(path)
- variant_group_ref = self.AddOrGetVariantGroupByNameAndPath(
- variant_group_name, grandparent)
- variant_path = posixpath.sep.join(path_split[-2:])
- variant_ref = variant_group_ref.GetChildByPath(variant_path)
- if variant_ref != None:
- assert variant_ref.__class__ == PBXFileReference
- else:
- variant_ref = PBXFileReference({'name': variant_name,
- 'path': variant_path})
- variant_group_ref.AppendChild(variant_ref)
- # The caller is interested in the variant group, not the specific
- # variant file.
- file_ref = variant_group_ref
- return file_ref
- else:
- # Hierarchical recursion. Add or get a PBXGroup corresponding to the
- # outermost path component, and then recurse into it, chopping off that
- # path component.
- next_dir = path_split[0]
- group_ref = self.GetChildByPath(next_dir)
- if group_ref != None:
- assert group_ref.__class__ == PBXGroup
- else:
- group_ref = PBXGroup({'path': next_dir})
- self.AppendChild(group_ref)
- return group_ref.AddOrGetFileByPath(posixpath.sep.join(path_split[1:]),
- hierarchical)
-
- def AddOrGetVariantGroupByNameAndPath(self, name, path):
- """Returns an existing or new PBXVariantGroup for name and path.
-
- If a PBXVariantGroup identified by the name and path arguments is already
- present as a child of this object, it is returned. Otherwise, a new
- PBXVariantGroup with the correct properties is created, added as a child,
- and returned.
-
- This method will generally be called by AddOrGetFileByPath, which knows
- when to create a variant group based on the structure of the pathnames
- passed to it.
- """
-
- key = (name, path)
- if key in self._variant_children_by_name_and_path:
- variant_group_ref = self._variant_children_by_name_and_path[key]
- assert variant_group_ref.__class__ == PBXVariantGroup
- return variant_group_ref
-
- variant_group_properties = {'name': name}
- if path != None:
- variant_group_properties['path'] = path
- variant_group_ref = PBXVariantGroup(variant_group_properties)
- self.AppendChild(variant_group_ref)
-
- return variant_group_ref
-
- def TakeOverOnlyChild(self, recurse=False):
- """If this PBXGroup has only one child and it's also a PBXGroup, take
- it over by making all of its children this object's children.
-
- This function will continue to take over only children when those children
- are groups. If there are three PBXGroups representing a, b, and c, with
- c inside b and b inside a, and a and b have no other children, this will
- result in a taking over both b and c, forming a PBXGroup for a/b/c.
-
- If recurse is True, this function will recurse into children and ask them
- to collapse themselves by taking over only children as well. Assuming
- an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f
- (d1, d2, and f are files, the rest are groups), recursion will result in
- a group for a/b/c containing a group for d3/e.
- """
-
- # At this stage, check that child class types are PBXGroup exactly,
- # instead of using isinstance. The only subclass of PBXGroup,
- # PBXVariantGroup, should not participate in reparenting in the same way:
- # reparenting by merging different object types would be wrong.
- while len(self._properties['children']) == 1 and \
- self._properties['children'][0].__class__ == PBXGroup:
- # Loop to take over the innermost only-child group possible.
-
- child = self._properties['children'][0]
-
- # Assume the child's properties, including its children. Save a copy
- # of this object's old properties, because they'll still be needed.
- # This object retains its existing id and parent attributes.
- old_properties = self._properties
- self._properties = child._properties
- self._children_by_path = child._children_by_path
-
- if not 'sourceTree' in self._properties or \
- self._properties['sourceTree'] == '<group>':
- # The child was relative to its parent. Fix up the path. Note that
- # children with a sourceTree other than "<group>" are not relative to
- # their parents, so no path fix-up is needed in that case.
- if 'path' in old_properties:
- if 'path' in self._properties:
- # Both the original parent and child have paths set.
- self._properties['path'] = posixpath.join(old_properties['path'],
- self._properties['path'])
- else:
- # Only the original parent has a path, use it.
- self._properties['path'] = old_properties['path']
- if 'sourceTree' in old_properties:
- # The original parent had a sourceTree set, use it.
- self._properties['sourceTree'] = old_properties['sourceTree']
-
- # If the original parent had a name set, keep using it. If the original
- # parent didn't have a name but the child did, let the child's name
- # live on. If the name attribute seems unnecessary now, get rid of it.
- if 'name' in old_properties and old_properties['name'] != None and \
- old_properties['name'] != self.Name():
- self._properties['name'] = old_properties['name']
- if 'name' in self._properties and 'path' in self._properties and \
- self._properties['name'] == self._properties['path']:
- del self._properties['name']
-
- # Notify all children of their new parent.
- for child in self._properties['children']:
- child.parent = self
-
- # If asked to recurse, recurse.
- if recurse:
- for child in self._properties['children']:
- if child.__class__ == PBXGroup:
- child.TakeOverOnlyChild(recurse)
-
- def SortGroup(self):
- self._properties['children'] = \
- sorted(self._properties['children'], cmp=lambda x,y: x.Compare(y))
-
- # Recurse.
- for child in self._properties['children']:
- if isinstance(child, PBXGroup):
- child.SortGroup()
-
-
-class XCFileLikeElement(XCHierarchicalElement):
- # Abstract base for objects that can be used as the fileRef property of
- # PBXBuildFile.
-
- def PathHashables(self):
- # A PBXBuildFile that refers to this object will call this method to
- # obtain additional hashables specific to this XCFileLikeElement. Don't
- # just use this object's hashables, they're not specific and unique enough
- # on their own (without access to the parent hashables.) Instead, provide
- # hashables that identify this object by path by getting its hashables as
- # well as the hashables of ancestor XCHierarchicalElement objects.
-
- hashables = []
- xche = self
- while xche != None and isinstance(xche, XCHierarchicalElement):
- xche_hashables = xche.Hashables()
- for index in xrange(0, len(xche_hashables)):
- hashables.insert(index, xche_hashables[index])
- xche = xche.parent
- return hashables
-
-
-class XCContainerPortal(XCObject):
- # Abstract base for objects that can be used as the containerPortal property
- # of PBXContainerItemProxy.
- pass
-
-
-class XCRemoteObject(XCObject):
- # Abstract base for objects that can be used as the remoteGlobalIDString
- # property of PBXContainerItemProxy.
- pass
-
-
-class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject):
- _schema = XCFileLikeElement._schema.copy()
- _schema.update({
- 'explicitFileType': [0, str, 0, 0],
- 'lastKnownFileType': [0, str, 0, 0],
- 'name': [0, str, 0, 0],
- 'path': [0, str, 0, 1],
- })
-
- # Weird output rules for PBXFileReference.
- _should_print_single_line = True
- # super
- _encode_transforms = XCFileLikeElement._alternate_encode_transforms
-
- def __init__(self, properties=None, id=None, parent=None):
- # super
- XCFileLikeElement.__init__(self, properties, id, parent)
- if 'path' in self._properties and self._properties['path'].endswith('/'):
- self._properties['path'] = self._properties['path'][:-1]
- is_dir = True
- else:
- is_dir = False
-
- if 'path' in self._properties and \
- not 'lastKnownFileType' in self._properties and \
- not 'explicitFileType' in self._properties:
- # TODO(mark): This is the replacement for a replacement for a quick hack.
- # It is no longer incredibly sucky, but this list needs to be extended.
- extension_map = {
- 'a': 'archive.ar',
- 'app': 'wrapper.application',
- 'bdic': 'file',
- 'bundle': 'wrapper.cfbundle',
- 'c': 'sourcecode.c.c',
- 'cc': 'sourcecode.cpp.cpp',
- 'cpp': 'sourcecode.cpp.cpp',
- 'css': 'text.css',
- 'cxx': 'sourcecode.cpp.cpp',
- 'dart': 'sourcecode',
- 'dylib': 'compiled.mach-o.dylib',
- 'framework': 'wrapper.framework',
- 'gyp': 'sourcecode',
- 'gypi': 'sourcecode',
- 'h': 'sourcecode.c.h',
- 'hxx': 'sourcecode.cpp.h',
- 'icns': 'image.icns',
- 'java': 'sourcecode.java',
- 'js': 'sourcecode.javascript',
- 'kext': 'wrapper.kext',
- 'm': 'sourcecode.c.objc',
- 'mm': 'sourcecode.cpp.objcpp',
- 'nib': 'wrapper.nib',
- 'o': 'compiled.mach-o.objfile',
- 'pdf': 'image.pdf',
- 'pl': 'text.script.perl',
- 'plist': 'text.plist.xml',
- 'pm': 'text.script.perl',
- 'png': 'image.png',
- 'py': 'text.script.python',
- 'r': 'sourcecode.rez',
- 'rez': 'sourcecode.rez',
- 's': 'sourcecode.asm',
- 'storyboard': 'file.storyboard',
- 'strings': 'text.plist.strings',
- 'swift': 'sourcecode.swift',
- 'ttf': 'file',
- 'xcassets': 'folder.assetcatalog',
- 'xcconfig': 'text.xcconfig',
- 'xcdatamodel': 'wrapper.xcdatamodel',
- 'xcdatamodeld':'wrapper.xcdatamodeld',
- 'xib': 'file.xib',
- 'y': 'sourcecode.yacc',
- }
-
- prop_map = {
- 'dart': 'explicitFileType',
- 'gyp': 'explicitFileType',
- 'gypi': 'explicitFileType',
- }
-
- if is_dir:
- file_type = 'folder'
- prop_name = 'lastKnownFileType'
- else:
- basename = posixpath.basename(self._properties['path'])
- (root, ext) = posixpath.splitext(basename)
- # Check the map using a lowercase extension.
- # TODO(mark): Maybe it should try with the original case first and fall
- # back to lowercase, in case there are any instances where case
- # matters. There currently aren't.
- if ext != '':
- ext = ext[1:].lower()
-
- # TODO(mark): "text" is the default value, but "file" is appropriate
- # for unrecognized files not containing text. Xcode seems to choose
- # based on content.
- file_type = extension_map.get(ext, 'text')
- prop_name = prop_map.get(ext, 'lastKnownFileType')
-
- self._properties[prop_name] = file_type
-
-
-class PBXVariantGroup(PBXGroup, XCFileLikeElement):
- """PBXVariantGroup is used by Xcode to represent localizations."""
- # No additions to the schema relative to PBXGroup.
- pass
-
-
-# PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below
-# because it uses PBXContainerItemProxy, defined below.
-
-
-class XCBuildConfiguration(XCObject):
- _schema = XCObject._schema.copy()
- _schema.update({
- 'baseConfigurationReference': [0, PBXFileReference, 0, 0],
- 'buildSettings': [0, dict, 0, 1, {}],
- 'name': [0, str, 0, 1],
- })
-
- def HasBuildSetting(self, key):
- return key in self._properties['buildSettings']
-
- def GetBuildSetting(self, key):
- return self._properties['buildSettings'][key]
-
- def SetBuildSetting(self, key, value):
- # TODO(mark): If a list, copy?
- self._properties['buildSettings'][key] = value
-
- def AppendBuildSetting(self, key, value):
- if not key in self._properties['buildSettings']:
- self._properties['buildSettings'][key] = []
- self._properties['buildSettings'][key].append(value)
-
- def DelBuildSetting(self, key):
- if key in self._properties['buildSettings']:
- del self._properties['buildSettings'][key]
-
- def SetBaseConfiguration(self, value):
- self._properties['baseConfigurationReference'] = value
-
-class XCConfigurationList(XCObject):
- # _configs is the default list of configurations.
- _configs = [ XCBuildConfiguration({'name': 'Debug'}),
- XCBuildConfiguration({'name': 'Release'}) ]
-
- _schema = XCObject._schema.copy()
- _schema.update({
- 'buildConfigurations': [1, XCBuildConfiguration, 1, 1, _configs],
- 'defaultConfigurationIsVisible': [0, int, 0, 1, 1],
- 'defaultConfigurationName': [0, str, 0, 1, 'Release'],
- })
-
- def Name(self):
- return 'Build configuration list for ' + \
- self.parent.__class__.__name__ + ' "' + self.parent.Name() + '"'
-
- def ConfigurationNamed(self, name):
- """Convenience accessor to obtain an XCBuildConfiguration by name."""
- for configuration in self._properties['buildConfigurations']:
- if configuration._properties['name'] == name:
- return configuration
-
- raise KeyError(name)
-
- def DefaultConfiguration(self):
- """Convenience accessor to obtain the default XCBuildConfiguration."""
- return self.ConfigurationNamed(self._properties['defaultConfigurationName'])
-
- def HasBuildSetting(self, key):
- """Determines the state of a build setting in all XCBuildConfiguration
- child objects.
-
- If all child objects have key in their build settings, and the value is the
- same in all child objects, returns 1.
-
- If no child objects have the key in their build settings, returns 0.
-
- If some, but not all, child objects have the key in their build settings,
- or if any children have different values for the key, returns -1.
- """
-
- has = None
- value = None
- for configuration in self._properties['buildConfigurations']:
- configuration_has = configuration.HasBuildSetting(key)
- if has is None:
- has = configuration_has
- elif has != configuration_has:
- return -1
-
- if configuration_has:
- configuration_value = configuration.GetBuildSetting(key)
- if value is None:
- value = configuration_value
- elif value != configuration_value:
- return -1
-
- if not has:
- return 0
-
- return 1
-
- def GetBuildSetting(self, key):
- """Gets the build setting for key.
-
- All child XCConfiguration objects must have the same value set for the
- setting, or a ValueError will be raised.
- """
-
- # TODO(mark): This is wrong for build settings that are lists. The list
- # contents should be compared (and a list copy returned?)
-
- value = None
- for configuration in self._properties['buildConfigurations']:
- configuration_value = configuration.GetBuildSetting(key)
- if value is None:
- value = configuration_value
- else:
- if value != configuration_value:
- raise ValueError('Variant values for ' + key)
-
- return value
-
- def SetBuildSetting(self, key, value):
- """Sets the build setting for key to value in all child
- XCBuildConfiguration objects.
- """
-
- for configuration in self._properties['buildConfigurations']:
- configuration.SetBuildSetting(key, value)
-
- def AppendBuildSetting(self, key, value):
- """Appends value to the build setting for key, which is treated as a list,
- in all child XCBuildConfiguration objects.
- """
-
- for configuration in self._properties['buildConfigurations']:
- configuration.AppendBuildSetting(key, value)
-
- def DelBuildSetting(self, key):
- """Deletes the build setting key from all child XCBuildConfiguration
- objects.
- """
-
- for configuration in self._properties['buildConfigurations']:
- configuration.DelBuildSetting(key)
-
- def SetBaseConfiguration(self, value):
- """Sets the build configuration in all child XCBuildConfiguration objects.
- """
-
- for configuration in self._properties['buildConfigurations']:
- configuration.SetBaseConfiguration(value)
-
-
-class PBXBuildFile(XCObject):
- _schema = XCObject._schema.copy()
- _schema.update({
- 'fileRef': [0, XCFileLikeElement, 0, 1],
- 'settings': [0, str, 0, 0], # hack, it's a dict
- })
-
- # Weird output rules for PBXBuildFile.
- _should_print_single_line = True
- _encode_transforms = XCObject._alternate_encode_transforms
-
- def Name(self):
- # Example: "main.cc in Sources"
- return self._properties['fileRef'].Name() + ' in ' + self.parent.Name()
-
- def Hashables(self):
- # super
- hashables = XCObject.Hashables(self)
-
- # It is not sufficient to just rely on Name() to get the
- # XCFileLikeElement's name, because that is not a complete pathname.
- # PathHashables returns hashables unique enough that no two
- # PBXBuildFiles should wind up with the same set of hashables, unless
- # someone adds the same file multiple times to the same target. That
- # would be considered invalid anyway.
- hashables.extend(self._properties['fileRef'].PathHashables())
-
- return hashables
-
-
-class XCBuildPhase(XCObject):
- """Abstract base for build phase classes. Not represented in a project
- file.
-
- Attributes:
- _files_by_path: A dict mapping each path of a child in the files list by
- path (keys) to the corresponding PBXBuildFile children (values).
- _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys)
- to the corresponding PBXBuildFile children (values).
- """
-
- # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't
- # actually have a "files" list. XCBuildPhase should not have "files" but
- # another abstract subclass of it should provide this, and concrete build
- # phase types that do have "files" lists should be derived from that new
- # abstract subclass. XCBuildPhase should only provide buildActionMask and
- # runOnlyForDeploymentPostprocessing, and not files or the various
- # file-related methods and attributes.
-
- _schema = XCObject._schema.copy()
- _schema.update({
- 'buildActionMask': [0, int, 0, 1, 0x7fffffff],
- 'files': [1, PBXBuildFile, 1, 1, []],
- 'runOnlyForDeploymentPostprocessing': [0, int, 0, 1, 0],
- })
-
- def __init__(self, properties=None, id=None, parent=None):
- # super
- XCObject.__init__(self, properties, id, parent)
-
- self._files_by_path = {}
- self._files_by_xcfilelikeelement = {}
- for pbxbuildfile in self._properties.get('files', []):
- self._AddBuildFileToDicts(pbxbuildfile)
-
- def FileGroup(self, path):
- # Subclasses must override this by returning a two-element tuple. The
- # first item in the tuple should be the PBXGroup to which "path" should be
- # added, either as a child or deeper descendant. The second item should
- # be a boolean indicating whether files should be added into hierarchical
- # groups or one single flat group.
- raise NotImplementedError(
- self.__class__.__name__ + ' must implement FileGroup')
-
- def _AddPathToDict(self, pbxbuildfile, path):
- """Adds path to the dict tracking paths belonging to this build phase.
-
- If the path is already a member of this build phase, raises an exception.
- """
-
- if path in self._files_by_path:
- raise ValueError('Found multiple build files with path ' + path)
- self._files_by_path[path] = pbxbuildfile
-
- def _AddBuildFileToDicts(self, pbxbuildfile, path=None):
- """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts.
-
- If path is specified, then it is the path that is being added to the
- phase, and pbxbuildfile must contain either a PBXFileReference directly
- referencing that path, or it must contain a PBXVariantGroup that itself
- contains a PBXFileReference referencing the path.
-
- If path is not specified, either the PBXFileReference's path or the paths
- of all children of the PBXVariantGroup are taken as being added to the
- phase.
-
- If the path is already present in the phase, raises an exception.
-
- If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile
- are already present in the phase, referenced by a different PBXBuildFile
- object, raises an exception. This does not raise an exception when
- a PBXFileReference or PBXVariantGroup reappear and are referenced by the
- same PBXBuildFile that has already introduced them, because in the case
- of PBXVariantGroup objects, they may correspond to multiple paths that are
- not all added simultaneously. When this situation occurs, the path needs
- to be added to _files_by_path, but nothing needs to change in
- _files_by_xcfilelikeelement, and the caller should have avoided adding
- the PBXBuildFile if it is already present in the list of children.
- """
-
- xcfilelikeelement = pbxbuildfile._properties['fileRef']
-
- paths = []
- if path != None:
- # It's best when the caller provides the path.
- if isinstance(xcfilelikeelement, PBXVariantGroup):
- paths.append(path)
- else:
- # If the caller didn't provide a path, there can be either multiple
- # paths (PBXVariantGroup) or one.
- if isinstance(xcfilelikeelement, PBXVariantGroup):
- for variant in xcfilelikeelement._properties['children']:
- paths.append(variant.FullPath())
- else:
- paths.append(xcfilelikeelement.FullPath())
-
- # Add the paths first, because if something's going to raise, the
- # messages provided by _AddPathToDict are more useful owing to its
- # having access to a real pathname and not just an object's Name().
- for a_path in paths:
- self._AddPathToDict(pbxbuildfile, a_path)
-
- # If another PBXBuildFile references this XCFileLikeElement, there's a
- # problem.
- if xcfilelikeelement in self._files_by_xcfilelikeelement and \
- self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile:
- raise ValueError('Found multiple build files for ' + \
- xcfilelikeelement.Name())
- self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile
-
- def AppendBuildFile(self, pbxbuildfile, path=None):
- # Callers should use this instead of calling
- # AppendProperty('files', pbxbuildfile) directly because this function
- # maintains the object's dicts. Better yet, callers can just call AddFile
- # with a pathname and not worry about building their own PBXBuildFile
- # objects.
- self.AppendProperty('files', pbxbuildfile)
- self._AddBuildFileToDicts(pbxbuildfile, path)
-
- def AddFile(self, path, settings=None):
- (file_group, hierarchical) = self.FileGroup(path)
- file_ref = file_group.AddOrGetFileByPath(path, hierarchical)
-
- if file_ref in self._files_by_xcfilelikeelement and \
- isinstance(file_ref, PBXVariantGroup):
- # There's already a PBXBuildFile in this phase corresponding to the
- # PBXVariantGroup. path just provides a new variant that belongs to
- # the group. Add the path to the dict.
- pbxbuildfile = self._files_by_xcfilelikeelement[file_ref]
- self._AddBuildFileToDicts(pbxbuildfile, path)
- else:
- # Add a new PBXBuildFile to get file_ref into the phase.
- if settings is None:
- pbxbuildfile = PBXBuildFile({'fileRef': file_ref})
- else:
- pbxbuildfile = PBXBuildFile({'fileRef': file_ref, 'settings': settings})
- self.AppendBuildFile(pbxbuildfile, path)
-
-
-class PBXHeadersBuildPhase(XCBuildPhase):
- # No additions to the schema relative to XCBuildPhase.
-
- def Name(self):
- return 'Headers'
-
- def FileGroup(self, path):
- return self.PBXProjectAncestor().RootGroupForPath(path)
-
-
-class PBXResourcesBuildPhase(XCBuildPhase):
- # No additions to the schema relative to XCBuildPhase.
-
- def Name(self):
- return 'Resources'
-
- def FileGroup(self, path):
- return self.PBXProjectAncestor().RootGroupForPath(path)
-
-
-class PBXSourcesBuildPhase(XCBuildPhase):
- # No additions to the schema relative to XCBuildPhase.
-
- def Name(self):
- return 'Sources'
-
- def FileGroup(self, path):
- return self.PBXProjectAncestor().RootGroupForPath(path)
-
-
-class PBXFrameworksBuildPhase(XCBuildPhase):
- # No additions to the schema relative to XCBuildPhase.
-
- def Name(self):
- return 'Frameworks'
-
- def FileGroup(self, path):
- (root, ext) = posixpath.splitext(path)
- if ext != '':
- ext = ext[1:].lower()
- if ext == 'o':
- # .o files are added to Xcode Frameworks phases, but conceptually aren't
- # frameworks, they're more like sources or intermediates. Redirect them
- # to show up in one of those other groups.
- return self.PBXProjectAncestor().RootGroupForPath(path)
- else:
- return (self.PBXProjectAncestor().FrameworksGroup(), False)
-
-
-class PBXShellScriptBuildPhase(XCBuildPhase):
- _schema = XCBuildPhase._schema.copy()
- _schema.update({
- 'inputPaths': [1, str, 0, 1, []],
- 'name': [0, str, 0, 0],
- 'outputPaths': [1, str, 0, 1, []],
- 'shellPath': [0, str, 0, 1, '/bin/sh'],
- 'shellScript': [0, str, 0, 1],
- 'showEnvVarsInLog': [0, int, 0, 0],
- })
-
- def Name(self):
- if 'name' in self._properties:
- return self._properties['name']
-
- return 'ShellScript'
-
-
-class PBXCopyFilesBuildPhase(XCBuildPhase):
- _schema = XCBuildPhase._schema.copy()
- _schema.update({
- 'dstPath': [0, str, 0, 1],
- 'dstSubfolderSpec': [0, int, 0, 1],
- 'name': [0, str, 0, 0],
- })
-
- # path_tree_re matches "$(DIR)/path" or just "$(DIR)". Match group 1 is
- # "DIR", match group 3 is "path" or None.
- path_tree_re = re.compile('^\\$\\((.*)\\)(/(.*)|)$')
-
- # path_tree_to_subfolder maps names of Xcode variables to the associated
- # dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object.
- path_tree_to_subfolder = {
- 'BUILT_FRAMEWORKS_DIR': 10, # Frameworks Directory
- 'BUILT_PRODUCTS_DIR': 16, # Products Directory
- # Other types that can be chosen via the Xcode UI.
- # TODO(mark): Map Xcode variable names to these.
- # : 1, # Wrapper
- # : 6, # Executables: 6
- # : 7, # Resources
- # : 15, # Java Resources
- # : 11, # Shared Frameworks
- # : 12, # Shared Support
- # : 13, # PlugIns
- }
-
- def Name(self):
- if 'name' in self._properties:
- return self._properties['name']
-
- return 'CopyFiles'
-
- def FileGroup(self, path):
- return self.PBXProjectAncestor().RootGroupForPath(path)
-
- def SetDestination(self, path):
- """Set the dstSubfolderSpec and dstPath properties from path.
-
- path may be specified in the same notation used for XCHierarchicalElements,
- specifically, "$(DIR)/path".
- """
-
- path_tree_match = self.path_tree_re.search(path)
- if path_tree_match:
- # Everything else needs to be relative to an Xcode variable.
- path_tree = path_tree_match.group(1)
- relative_path = path_tree_match.group(3)
-
- if path_tree in self.path_tree_to_subfolder:
- subfolder = self.path_tree_to_subfolder[path_tree]
- if relative_path is None:
- relative_path = ''
- else:
- # The path starts with an unrecognized Xcode variable
- # name like $(SRCROOT). Xcode will still handle this
- # as an "absolute path" that starts with the variable.
- subfolder = 0
- relative_path = path
- elif path.startswith('/'):
- # Special case. Absolute paths are in dstSubfolderSpec 0.
- subfolder = 0
- relative_path = path[1:]
- else:
- raise ValueError('Can\'t use path %s in a %s' % \
- (path, self.__class__.__name__))
-
- self._properties['dstPath'] = relative_path
- self._properties['dstSubfolderSpec'] = subfolder
-
-
-class PBXBuildRule(XCObject):
- _schema = XCObject._schema.copy()
- _schema.update({
- 'compilerSpec': [0, str, 0, 1],
- 'filePatterns': [0, str, 0, 0],
- 'fileType': [0, str, 0, 1],
- 'isEditable': [0, int, 0, 1, 1],
- 'outputFiles': [1, str, 0, 1, []],
- 'script': [0, str, 0, 0],
- })
-
- def Name(self):
- # Not very inspired, but it's what Xcode uses.
- return self.__class__.__name__
-
- def Hashables(self):
- # super
- hashables = XCObject.Hashables(self)
-
- # Use the hashables of the weak objects that this object refers to.
- hashables.append(self._properties['fileType'])
- if 'filePatterns' in self._properties:
- hashables.append(self._properties['filePatterns'])
- return hashables
-
-
-class PBXContainerItemProxy(XCObject):
- # When referencing an item in this project file, containerPortal is the
- # PBXProject root object of this project file. When referencing an item in
- # another project file, containerPortal is a PBXFileReference identifying
- # the other project file.
- #
- # When serving as a proxy to an XCTarget (in this project file or another),
- # proxyType is 1. When serving as a proxy to a PBXFileReference (in another
- # project file), proxyType is 2. Type 2 is used for references to the
- # producs of the other project file's targets.
- #
- # Xcode is weird about remoteGlobalIDString. Usually, it's printed without
- # a comment, indicating that it's tracked internally simply as a string, but
- # sometimes it's printed with a comment (usually when the object is initially
- # created), indicating that it's tracked as a project file object at least
- # sometimes. This module always tracks it as an object, but contains a hack
- # to prevent it from printing the comment in the project file output. See
- # _XCKVPrint.
- _schema = XCObject._schema.copy()
- _schema.update({
- 'containerPortal': [0, XCContainerPortal, 0, 1],
- 'proxyType': [0, int, 0, 1],
- 'remoteGlobalIDString': [0, XCRemoteObject, 0, 1],
- 'remoteInfo': [0, str, 0, 1],
- })
-
- def __repr__(self):
- props = self._properties
- name = '%s.gyp:%s' % (props['containerPortal'].Name(), props['remoteInfo'])
- return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
-
- def Name(self):
- # Admittedly not the best name, but it's what Xcode uses.
- return self.__class__.__name__
-
- def Hashables(self):
- # super
- hashables = XCObject.Hashables(self)
-
- # Use the hashables of the weak objects that this object refers to.
- hashables.extend(self._properties['containerPortal'].Hashables())
- hashables.extend(self._properties['remoteGlobalIDString'].Hashables())
- return hashables
-
-
-class PBXTargetDependency(XCObject):
- # The "target" property accepts an XCTarget object, and obviously not
- # NoneType. But XCTarget is defined below, so it can't be put into the
- # schema yet. The definition of PBXTargetDependency can't be moved below
- # XCTarget because XCTarget's own schema references PBXTargetDependency.
- # Python doesn't deal well with this circular relationship, and doesn't have
- # a real way to do forward declarations. To work around, the type of
- # the "target" property is reset below, after XCTarget is defined.
- #
- # At least one of "name" and "target" is required.
- _schema = XCObject._schema.copy()
- _schema.update({
- 'name': [0, str, 0, 0],
- 'target': [0, None.__class__, 0, 0],
- 'targetProxy': [0, PBXContainerItemProxy, 1, 1],
- })
-
- def __repr__(self):
- name = self._properties.get('name') or self._properties['target'].Name()
- return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self))
-
- def Name(self):
- # Admittedly not the best name, but it's what Xcode uses.
- return self.__class__.__name__
-
- def Hashables(self):
- # super
- hashables = XCObject.Hashables(self)
-
- # Use the hashables of the weak objects that this object refers to.
- hashables.extend(self._properties['targetProxy'].Hashables())
- return hashables
-
-
-class PBXReferenceProxy(XCFileLikeElement):
- _schema = XCFileLikeElement._schema.copy()
- _schema.update({
- 'fileType': [0, str, 0, 1],
- 'path': [0, str, 0, 1],
- 'remoteRef': [0, PBXContainerItemProxy, 1, 1],
- })
-
-
-class XCTarget(XCRemoteObject):
- # An XCTarget is really just an XCObject, the XCRemoteObject thing is just
- # to allow PBXProject to be used in the remoteGlobalIDString property of
- # PBXContainerItemProxy.
- #
- # Setting a "name" property at instantiation may also affect "productName",
- # which may in turn affect the "PRODUCT_NAME" build setting in children of
- # "buildConfigurationList". See __init__ below.
- _schema = XCRemoteObject._schema.copy()
- _schema.update({
- 'buildConfigurationList': [0, XCConfigurationList, 1, 1,
- XCConfigurationList()],
- 'buildPhases': [1, XCBuildPhase, 1, 1, []],
- 'dependencies': [1, PBXTargetDependency, 1, 1, []],
- 'name': [0, str, 0, 1],
- 'productName': [0, str, 0, 1],
- })
-
- def __init__(self, properties=None, id=None, parent=None,
- force_outdir=None, force_prefix=None, force_extension=None):
- # super
- XCRemoteObject.__init__(self, properties, id, parent)
-
- # Set up additional defaults not expressed in the schema. If a "name"
- # property was supplied, set "productName" if it is not present. Also set
- # the "PRODUCT_NAME" build setting in each configuration, but only if
- # the setting is not present in any build configuration.
- if 'name' in self._properties:
- if not 'productName' in self._properties:
- self.SetProperty('productName', self._properties['name'])
-
- if 'productName' in self._properties:
- if 'buildConfigurationList' in self._properties:
- configs = self._properties['buildConfigurationList']
- if configs.HasBuildSetting('PRODUCT_NAME') == 0:
- configs.SetBuildSetting('PRODUCT_NAME',
- self._properties['productName'])
-
- def AddDependency(self, other):
- pbxproject = self.PBXProjectAncestor()
- other_pbxproject = other.PBXProjectAncestor()
- if pbxproject == other_pbxproject:
- # Add a dependency to another target in the same project file.
- container = PBXContainerItemProxy({'containerPortal': pbxproject,
- 'proxyType': 1,
- 'remoteGlobalIDString': other,
- 'remoteInfo': other.Name()})
- dependency = PBXTargetDependency({'target': other,
- 'targetProxy': container})
- self.AppendProperty('dependencies', dependency)
- else:
- # Add a dependency to a target in a different project file.
- other_project_ref = \
- pbxproject.AddOrGetProjectReference(other_pbxproject)[1]
- container = PBXContainerItemProxy({
- 'containerPortal': other_project_ref,
- 'proxyType': 1,
- 'remoteGlobalIDString': other,
- 'remoteInfo': other.Name(),
- })
- dependency = PBXTargetDependency({'name': other.Name(),
- 'targetProxy': container})
- self.AppendProperty('dependencies', dependency)
-
- # Proxy all of these through to the build configuration list.
-
- def ConfigurationNamed(self, name):
- return self._properties['buildConfigurationList'].ConfigurationNamed(name)
-
- def DefaultConfiguration(self):
- return self._properties['buildConfigurationList'].DefaultConfiguration()
-
- def HasBuildSetting(self, key):
- return self._properties['buildConfigurationList'].HasBuildSetting(key)
-
- def GetBuildSetting(self, key):
- return self._properties['buildConfigurationList'].GetBuildSetting(key)
-
- def SetBuildSetting(self, key, value):
- return self._properties['buildConfigurationList'].SetBuildSetting(key, \
- value)
-
- def AppendBuildSetting(self, key, value):
- return self._properties['buildConfigurationList'].AppendBuildSetting(key, \
- value)
-
- def DelBuildSetting(self, key):
- return self._properties['buildConfigurationList'].DelBuildSetting(key)
-
-
-# Redefine the type of the "target" property. See PBXTargetDependency._schema
-# above.
-PBXTargetDependency._schema['target'][1] = XCTarget
-
-
-class PBXNativeTarget(XCTarget):
- # buildPhases is overridden in the schema to be able to set defaults.
- #
- # NOTE: Contrary to most objects, it is advisable to set parent when
- # constructing PBXNativeTarget. A parent of an XCTarget must be a PBXProject
- # object. A parent reference is required for a PBXNativeTarget during
- # construction to be able to set up the target defaults for productReference,
- # because a PBXBuildFile object must be created for the target and it must
- # be added to the PBXProject's mainGroup hierarchy.
- _schema = XCTarget._schema.copy()
- _schema.update({
- 'buildPhases': [1, XCBuildPhase, 1, 1,
- [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()]],
- 'buildRules': [1, PBXBuildRule, 1, 1, []],
- 'productReference': [0, PBXFileReference, 0, 1],
- 'productType': [0, str, 0, 1],
- })
-
- # Mapping from Xcode product-types to settings. The settings are:
- # filetype : used for explicitFileType in the project file
- # prefix : the prefix for the file name
- # suffix : the suffix for the file name
- _product_filetypes = {
- 'com.apple.product-type.application': ['wrapper.application',
- '', '.app'],
- 'com.apple.product-type.application.watchapp': ['wrapper.application',
- '', '.app'],
- 'com.apple.product-type.watchkit-extension': ['wrapper.app-extension',
- '', '.appex'],
- 'com.apple.product-type.app-extension': ['wrapper.app-extension',
- '', '.appex'],
- 'com.apple.product-type.bundle': ['wrapper.cfbundle',
- '', '.bundle'],
- 'com.apple.product-type.framework': ['wrapper.framework',
- '', '.framework'],
- 'com.apple.product-type.library.dynamic': ['compiled.mach-o.dylib',
- 'lib', '.dylib'],
- 'com.apple.product-type.library.static': ['archive.ar',
- 'lib', '.a'],
- 'com.apple.product-type.tool': ['compiled.mach-o.executable',
- '', ''],
- 'com.apple.product-type.bundle.unit-test': ['wrapper.cfbundle',
- '', '.xctest'],
- 'com.googlecode.gyp.xcode.bundle': ['compiled.mach-o.dylib',
- '', '.so'],
- 'com.apple.product-type.kernel-extension': ['wrapper.kext',
- '', '.kext'],
- }
-
- def __init__(self, properties=None, id=None, parent=None,
- force_outdir=None, force_prefix=None, force_extension=None):
- # super
- XCTarget.__init__(self, properties, id, parent)
-
- if 'productName' in self._properties and \
- 'productType' in self._properties and \
- not 'productReference' in self._properties and \
- self._properties['productType'] in self._product_filetypes:
- products_group = None
- pbxproject = self.PBXProjectAncestor()
- if pbxproject != None:
- products_group = pbxproject.ProductsGroup()
-
- if products_group != None:
- (filetype, prefix, suffix) = \
- self._product_filetypes[self._properties['productType']]
- # Xcode does not have a distinct type for loadable modules that are
- # pure BSD targets (not in a bundle wrapper). GYP allows such modules
- # to be specified by setting a target type to loadable_module without
- # having mac_bundle set. These are mapped to the pseudo-product type
- # com.googlecode.gyp.xcode.bundle.
- #
- # By picking up this special type and converting it to a dynamic
- # library (com.apple.product-type.library.dynamic) with fix-ups,
- # single-file loadable modules can be produced.
- #
- # MACH_O_TYPE is changed to mh_bundle to produce the proper file type
- # (as opposed to mh_dylib). In order for linking to succeed,
- # DYLIB_CURRENT_VERSION and DYLIB_COMPATIBILITY_VERSION must be
- # cleared. They are meaningless for type mh_bundle.
- #
- # Finally, the .so extension is forcibly applied over the default
- # (.dylib), unless another forced extension is already selected.
- # .dylib is plainly wrong, and .bundle is used by loadable_modules in
- # bundle wrappers (com.apple.product-type.bundle). .so seems an odd
- # choice because it's used as the extension on many other systems that
- # don't distinguish between linkable shared libraries and non-linkable
- # loadable modules, but there's precedent: Python loadable modules on
- # Mac OS X use an .so extension.
- if self._properties['productType'] == 'com.googlecode.gyp.xcode.bundle':
- self._properties['productType'] = \
- 'com.apple.product-type.library.dynamic'
- self.SetBuildSetting('MACH_O_TYPE', 'mh_bundle')
- self.SetBuildSetting('DYLIB_CURRENT_VERSION', '')
- self.SetBuildSetting('DYLIB_COMPATIBILITY_VERSION', '')
- if force_extension is None:
- force_extension = suffix[1:]
-
- if self._properties['productType'] == \
- 'com.apple.product-type-bundle.unit.test':
- if force_extension is None:
- force_extension = suffix[1:]
-
- if force_extension is not None:
- # If it's a wrapper (bundle), set WRAPPER_EXTENSION.
- # Extension override.
- suffix = '.' + force_extension
- if filetype.startswith('wrapper.'):
- self.SetBuildSetting('WRAPPER_EXTENSION', force_extension)
- else:
- self.SetBuildSetting('EXECUTABLE_EXTENSION', force_extension)
-
- if filetype.startswith('compiled.mach-o.executable'):
- product_name = self._properties['productName']
- product_name += suffix
- suffix = ''
- self.SetProperty('productName', product_name)
- self.SetBuildSetting('PRODUCT_NAME', product_name)
-
- # Xcode handles most prefixes based on the target type, however there
- # are exceptions. If a "BSD Dynamic Library" target is added in the
- # Xcode UI, Xcode sets EXECUTABLE_PREFIX. This check duplicates that
- # behavior.
- if force_prefix is not None:
- prefix = force_prefix
- if filetype.startswith('wrapper.'):
- self.SetBuildSetting('WRAPPER_PREFIX', prefix)
- else:
- self.SetBuildSetting('EXECUTABLE_PREFIX', prefix)
-
- if force_outdir is not None:
- self.SetBuildSetting('TARGET_BUILD_DIR', force_outdir)
-
- # TODO(tvl): Remove the below hack.
- # http://code.google.com/p/gyp/issues/detail?id=122
-
- # Some targets include the prefix in the target_name. These targets
- # really should just add a product_name setting that doesn't include
- # the prefix. For example:
- # target_name = 'libevent', product_name = 'event'
- # This check cleans up for them.
- product_name = self._properties['productName']
- prefix_len = len(prefix)
- if prefix_len and (product_name[:prefix_len] == prefix):
- product_name = product_name[prefix_len:]
- self.SetProperty('productName', product_name)
- self.SetBuildSetting('PRODUCT_NAME', product_name)
-
- ref_props = {
- 'explicitFileType': filetype,
- 'includeInIndex': 0,
- 'path': prefix + product_name + suffix,
- 'sourceTree': 'BUILT_PRODUCTS_DIR',
- }
- file_ref = PBXFileReference(ref_props)
- products_group.AppendChild(file_ref)
- self.SetProperty('productReference', file_ref)
-
- def GetBuildPhaseByType(self, type):
- if not 'buildPhases' in self._properties:
- return None
-
- the_phase = None
- for phase in self._properties['buildPhases']:
- if isinstance(phase, type):
- # Some phases may be present in multiples in a well-formed project file,
- # but phases like PBXSourcesBuildPhase may only be present singly, and
- # this function is intended as an aid to GetBuildPhaseByType. Loop
- # over the entire list of phases and assert if more than one of the
- # desired type is found.
- assert the_phase is None
- the_phase = phase
-
- return the_phase
-
- def HeadersPhase(self):
- headers_phase = self.GetBuildPhaseByType(PBXHeadersBuildPhase)
- if headers_phase is None:
- headers_phase = PBXHeadersBuildPhase()
-
- # The headers phase should come before the resources, sources, and
- # frameworks phases, if any.
- insert_at = len(self._properties['buildPhases'])
- for index in xrange(0, len(self._properties['buildPhases'])):
- phase = self._properties['buildPhases'][index]
- if isinstance(phase, PBXResourcesBuildPhase) or \
- isinstance(phase, PBXSourcesBuildPhase) or \
- isinstance(phase, PBXFrameworksBuildPhase):
- insert_at = index
- break
-
- self._properties['buildPhases'].insert(insert_at, headers_phase)
- headers_phase.parent = self
-
- return headers_phase
-
- def ResourcesPhase(self):
- resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase)
- if resources_phase is None:
- resources_phase = PBXResourcesBuildPhase()
-
- # The resources phase should come before the sources and frameworks
- # phases, if any.
- insert_at = len(self._properties['buildPhases'])
- for index in xrange(0, len(self._properties['buildPhases'])):
- phase = self._properties['buildPhases'][index]
- if isinstance(phase, PBXSourcesBuildPhase) or \
- isinstance(phase, PBXFrameworksBuildPhase):
- insert_at = index
- break
-
- self._properties['buildPhases'].insert(insert_at, resources_phase)
- resources_phase.parent = self
-
- return resources_phase
-
- def SourcesPhase(self):
- sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase)
- if sources_phase is None:
- sources_phase = PBXSourcesBuildPhase()
- self.AppendProperty('buildPhases', sources_phase)
-
- return sources_phase
-
- def FrameworksPhase(self):
- frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase)
- if frameworks_phase is None:
- frameworks_phase = PBXFrameworksBuildPhase()
- self.AppendProperty('buildPhases', frameworks_phase)
-
- return frameworks_phase
-
- def AddDependency(self, other):
- # super
- XCTarget.AddDependency(self, other)
-
- static_library_type = 'com.apple.product-type.library.static'
- shared_library_type = 'com.apple.product-type.library.dynamic'
- framework_type = 'com.apple.product-type.framework'
- if isinstance(other, PBXNativeTarget) and \
- 'productType' in self._properties and \
- self._properties['productType'] != static_library_type and \
- 'productType' in other._properties and \
- (other._properties['productType'] == static_library_type or \
- ((other._properties['productType'] == shared_library_type or \
- other._properties['productType'] == framework_type) and \
- ((not other.HasBuildSetting('MACH_O_TYPE')) or
- other.GetBuildSetting('MACH_O_TYPE') != 'mh_bundle'))):
-
- file_ref = other.GetProperty('productReference')
-
- pbxproject = self.PBXProjectAncestor()
- other_pbxproject = other.PBXProjectAncestor()
- if pbxproject != other_pbxproject:
- other_project_product_group = \
- pbxproject.AddOrGetProjectReference(other_pbxproject)[0]
- file_ref = other_project_product_group.GetChildByRemoteObject(file_ref)
-
- self.FrameworksPhase().AppendProperty('files',
- PBXBuildFile({'fileRef': file_ref}))
-
-
-class PBXAggregateTarget(XCTarget):
- pass
-
-
-class PBXProject(XCContainerPortal):
- # A PBXProject is really just an XCObject, the XCContainerPortal thing is
- # just to allow PBXProject to be used in the containerPortal property of
- # PBXContainerItemProxy.
- """
-
- Attributes:
- path: "sample.xcodeproj". TODO(mark) Document me!
- _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each
- value is a reference to the dict in the
- projectReferences list associated with the keyed
- PBXProject.
- """
-
- _schema = XCContainerPortal._schema.copy()
- _schema.update({
- 'attributes': [0, dict, 0, 0],
- 'buildConfigurationList': [0, XCConfigurationList, 1, 1,
- XCConfigurationList()],
- 'compatibilityVersion': [0, str, 0, 1, 'Xcode 3.2'],
- 'hasScannedForEncodings': [0, int, 0, 1, 1],
- 'mainGroup': [0, PBXGroup, 1, 1, PBXGroup()],
- 'projectDirPath': [0, str, 0, 1, ''],
- 'projectReferences': [1, dict, 0, 0],
- 'projectRoot': [0, str, 0, 1, ''],
- 'targets': [1, XCTarget, 1, 1, []],
- })
-
- def __init__(self, properties=None, id=None, parent=None, path=None):
- self.path = path
- self._other_pbxprojects = {}
- # super
- return XCContainerPortal.__init__(self, properties, id, parent)
-
- def Name(self):
- name = self.path
- if name[-10:] == '.xcodeproj':
- name = name[:-10]
- return posixpath.basename(name)
-
- def Path(self):
- return self.path
-
- def Comment(self):
- return 'Project object'
-
- def Children(self):
- # super
- children = XCContainerPortal.Children(self)
-
- # Add children that the schema doesn't know about. Maybe there's a more
- # elegant way around this, but this is the only case where we need to own
- # objects in a dictionary (that is itself in a list), and three lines for
- # a one-off isn't that big a deal.
- if 'projectReferences' in self._properties:
- for reference in self._properties['projectReferences']:
- children.append(reference['ProductGroup'])
-
- return children
-
- def PBXProjectAncestor(self):
- return self
-
- def _GroupByName(self, name):
- if not 'mainGroup' in self._properties:
- self.SetProperty('mainGroup', PBXGroup())
-
- main_group = self._properties['mainGroup']
- group = main_group.GetChildByName(name)
- if group is None:
- group = PBXGroup({'name': name})
- main_group.AppendChild(group)
-
- return group
-
- # SourceGroup and ProductsGroup are created by default in Xcode's own
- # templates.
- def SourceGroup(self):
- return self._GroupByName('Source')
-
- def ProductsGroup(self):
- return self._GroupByName('Products')
-
- # IntermediatesGroup is used to collect source-like files that are generated
- # by rules or script phases and are placed in intermediate directories such
- # as DerivedSources.
- def IntermediatesGroup(self):
- return self._GroupByName('Intermediates')
-
- # FrameworksGroup and ProjectsGroup are top-level groups used to collect
- # frameworks and projects.
- def FrameworksGroup(self):
- return self._GroupByName('Frameworks')
-
- def ProjectsGroup(self):
- return self._GroupByName('Projects')
-
- def RootGroupForPath(self, path):
- """Returns a PBXGroup child of this object to which path should be added.
-
- This method is intended to choose between SourceGroup and
- IntermediatesGroup on the basis of whether path is present in a source
- directory or an intermediates directory. For the purposes of this
- determination, any path located within a derived file directory such as
- PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates
- directory.
-
- The returned value is a two-element tuple. The first element is the
- PBXGroup, and the second element specifies whether that group should be
- organized hierarchically (True) or as a single flat list (False).
- """
-
- # TODO(mark): make this a class variable and bind to self on call?
- # Also, this list is nowhere near exhaustive.
- # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by
- # gyp.generator.xcode. There should probably be some way for that module
- # to push the names in, rather than having to hard-code them here.
- source_tree_groups = {
- 'DERIVED_FILE_DIR': (self.IntermediatesGroup, True),
- 'INTERMEDIATE_DIR': (self.IntermediatesGroup, True),
- 'PROJECT_DERIVED_FILE_DIR': (self.IntermediatesGroup, True),
- 'SHARED_INTERMEDIATE_DIR': (self.IntermediatesGroup, True),
- }
-
- (source_tree, path) = SourceTreeAndPathFromPath(path)
- if source_tree != None and source_tree in source_tree_groups:
- (group_func, hierarchical) = source_tree_groups[source_tree]
- group = group_func()
- return (group, hierarchical)
-
- # TODO(mark): make additional choices based on file extension.
-
- return (self.SourceGroup(), True)
-
- def AddOrGetFileInRootGroup(self, path):
- """Returns a PBXFileReference corresponding to path in the correct group
- according to RootGroupForPath's heuristics.
-
- If an existing PBXFileReference for path exists, it will be returned.
- Otherwise, one will be created and returned.
- """
-
- (group, hierarchical) = self.RootGroupForPath(path)
- return group.AddOrGetFileByPath(path, hierarchical)
-
- def RootGroupsTakeOverOnlyChildren(self, recurse=False):
- """Calls TakeOverOnlyChild for all groups in the main group."""
-
- for group in self._properties['mainGroup']._properties['children']:
- if isinstance(group, PBXGroup):
- group.TakeOverOnlyChild(recurse)
-
- def SortGroups(self):
- # Sort the children of the mainGroup (like "Source" and "Products")
- # according to their defined order.
- self._properties['mainGroup']._properties['children'] = \
- sorted(self._properties['mainGroup']._properties['children'],
- cmp=lambda x,y: x.CompareRootGroup(y))
-
- # Sort everything else by putting group before files, and going
- # alphabetically by name within sections of groups and files. SortGroup
- # is recursive.
- for group in self._properties['mainGroup']._properties['children']:
- if not isinstance(group, PBXGroup):
- continue
-
- if group.Name() == 'Products':
- # The Products group is a special case. Instead of sorting
- # alphabetically, sort things in the order of the targets that
- # produce the products. To do this, just build up a new list of
- # products based on the targets.
- products = []
- for target in self._properties['targets']:
- if not isinstance(target, PBXNativeTarget):
- continue
- product = target._properties['productReference']
- # Make sure that the product is already in the products group.
- assert product in group._properties['children']
- products.append(product)
-
- # Make sure that this process doesn't miss anything that was already
- # in the products group.
- assert len(products) == len(group._properties['children'])
- group._properties['children'] = products
- else:
- group.SortGroup()
-
- def AddOrGetProjectReference(self, other_pbxproject):
- """Add a reference to another project file (via PBXProject object) to this
- one.
-
- Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in
- this project file that contains a PBXReferenceProxy object for each
- product of each PBXNativeTarget in the other project file. ProjectRef is
- a PBXFileReference to the other project file.
-
- If this project file already references the other project file, the
- existing ProductGroup and ProjectRef are returned. The ProductGroup will
- still be updated if necessary.
- """
-
- if not 'projectReferences' in self._properties:
- self._properties['projectReferences'] = []
-
- product_group = None
- project_ref = None
-
- if not other_pbxproject in self._other_pbxprojects:
- # This project file isn't yet linked to the other one. Establish the
- # link.
- product_group = PBXGroup({'name': 'Products'})
-
- # ProductGroup is strong.
- product_group.parent = self
-
- # There's nothing unique about this PBXGroup, and if left alone, it will
- # wind up with the same set of hashables as all other PBXGroup objects
- # owned by the projectReferences list. Add the hashables of the
- # remote PBXProject that it's related to.
- product_group._hashables.extend(other_pbxproject.Hashables())
-
- # The other project reports its path as relative to the same directory
- # that this project's path is relative to. The other project's path
- # is not necessarily already relative to this project. Figure out the
- # pathname that this project needs to use to refer to the other one.
- this_path = posixpath.dirname(self.Path())
- projectDirPath = self.GetProperty('projectDirPath')
- if projectDirPath:
- if posixpath.isabs(projectDirPath[0]):
- this_path = projectDirPath
- else:
- this_path = posixpath.join(this_path, projectDirPath)
- other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path)
-
- # ProjectRef is weak (it's owned by the mainGroup hierarchy).
- project_ref = PBXFileReference({
- 'lastKnownFileType': 'wrapper.pb-project',
- 'path': other_path,
- 'sourceTree': 'SOURCE_ROOT',
- })
- self.ProjectsGroup().AppendChild(project_ref)
-
- ref_dict = {'ProductGroup': product_group, 'ProjectRef': project_ref}
- self._other_pbxprojects[other_pbxproject] = ref_dict
- self.AppendProperty('projectReferences', ref_dict)
-
- # Xcode seems to sort this list case-insensitively
- self._properties['projectReferences'] = \
- sorted(self._properties['projectReferences'], cmp=lambda x,y:
- cmp(x['ProjectRef'].Name().lower(),
- y['ProjectRef'].Name().lower()))
- else:
- # The link already exists. Pull out the relevnt data.
- project_ref_dict = self._other_pbxprojects[other_pbxproject]
- product_group = project_ref_dict['ProductGroup']
- project_ref = project_ref_dict['ProjectRef']
-
- self._SetUpProductReferences(other_pbxproject, product_group, project_ref)
-
- inherit_unique_symroot = self._AllSymrootsUnique(other_pbxproject, False)
- targets = other_pbxproject.GetProperty('targets')
- if all(self._AllSymrootsUnique(t, inherit_unique_symroot) for t in targets):
- dir_path = project_ref._properties['path']
- product_group._hashables.extend(dir_path)
-
- return [product_group, project_ref]
-
- def _AllSymrootsUnique(self, target, inherit_unique_symroot):
- # Returns True if all configurations have a unique 'SYMROOT' attribute.
- # The value of inherit_unique_symroot decides, if a configuration is assumed
- # to inherit a unique 'SYMROOT' attribute from its parent, if it doesn't
- # define an explicit value for 'SYMROOT'.
- symroots = self._DefinedSymroots(target)
- for s in self._DefinedSymroots(target):
- if (s is not None and not self._IsUniqueSymrootForTarget(s) or
- s is None and not inherit_unique_symroot):
- return False
- return True if symroots else inherit_unique_symroot
-
- def _DefinedSymroots(self, target):
- # Returns all values for the 'SYMROOT' attribute defined in all
- # configurations for this target. If any configuration doesn't define the
- # 'SYMROOT' attribute, None is added to the returned set. If all
- # configurations don't define the 'SYMROOT' attribute, an empty set is
- # returned.
- config_list = target.GetProperty('buildConfigurationList')
- symroots = set()
- for config in config_list.GetProperty('buildConfigurations'):
- setting = config.GetProperty('buildSettings')
- if 'SYMROOT' in setting:
- symroots.add(setting['SYMROOT'])
- else:
- symroots.add(None)
- if len(symroots) == 1 and None in symroots:
- return set()
- return symroots
-
- def _IsUniqueSymrootForTarget(self, symroot):
- # This method returns True if all configurations in target contain a
- # 'SYMROOT' attribute that is unique for the given target. A value is
- # unique, if the Xcode macro '$SRCROOT' appears in it in any form.
- uniquifier = ['$SRCROOT', '$(SRCROOT)']
- if any(x in symroot for x in uniquifier):
- return True
- return False
-
- def _SetUpProductReferences(self, other_pbxproject, product_group,
- project_ref):
- # TODO(mark): This only adds references to products in other_pbxproject
- # when they don't exist in this pbxproject. Perhaps it should also
- # remove references from this pbxproject that are no longer present in
- # other_pbxproject. Perhaps it should update various properties if they
- # change.
- for target in other_pbxproject._properties['targets']:
- if not isinstance(target, PBXNativeTarget):
- continue
-
- other_fileref = target._properties['productReference']
- if product_group.GetChildByRemoteObject(other_fileref) is None:
- # Xcode sets remoteInfo to the name of the target and not the name
- # of its product, despite this proxy being a reference to the product.
- container_item = PBXContainerItemProxy({
- 'containerPortal': project_ref,
- 'proxyType': 2,
- 'remoteGlobalIDString': other_fileref,
- 'remoteInfo': target.Name()
- })
- # TODO(mark): Does sourceTree get copied straight over from the other
- # project? Can the other project ever have lastKnownFileType here
- # instead of explicitFileType? (Use it if so?) Can path ever be
- # unset? (I don't think so.) Can other_fileref have name set, and
- # does it impact the PBXReferenceProxy if so? These are the questions
- # that perhaps will be answered one day.
- reference_proxy = PBXReferenceProxy({
- 'fileType': other_fileref._properties['explicitFileType'],
- 'path': other_fileref._properties['path'],
- 'sourceTree': other_fileref._properties['sourceTree'],
- 'remoteRef': container_item,
- })
-
- product_group.AppendChild(reference_proxy)
-
- def SortRemoteProductReferences(self):
- # For each remote project file, sort the associated ProductGroup in the
- # same order that the targets are sorted in the remote project file. This
- # is the sort order used by Xcode.
-
- def CompareProducts(x, y, remote_products):
- # x and y are PBXReferenceProxy objects. Go through their associated
- # PBXContainerItem to get the remote PBXFileReference, which will be
- # present in the remote_products list.
- x_remote = x._properties['remoteRef']._properties['remoteGlobalIDString']
- y_remote = y._properties['remoteRef']._properties['remoteGlobalIDString']
- x_index = remote_products.index(x_remote)
- y_index = remote_products.index(y_remote)
-
- # Use the order of each remote PBXFileReference in remote_products to
- # determine the sort order.
- return cmp(x_index, y_index)
-
- for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems():
- # Build up a list of products in the remote project file, ordered the
- # same as the targets that produce them.
- remote_products = []
- for target in other_pbxproject._properties['targets']:
- if not isinstance(target, PBXNativeTarget):
- continue
- remote_products.append(target._properties['productReference'])
-
- # Sort the PBXReferenceProxy children according to the list of remote
- # products.
- product_group = ref_dict['ProductGroup']
- product_group._properties['children'] = sorted(
- product_group._properties['children'],
- cmp=lambda x, y, rp=remote_products: CompareProducts(x, y, rp))
-
-
-class XCProjectFile(XCObject):
- _schema = XCObject._schema.copy()
- _schema.update({
- 'archiveVersion': [0, int, 0, 1, 1],
- 'classes': [0, dict, 0, 1, {}],
- 'objectVersion': [0, int, 0, 1, 46],
- 'rootObject': [0, PBXProject, 1, 1],
- })
-
- def ComputeIDs(self, recursive=True, overwrite=True, hash=None):
- # Although XCProjectFile is implemented here as an XCObject, it's not a
- # proper object in the Xcode sense, and it certainly doesn't have its own
- # ID. Pass through an attempt to update IDs to the real root object.
- if recursive:
- self._properties['rootObject'].ComputeIDs(recursive, overwrite, hash)
-
- def Print(self, file=sys.stdout):
- self.VerifyHasRequiredProperties()
-
- # Add the special "objects" property, which will be caught and handled
- # separately during printing. This structure allows a fairly standard
- # loop do the normal printing.
- self._properties['objects'] = {}
- self._XCPrint(file, 0, '// !$*UTF8*$!\n')
- if self._should_print_single_line:
- self._XCPrint(file, 0, '{ ')
- else:
- self._XCPrint(file, 0, '{\n')
- for property, value in sorted(self._properties.iteritems(),
- cmp=lambda x, y: cmp(x, y)):
- if property == 'objects':
- self._PrintObjects(file)
- else:
- self._XCKVPrint(file, 1, property, value)
- self._XCPrint(file, 0, '}\n')
- del self._properties['objects']
-
- def _PrintObjects(self, file):
- if self._should_print_single_line:
- self._XCPrint(file, 0, 'objects = {')
- else:
- self._XCPrint(file, 1, 'objects = {\n')
-
- objects_by_class = {}
- for object in self.Descendants():
- if object == self:
- continue
- class_name = object.__class__.__name__
- if not class_name in objects_by_class:
- objects_by_class[class_name] = []
- objects_by_class[class_name].append(object)
-
- for class_name in sorted(objects_by_class):
- self._XCPrint(file, 0, '\n')
- self._XCPrint(file, 0, '/* Begin ' + class_name + ' section */\n')
- for object in sorted(objects_by_class[class_name],
- cmp=lambda x, y: cmp(x.id, y.id)):
- object.Print(file)
- self._XCPrint(file, 0, '/* End ' + class_name + ' section */\n')
-
- if self._should_print_single_line:
- self._XCPrint(file, 0, '}; ')
- else:
- self._XCPrint(file, 1, '};\n')
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
deleted file mode 100644
index 5de848158d..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Applies a fix to CR LF TAB handling in xml.dom.
-
-Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293
-Working around this: http://bugs.python.org/issue5752
-TODO(bradnelson): Consider dropping this when we drop XP support.
-"""
-
-
-import xml.dom.minidom
-
-
-def _Replacement_write_data(writer, data, is_attrib=False):
- """Writes datachars to writer."""
- data = data.replace("&", "&amp;").replace("<", "&lt;")
- data = data.replace("\"", "&quot;").replace(">", "&gt;")
- if is_attrib:
- data = data.replace(
- "\r", "&#xD;").replace(
- "\n", "&#xA;").replace(
- "\t", "&#x9;")
- writer.write(data)
-
-
-def _Replacement_writexml(self, writer, indent="", addindent="", newl=""):
- # indent = current indentation
- # addindent = indentation to add to higher levels
- # newl = newline string
- writer.write(indent+"<" + self.tagName)
-
- attrs = self._get_attributes()
- a_names = attrs.keys()
- a_names.sort()
-
- for a_name in a_names:
- writer.write(" %s=\"" % a_name)
- _Replacement_write_data(writer, attrs[a_name].value, is_attrib=True)
- writer.write("\"")
- if self.childNodes:
- writer.write(">%s" % newl)
- for node in self.childNodes:
- node.writexml(writer, indent + addindent, addindent, newl)
- writer.write("%s</%s>%s" % (indent, self.tagName, newl))
- else:
- writer.write("/>%s" % newl)
-
-
-class XmlFix(object):
- """Object to manage temporary patching of xml.dom.minidom."""
-
- def __init__(self):
- # Preserve current xml.dom.minidom functions.
- self.write_data = xml.dom.minidom._write_data
- self.writexml = xml.dom.minidom.Element.writexml
- # Inject replacement versions of a function and a method.
- xml.dom.minidom._write_data = _Replacement_write_data
- xml.dom.minidom.Element.writexml = _Replacement_writexml
-
- def Cleanup(self):
- if self.write_data:
- xml.dom.minidom._write_data = self.write_data
- xml.dom.minidom.Element.writexml = self.writexml
- self.write_data = None
-
- def __del__(self):
- self.Cleanup()
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/samples/samples b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/samples/samples
deleted file mode 100755
index 804b618998..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/samples/samples
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os.path
-import shutil
-import sys
-
-
-gyps = [
- 'app/app.gyp',
- 'base/base.gyp',
- 'build/temp_gyp/googleurl.gyp',
- 'build/all.gyp',
- 'build/common.gypi',
- 'build/external_code.gypi',
- 'chrome/test/security_tests/security_tests.gyp',
- 'chrome/third_party/hunspell/hunspell.gyp',
- 'chrome/chrome.gyp',
- 'media/media.gyp',
- 'net/net.gyp',
- 'printing/printing.gyp',
- 'sdch/sdch.gyp',
- 'skia/skia.gyp',
- 'testing/gmock.gyp',
- 'testing/gtest.gyp',
- 'third_party/bzip2/bzip2.gyp',
- 'third_party/icu38/icu38.gyp',
- 'third_party/libevent/libevent.gyp',
- 'third_party/libjpeg/libjpeg.gyp',
- 'third_party/libpng/libpng.gyp',
- 'third_party/libxml/libxml.gyp',
- 'third_party/libxslt/libxslt.gyp',
- 'third_party/lzma_sdk/lzma_sdk.gyp',
- 'third_party/modp_b64/modp_b64.gyp',
- 'third_party/npapi/npapi.gyp',
- 'third_party/sqlite/sqlite.gyp',
- 'third_party/zlib/zlib.gyp',
- 'v8/tools/gyp/v8.gyp',
- 'webkit/activex_shim/activex_shim.gyp',
- 'webkit/activex_shim_dll/activex_shim_dll.gyp',
- 'webkit/build/action_csspropertynames.py',
- 'webkit/build/action_cssvaluekeywords.py',
- 'webkit/build/action_jsconfig.py',
- 'webkit/build/action_makenames.py',
- 'webkit/build/action_maketokenizer.py',
- 'webkit/build/action_useragentstylesheets.py',
- 'webkit/build/rule_binding.py',
- 'webkit/build/rule_bison.py',
- 'webkit/build/rule_gperf.py',
- 'webkit/tools/test_shell/test_shell.gyp',
- 'webkit/webkit.gyp',
-]
-
-
-def Main(argv):
- if len(argv) != 3 or argv[1] not in ['push', 'pull']:
- print 'Usage: %s push/pull PATH_TO_CHROME' % argv[0]
- return 1
-
- path_to_chrome = argv[2]
-
- for g in gyps:
- chrome_file = os.path.join(path_to_chrome, g)
- local_file = os.path.join(os.path.dirname(argv[0]), os.path.split(g)[1])
- if argv[1] == 'push':
- print 'Copying %s to %s' % (local_file, chrome_file)
- shutil.copyfile(local_file, chrome_file)
- elif argv[1] == 'pull':
- print 'Copying %s to %s' % (chrome_file, local_file)
- shutil.copyfile(chrome_file, local_file)
- else:
- assert False
-
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(Main(sys.argv))
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/samples/samples.bat b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/samples/samples.bat
deleted file mode 100644
index 568325565c..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/samples/samples.bat
+++ /dev/null
@@ -1,5 +0,0 @@
-@rem Copyright (c) 2009 Google Inc. All rights reserved.
-@rem Use of this source code is governed by a BSD-style license that can be
-@rem found in the LICENSE file.
-
-@python %~dp0/samples %*
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/setup.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/setup.py
deleted file mode 100755
index 75a42558d8..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/setup.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from setuptools import setup
-
-setup(
- name='gyp',
- version='0.1',
- description='Generate Your Projects',
- author='Chromium Authors',
- author_email='chromium-dev@googlegroups.com',
- url='http://code.google.com/p/gyp',
- package_dir = {'': 'pylib'},
- packages=['gyp', 'gyp.generator'],
- entry_points = {'console_scripts': ['gyp=gyp:script_main'] }
-)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/README b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/README
deleted file mode 100644
index 712e4efbb7..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/README
+++ /dev/null
@@ -1,15 +0,0 @@
-pretty_vcproj:
- Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2]
-
- They key/value pair are used to resolve vsprops name.
-
- For example, if I want to diff the base.vcproj project:
-
- pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > orignal.txt
- pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt
-
- And you can use your favorite diff tool to see the changes.
-
- Note: In the case of base.vcproj, the original vcproj is one level up the generated one.
- I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt
- before you perform the diff. \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/Xcode/README b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/Xcode/README
deleted file mode 100644
index 2492a2c2f8..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/Xcode/README
+++ /dev/null
@@ -1,5 +0,0 @@
-Specifications contains syntax formatters for Xcode 3. These do not appear to be supported yet on Xcode 4. To use these with Xcode 3 please install both the gyp.pbfilespec and gyp.xclangspec files in
-
-~/Library/Application Support/Developer/Shared/Xcode/Specifications/
-
-and restart Xcode. \ No newline at end of file
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec
deleted file mode 100644
index 85e2e268a5..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- gyp.pbfilespec
- GYP source file spec for Xcode 3
-
- There is not much documentation available regarding the format
- of .pbfilespec files. As a starting point, see for instance the
- outdated documentation at:
- http://maxao.free.fr/xcode-plugin-interface/specifications.html
- and the files in:
- /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/
-
- Place this file in directory:
- ~/Library/Application Support/Developer/Shared/Xcode/Specifications/
-*/
-
-(
- {
- Identifier = sourcecode.gyp;
- BasedOn = sourcecode;
- Name = "GYP Files";
- Extensions = ("gyp", "gypi");
- MIMETypes = ("text/gyp");
- Language = "xcode.lang.gyp";
- IsTextFile = YES;
- IsSourceFile = YES;
- }
-)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec
deleted file mode 100644
index ab1312e6ee..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec
+++ /dev/null
@@ -1,226 +0,0 @@
-/*
- Copyright (c) 2011 Google Inc. All rights reserved.
- Use of this source code is governed by a BSD-style license that can be
- found in the LICENSE file.
-
- gyp.xclangspec
- GYP language specification for Xcode 3
-
- There is not much documentation available regarding the format
- of .xclangspec files. As a starting point, see for instance the
- outdated documentation at:
- http://maxao.free.fr/xcode-plugin-interface/specifications.html
- and the files in:
- /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/
-
- Place this file in directory:
- ~/Library/Application Support/Developer/Shared/Xcode/Specifications/
-*/
-
-(
-
- {
- Identifier = "xcode.lang.gyp.keyword";
- Syntax = {
- Words = (
- "and",
- "or",
- "<!",
- "<",
- );
- Type = "xcode.syntax.keyword";
- };
- },
-
- {
- Identifier = "xcode.lang.gyp.target.declarator";
- Syntax = {
- Words = (
- "'target_name'",
- );
- Type = "xcode.syntax.identifier.type";
- };
- },
-
- {
- Identifier = "xcode.lang.gyp.string.singlequote";
- Syntax = {
- IncludeRules = (
- "xcode.lang.string",
- "xcode.lang.gyp.keyword",
- "xcode.lang.number",
- );
- Start = "'";
- End = "'";
- };
- },
-
- {
- Identifier = "xcode.lang.gyp.comma";
- Syntax = {
- Words = ( ",", );
-
- };
- },
-
- {
- Identifier = "xcode.lang.gyp";
- Description = "GYP Coloring";
- BasedOn = "xcode.lang.simpleColoring";
- IncludeInMenu = YES;
- Name = "GYP";
- Syntax = {
- Tokenizer = "xcode.lang.gyp.lexer.toplevel";
- IncludeRules = (
- "xcode.lang.gyp.dictionary",
- );
- Type = "xcode.syntax.plain";
- };
- },
-
- // The following rule returns tokens to the other rules
- {
- Identifier = "xcode.lang.gyp.lexer";
- Syntax = {
- IncludeRules = (
- "xcode.lang.gyp.comment",
- "xcode.lang.string",
- 'xcode.lang.gyp.targetname.declarator',
- "xcode.lang.gyp.string.singlequote",
- "xcode.lang.number",
- "xcode.lang.gyp.comma",
- );
- };
- },
-
- {
- Identifier = "xcode.lang.gyp.lexer.toplevel";
- Syntax = {
- IncludeRules = (
- "xcode.lang.gyp.comment",
- );
- };
- },
-
- {
- Identifier = "xcode.lang.gyp.assignment";
- Syntax = {
- Tokenizer = "xcode.lang.gyp.lexer";
- Rules = (
- "xcode.lang.gyp.assignment.lhs",
- ":",
- "xcode.lang.gyp.assignment.rhs",
- );
- };
-
- },
-
- {
- Identifier = "xcode.lang.gyp.target.declaration";
- Syntax = {
- Tokenizer = "xcode.lang.gyp.lexer";
- Rules = (
- "xcode.lang.gyp.target.declarator",
- ":",
- "xcode.lang.gyp.target.name",
- );
- };
- },
-
- {
- Identifier = "xcode.lang.gyp.target.name";
- Syntax = {
- Tokenizer = "xcode.lang.gyp.lexer";
- Rules = (
- "xcode.lang.gyp.string.singlequote",
- );
- Type = "xcode.syntax.definition.function";
- };
- },
-
- {
- Identifier = "xcode.lang.gyp.assignment.lhs";
- Syntax = {
- Tokenizer = "xcode.lang.gyp.lexer";
- Rules = (
- "xcode.lang.gyp.string.singlequote",
- );
- Type = "xcode.syntax.identifier.type";
- };
- },
-
- {
- Identifier = "xcode.lang.gyp.assignment.rhs";
- Syntax = {
- Tokenizer = "xcode.lang.gyp.lexer";
- Rules = (
- "xcode.lang.gyp.string.singlequote?",
- "xcode.lang.gyp.array?",
- "xcode.lang.gyp.dictionary?",
- "xcode.lang.number?",
- );
- };
- },
-
- {
- Identifier = "xcode.lang.gyp.dictionary";
- Syntax = {
- Tokenizer = "xcode.lang.gyp.lexer";
- Start = "{";
- End = "}";
- Foldable = YES;
- Recursive = YES;
- IncludeRules = (
- "xcode.lang.gyp.target.declaration",
- "xcode.lang.gyp.assignment",
- );
- };
- },
-
- {
- Identifier = "xcode.lang.gyp.array";
- Syntax = {
- Tokenizer = "xcode.lang.gyp.lexer";
- Start = "[";
- End = "]";
- Foldable = YES;
- Recursive = YES;
- IncludeRules = (
- "xcode.lang.gyp.array",
- "xcode.lang.gyp.dictionary",
- "xcode.lang.gyp.string.singlequote",
- );
- };
- },
-
- {
- Identifier = "xcode.lang.gyp.todo.mark";
- Syntax = {
- StartChars = "T";
- Match = (
- "^\(TODO\(.*\):[ \t]+.*\)$", // include "TODO: " in the markers list
- );
- // This is the order of captures. All of the match strings above need the same order.
- CaptureTypes = (
- "xcode.syntax.mark"
- );
- Type = "xcode.syntax.comment";
- };
- },
-
- {
- Identifier = "xcode.lang.gyp.comment";
- BasedOn = "xcode.lang.comment"; // for text macros
- Syntax = {
- Start = "#";
- End = "\n";
- IncludeRules = (
- "xcode.lang.url",
- "xcode.lang.url.mail",
- "xcode.lang.comment.mark",
- "xcode.lang.gyp.todo.mark",
- );
- Type = "xcode.syntax.comment";
- };
- },
-)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/README b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/README
deleted file mode 100644
index eeef39f41b..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/README
+++ /dev/null
@@ -1,12 +0,0 @@
-How to install gyp-mode for emacs:
-
-Add the following to your ~/.emacs (replace ... with the path to your gyp
-checkout).
-
-(setq load-path (cons ".../tools/emacs" load-path))
-(require 'gyp)
-
-Restart emacs (or eval-region the added lines) and you should be all set.
-
-Please note that ert is required for running the tests, which is included in
-Emacs 24, or available separately from https://github.com/ohler/ert
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el
deleted file mode 100644
index 11b8497886..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/gyp-tests.el
+++ /dev/null
@@ -1,63 +0,0 @@
-;;; gyp-tests.el - unit tests for gyp-mode.
-
-;; Copyright (c) 2012 Google Inc. All rights reserved.
-;; Use of this source code is governed by a BSD-style license that can be
-;; found in the LICENSE file.
-
-;; The recommended way to run these tests is to run them from the command-line,
-;; with the run-unit-tests.sh script.
-
-(require 'cl)
-(require 'ert)
-(require 'gyp)
-
-(defconst samples (directory-files "testdata" t ".gyp$")
- "List of golden samples to check")
-
-(defun fontify (filename)
- (with-temp-buffer
- (insert-file-contents-literally filename)
- (gyp-mode)
- (font-lock-fontify-buffer)
- (buffer-string)))
-
-(defun read-golden-sample (filename)
- (with-temp-buffer
- (insert-file-contents-literally (concat filename ".fontified"))
- (read (current-buffer))))
-
-(defun equivalent-face (face)
- "For the purposes of face comparison, we're not interested in the
- differences between certain faces. For example, the difference between
- font-lock-comment-delimiter and font-lock-comment-face."
- (case face
- ((font-lock-comment-delimiter-face) font-lock-comment-face)
- (t face)))
-
-(defun text-face-properties (s)
- "Extract the text properties from s"
- (let ((result (list t)))
- (dotimes (i (length s))
- (setq result (cons (equivalent-face (get-text-property i 'face s))
- result)))
- (nreverse result)))
-
-(ert-deftest test-golden-samples ()
- "Check that fontification produces the same results as the golden samples"
- (dolist (sample samples)
- (let ((golden (read-golden-sample sample))
- (fontified (fontify sample)))
- (should (equal golden fontified))
- (should (equal (text-face-properties golden)
- (text-face-properties fontified))))))
-
-(defun create-golden-sample (filename)
- "Create a golden sample by fontifying filename and writing out the printable
- representation of the fontified buffer (with text properties) to the
- FILENAME.fontified"
- (with-temp-file (concat filename ".fontified")
- (print (fontify filename) (current-buffer))))
-
-(defun create-golden-samples ()
- "Recreate the golden samples"
- (dolist (sample samples) (create-golden-sample sample)))
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/gyp.el b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/gyp.el
deleted file mode 100644
index b98b155ced..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/gyp.el
+++ /dev/null
@@ -1,275 +0,0 @@
-;;; gyp.el - font-lock-mode support for gyp files.
-
-;; Copyright (c) 2012 Google Inc. All rights reserved.
-;; Use of this source code is governed by a BSD-style license that can be
-;; found in the LICENSE file.
-
-;; Put this somewhere in your load-path and
-;; (require 'gyp)
-
-(require 'python)
-(require 'cl)
-
-(when (string-match "python-mode.el" (symbol-file 'python-mode 'defun))
- (error (concat "python-mode must be loaded from python.el (bundled with "
- "recent emacsen), not from the older and less maintained "
- "python-mode.el")))
-
-(defadvice python-indent-calculate-levels (after gyp-outdent-closing-parens
- activate)
- "De-indent closing parens, braces, and brackets in gyp-mode."
- (when (and (eq major-mode 'gyp-mode)
- (string-match "^ *[])}][],)}]* *$"
- (buffer-substring-no-properties
- (line-beginning-position) (line-end-position))))
- (setf (first python-indent-levels)
- (- (first python-indent-levels) python-continuation-offset))))
-
-(defadvice python-indent-guess-indent-offset (around
- gyp-indent-guess-indent-offset
- activate)
- "Guess correct indent offset in gyp-mode."
- (or (and (not (eq major-mode 'gyp-mode))
- ad-do-it)
- (save-excursion
- (save-restriction
- (widen)
- (goto-char (point-min))
- ;; Find first line ending with an opening brace that is not a comment.
- (or (and (re-search-forward "\\(^[[{]$\\|^.*[^#].*[[{]$\\)")
- (forward-line)
- (/= (current-indentation) 0)
- (set (make-local-variable 'python-indent-offset)
- (current-indentation))
- (set (make-local-variable 'python-continuation-offset)
- (current-indentation)))
- (message "Can't guess gyp indent offset, using default: %s"
- python-continuation-offset))))))
-
-(define-derived-mode gyp-mode python-mode "Gyp"
- "Major mode for editing .gyp files. See http://code.google.com/p/gyp/"
- ;; gyp-parse-history is a stack of (POSITION . PARSE-STATE) tuples,
- ;; with greater positions at the top of the stack. PARSE-STATE
- ;; is a list of section symbols (see gyp-section-name and gyp-parse-to)
- ;; with most nested section symbol at the front of the list.
- (set (make-local-variable 'gyp-parse-history) '((1 . (list))))
- (gyp-add-font-lock-keywords))
-
-(defun gyp-set-indentation ()
- "Hook function to configure python indentation to suit gyp mode."
- (set (make-local-variable 'python-indent-offset) 2)
- (set (make-local-variable 'python-continuation-offset) 2)
- (set (make-local-variable 'python-indent-guess-indent-offset) t)
- (python-indent-guess-indent-offset))
-
-(add-hook 'gyp-mode-hook 'gyp-set-indentation)
-
-(add-to-list 'auto-mode-alist '("\\.gyp\\'" . gyp-mode))
-(add-to-list 'auto-mode-alist '("\\.gypi\\'" . gyp-mode))
-(add-to-list 'auto-mode-alist '("/\\.gclient\\'" . gyp-mode))
-
-;;; Font-lock support
-
-(defconst gyp-dependencies-regexp
- (regexp-opt (list "dependencies" "export_dependent_settings"))
- "Regular expression to introduce 'dependencies' section")
-
-(defconst gyp-sources-regexp
- (regexp-opt (list "action" "files" "include_dirs" "includes" "inputs"
- "libraries" "outputs" "sources"))
- "Regular expression to introduce 'sources' sections")
-
-(defconst gyp-conditions-regexp
- (regexp-opt (list "conditions" "target_conditions"))
- "Regular expression to introduce conditions sections")
-
-(defconst gyp-variables-regexp
- "^variables"
- "Regular expression to introduce variables sections")
-
-(defconst gyp-defines-regexp
- "^defines"
- "Regular expression to introduce 'defines' sections")
-
-(defconst gyp-targets-regexp
- "^targets"
- "Regular expression to introduce 'targets' sections")
-
-(defun gyp-section-name (section)
- "Map the sections we are interested in from SECTION to symbol.
-
- SECTION is a string from the buffer that introduces a section. The result is
- a symbol representing the kind of section.
-
- This allows us to treat (for the purposes of font-lock) several different
- section names as the same kind of section. For example, a 'sources section
- can be introduced by the 'sources', 'inputs', 'outputs' keyword.
-
- 'other is the default section kind when a more specific match is not made."
- (cond ((string-match-p gyp-dependencies-regexp section) 'dependencies)
- ((string-match-p gyp-sources-regexp section) 'sources)
- ((string-match-p gyp-variables-regexp section) 'variables)
- ((string-match-p gyp-conditions-regexp section) 'conditions)
- ((string-match-p gyp-targets-regexp section) 'targets)
- ((string-match-p gyp-defines-regexp section) 'defines)
- (t 'other)))
-
-(defun gyp-invalidate-parse-states-after (target-point)
- "Erase any parse information after target-point."
- (while (> (caar gyp-parse-history) target-point)
- (setq gyp-parse-history (cdr gyp-parse-history))))
-
-(defun gyp-parse-point ()
- "The point of the last parse state added by gyp-parse-to."
- (caar gyp-parse-history))
-
-(defun gyp-parse-sections ()
- "A list of section symbols holding at the last parse state point."
- (cdar gyp-parse-history))
-
-(defun gyp-inside-dictionary-p ()
- "Predicate returning true if the parser is inside a dictionary."
- (not (eq (cadar gyp-parse-history) 'list)))
-
-(defun gyp-add-parse-history (point sections)
- "Add parse state SECTIONS to the parse history at POINT so that parsing can be
- resumed instantly."
- (while (>= (caar gyp-parse-history) point)
- (setq gyp-parse-history (cdr gyp-parse-history)))
- (setq gyp-parse-history (cons (cons point sections) gyp-parse-history)))
-
-(defun gyp-parse-to (target-point)
- "Parses from (point) to TARGET-POINT adding the parse state information to
- gyp-parse-state-history. Parsing stops if TARGET-POINT is reached or if a
- string literal has been parsed. Returns nil if no further parsing can be
- done, otherwise returns the position of the start of a parsed string, leaving
- the point at the end of the string."
- (let ((parsing t)
- string-start)
- (while parsing
- (setq string-start nil)
- ;; Parse up to a character that starts a sexp, or if the nesting
- ;; level decreases.
- (let ((state (parse-partial-sexp (gyp-parse-point)
- target-point
- -1
- t))
- (sections (gyp-parse-sections)))
- (if (= (nth 0 state) -1)
- (setq sections (cdr sections)) ; pop out a level
- (cond ((looking-at-p "['\"]") ; a string
- (setq string-start (point))
- (goto-char (scan-sexps (point) 1))
- (if (gyp-inside-dictionary-p)
- ;; Look for sections inside a dictionary
- (let ((section (gyp-section-name
- (buffer-substring-no-properties
- (+ 1 string-start)
- (- (point) 1)))))
- (setq sections (cons section (cdr sections)))))
- ;; Stop after the string so it can be fontified.
- (setq target-point (point)))
- ((looking-at-p "{")
- ;; Inside a dictionary. Increase nesting.
- (forward-char 1)
- (setq sections (cons 'unknown sections)))
- ((looking-at-p "\\[")
- ;; Inside a list. Increase nesting
- (forward-char 1)
- (setq sections (cons 'list sections)))
- ((not (eobp))
- ;; other
- (forward-char 1))))
- (gyp-add-parse-history (point) sections)
- (setq parsing (< (point) target-point))))
- string-start))
-
-(defun gyp-section-at-point ()
- "Transform the last parse state, which is a list of nested sections and return
- the section symbol that should be used to determine font-lock information for
- the string. Can return nil indicating the string should not have any attached
- section."
- (let ((sections (gyp-parse-sections)))
- (cond
- ((eq (car sections) 'conditions)
- ;; conditions can occur in a variables section, but we still want to
- ;; highlight it as a keyword.
- nil)
- ((and (eq (car sections) 'list)
- (eq (cadr sections) 'list))
- ;; conditions and sources can have items in [[ ]]
- (caddr sections))
- (t (cadr sections)))))
-
-(defun gyp-section-match (limit)
- "Parse from (point) to LIMIT returning by means of match data what was
- matched. The group of the match indicates what style font-lock should apply.
- See also `gyp-add-font-lock-keywords'."
- (gyp-invalidate-parse-states-after (point))
- (let ((group nil)
- (string-start t))
- (while (and (< (point) limit)
- (not group)
- string-start)
- (setq string-start (gyp-parse-to limit))
- (if string-start
- (setq group (case (gyp-section-at-point)
- ('dependencies 1)
- ('variables 2)
- ('conditions 2)
- ('sources 3)
- ('defines 4)
- (nil nil)))))
- (if group
- (progn
- ;; Set the match data to indicate to the font-lock mechanism the
- ;; highlighting to be performed.
- (set-match-data (append (list string-start (point))
- (make-list (* (1- group) 2) nil)
- (list (1+ string-start) (1- (point)))))
- t))))
-
-;;; Please see http://code.google.com/p/gyp/wiki/GypLanguageSpecification for
-;;; canonical list of keywords.
-(defun gyp-add-font-lock-keywords ()
- "Add gyp-mode keywords to font-lock mechanism."
- ;; TODO(jknotten): Move all the keyword highlighting into gyp-section-match
- ;; so that we can do the font-locking in a single font-lock pass.
- (font-lock-add-keywords
- nil
- (list
- ;; Top-level keywords
- (list (concat "['\"]\\("
- (regexp-opt (list "action" "action_name" "actions" "cflags"
- "cflags_cc" "conditions" "configurations"
- "copies" "defines" "dependencies" "destination"
- "direct_dependent_settings"
- "export_dependent_settings" "extension" "files"
- "include_dirs" "includes" "inputs" "ldflags" "libraries"
- "link_settings" "mac_bundle" "message"
- "msvs_external_rule" "outputs" "product_name"
- "process_outputs_as_sources" "rules" "rule_name"
- "sources" "suppress_wildcard"
- "target_conditions" "target_defaults"
- "target_defines" "target_name" "toolsets"
- "targets" "type" "variables" "xcode_settings"))
- "[!/+=]?\\)") 1 'font-lock-keyword-face t)
- ;; Type of target
- (list (concat "['\"]\\("
- (regexp-opt (list "loadable_module" "static_library"
- "shared_library" "executable" "none"))
- "\\)") 1 'font-lock-type-face t)
- (list "\\(?:target\\|action\\)_name['\"]\\s-*:\\s-*['\"]\\([^ '\"]*\\)" 1
- 'font-lock-function-name-face t)
- (list 'gyp-section-match
- (list 1 'font-lock-function-name-face t t) ; dependencies
- (list 2 'font-lock-variable-name-face t t) ; variables, conditions
- (list 3 'font-lock-constant-face t t) ; sources
- (list 4 'font-lock-preprocessor-face t t)) ; preprocessor
- ;; Variable expansion
- (list "<@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t)
- ;; Command expansion
- (list "<!@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t)
- )))
-
-(provide 'gyp)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/run-unit-tests.sh b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/run-unit-tests.sh
deleted file mode 100755
index 6e62b9b28c..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/run-unit-tests.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-emacs --no-site-file --no-init-file --batch \
- --load ert.el --load gyp.el --load gyp-tests.el \
- -f ert-run-tests-batch-and-exit
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp
deleted file mode 100644
index 29300fe1b8..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp
+++ /dev/null
@@ -1,1105 +0,0 @@
-# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'variables': {
- 'chromium_code': 1,
- # Override to dynamically link the PulseAudio library.
- 'use_pulseaudio%': 0,
- # Override to dynamically link the cras (ChromeOS audio) library.
- 'use_cras%': 0,
- },
- 'targets': [
- {
- 'target_name': 'media',
- 'type': '<(component)',
- 'dependencies': [
- 'yuv_convert',
- '../base/base.gyp:base',
- '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
- '../build/temp_gyp/googleurl.gyp:googleurl',
- '../crypto/crypto.gyp:crypto',
- '../third_party/openmax/openmax.gyp:il',
- '../ui/ui.gyp:ui',
- ],
- 'defines': [
- 'MEDIA_IMPLEMENTATION',
- ],
- 'include_dirs': [
- '..',
- ],
- 'sources': [
- 'audio/android/audio_manager_android.cc',
- 'audio/android/audio_manager_android.h',
- 'audio/android/audio_track_output_android.cc',
- 'audio/android/audio_track_output_android.h',
- 'audio/android/opensles_input.cc',
- 'audio/android/opensles_input.h',
- 'audio/android/opensles_output.cc',
- 'audio/android/opensles_output.h',
- 'audio/async_socket_io_handler.h',
- 'audio/async_socket_io_handler_posix.cc',
- 'audio/async_socket_io_handler_win.cc',
- 'audio/audio_buffers_state.cc',
- 'audio/audio_buffers_state.h',
- 'audio/audio_io.h',
- 'audio/audio_input_controller.cc',
- 'audio/audio_input_controller.h',
- 'audio/audio_input_stream_impl.cc',
- 'audio/audio_input_stream_impl.h',
- 'audio/audio_device_name.cc',
- 'audio/audio_device_name.h',
- 'audio/audio_manager.cc',
- 'audio/audio_manager.h',
- 'audio/audio_manager_base.cc',
- 'audio/audio_manager_base.h',
- 'audio/audio_output_controller.cc',
- 'audio/audio_output_controller.h',
- 'audio/audio_output_dispatcher.cc',
- 'audio/audio_output_dispatcher.h',
- 'audio/audio_output_dispatcher_impl.cc',
- 'audio/audio_output_dispatcher_impl.h',
- 'audio/audio_output_mixer.cc',
- 'audio/audio_output_mixer.h',
- 'audio/audio_output_proxy.cc',
- 'audio/audio_output_proxy.h',
- 'audio/audio_parameters.cc',
- 'audio/audio_parameters.h',
- 'audio/audio_util.cc',
- 'audio/audio_util.h',
- 'audio/cross_process_notification.cc',
- 'audio/cross_process_notification.h',
- 'audio/cross_process_notification_win.cc',
- 'audio/cross_process_notification_posix.cc',
- 'audio/fake_audio_input_stream.cc',
- 'audio/fake_audio_input_stream.h',
- 'audio/fake_audio_output_stream.cc',
- 'audio/fake_audio_output_stream.h',
- 'audio/linux/audio_manager_linux.cc',
- 'audio/linux/audio_manager_linux.h',
- 'audio/linux/alsa_input.cc',
- 'audio/linux/alsa_input.h',
- 'audio/linux/alsa_output.cc',
- 'audio/linux/alsa_output.h',
- 'audio/linux/alsa_util.cc',
- 'audio/linux/alsa_util.h',
- 'audio/linux/alsa_wrapper.cc',
- 'audio/linux/alsa_wrapper.h',
- 'audio/linux/cras_output.cc',
- 'audio/linux/cras_output.h',
- 'audio/openbsd/audio_manager_openbsd.cc',
- 'audio/openbsd/audio_manager_openbsd.h',
- 'audio/mac/audio_input_mac.cc',
- 'audio/mac/audio_input_mac.h',
- 'audio/mac/audio_low_latency_input_mac.cc',
- 'audio/mac/audio_low_latency_input_mac.h',
- 'audio/mac/audio_low_latency_output_mac.cc',
- 'audio/mac/audio_low_latency_output_mac.h',
- 'audio/mac/audio_manager_mac.cc',
- 'audio/mac/audio_manager_mac.h',
- 'audio/mac/audio_output_mac.cc',
- 'audio/mac/audio_output_mac.h',
- 'audio/null_audio_sink.cc',
- 'audio/null_audio_sink.h',
- 'audio/pulse/pulse_output.cc',
- 'audio/pulse/pulse_output.h',
- 'audio/sample_rates.cc',
- 'audio/sample_rates.h',
- 'audio/simple_sources.cc',
- 'audio/simple_sources.h',
- 'audio/win/audio_low_latency_input_win.cc',
- 'audio/win/audio_low_latency_input_win.h',
- 'audio/win/audio_low_latency_output_win.cc',
- 'audio/win/audio_low_latency_output_win.h',
- 'audio/win/audio_manager_win.cc',
- 'audio/win/audio_manager_win.h',
- 'audio/win/avrt_wrapper_win.cc',
- 'audio/win/avrt_wrapper_win.h',
- 'audio/win/device_enumeration_win.cc',
- 'audio/win/device_enumeration_win.h',
- 'audio/win/wavein_input_win.cc',
- 'audio/win/wavein_input_win.h',
- 'audio/win/waveout_output_win.cc',
- 'audio/win/waveout_output_win.h',
- 'base/android/media_jni_registrar.cc',
- 'base/android/media_jni_registrar.h',
- 'base/audio_decoder.cc',
- 'base/audio_decoder.h',
- 'base/audio_decoder_config.cc',
- 'base/audio_decoder_config.h',
- 'base/audio_renderer.h',
- 'base/audio_renderer_mixer.cc',
- 'base/audio_renderer_mixer.h',
- 'base/audio_renderer_mixer_input.cc',
- 'base/audio_renderer_mixer_input.h',
- 'base/bitstream_buffer.h',
- 'base/buffers.cc',
- 'base/buffers.h',
- 'base/byte_queue.cc',
- 'base/byte_queue.h',
- 'base/channel_layout.cc',
- 'base/channel_layout.h',
- 'base/clock.cc',
- 'base/clock.h',
- 'base/composite_filter.cc',
- 'base/composite_filter.h',
- 'base/data_buffer.cc',
- 'base/data_buffer.h',
- 'base/data_source.cc',
- 'base/data_source.h',
- 'base/decoder_buffer.cc',
- 'base/decoder_buffer.h',
- 'base/decrypt_config.cc',
- 'base/decrypt_config.h',
- 'base/decryptor.h',
- 'base/decryptor_client.h',
- 'base/demuxer.cc',
- 'base/demuxer.h',
- 'base/demuxer_stream.cc',
- 'base/demuxer_stream.h',
- 'base/djb2.cc',
- 'base/djb2.h',
- 'base/filter_collection.cc',
- 'base/filter_collection.h',
- 'base/filter_host.h',
- 'base/filters.cc',
- 'base/filters.h',
- 'base/h264_bitstream_converter.cc',
- 'base/h264_bitstream_converter.h',
- 'base/media.h',
- 'base/media_android.cc',
- 'base/media_export.h',
- 'base/media_log.cc',
- 'base/media_log.h',
- 'base/media_log_event.h',
- 'base/media_posix.cc',
- 'base/media_switches.cc',
- 'base/media_switches.h',
- 'base/media_win.cc',
- 'base/message_loop_factory.cc',
- 'base/message_loop_factory.h',
- 'base/pipeline.cc',
- 'base/pipeline.h',
- 'base/pipeline_status.cc',
- 'base/pipeline_status.h',
- 'base/ranges.cc',
- 'base/ranges.h',
- 'base/seekable_buffer.cc',
- 'base/seekable_buffer.h',
- 'base/state_matrix.cc',
- 'base/state_matrix.h',
- 'base/stream_parser.cc',
- 'base/stream_parser.h',
- 'base/stream_parser_buffer.cc',
- 'base/stream_parser_buffer.h',
- 'base/video_decoder.cc',
- 'base/video_decoder.h',
- 'base/video_decoder_config.cc',
- 'base/video_decoder_config.h',
- 'base/video_frame.cc',
- 'base/video_frame.h',
- 'base/video_renderer.h',
- 'base/video_util.cc',
- 'base/video_util.h',
- 'crypto/aes_decryptor.cc',
- 'crypto/aes_decryptor.h',
- 'ffmpeg/ffmpeg_common.cc',
- 'ffmpeg/ffmpeg_common.h',
- 'ffmpeg/file_protocol.cc',
- 'ffmpeg/file_protocol.h',
- 'filters/audio_file_reader.cc',
- 'filters/audio_file_reader.h',
- 'filters/audio_renderer_algorithm.cc',
- 'filters/audio_renderer_algorithm.h',
- 'filters/audio_renderer_impl.cc',
- 'filters/audio_renderer_impl.h',
- 'filters/bitstream_converter.cc',
- 'filters/bitstream_converter.h',
- 'filters/chunk_demuxer.cc',
- 'filters/chunk_demuxer.h',
- 'filters/chunk_demuxer_client.h',
- 'filters/dummy_demuxer.cc',
- 'filters/dummy_demuxer.h',
- 'filters/ffmpeg_audio_decoder.cc',
- 'filters/ffmpeg_audio_decoder.h',
- 'filters/ffmpeg_demuxer.cc',
- 'filters/ffmpeg_demuxer.h',
- 'filters/ffmpeg_h264_bitstream_converter.cc',
- 'filters/ffmpeg_h264_bitstream_converter.h',
- 'filters/ffmpeg_glue.cc',
- 'filters/ffmpeg_glue.h',
- 'filters/ffmpeg_video_decoder.cc',
- 'filters/ffmpeg_video_decoder.h',
- 'filters/file_data_source.cc',
- 'filters/file_data_source.h',
- 'filters/gpu_video_decoder.cc',
- 'filters/gpu_video_decoder.h',
- 'filters/in_memory_url_protocol.cc',
- 'filters/in_memory_url_protocol.h',
- 'filters/source_buffer_stream.cc',
- 'filters/source_buffer_stream.h',
- 'filters/video_frame_generator.cc',
- 'filters/video_frame_generator.h',
- 'filters/video_renderer_base.cc',
- 'filters/video_renderer_base.h',
- 'video/capture/fake_video_capture_device.cc',
- 'video/capture/fake_video_capture_device.h',
- 'video/capture/linux/video_capture_device_linux.cc',
- 'video/capture/linux/video_capture_device_linux.h',
- 'video/capture/mac/video_capture_device_mac.h',
- 'video/capture/mac/video_capture_device_mac.mm',
- 'video/capture/mac/video_capture_device_qtkit_mac.h',
- 'video/capture/mac/video_capture_device_qtkit_mac.mm',
- 'video/capture/video_capture.h',
- 'video/capture/video_capture_device.h',
- 'video/capture/video_capture_device_dummy.cc',
- 'video/capture/video_capture_device_dummy.h',
- 'video/capture/video_capture_proxy.cc',
- 'video/capture/video_capture_proxy.h',
- 'video/capture/video_capture_types.h',
- 'video/capture/win/filter_base_win.cc',
- 'video/capture/win/filter_base_win.h',
- 'video/capture/win/pin_base_win.cc',
- 'video/capture/win/pin_base_win.h',
- 'video/capture/win/sink_filter_observer_win.h',
- 'video/capture/win/sink_filter_win.cc',
- 'video/capture/win/sink_filter_win.h',
- 'video/capture/win/sink_input_pin_win.cc',
- 'video/capture/win/sink_input_pin_win.h',
- 'video/capture/win/video_capture_device_win.cc',
- 'video/capture/win/video_capture_device_win.h',
- 'video/picture.cc',
- 'video/picture.h',
- 'video/video_decode_accelerator.cc',
- 'video/video_decode_accelerator.h',
- 'webm/webm_constants.h',
- 'webm/webm_cluster_parser.cc',
- 'webm/webm_cluster_parser.h',
- 'webm/webm_content_encodings.cc',
- 'webm/webm_content_encodings.h',
- 'webm/webm_content_encodings_client.cc',
- 'webm/webm_content_encodings_client.h',
- 'webm/webm_info_parser.cc',
- 'webm/webm_info_parser.h',
- 'webm/webm_parser.cc',
- 'webm/webm_parser.h',
- 'webm/webm_stream_parser.cc',
- 'webm/webm_stream_parser.h',
- 'webm/webm_tracks_parser.cc',
- 'webm/webm_tracks_parser.h',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- '..',
- ],
- },
- 'conditions': [
- # Android doesn't use ffmpeg, so make the dependency conditional
- # and exclude the sources which depend on ffmpeg.
- ['OS != "android"', {
- 'dependencies': [
- '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
- ],
- }],
- ['OS == "android"', {
- 'sources!': [
- 'base/media_posix.cc',
- 'ffmpeg/ffmpeg_common.cc',
- 'ffmpeg/ffmpeg_common.h',
- 'ffmpeg/file_protocol.cc',
- 'ffmpeg/file_protocol.h',
- 'filters/audio_file_reader.cc',
- 'filters/audio_file_reader.h',
- 'filters/bitstream_converter.cc',
- 'filters/bitstream_converter.h',
- 'filters/chunk_demuxer.cc',
- 'filters/chunk_demuxer.h',
- 'filters/chunk_demuxer_client.h',
- 'filters/ffmpeg_audio_decoder.cc',
- 'filters/ffmpeg_audio_decoder.h',
- 'filters/ffmpeg_demuxer.cc',
- 'filters/ffmpeg_demuxer.h',
- 'filters/ffmpeg_h264_bitstream_converter.cc',
- 'filters/ffmpeg_h264_bitstream_converter.h',
- 'filters/ffmpeg_glue.cc',
- 'filters/ffmpeg_glue.h',
- 'filters/ffmpeg_video_decoder.cc',
- 'filters/ffmpeg_video_decoder.h',
- 'filters/gpu_video_decoder.cc',
- 'filters/gpu_video_decoder.h',
- 'webm/webm_cluster_parser.cc',
- 'webm/webm_cluster_parser.h',
- 'webm/webm_stream_parser.cc',
- 'webm/webm_stream_parser.h',
- ],
- }],
- # The below 'android' condition were added temporarily and should be
- # removed in downstream, because there is no Java environment setup in
- # upstream yet.
- ['OS == "android"', {
- 'sources!':[
- 'audio/android/audio_track_output_android.cc',
- ],
- 'sources':[
- 'audio/android/audio_track_output_stub_android.cc',
- ],
- 'link_settings': {
- 'libraries': [
- '-lOpenSLES',
- ],
- },
- }],
- ['OS=="linux" or OS=="freebsd" or OS=="solaris"', {
- 'link_settings': {
- 'libraries': [
- '-lasound',
- ],
- },
- }],
- ['OS=="openbsd"', {
- 'sources/': [ ['exclude', '/alsa_' ],
- ['exclude', '/audio_manager_linux' ] ],
- 'link_settings': {
- 'libraries': [
- ],
- },
- }],
- ['OS!="openbsd"', {
- 'sources!': [
- 'audio/openbsd/audio_manager_openbsd.cc',
- 'audio/openbsd/audio_manager_openbsd.h',
- ],
- }],
- ['OS=="linux"', {
- 'variables': {
- 'conditions': [
- ['sysroot!=""', {
- 'pkg-config': '../build/linux/pkg-config-wrapper "<(sysroot)" "<(target_arch)"',
- }, {
- 'pkg-config': 'pkg-config'
- }],
- ],
- },
- 'conditions': [
- ['use_cras == 1', {
- 'cflags': [
- '<!@(<(pkg-config) --cflags libcras)',
- ],
- 'link_settings': {
- 'libraries': [
- '<!@(<(pkg-config) --libs libcras)',
- ],
- },
- 'defines': [
- 'USE_CRAS',
- ],
- }, { # else: use_cras == 0
- 'sources!': [
- 'audio/linux/cras_output.cc',
- 'audio/linux/cras_output.h',
- ],
- }],
- ],
- }],
- ['os_posix == 1', {
- 'conditions': [
- ['use_pulseaudio == 1', {
- 'cflags': [
- '<!@(pkg-config --cflags libpulse)',
- ],
- 'link_settings': {
- 'libraries': [
- '<!@(pkg-config --libs-only-l libpulse)',
- ],
- },
- 'defines': [
- 'USE_PULSEAUDIO',
- ],
- }, { # else: use_pulseaudio == 0
- 'sources!': [
- 'audio/pulse/pulse_output.cc',
- 'audio/pulse/pulse_output.h',
- ],
- }],
- ],
- }],
- ['os_posix == 1 and OS != "android"', {
- # Video capture isn't supported in Android yet.
- 'sources!': [
- 'video/capture/video_capture_device_dummy.cc',
- 'video/capture/video_capture_device_dummy.h',
- ],
- }],
- ['OS=="mac"', {
- 'link_settings': {
- 'libraries': [
- '$(SDKROOT)/System/Library/Frameworks/AudioUnit.framework',
- '$(SDKROOT)/System/Library/Frameworks/AudioToolbox.framework',
- '$(SDKROOT)/System/Library/Frameworks/CoreAudio.framework',
- '$(SDKROOT)/System/Library/Frameworks/CoreVideo.framework',
- '$(SDKROOT)/System/Library/Frameworks/QTKit.framework',
- ],
- },
- }],
- ['OS=="win"', {
- 'sources!': [
- 'audio/pulse/pulse_output.cc',
- 'audio/pulse/pulse_output.h',
- 'video/capture/video_capture_device_dummy.cc',
- 'video/capture/video_capture_device_dummy.h',
- ],
- }],
- ['proprietary_codecs==1 or branding=="Chrome"', {
- 'sources': [
- 'mp4/avc.cc',
- 'mp4/avc.h',
- 'mp4/box_definitions.cc',
- 'mp4/box_definitions.h',
- 'mp4/box_reader.cc',
- 'mp4/box_reader.h',
- 'mp4/cenc.cc',
- 'mp4/cenc.h',
- 'mp4/mp4_stream_parser.cc',
- 'mp4/mp4_stream_parser.h',
- 'mp4/offset_byte_queue.cc',
- 'mp4/offset_byte_queue.h',
- 'mp4/track_run_iterator.cc',
- 'mp4/track_run_iterator.h',
- ],
- }],
- ],
- },
- {
- 'target_name': 'yuv_convert',
- 'type': 'static_library',
- 'include_dirs': [
- '..',
- ],
- 'conditions': [
- ['order_profiling != 0', {
- 'target_conditions' : [
- ['_toolset=="target"', {
- 'cflags!': [ '-finstrument-functions' ],
- }],
- ],
- }],
- [ 'target_arch == "ia32" or target_arch == "x64"', {
- 'dependencies': [
- 'yuv_convert_simd_x86',
- ],
- }],
- [ 'target_arch == "arm"', {
- 'dependencies': [
- 'yuv_convert_simd_arm',
- ],
- }],
- ],
- 'sources': [
- 'base/yuv_convert.cc',
- 'base/yuv_convert.h',
- ],
- },
- {
- 'target_name': 'yuv_convert_simd_x86',
- 'type': 'static_library',
- 'include_dirs': [
- '..',
- ],
- 'sources': [
- 'base/simd/convert_rgb_to_yuv_c.cc',
- 'base/simd/convert_rgb_to_yuv_sse2.cc',
- 'base/simd/convert_rgb_to_yuv_ssse3.asm',
- 'base/simd/convert_rgb_to_yuv_ssse3.cc',
- 'base/simd/convert_rgb_to_yuv_ssse3.inc',
- 'base/simd/convert_yuv_to_rgb_c.cc',
- 'base/simd/convert_yuv_to_rgb_x86.cc',
- 'base/simd/convert_yuv_to_rgb_mmx.asm',
- 'base/simd/convert_yuv_to_rgb_mmx.inc',
- 'base/simd/convert_yuv_to_rgb_sse.asm',
- 'base/simd/filter_yuv.h',
- 'base/simd/filter_yuv_c.cc',
- 'base/simd/filter_yuv_mmx.cc',
- 'base/simd/filter_yuv_sse2.cc',
- 'base/simd/linear_scale_yuv_to_rgb_mmx.asm',
- 'base/simd/linear_scale_yuv_to_rgb_mmx.inc',
- 'base/simd/linear_scale_yuv_to_rgb_sse.asm',
- 'base/simd/scale_yuv_to_rgb_mmx.asm',
- 'base/simd/scale_yuv_to_rgb_mmx.inc',
- 'base/simd/scale_yuv_to_rgb_sse.asm',
- 'base/simd/yuv_to_rgb_table.cc',
- 'base/simd/yuv_to_rgb_table.h',
- ],
- 'conditions': [
- ['order_profiling != 0', {
- 'target_conditions' : [
- ['_toolset=="target"', {
- 'cflags!': [ '-finstrument-functions' ],
- }],
- ],
- }],
- [ 'target_arch == "x64"', {
- # Source files optimized for X64 systems.
- 'sources': [
- 'base/simd/linear_scale_yuv_to_rgb_mmx_x64.asm',
- 'base/simd/scale_yuv_to_rgb_sse2_x64.asm',
- ],
- }],
- [ 'os_posix == 1 and OS != "mac" and OS != "android"', {
- 'cflags': [
- '-msse2',
- ],
- }],
- [ 'OS == "mac"', {
- 'configurations': {
- 'Debug': {
- 'xcode_settings': {
- # gcc on the mac builds horribly unoptimized sse code in debug
- # mode. Since this is rarely going to be debugged, run with full
- # optimizations in Debug as well as Release.
- 'GCC_OPTIMIZATION_LEVEL': '3', # -O3
- },
- },
- },
- }],
- [ 'OS=="win"', {
- 'variables': {
- 'yasm_flags': [
- '-DWIN32',
- '-DMSVC',
- '-DCHROMIUM',
- '-Isimd',
- ],
- },
- }],
- [ 'OS=="mac"', {
- 'variables': {
- 'yasm_flags': [
- '-DPREFIX',
- '-DMACHO',
- '-DCHROMIUM',
- '-Isimd',
- ],
- },
- }],
- [ 'os_posix==1 and OS!="mac"', {
- 'variables': {
- 'conditions': [
- [ 'target_arch=="ia32"', {
- 'yasm_flags': [
- '-DX86_32',
- '-DELF',
- '-DCHROMIUM',
- '-Isimd',
- ],
- }, {
- 'yasm_flags': [
- '-DARCH_X86_64',
- '-DELF',
- '-DPIC',
- '-DCHROMIUM',
- '-Isimd',
- ],
- }],
- ],
- },
- }],
- ],
- 'variables': {
- 'yasm_output_path': '<(SHARED_INTERMEDIATE_DIR)/media',
- },
- 'msvs_2010_disable_uldi_when_referenced': 1,
- 'includes': [
- '../third_party/yasm/yasm_compile.gypi',
- ],
- },
- {
- 'target_name': 'yuv_convert_simd_arm',
- 'type': 'static_library',
- 'include_dirs': [
- '..',
- ],
- 'sources': [
- 'base/simd/convert_rgb_to_yuv_c.cc',
- 'base/simd/convert_rgb_to_yuv.h',
- 'base/simd/convert_yuv_to_rgb_c.cc',
- 'base/simd/convert_yuv_to_rgb.h',
- 'base/simd/filter_yuv.h',
- 'base/simd/filter_yuv_c.cc',
- 'base/simd/yuv_to_rgb_table.cc',
- 'base/simd/yuv_to_rgb_table.h',
- ],
- },
- {
- 'target_name': 'media_unittests',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- 'media_test_support',
- 'yuv_convert',
- '../base/base.gyp:base',
- '../base/base.gyp:base_i18n',
- '../base/base.gyp:test_support_base',
- '../testing/gmock.gyp:gmock',
- '../testing/gtest.gyp:gtest',
- '../ui/ui.gyp:ui',
- ],
- 'sources': [
- 'audio/async_socket_io_handler_unittest.cc',
- 'audio/audio_input_controller_unittest.cc',
- 'audio/audio_input_device_unittest.cc',
- 'audio/audio_input_unittest.cc',
- 'audio/audio_input_volume_unittest.cc',
- 'audio/audio_low_latency_input_output_unittest.cc',
- 'audio/audio_output_controller_unittest.cc',
- 'audio/audio_output_proxy_unittest.cc',
- 'audio/audio_parameters_unittest.cc',
- 'audio/audio_util_unittest.cc',
- 'audio/cross_process_notification_unittest.cc',
- 'audio/linux/alsa_output_unittest.cc',
- 'audio/mac/audio_low_latency_input_mac_unittest.cc',
- 'audio/mac/audio_output_mac_unittest.cc',
- 'audio/simple_sources_unittest.cc',
- 'audio/win/audio_low_latency_input_win_unittest.cc',
- 'audio/win/audio_low_latency_output_win_unittest.cc',
- 'audio/win/audio_output_win_unittest.cc',
- 'base/audio_renderer_mixer_unittest.cc',
- 'base/audio_renderer_mixer_input_unittest.cc',
- 'base/buffers_unittest.cc',
- 'base/clock_unittest.cc',
- 'base/composite_filter_unittest.cc',
- 'base/data_buffer_unittest.cc',
- 'base/decoder_buffer_unittest.cc',
- 'base/djb2_unittest.cc',
- 'base/fake_audio_render_callback.cc',
- 'base/fake_audio_render_callback.h',
- 'base/filter_collection_unittest.cc',
- 'base/h264_bitstream_converter_unittest.cc',
- 'base/pipeline_unittest.cc',
- 'base/ranges_unittest.cc',
- 'base/run_all_unittests.cc',
- 'base/seekable_buffer_unittest.cc',
- 'base/state_matrix_unittest.cc',
- 'base/test_data_util.cc',
- 'base/test_data_util.h',
- 'base/video_frame_unittest.cc',
- 'base/video_util_unittest.cc',
- 'base/yuv_convert_unittest.cc',
- 'crypto/aes_decryptor_unittest.cc',
- 'ffmpeg/ffmpeg_common_unittest.cc',
- 'filters/audio_renderer_algorithm_unittest.cc',
- 'filters/audio_renderer_impl_unittest.cc',
- 'filters/bitstream_converter_unittest.cc',
- 'filters/chunk_demuxer_unittest.cc',
- 'filters/ffmpeg_audio_decoder_unittest.cc',
- 'filters/ffmpeg_decoder_unittest.h',
- 'filters/ffmpeg_demuxer_unittest.cc',
- 'filters/ffmpeg_glue_unittest.cc',
- 'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
- 'filters/ffmpeg_video_decoder_unittest.cc',
- 'filters/file_data_source_unittest.cc',
- 'filters/pipeline_integration_test.cc',
- 'filters/pipeline_integration_test_base.cc',
- 'filters/source_buffer_stream_unittest.cc',
- 'filters/video_renderer_base_unittest.cc',
- 'video/capture/video_capture_device_unittest.cc',
- 'webm/cluster_builder.cc',
- 'webm/cluster_builder.h',
- 'webm/webm_cluster_parser_unittest.cc',
- 'webm/webm_content_encodings_client_unittest.cc',
- 'webm/webm_parser_unittest.cc',
- ],
- 'conditions': [
- ['os_posix==1 and OS!="mac"', {
- 'conditions': [
- ['linux_use_tcmalloc==1', {
- 'dependencies': [
- '../base/allocator/allocator.gyp:allocator',
- ],
- }],
- ],
- }],
- ['OS != "android"', {
- 'dependencies': [
- '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
- ],
- }],
- ['OS == "android"', {
- 'sources!': [
- 'audio/audio_input_volume_unittest.cc',
- 'base/test_data_util.cc',
- 'base/test_data_util.h',
- 'ffmpeg/ffmpeg_common_unittest.cc',
- 'filters/ffmpeg_audio_decoder_unittest.cc',
- 'filters/bitstream_converter_unittest.cc',
- 'filters/chunk_demuxer_unittest.cc',
- 'filters/ffmpeg_demuxer_unittest.cc',
- 'filters/ffmpeg_glue_unittest.cc',
- 'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
- 'filters/ffmpeg_video_decoder_unittest.cc',
- 'filters/pipeline_integration_test.cc',
- 'filters/pipeline_integration_test_base.cc',
- 'mp4/mp4_stream_parser_unittest.cc',
- 'webm/webm_cluster_parser_unittest.cc',
- ],
- }],
- ['OS == "linux"', {
- 'conditions': [
- ['use_cras == 1', {
- 'sources': [
- 'audio/linux/cras_output_unittest.cc',
- ],
- 'defines': [
- 'USE_CRAS',
- ],
- }],
- ],
- }],
- [ 'target_arch=="ia32" or target_arch=="x64"', {
- 'sources': [
- 'base/simd/convert_rgb_to_yuv_unittest.cc',
- ],
- }],
- ['proprietary_codecs==1 or branding=="Chrome"', {
- 'sources': [
- 'mp4/avc_unittest.cc',
- 'mp4/box_reader_unittest.cc',
- 'mp4/mp4_stream_parser_unittest.cc',
- 'mp4/offset_byte_queue_unittest.cc',
- ],
- }],
- ],
- },
- {
- 'target_name': 'media_test_support',
- 'type': 'static_library',
- 'dependencies': [
- 'media',
- '../base/base.gyp:base',
- '../testing/gmock.gyp:gmock',
- '../testing/gtest.gyp:gtest',
- ],
- 'sources': [
- 'audio/test_audio_input_controller_factory.cc',
- 'audio/test_audio_input_controller_factory.h',
- 'base/mock_callback.cc',
- 'base/mock_callback.h',
- 'base/mock_data_source_host.cc',
- 'base/mock_data_source_host.h',
- 'base/mock_demuxer_host.cc',
- 'base/mock_demuxer_host.h',
- 'base/mock_filter_host.cc',
- 'base/mock_filter_host.h',
- 'base/mock_filters.cc',
- 'base/mock_filters.h',
- ],
- },
- {
- 'target_name': 'scaler_bench',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- 'yuv_convert',
- '../base/base.gyp:base',
- '../skia/skia.gyp:skia',
- ],
- 'sources': [
- 'tools/scaler_bench/scaler_bench.cc',
- ],
- },
- {
- 'target_name': 'qt_faststart',
- 'type': 'executable',
- 'sources': [
- 'tools/qt_faststart/qt_faststart.c'
- ],
- },
- {
- 'target_name': 'seek_tester',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- '../base/base.gyp:base',
- ],
- 'sources': [
- 'tools/seek_tester/seek_tester.cc',
- ],
- },
- ],
- 'conditions': [
- ['OS=="win"', {
- 'targets': [
- {
- 'target_name': 'player_wtl',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- 'yuv_convert',
- '../base/base.gyp:base',
- '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
- '../ui/ui.gyp:ui',
- ],
- 'include_dirs': [
- '<(DEPTH)/third_party/wtl/include',
- ],
- 'sources': [
- 'tools/player_wtl/list.h',
- 'tools/player_wtl/mainfrm.h',
- 'tools/player_wtl/movie.cc',
- 'tools/player_wtl/movie.h',
- 'tools/player_wtl/player_wtl.cc',
- 'tools/player_wtl/player_wtl.rc',
- 'tools/player_wtl/props.h',
- 'tools/player_wtl/seek.h',
- 'tools/player_wtl/resource.h',
- 'tools/player_wtl/view.h',
- ],
- 'msvs_settings': {
- 'VCLinkerTool': {
- 'SubSystem': '2', # Set /SUBSYSTEM:WINDOWS
- },
- },
- 'defines': [
- '_CRT_SECURE_NO_WARNINGS=1',
- ],
- },
- ],
- }],
- ['OS == "win" or toolkit_uses_gtk == 1', {
- 'targets': [
- {
- 'target_name': 'shader_bench',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- 'yuv_convert',
- '../base/base.gyp:base',
- '../ui/gl/gl.gyp:gl',
- ],
- 'sources': [
- 'tools/shader_bench/shader_bench.cc',
- 'tools/shader_bench/cpu_color_painter.cc',
- 'tools/shader_bench/cpu_color_painter.h',
- 'tools/shader_bench/gpu_color_painter.cc',
- 'tools/shader_bench/gpu_color_painter.h',
- 'tools/shader_bench/gpu_painter.cc',
- 'tools/shader_bench/gpu_painter.h',
- 'tools/shader_bench/painter.cc',
- 'tools/shader_bench/painter.h',
- 'tools/shader_bench/window.cc',
- 'tools/shader_bench/window.h',
- ],
- 'conditions': [
- ['toolkit_uses_gtk == 1', {
- 'dependencies': [
- '../build/linux/system.gyp:gtk',
- ],
- 'sources': [
- 'tools/shader_bench/window_linux.cc',
- ],
- }],
- ['OS=="win"', {
- 'dependencies': [
- '../third_party/angle/src/build_angle.gyp:libEGL',
- '../third_party/angle/src/build_angle.gyp:libGLESv2',
- ],
- 'sources': [
- 'tools/shader_bench/window_win.cc',
- ],
- }],
- ],
- },
- ],
- }],
- ['OS == "linux" and target_arch != "arm"', {
- 'targets': [
- {
- 'target_name': 'tile_render_bench',
- 'type': 'executable',
- 'dependencies': [
- '../base/base.gyp:base',
- '../ui/gl/gl.gyp:gl',
- ],
- 'libraries': [
- '-lGL',
- '-ldl',
- ],
- 'sources': [
- 'tools/tile_render_bench/tile_render_bench.cc',
- ],
- },
- ],
- }],
- ['os_posix == 1 and OS != "mac" and OS != "android"', {
- 'targets': [
- {
- 'target_name': 'player_x11',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- 'yuv_convert',
- '../base/base.gyp:base',
- '../ui/gl/gl.gyp:gl',
- ],
- 'link_settings': {
- 'libraries': [
- '-ldl',
- '-lX11',
- '-lXrender',
- '-lXext',
- ],
- },
- 'sources': [
- 'tools/player_x11/data_source_logger.cc',
- 'tools/player_x11/data_source_logger.h',
- 'tools/player_x11/gl_video_renderer.cc',
- 'tools/player_x11/gl_video_renderer.h',
- 'tools/player_x11/player_x11.cc',
- 'tools/player_x11/x11_video_renderer.cc',
- 'tools/player_x11/x11_video_renderer.h',
- ],
- },
- ],
- }],
- ['OS == "android"', {
- 'targets': [
- {
- 'target_name': 'player_android',
- 'type': 'static_library',
- 'sources': [
- 'base/android/media_player_bridge.cc',
- 'base/android/media_player_bridge.h',
- ],
- 'dependencies': [
- '../base/base.gyp:base',
- ],
- 'include_dirs': [
- '<(SHARED_INTERMEDIATE_DIR)/media',
- ],
- 'actions': [
- {
- 'action_name': 'generate-jni-headers',
- 'inputs': [
- '../base/android/jni_generator/jni_generator.py',
- 'base/android/java/src/org/chromium/media/MediaPlayerListener.java',
- ],
- 'outputs': [
- '<(SHARED_INTERMEDIATE_DIR)/media/jni/media_player_listener_jni.h',
- ],
- 'action': [
- 'python',
- '<(DEPTH)/base/android/jni_generator/jni_generator.py',
- '-o',
- '<@(_inputs)',
- '<@(_outputs)',
- ],
- },
- ],
- },
- {
- 'target_name': 'media_java',
- 'type': 'none',
- 'dependencies': [ '../base/base.gyp:base_java' ],
- 'variables': {
- 'package_name': 'media',
- 'java_in_dir': 'base/android/java',
- },
- 'includes': [ '../build/java.gypi' ],
- },
-
- ],
- }, { # OS != "android"'
- # Android does not use ffmpeg, so disable the targets which require it.
- 'targets': [
- {
- 'target_name': 'ffmpeg_unittests',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- 'media_test_support',
- '../base/base.gyp:base',
- '../base/base.gyp:base_i18n',
- '../base/base.gyp:test_support_base',
- '../base/base.gyp:test_support_perf',
- '../testing/gtest.gyp:gtest',
- '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
- ],
- 'sources': [
- 'ffmpeg/ffmpeg_unittest.cc',
- ],
- 'conditions': [
- ['toolkit_uses_gtk == 1', {
- 'dependencies': [
- # Needed for the following #include chain:
- # base/run_all_unittests.cc
- # ../base/test_suite.h
- # gtk/gtk.h
- '../build/linux/system.gyp:gtk',
- ],
- 'conditions': [
- ['linux_use_tcmalloc==1', {
- 'dependencies': [
- '../base/allocator/allocator.gyp:allocator',
- ],
- }],
- ],
- }],
- ],
- },
- {
- 'target_name': 'ffmpeg_regression_tests',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- 'media_test_support',
- '../base/base.gyp:test_support_base',
- '../testing/gmock.gyp:gmock',
- '../testing/gtest.gyp:gtest',
- '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
- ],
- 'sources': [
- 'base/test_data_util.cc',
- 'base/run_all_unittests.cc',
- 'ffmpeg/ffmpeg_regression_tests.cc',
- 'filters/pipeline_integration_test_base.cc',
- ],
- 'conditions': [
- ['os_posix==1 and OS!="mac"', {
- 'conditions': [
- ['linux_use_tcmalloc==1', {
- 'dependencies': [
- '../base/allocator/allocator.gyp:allocator',
- ],
- }],
- ],
- }],
- ],
- },
- {
- 'target_name': 'ffmpeg_tests',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- '../base/base.gyp:base',
- '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
- ],
- 'sources': [
- 'test/ffmpeg_tests/ffmpeg_tests.cc',
- ],
- },
- {
- 'target_name': 'media_bench',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- '../base/base.gyp:base',
- '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
- ],
- 'sources': [
- 'tools/media_bench/media_bench.cc',
- ],
- },
- ],
- }]
- ],
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp.fontified b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp.fontified
deleted file mode 100644
index 962b7b2c43..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/emacs/testdata/media.gyp.fontified
+++ /dev/null
@@ -1,1107 +0,0 @@
-
-#("# Copyright (c) 2012 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'variables': {
- 'chromium_code': 1,
- # Override to dynamically link the PulseAudio library.
- 'use_pulseaudio%': 0,
- # Override to dynamically link the cras (ChromeOS audio) library.
- 'use_cras%': 0,
- },
- 'targets': [
- {
- 'target_name': 'media',
- 'type': '<(component)',
- 'dependencies': [
- 'yuv_convert',
- '../base/base.gyp:base',
- '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
- '../build/temp_gyp/googleurl.gyp:googleurl',
- '../crypto/crypto.gyp:crypto',
- '../third_party/openmax/openmax.gyp:il',
- '../ui/ui.gyp:ui',
- ],
- 'defines': [
- 'MEDIA_IMPLEMENTATION',
- ],
- 'include_dirs': [
- '..',
- ],
- 'sources': [
- 'audio/android/audio_manager_android.cc',
- 'audio/android/audio_manager_android.h',
- 'audio/android/audio_track_output_android.cc',
- 'audio/android/audio_track_output_android.h',
- 'audio/android/opensles_input.cc',
- 'audio/android/opensles_input.h',
- 'audio/android/opensles_output.cc',
- 'audio/android/opensles_output.h',
- 'audio/async_socket_io_handler.h',
- 'audio/async_socket_io_handler_posix.cc',
- 'audio/async_socket_io_handler_win.cc',
- 'audio/audio_buffers_state.cc',
- 'audio/audio_buffers_state.h',
- 'audio/audio_io.h',
- 'audio/audio_input_controller.cc',
- 'audio/audio_input_controller.h',
- 'audio/audio_input_stream_impl.cc',
- 'audio/audio_input_stream_impl.h',
- 'audio/audio_device_name.cc',
- 'audio/audio_device_name.h',
- 'audio/audio_manager.cc',
- 'audio/audio_manager.h',
- 'audio/audio_manager_base.cc',
- 'audio/audio_manager_base.h',
- 'audio/audio_output_controller.cc',
- 'audio/audio_output_controller.h',
- 'audio/audio_output_dispatcher.cc',
- 'audio/audio_output_dispatcher.h',
- 'audio/audio_output_dispatcher_impl.cc',
- 'audio/audio_output_dispatcher_impl.h',
- 'audio/audio_output_mixer.cc',
- 'audio/audio_output_mixer.h',
- 'audio/audio_output_proxy.cc',
- 'audio/audio_output_proxy.h',
- 'audio/audio_parameters.cc',
- 'audio/audio_parameters.h',
- 'audio/audio_util.cc',
- 'audio/audio_util.h',
- 'audio/cross_process_notification.cc',
- 'audio/cross_process_notification.h',
- 'audio/cross_process_notification_win.cc',
- 'audio/cross_process_notification_posix.cc',
- 'audio/fake_audio_input_stream.cc',
- 'audio/fake_audio_input_stream.h',
- 'audio/fake_audio_output_stream.cc',
- 'audio/fake_audio_output_stream.h',
- 'audio/linux/audio_manager_linux.cc',
- 'audio/linux/audio_manager_linux.h',
- 'audio/linux/alsa_input.cc',
- 'audio/linux/alsa_input.h',
- 'audio/linux/alsa_output.cc',
- 'audio/linux/alsa_output.h',
- 'audio/linux/alsa_util.cc',
- 'audio/linux/alsa_util.h',
- 'audio/linux/alsa_wrapper.cc',
- 'audio/linux/alsa_wrapper.h',
- 'audio/linux/cras_output.cc',
- 'audio/linux/cras_output.h',
- 'audio/openbsd/audio_manager_openbsd.cc',
- 'audio/openbsd/audio_manager_openbsd.h',
- 'audio/mac/audio_input_mac.cc',
- 'audio/mac/audio_input_mac.h',
- 'audio/mac/audio_low_latency_input_mac.cc',
- 'audio/mac/audio_low_latency_input_mac.h',
- 'audio/mac/audio_low_latency_output_mac.cc',
- 'audio/mac/audio_low_latency_output_mac.h',
- 'audio/mac/audio_manager_mac.cc',
- 'audio/mac/audio_manager_mac.h',
- 'audio/mac/audio_output_mac.cc',
- 'audio/mac/audio_output_mac.h',
- 'audio/null_audio_sink.cc',
- 'audio/null_audio_sink.h',
- 'audio/pulse/pulse_output.cc',
- 'audio/pulse/pulse_output.h',
- 'audio/sample_rates.cc',
- 'audio/sample_rates.h',
- 'audio/simple_sources.cc',
- 'audio/simple_sources.h',
- 'audio/win/audio_low_latency_input_win.cc',
- 'audio/win/audio_low_latency_input_win.h',
- 'audio/win/audio_low_latency_output_win.cc',
- 'audio/win/audio_low_latency_output_win.h',
- 'audio/win/audio_manager_win.cc',
- 'audio/win/audio_manager_win.h',
- 'audio/win/avrt_wrapper_win.cc',
- 'audio/win/avrt_wrapper_win.h',
- 'audio/win/device_enumeration_win.cc',
- 'audio/win/device_enumeration_win.h',
- 'audio/win/wavein_input_win.cc',
- 'audio/win/wavein_input_win.h',
- 'audio/win/waveout_output_win.cc',
- 'audio/win/waveout_output_win.h',
- 'base/android/media_jni_registrar.cc',
- 'base/android/media_jni_registrar.h',
- 'base/audio_decoder.cc',
- 'base/audio_decoder.h',
- 'base/audio_decoder_config.cc',
- 'base/audio_decoder_config.h',
- 'base/audio_renderer.h',
- 'base/audio_renderer_mixer.cc',
- 'base/audio_renderer_mixer.h',
- 'base/audio_renderer_mixer_input.cc',
- 'base/audio_renderer_mixer_input.h',
- 'base/bitstream_buffer.h',
- 'base/buffers.cc',
- 'base/buffers.h',
- 'base/byte_queue.cc',
- 'base/byte_queue.h',
- 'base/channel_layout.cc',
- 'base/channel_layout.h',
- 'base/clock.cc',
- 'base/clock.h',
- 'base/composite_filter.cc',
- 'base/composite_filter.h',
- 'base/data_buffer.cc',
- 'base/data_buffer.h',
- 'base/data_source.cc',
- 'base/data_source.h',
- 'base/decoder_buffer.cc',
- 'base/decoder_buffer.h',
- 'base/decrypt_config.cc',
- 'base/decrypt_config.h',
- 'base/decryptor.h',
- 'base/decryptor_client.h',
- 'base/demuxer.cc',
- 'base/demuxer.h',
- 'base/demuxer_stream.cc',
- 'base/demuxer_stream.h',
- 'base/djb2.cc',
- 'base/djb2.h',
- 'base/filter_collection.cc',
- 'base/filter_collection.h',
- 'base/filter_host.h',
- 'base/filters.cc',
- 'base/filters.h',
- 'base/h264_bitstream_converter.cc',
- 'base/h264_bitstream_converter.h',
- 'base/media.h',
- 'base/media_android.cc',
- 'base/media_export.h',
- 'base/media_log.cc',
- 'base/media_log.h',
- 'base/media_log_event.h',
- 'base/media_posix.cc',
- 'base/media_switches.cc',
- 'base/media_switches.h',
- 'base/media_win.cc',
- 'base/message_loop_factory.cc',
- 'base/message_loop_factory.h',
- 'base/pipeline.cc',
- 'base/pipeline.h',
- 'base/pipeline_status.cc',
- 'base/pipeline_status.h',
- 'base/ranges.cc',
- 'base/ranges.h',
- 'base/seekable_buffer.cc',
- 'base/seekable_buffer.h',
- 'base/state_matrix.cc',
- 'base/state_matrix.h',
- 'base/stream_parser.cc',
- 'base/stream_parser.h',
- 'base/stream_parser_buffer.cc',
- 'base/stream_parser_buffer.h',
- 'base/video_decoder.cc',
- 'base/video_decoder.h',
- 'base/video_decoder_config.cc',
- 'base/video_decoder_config.h',
- 'base/video_frame.cc',
- 'base/video_frame.h',
- 'base/video_renderer.h',
- 'base/video_util.cc',
- 'base/video_util.h',
- 'crypto/aes_decryptor.cc',
- 'crypto/aes_decryptor.h',
- 'ffmpeg/ffmpeg_common.cc',
- 'ffmpeg/ffmpeg_common.h',
- 'ffmpeg/file_protocol.cc',
- 'ffmpeg/file_protocol.h',
- 'filters/audio_file_reader.cc',
- 'filters/audio_file_reader.h',
- 'filters/audio_renderer_algorithm.cc',
- 'filters/audio_renderer_algorithm.h',
- 'filters/audio_renderer_impl.cc',
- 'filters/audio_renderer_impl.h',
- 'filters/bitstream_converter.cc',
- 'filters/bitstream_converter.h',
- 'filters/chunk_demuxer.cc',
- 'filters/chunk_demuxer.h',
- 'filters/chunk_demuxer_client.h',
- 'filters/dummy_demuxer.cc',
- 'filters/dummy_demuxer.h',
- 'filters/ffmpeg_audio_decoder.cc',
- 'filters/ffmpeg_audio_decoder.h',
- 'filters/ffmpeg_demuxer.cc',
- 'filters/ffmpeg_demuxer.h',
- 'filters/ffmpeg_h264_bitstream_converter.cc',
- 'filters/ffmpeg_h264_bitstream_converter.h',
- 'filters/ffmpeg_glue.cc',
- 'filters/ffmpeg_glue.h',
- 'filters/ffmpeg_video_decoder.cc',
- 'filters/ffmpeg_video_decoder.h',
- 'filters/file_data_source.cc',
- 'filters/file_data_source.h',
- 'filters/gpu_video_decoder.cc',
- 'filters/gpu_video_decoder.h',
- 'filters/in_memory_url_protocol.cc',
- 'filters/in_memory_url_protocol.h',
- 'filters/source_buffer_stream.cc',
- 'filters/source_buffer_stream.h',
- 'filters/video_frame_generator.cc',
- 'filters/video_frame_generator.h',
- 'filters/video_renderer_base.cc',
- 'filters/video_renderer_base.h',
- 'video/capture/fake_video_capture_device.cc',
- 'video/capture/fake_video_capture_device.h',
- 'video/capture/linux/video_capture_device_linux.cc',
- 'video/capture/linux/video_capture_device_linux.h',
- 'video/capture/mac/video_capture_device_mac.h',
- 'video/capture/mac/video_capture_device_mac.mm',
- 'video/capture/mac/video_capture_device_qtkit_mac.h',
- 'video/capture/mac/video_capture_device_qtkit_mac.mm',
- 'video/capture/video_capture.h',
- 'video/capture/video_capture_device.h',
- 'video/capture/video_capture_device_dummy.cc',
- 'video/capture/video_capture_device_dummy.h',
- 'video/capture/video_capture_proxy.cc',
- 'video/capture/video_capture_proxy.h',
- 'video/capture/video_capture_types.h',
- 'video/capture/win/filter_base_win.cc',
- 'video/capture/win/filter_base_win.h',
- 'video/capture/win/pin_base_win.cc',
- 'video/capture/win/pin_base_win.h',
- 'video/capture/win/sink_filter_observer_win.h',
- 'video/capture/win/sink_filter_win.cc',
- 'video/capture/win/sink_filter_win.h',
- 'video/capture/win/sink_input_pin_win.cc',
- 'video/capture/win/sink_input_pin_win.h',
- 'video/capture/win/video_capture_device_win.cc',
- 'video/capture/win/video_capture_device_win.h',
- 'video/picture.cc',
- 'video/picture.h',
- 'video/video_decode_accelerator.cc',
- 'video/video_decode_accelerator.h',
- 'webm/webm_constants.h',
- 'webm/webm_cluster_parser.cc',
- 'webm/webm_cluster_parser.h',
- 'webm/webm_content_encodings.cc',
- 'webm/webm_content_encodings.h',
- 'webm/webm_content_encodings_client.cc',
- 'webm/webm_content_encodings_client.h',
- 'webm/webm_info_parser.cc',
- 'webm/webm_info_parser.h',
- 'webm/webm_parser.cc',
- 'webm/webm_parser.h',
- 'webm/webm_stream_parser.cc',
- 'webm/webm_stream_parser.h',
- 'webm/webm_tracks_parser.cc',
- 'webm/webm_tracks_parser.h',
- ],
- 'direct_dependent_settings': {
- 'include_dirs': [
- '..',
- ],
- },
- 'conditions': [
- # Android doesn't use ffmpeg, so make the dependency conditional
- # and exclude the sources which depend on ffmpeg.
- ['OS != \"android\"', {
- 'dependencies': [
- '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
- ],
- }],
- ['OS == \"android\"', {
- 'sources!': [
- 'base/media_posix.cc',
- 'ffmpeg/ffmpeg_common.cc',
- 'ffmpeg/ffmpeg_common.h',
- 'ffmpeg/file_protocol.cc',
- 'ffmpeg/file_protocol.h',
- 'filters/audio_file_reader.cc',
- 'filters/audio_file_reader.h',
- 'filters/bitstream_converter.cc',
- 'filters/bitstream_converter.h',
- 'filters/chunk_demuxer.cc',
- 'filters/chunk_demuxer.h',
- 'filters/chunk_demuxer_client.h',
- 'filters/ffmpeg_audio_decoder.cc',
- 'filters/ffmpeg_audio_decoder.h',
- 'filters/ffmpeg_demuxer.cc',
- 'filters/ffmpeg_demuxer.h',
- 'filters/ffmpeg_h264_bitstream_converter.cc',
- 'filters/ffmpeg_h264_bitstream_converter.h',
- 'filters/ffmpeg_glue.cc',
- 'filters/ffmpeg_glue.h',
- 'filters/ffmpeg_video_decoder.cc',
- 'filters/ffmpeg_video_decoder.h',
- 'filters/gpu_video_decoder.cc',
- 'filters/gpu_video_decoder.h',
- 'webm/webm_cluster_parser.cc',
- 'webm/webm_cluster_parser.h',
- 'webm/webm_stream_parser.cc',
- 'webm/webm_stream_parser.h',
- ],
- }],
- # The below 'android' condition were added temporarily and should be
- # removed in downstream, because there is no Java environment setup in
- # upstream yet.
- ['OS == \"android\"', {
- 'sources!':[
- 'audio/android/audio_track_output_android.cc',
- ],
- 'sources':[
- 'audio/android/audio_track_output_stub_android.cc',
- ],
- 'link_settings': {
- 'libraries': [
- '-lOpenSLES',
- ],
- },
- }],
- ['OS==\"linux\" or OS==\"freebsd\" or OS==\"solaris\"', {
- 'link_settings': {
- 'libraries': [
- '-lasound',
- ],
- },
- }],
- ['OS==\"openbsd\"', {
- 'sources/': [ ['exclude', '/alsa_' ],
- ['exclude', '/audio_manager_linux' ] ],
- 'link_settings': {
- 'libraries': [
- ],
- },
- }],
- ['OS!=\"openbsd\"', {
- 'sources!': [
- 'audio/openbsd/audio_manager_openbsd.cc',
- 'audio/openbsd/audio_manager_openbsd.h',
- ],
- }],
- ['OS==\"linux\"', {
- 'variables': {
- 'conditions': [
- ['sysroot!=\"\"', {
- 'pkg-config': '../build/linux/pkg-config-wrapper \"<(sysroot)\" \"<(target_arch)\"',
- }, {
- 'pkg-config': 'pkg-config'
- }],
- ],
- },
- 'conditions': [
- ['use_cras == 1', {
- 'cflags': [
- '<!@(<(pkg-config) --cflags libcras)',
- ],
- 'link_settings': {
- 'libraries': [
- '<!@(<(pkg-config) --libs libcras)',
- ],
- },
- 'defines': [
- 'USE_CRAS',
- ],
- }, { # else: use_cras == 0
- 'sources!': [
- 'audio/linux/cras_output.cc',
- 'audio/linux/cras_output.h',
- ],
- }],
- ],
- }],
- ['os_posix == 1', {
- 'conditions': [
- ['use_pulseaudio == 1', {
- 'cflags': [
- '<!@(pkg-config --cflags libpulse)',
- ],
- 'link_settings': {
- 'libraries': [
- '<!@(pkg-config --libs-only-l libpulse)',
- ],
- },
- 'defines': [
- 'USE_PULSEAUDIO',
- ],
- }, { # else: use_pulseaudio == 0
- 'sources!': [
- 'audio/pulse/pulse_output.cc',
- 'audio/pulse/pulse_output.h',
- ],
- }],
- ],
- }],
- ['os_posix == 1 and OS != \"android\"', {
- # Video capture isn't supported in Android yet.
- 'sources!': [
- 'video/capture/video_capture_device_dummy.cc',
- 'video/capture/video_capture_device_dummy.h',
- ],
- }],
- ['OS==\"mac\"', {
- 'link_settings': {
- 'libraries': [
- '$(SDKROOT)/System/Library/Frameworks/AudioUnit.framework',
- '$(SDKROOT)/System/Library/Frameworks/AudioToolbox.framework',
- '$(SDKROOT)/System/Library/Frameworks/CoreAudio.framework',
- '$(SDKROOT)/System/Library/Frameworks/CoreVideo.framework',
- '$(SDKROOT)/System/Library/Frameworks/QTKit.framework',
- ],
- },
- }],
- ['OS==\"win\"', {
- 'sources!': [
- 'audio/pulse/pulse_output.cc',
- 'audio/pulse/pulse_output.h',
- 'video/capture/video_capture_device_dummy.cc',
- 'video/capture/video_capture_device_dummy.h',
- ],
- }],
- ['proprietary_codecs==1 or branding==\"Chrome\"', {
- 'sources': [
- 'mp4/avc.cc',
- 'mp4/avc.h',
- 'mp4/box_definitions.cc',
- 'mp4/box_definitions.h',
- 'mp4/box_reader.cc',
- 'mp4/box_reader.h',
- 'mp4/cenc.cc',
- 'mp4/cenc.h',
- 'mp4/mp4_stream_parser.cc',
- 'mp4/mp4_stream_parser.h',
- 'mp4/offset_byte_queue.cc',
- 'mp4/offset_byte_queue.h',
- 'mp4/track_run_iterator.cc',
- 'mp4/track_run_iterator.h',
- ],
- }],
- ],
- },
- {
- 'target_name': 'yuv_convert',
- 'type': 'static_library',
- 'include_dirs': [
- '..',
- ],
- 'conditions': [
- ['order_profiling != 0', {
- 'target_conditions' : [
- ['_toolset==\"target\"', {
- 'cflags!': [ '-finstrument-functions' ],
- }],
- ],
- }],
- [ 'target_arch == \"ia32\" or target_arch == \"x64\"', {
- 'dependencies': [
- 'yuv_convert_simd_x86',
- ],
- }],
- [ 'target_arch == \"arm\"', {
- 'dependencies': [
- 'yuv_convert_simd_arm',
- ],
- }],
- ],
- 'sources': [
- 'base/yuv_convert.cc',
- 'base/yuv_convert.h',
- ],
- },
- {
- 'target_name': 'yuv_convert_simd_x86',
- 'type': 'static_library',
- 'include_dirs': [
- '..',
- ],
- 'sources': [
- 'base/simd/convert_rgb_to_yuv_c.cc',
- 'base/simd/convert_rgb_to_yuv_sse2.cc',
- 'base/simd/convert_rgb_to_yuv_ssse3.asm',
- 'base/simd/convert_rgb_to_yuv_ssse3.cc',
- 'base/simd/convert_rgb_to_yuv_ssse3.inc',
- 'base/simd/convert_yuv_to_rgb_c.cc',
- 'base/simd/convert_yuv_to_rgb_x86.cc',
- 'base/simd/convert_yuv_to_rgb_mmx.asm',
- 'base/simd/convert_yuv_to_rgb_mmx.inc',
- 'base/simd/convert_yuv_to_rgb_sse.asm',
- 'base/simd/filter_yuv.h',
- 'base/simd/filter_yuv_c.cc',
- 'base/simd/filter_yuv_mmx.cc',
- 'base/simd/filter_yuv_sse2.cc',
- 'base/simd/linear_scale_yuv_to_rgb_mmx.asm',
- 'base/simd/linear_scale_yuv_to_rgb_mmx.inc',
- 'base/simd/linear_scale_yuv_to_rgb_sse.asm',
- 'base/simd/scale_yuv_to_rgb_mmx.asm',
- 'base/simd/scale_yuv_to_rgb_mmx.inc',
- 'base/simd/scale_yuv_to_rgb_sse.asm',
- 'base/simd/yuv_to_rgb_table.cc',
- 'base/simd/yuv_to_rgb_table.h',
- ],
- 'conditions': [
- ['order_profiling != 0', {
- 'target_conditions' : [
- ['_toolset==\"target\"', {
- 'cflags!': [ '-finstrument-functions' ],
- }],
- ],
- }],
- [ 'target_arch == \"x64\"', {
- # Source files optimized for X64 systems.
- 'sources': [
- 'base/simd/linear_scale_yuv_to_rgb_mmx_x64.asm',
- 'base/simd/scale_yuv_to_rgb_sse2_x64.asm',
- ],
- }],
- [ 'os_posix == 1 and OS != \"mac\" and OS != \"android\"', {
- 'cflags': [
- '-msse2',
- ],
- }],
- [ 'OS == \"mac\"', {
- 'configurations': {
- 'Debug': {
- 'xcode_settings': {
- # gcc on the mac builds horribly unoptimized sse code in debug
- # mode. Since this is rarely going to be debugged, run with full
- # optimizations in Debug as well as Release.
- 'GCC_OPTIMIZATION_LEVEL': '3', # -O3
- },
- },
- },
- }],
- [ 'OS==\"win\"', {
- 'variables': {
- 'yasm_flags': [
- '-DWIN32',
- '-DMSVC',
- '-DCHROMIUM',
- '-Isimd',
- ],
- },
- }],
- [ 'OS==\"mac\"', {
- 'variables': {
- 'yasm_flags': [
- '-DPREFIX',
- '-DMACHO',
- '-DCHROMIUM',
- '-Isimd',
- ],
- },
- }],
- [ 'os_posix==1 and OS!=\"mac\"', {
- 'variables': {
- 'conditions': [
- [ 'target_arch==\"ia32\"', {
- 'yasm_flags': [
- '-DX86_32',
- '-DELF',
- '-DCHROMIUM',
- '-Isimd',
- ],
- }, {
- 'yasm_flags': [
- '-DARCH_X86_64',
- '-DELF',
- '-DPIC',
- '-DCHROMIUM',
- '-Isimd',
- ],
- }],
- ],
- },
- }],
- ],
- 'variables': {
- 'yasm_output_path': '<(SHARED_INTERMEDIATE_DIR)/media',
- },
- 'msvs_2010_disable_uldi_when_referenced': 1,
- 'includes': [
- '../third_party/yasm/yasm_compile.gypi',
- ],
- },
- {
- 'target_name': 'yuv_convert_simd_arm',
- 'type': 'static_library',
- 'include_dirs': [
- '..',
- ],
- 'sources': [
- 'base/simd/convert_rgb_to_yuv_c.cc',
- 'base/simd/convert_rgb_to_yuv.h',
- 'base/simd/convert_yuv_to_rgb_c.cc',
- 'base/simd/convert_yuv_to_rgb.h',
- 'base/simd/filter_yuv.h',
- 'base/simd/filter_yuv_c.cc',
- 'base/simd/yuv_to_rgb_table.cc',
- 'base/simd/yuv_to_rgb_table.h',
- ],
- },
- {
- 'target_name': 'media_unittests',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- 'media_test_support',
- 'yuv_convert',
- '../base/base.gyp:base',
- '../base/base.gyp:base_i18n',
- '../base/base.gyp:test_support_base',
- '../testing/gmock.gyp:gmock',
- '../testing/gtest.gyp:gtest',
- '../ui/ui.gyp:ui',
- ],
- 'sources': [
- 'audio/async_socket_io_handler_unittest.cc',
- 'audio/audio_input_controller_unittest.cc',
- 'audio/audio_input_device_unittest.cc',
- 'audio/audio_input_unittest.cc',
- 'audio/audio_input_volume_unittest.cc',
- 'audio/audio_low_latency_input_output_unittest.cc',
- 'audio/audio_output_controller_unittest.cc',
- 'audio/audio_output_proxy_unittest.cc',
- 'audio/audio_parameters_unittest.cc',
- 'audio/audio_util_unittest.cc',
- 'audio/cross_process_notification_unittest.cc',
- 'audio/linux/alsa_output_unittest.cc',
- 'audio/mac/audio_low_latency_input_mac_unittest.cc',
- 'audio/mac/audio_output_mac_unittest.cc',
- 'audio/simple_sources_unittest.cc',
- 'audio/win/audio_low_latency_input_win_unittest.cc',
- 'audio/win/audio_low_latency_output_win_unittest.cc',
- 'audio/win/audio_output_win_unittest.cc',
- 'base/audio_renderer_mixer_unittest.cc',
- 'base/audio_renderer_mixer_input_unittest.cc',
- 'base/buffers_unittest.cc',
- 'base/clock_unittest.cc',
- 'base/composite_filter_unittest.cc',
- 'base/data_buffer_unittest.cc',
- 'base/decoder_buffer_unittest.cc',
- 'base/djb2_unittest.cc',
- 'base/fake_audio_render_callback.cc',
- 'base/fake_audio_render_callback.h',
- 'base/filter_collection_unittest.cc',
- 'base/h264_bitstream_converter_unittest.cc',
- 'base/pipeline_unittest.cc',
- 'base/ranges_unittest.cc',
- 'base/run_all_unittests.cc',
- 'base/seekable_buffer_unittest.cc',
- 'base/state_matrix_unittest.cc',
- 'base/test_data_util.cc',
- 'base/test_data_util.h',
- 'base/video_frame_unittest.cc',
- 'base/video_util_unittest.cc',
- 'base/yuv_convert_unittest.cc',
- 'crypto/aes_decryptor_unittest.cc',
- 'ffmpeg/ffmpeg_common_unittest.cc',
- 'filters/audio_renderer_algorithm_unittest.cc',
- 'filters/audio_renderer_impl_unittest.cc',
- 'filters/bitstream_converter_unittest.cc',
- 'filters/chunk_demuxer_unittest.cc',
- 'filters/ffmpeg_audio_decoder_unittest.cc',
- 'filters/ffmpeg_decoder_unittest.h',
- 'filters/ffmpeg_demuxer_unittest.cc',
- 'filters/ffmpeg_glue_unittest.cc',
- 'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
- 'filters/ffmpeg_video_decoder_unittest.cc',
- 'filters/file_data_source_unittest.cc',
- 'filters/pipeline_integration_test.cc',
- 'filters/pipeline_integration_test_base.cc',
- 'filters/source_buffer_stream_unittest.cc',
- 'filters/video_renderer_base_unittest.cc',
- 'video/capture/video_capture_device_unittest.cc',
- 'webm/cluster_builder.cc',
- 'webm/cluster_builder.h',
- 'webm/webm_cluster_parser_unittest.cc',
- 'webm/webm_content_encodings_client_unittest.cc',
- 'webm/webm_parser_unittest.cc',
- ],
- 'conditions': [
- ['os_posix==1 and OS!=\"mac\"', {
- 'conditions': [
- ['linux_use_tcmalloc==1', {
- 'dependencies': [
- '../base/allocator/allocator.gyp:allocator',
- ],
- }],
- ],
- }],
- ['OS != \"android\"', {
- 'dependencies': [
- '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
- ],
- }],
- ['OS == \"android\"', {
- 'sources!': [
- 'audio/audio_input_volume_unittest.cc',
- 'base/test_data_util.cc',
- 'base/test_data_util.h',
- 'ffmpeg/ffmpeg_common_unittest.cc',
- 'filters/ffmpeg_audio_decoder_unittest.cc',
- 'filters/bitstream_converter_unittest.cc',
- 'filters/chunk_demuxer_unittest.cc',
- 'filters/ffmpeg_demuxer_unittest.cc',
- 'filters/ffmpeg_glue_unittest.cc',
- 'filters/ffmpeg_h264_bitstream_converter_unittest.cc',
- 'filters/ffmpeg_video_decoder_unittest.cc',
- 'filters/pipeline_integration_test.cc',
- 'filters/pipeline_integration_test_base.cc',
- 'mp4/mp4_stream_parser_unittest.cc',
- 'webm/webm_cluster_parser_unittest.cc',
- ],
- }],
- ['OS == \"linux\"', {
- 'conditions': [
- ['use_cras == 1', {
- 'sources': [
- 'audio/linux/cras_output_unittest.cc',
- ],
- 'defines': [
- 'USE_CRAS',
- ],
- }],
- ],
- }],
- [ 'target_arch==\"ia32\" or target_arch==\"x64\"', {
- 'sources': [
- 'base/simd/convert_rgb_to_yuv_unittest.cc',
- ],
- }],
- ['proprietary_codecs==1 or branding==\"Chrome\"', {
- 'sources': [
- 'mp4/avc_unittest.cc',
- 'mp4/box_reader_unittest.cc',
- 'mp4/mp4_stream_parser_unittest.cc',
- 'mp4/offset_byte_queue_unittest.cc',
- ],
- }],
- ],
- },
- {
- 'target_name': 'media_test_support',
- 'type': 'static_library',
- 'dependencies': [
- 'media',
- '../base/base.gyp:base',
- '../testing/gmock.gyp:gmock',
- '../testing/gtest.gyp:gtest',
- ],
- 'sources': [
- 'audio/test_audio_input_controller_factory.cc',
- 'audio/test_audio_input_controller_factory.h',
- 'base/mock_callback.cc',
- 'base/mock_callback.h',
- 'base/mock_data_source_host.cc',
- 'base/mock_data_source_host.h',
- 'base/mock_demuxer_host.cc',
- 'base/mock_demuxer_host.h',
- 'base/mock_filter_host.cc',
- 'base/mock_filter_host.h',
- 'base/mock_filters.cc',
- 'base/mock_filters.h',
- ],
- },
- {
- 'target_name': 'scaler_bench',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- 'yuv_convert',
- '../base/base.gyp:base',
- '../skia/skia.gyp:skia',
- ],
- 'sources': [
- 'tools/scaler_bench/scaler_bench.cc',
- ],
- },
- {
- 'target_name': 'qt_faststart',
- 'type': 'executable',
- 'sources': [
- 'tools/qt_faststart/qt_faststart.c'
- ],
- },
- {
- 'target_name': 'seek_tester',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- '../base/base.gyp:base',
- ],
- 'sources': [
- 'tools/seek_tester/seek_tester.cc',
- ],
- },
- ],
- 'conditions': [
- ['OS==\"win\"', {
- 'targets': [
- {
- 'target_name': 'player_wtl',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- 'yuv_convert',
- '../base/base.gyp:base',
- '../base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
- '../ui/ui.gyp:ui',
- ],
- 'include_dirs': [
- '<(DEPTH)/third_party/wtl/include',
- ],
- 'sources': [
- 'tools/player_wtl/list.h',
- 'tools/player_wtl/mainfrm.h',
- 'tools/player_wtl/movie.cc',
- 'tools/player_wtl/movie.h',
- 'tools/player_wtl/player_wtl.cc',
- 'tools/player_wtl/player_wtl.rc',
- 'tools/player_wtl/props.h',
- 'tools/player_wtl/seek.h',
- 'tools/player_wtl/resource.h',
- 'tools/player_wtl/view.h',
- ],
- 'msvs_settings': {
- 'VCLinkerTool': {
- 'SubSystem': '2', # Set /SUBSYSTEM:WINDOWS
- },
- },
- 'defines': [
- '_CRT_SECURE_NO_WARNINGS=1',
- ],
- },
- ],
- }],
- ['OS == \"win\" or toolkit_uses_gtk == 1', {
- 'targets': [
- {
- 'target_name': 'shader_bench',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- 'yuv_convert',
- '../base/base.gyp:base',
- '../ui/gl/gl.gyp:gl',
- ],
- 'sources': [
- 'tools/shader_bench/shader_bench.cc',
- 'tools/shader_bench/cpu_color_painter.cc',
- 'tools/shader_bench/cpu_color_painter.h',
- 'tools/shader_bench/gpu_color_painter.cc',
- 'tools/shader_bench/gpu_color_painter.h',
- 'tools/shader_bench/gpu_painter.cc',
- 'tools/shader_bench/gpu_painter.h',
- 'tools/shader_bench/painter.cc',
- 'tools/shader_bench/painter.h',
- 'tools/shader_bench/window.cc',
- 'tools/shader_bench/window.h',
- ],
- 'conditions': [
- ['toolkit_uses_gtk == 1', {
- 'dependencies': [
- '../build/linux/system.gyp:gtk',
- ],
- 'sources': [
- 'tools/shader_bench/window_linux.cc',
- ],
- }],
- ['OS==\"win\"', {
- 'dependencies': [
- '../third_party/angle/src/build_angle.gyp:libEGL',
- '../third_party/angle/src/build_angle.gyp:libGLESv2',
- ],
- 'sources': [
- 'tools/shader_bench/window_win.cc',
- ],
- }],
- ],
- },
- ],
- }],
- ['OS == \"linux\" and target_arch != \"arm\"', {
- 'targets': [
- {
- 'target_name': 'tile_render_bench',
- 'type': 'executable',
- 'dependencies': [
- '../base/base.gyp:base',
- '../ui/gl/gl.gyp:gl',
- ],
- 'libraries': [
- '-lGL',
- '-ldl',
- ],
- 'sources': [
- 'tools/tile_render_bench/tile_render_bench.cc',
- ],
- },
- ],
- }],
- ['os_posix == 1 and OS != \"mac\" and OS != \"android\"', {
- 'targets': [
- {
- 'target_name': 'player_x11',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- 'yuv_convert',
- '../base/base.gyp:base',
- '../ui/gl/gl.gyp:gl',
- ],
- 'link_settings': {
- 'libraries': [
- '-ldl',
- '-lX11',
- '-lXrender',
- '-lXext',
- ],
- },
- 'sources': [
- 'tools/player_x11/data_source_logger.cc',
- 'tools/player_x11/data_source_logger.h',
- 'tools/player_x11/gl_video_renderer.cc',
- 'tools/player_x11/gl_video_renderer.h',
- 'tools/player_x11/player_x11.cc',
- 'tools/player_x11/x11_video_renderer.cc',
- 'tools/player_x11/x11_video_renderer.h',
- ],
- },
- ],
- }],
- ['OS == \"android\"', {
- 'targets': [
- {
- 'target_name': 'player_android',
- 'type': 'static_library',
- 'sources': [
- 'base/android/media_player_bridge.cc',
- 'base/android/media_player_bridge.h',
- ],
- 'dependencies': [
- '../base/base.gyp:base',
- ],
- 'include_dirs': [
- '<(SHARED_INTERMEDIATE_DIR)/media',
- ],
- 'actions': [
- {
- 'action_name': 'generate-jni-headers',
- 'inputs': [
- '../base/android/jni_generator/jni_generator.py',
- 'base/android/java/src/org/chromium/media/MediaPlayerListener.java',
- ],
- 'outputs': [
- '<(SHARED_INTERMEDIATE_DIR)/media/jni/media_player_listener_jni.h',
- ],
- 'action': [
- 'python',
- '<(DEPTH)/base/android/jni_generator/jni_generator.py',
- '-o',
- '<@(_inputs)',
- '<@(_outputs)',
- ],
- },
- ],
- },
- {
- 'target_name': 'media_java',
- 'type': 'none',
- 'dependencies': [ '../base/base.gyp:base_java' ],
- 'variables': {
- 'package_name': 'media',
- 'java_in_dir': 'base/android/java',
- },
- 'includes': [ '../build/java.gypi' ],
- },
-
- ],
- }, { # OS != \"android\"'
- # Android does not use ffmpeg, so disable the targets which require it.
- 'targets': [
- {
- 'target_name': 'ffmpeg_unittests',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- 'media_test_support',
- '../base/base.gyp:base',
- '../base/base.gyp:base_i18n',
- '../base/base.gyp:test_support_base',
- '../base/base.gyp:test_support_perf',
- '../testing/gtest.gyp:gtest',
- '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
- ],
- 'sources': [
- 'ffmpeg/ffmpeg_unittest.cc',
- ],
- 'conditions': [
- ['toolkit_uses_gtk == 1', {
- 'dependencies': [
- # Needed for the following #include chain:
- # base/run_all_unittests.cc
- # ../base/test_suite.h
- # gtk/gtk.h
- '../build/linux/system.gyp:gtk',
- ],
- 'conditions': [
- ['linux_use_tcmalloc==1', {
- 'dependencies': [
- '../base/allocator/allocator.gyp:allocator',
- ],
- }],
- ],
- }],
- ],
- },
- {
- 'target_name': 'ffmpeg_regression_tests',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- 'media_test_support',
- '../base/base.gyp:test_support_base',
- '../testing/gmock.gyp:gmock',
- '../testing/gtest.gyp:gtest',
- '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
- ],
- 'sources': [
- 'base/test_data_util.cc',
- 'base/run_all_unittests.cc',
- 'ffmpeg/ffmpeg_regression_tests.cc',
- 'filters/pipeline_integration_test_base.cc',
- ],
- 'conditions': [
- ['os_posix==1 and OS!=\"mac\"', {
- 'conditions': [
- ['linux_use_tcmalloc==1', {
- 'dependencies': [
- '../base/allocator/allocator.gyp:allocator',
- ],
- }],
- ],
- }],
- ],
- },
- {
- 'target_name': 'ffmpeg_tests',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- '../base/base.gyp:base',
- '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
- ],
- 'sources': [
- 'test/ffmpeg_tests/ffmpeg_tests.cc',
- ],
- },
- {
- 'target_name': 'media_bench',
- 'type': 'executable',
- 'dependencies': [
- 'media',
- '../base/base.gyp:base',
- '../third_party/ffmpeg/ffmpeg.gyp:ffmpeg',
- ],
- 'sources': [
- 'tools/media_bench/media_bench.cc',
- ],
- },
- ],
- }]
- ],
-}
-" 0 64 (face font-lock-comment-face) 64 137 (face font-lock-comment-face) 137 166 (face font-lock-comment-face) 166 171 nil 171 172 (face font-lock-string-face) 172 181 (face font-lock-keyword-face) 181 182 (face font-lock-string-face) 182 190 nil 190 191 (face font-lock-string-face) 191 204 (face font-lock-variable-name-face) 204 205 (face font-lock-string-face) 205 214 nil 214 269 (face font-lock-comment-face) 269 273 nil 273 274 (face font-lock-string-face) 274 289 (face font-lock-variable-name-face) 289 290 (face font-lock-string-face) 290 299 nil 299 365 (face font-lock-comment-face) 365 369 nil 369 370 (face font-lock-string-face) 370 379 (face font-lock-variable-name-face) 379 380 (face font-lock-string-face) 380 392 nil 392 393 (face font-lock-string-face) 393 400 (face font-lock-keyword-face) 400 401 (face font-lock-string-face) 401 417 nil 417 418 (face font-lock-string-face) 418 429 (face font-lock-keyword-face) 429 430 (face font-lock-string-face) 430 432 nil 432 433 (face font-lock-string-face) 433 438 (face font-lock-function-name-face) 438 439 (face font-lock-string-face) 439 447 nil 447 448 (face font-lock-string-face) 448 452 (face font-lock-keyword-face) 452 453 (face font-lock-string-face) 453 455 nil 455 458 (face font-lock-string-face) 458 467 (face font-lock-variable-name-face) 467 469 (face font-lock-string-face) 469 477 nil 477 478 (face font-lock-string-face) 478 490 (face font-lock-keyword-face) 490 491 (face font-lock-string-face) 491 503 nil 503 504 (face font-lock-string-face) 504 515 (face font-lock-function-name-face) 515 516 (face font-lock-string-face) 516 526 nil 526 527 (face font-lock-string-face) 527 548 (face font-lock-function-name-face) 548 549 (face font-lock-string-face) 549 559 nil 559 560 (face font-lock-string-face) 560 643 (face font-lock-function-name-face) 643 644 (face font-lock-string-face) 644 654 nil 654 655 (face font-lock-string-face) 655 696 (face font-lock-function-name-face) 696 697 (face font-lock-string-face) 697 707 nil 707 708 (face font-lock-string-face) 708 735 (face font-lock-function-name-face) 735 736 (face font-lock-string-face) 736 746 nil 746 747 (face font-lock-string-face) 747 784 (face font-lock-function-name-face) 784 785 (face font-lock-string-face) 785 795 nil 795 796 (face font-lock-string-face) 796 811 (face font-lock-function-name-face) 811 812 (face font-lock-string-face) 812 829 nil 829 830 (face font-lock-string-face) 830 837 (face font-lock-keyword-face) 837 838 (face font-lock-string-face) 838 850 nil 850 851 (face font-lock-string-face) 851 871 (face font-lock-preprocessor-face) 871 872 (face font-lock-string-face) 872 889 nil 889 890 (face font-lock-string-face) 890 902 (face font-lock-keyword-face) 902 903 (face font-lock-string-face) 903 915 nil 915 916 (face font-lock-string-face) 916 918 (face font-lock-constant-face) 918 919 (face font-lock-string-face) 919 936 nil 936 937 (face font-lock-string-face) 937 944 (face font-lock-keyword-face) 944 945 (face font-lock-string-face) 945 957 nil 957 958 (face font-lock-string-face) 958 996 (face font-lock-constant-face) 996 997 (face font-lock-string-face) 997 1007 nil 1007 1008 (face font-lock-string-face) 1008 1045 (face font-lock-constant-face) 1045 1046 (face font-lock-string-face) 1046 1056 nil 1056 1057 (face font-lock-string-face) 1057 1100 (face font-lock-constant-face) 1100 1101 (face font-lock-string-face) 1101 1111 nil 1111 1112 (face font-lock-string-face) 1112 1154 (face font-lock-constant-face) 1154 1155 (face font-lock-string-face) 1155 1165 nil 1165 1166 (face font-lock-string-face) 1166 1197 (face font-lock-constant-face) 1197 1198 (face font-lock-string-face) 1198 1208 nil 1208 1209 (face font-lock-string-face) 1209 1239 (face font-lock-constant-face) 1239 1240 (face font-lock-string-face) 1240 1250 nil 1250 1251 (face font-lock-string-face) 1251 1283 (face font-lock-constant-face) 1283 1284 (face font-lock-string-face) 1284 1294 nil 1294 1295 (face font-lock-string-face) 1295 1326 (face font-lock-constant-face) 1326 1327 (face font-lock-string-face) 1327 1337 nil 1337 1338 (face font-lock-string-face) 1338 1369 (face font-lock-constant-face) 1369 1370 (face font-lock-string-face) 1370 1380 nil 1380 1381 (face font-lock-string-face) 1381 1419 (face font-lock-constant-face) 1419 1420 (face font-lock-string-face) 1420 1430 nil 1430 1431 (face font-lock-string-face) 1431 1467 (face font-lock-constant-face) 1467 1468 (face font-lock-string-face) 1468 1478 nil 1478 1479 (face font-lock-string-face) 1479 1507 (face font-lock-constant-face) 1507 1508 (face font-lock-string-face) 1508 1518 nil 1518 1519 (face font-lock-string-face) 1519 1546 (face font-lock-constant-face) 1546 1547 (face font-lock-string-face) 1547 1557 nil 1557 1558 (face font-lock-string-face) 1558 1574 (face font-lock-constant-face) 1574 1575 (face font-lock-string-face) 1575 1585 nil 1585 1586 (face font-lock-string-face) 1586 1617 (face font-lock-constant-face) 1617 1618 (face font-lock-string-face) 1618 1628 nil 1628 1629 (face font-lock-string-face) 1629 1659 (face font-lock-constant-face) 1659 1660 (face font-lock-string-face) 1660 1670 nil 1670 1671 (face font-lock-string-face) 1671 1703 (face font-lock-constant-face) 1703 1704 (face font-lock-string-face) 1704 1714 nil 1714 1715 (face font-lock-string-face) 1715 1746 (face font-lock-constant-face) 1746 1747 (face font-lock-string-face) 1747 1757 nil 1757 1758 (face font-lock-string-face) 1758 1784 (face font-lock-constant-face) 1784 1785 (face font-lock-string-face) 1785 1795 nil 1795 1796 (face font-lock-string-face) 1796 1821 (face font-lock-constant-face) 1821 1822 (face font-lock-string-face) 1822 1832 nil 1832 1833 (face font-lock-string-face) 1833 1855 (face font-lock-constant-face) 1855 1856 (face font-lock-string-face) 1856 1866 nil 1866 1867 (face font-lock-string-face) 1867 1888 (face font-lock-constant-face) 1888 1889 (face font-lock-string-face) 1889 1899 nil 1899 1900 (face font-lock-string-face) 1900 1927 (face font-lock-constant-face) 1927 1928 (face font-lock-string-face) 1928 1938 nil 1938 1939 (face font-lock-string-face) 1939 1965 (face font-lock-constant-face) 1965 1966 (face font-lock-string-face) 1966 1976 nil 1976 1977 (face font-lock-string-face) 1977 2009 (face font-lock-constant-face) 2009 2010 (face font-lock-string-face) 2010 2020 nil 2020 2021 (face font-lock-string-face) 2021 2052 (face font-lock-constant-face) 2052 2053 (face font-lock-string-face) 2053 2063 nil 2063 2064 (face font-lock-string-face) 2064 2096 (face font-lock-constant-face) 2096 2097 (face font-lock-string-face) 2097 2107 nil 2107 2108 (face font-lock-string-face) 2108 2139 (face font-lock-constant-face) 2139 2140 (face font-lock-string-face) 2140 2150 nil 2150 2151 (face font-lock-string-face) 2151 2188 (face font-lock-constant-face) 2188 2189 (face font-lock-string-face) 2189 2199 nil 2199 2200 (face font-lock-string-face) 2200 2236 (face font-lock-constant-face) 2236 2237 (face font-lock-string-face) 2237 2247 nil 2247 2248 (face font-lock-string-face) 2248 2275 (face font-lock-constant-face) 2275 2276 (face font-lock-string-face) 2276 2286 nil 2286 2287 (face font-lock-string-face) 2287 2313 (face font-lock-constant-face) 2313 2314 (face font-lock-string-face) 2314 2324 nil 2324 2325 (face font-lock-string-face) 2325 2352 (face font-lock-constant-face) 2352 2353 (face font-lock-string-face) 2353 2363 nil 2363 2364 (face font-lock-string-face) 2364 2390 (face font-lock-constant-face) 2390 2391 (face font-lock-string-face) 2391 2401 nil 2401 2402 (face font-lock-string-face) 2402 2427 (face font-lock-constant-face) 2427 2428 (face font-lock-string-face) 2428 2438 nil 2438 2439 (face font-lock-string-face) 2439 2463 (face font-lock-constant-face) 2463 2464 (face font-lock-string-face) 2464 2474 nil 2474 2475 (face font-lock-string-face) 2475 2494 (face font-lock-constant-face) 2494 2495 (face font-lock-string-face) 2495 2505 nil 2505 2506 (face font-lock-string-face) 2506 2524 (face font-lock-constant-face) 2524 2525 (face font-lock-string-face) 2525 2535 nil 2535 2536 (face font-lock-string-face) 2536 2571 (face font-lock-constant-face) 2571 2572 (face font-lock-string-face) 2572 2582 nil 2582 2583 (face font-lock-string-face) 2583 2617 (face font-lock-constant-face) 2617 2618 (face font-lock-string-face) 2618 2628 nil 2628 2629 (face font-lock-string-face) 2629 2668 (face font-lock-constant-face) 2668 2669 (face font-lock-string-face) 2669 2679 nil 2679 2680 (face font-lock-string-face) 2680 2721 (face font-lock-constant-face) 2721 2722 (face font-lock-string-face) 2722 2732 nil 2732 2733 (face font-lock-string-face) 2733 2765 (face font-lock-constant-face) 2765 2766 (face font-lock-string-face) 2766 2776 nil 2776 2777 (face font-lock-string-face) 2777 2808 (face font-lock-constant-face) 2808 2809 (face font-lock-string-face) 2809 2819 nil 2819 2820 (face font-lock-string-face) 2820 2853 (face font-lock-constant-face) 2853 2854 (face font-lock-string-face) 2854 2864 nil 2864 2865 (face font-lock-string-face) 2865 2897 (face font-lock-constant-face) 2897 2898 (face font-lock-string-face) 2898 2908 nil 2908 2909 (face font-lock-string-face) 2909 2943 (face font-lock-constant-face) 2943 2944 (face font-lock-string-face) 2944 2954 nil 2954 2955 (face font-lock-string-face) 2955 2988 (face font-lock-constant-face) 2988 2989 (face font-lock-string-face) 2989 2999 nil 2999 3000 (face font-lock-string-face) 3000 3025 (face font-lock-constant-face) 3025 3026 (face font-lock-string-face) 3026 3036 nil 3036 3037 (face font-lock-string-face) 3037 3061 (face font-lock-constant-face) 3061 3062 (face font-lock-string-face) 3062 3072 nil 3072 3073 (face font-lock-string-face) 3073 3099 (face font-lock-constant-face) 3099 3100 (face font-lock-string-face) 3100 3110 nil 3110 3111 (face font-lock-string-face) 3111 3136 (face font-lock-constant-face) 3136 3137 (face font-lock-string-face) 3137 3147 nil 3147 3148 (face font-lock-string-face) 3148 3172 (face font-lock-constant-face) 3172 3173 (face font-lock-string-face) 3173 3183 nil 3183 3184 (face font-lock-string-face) 3184 3207 (face font-lock-constant-face) 3207 3208 (face font-lock-string-face) 3208 3218 nil 3218 3219 (face font-lock-string-face) 3219 3246 (face font-lock-constant-face) 3246 3247 (face font-lock-string-face) 3247 3257 nil 3257 3258 (face font-lock-string-face) 3258 3284 (face font-lock-constant-face) 3284 3285 (face font-lock-string-face) 3285 3295 nil 3295 3296 (face font-lock-string-face) 3296 3322 (face font-lock-constant-face) 3322 3323 (face font-lock-string-face) 3323 3333 nil 3333 3334 (face font-lock-string-face) 3334 3359 (face font-lock-constant-face) 3359 3360 (face font-lock-string-face) 3360 3370 nil 3370 3371 (face font-lock-string-face) 3371 3409 (face font-lock-constant-face) 3409 3410 (face font-lock-string-face) 3410 3420 nil 3420 3421 (face font-lock-string-face) 3421 3458 (face font-lock-constant-face) 3458 3459 (face font-lock-string-face) 3459 3469 nil 3469 3470 (face font-lock-string-face) 3470 3498 (face font-lock-constant-face) 3498 3499 (face font-lock-string-face) 3499 3509 nil 3509 3510 (face font-lock-string-face) 3510 3537 (face font-lock-constant-face) 3537 3538 (face font-lock-string-face) 3538 3548 nil 3548 3549 (face font-lock-string-face) 3549 3589 (face font-lock-constant-face) 3589 3590 (face font-lock-string-face) 3590 3600 nil 3600 3601 (face font-lock-string-face) 3601 3640 (face font-lock-constant-face) 3640 3641 (face font-lock-string-face) 3641 3651 nil 3651 3652 (face font-lock-string-face) 3652 3693 (face font-lock-constant-face) 3693 3694 (face font-lock-string-face) 3694 3704 nil 3704 3705 (face font-lock-string-face) 3705 3745 (face font-lock-constant-face) 3745 3746 (face font-lock-string-face) 3746 3756 nil 3756 3757 (face font-lock-string-face) 3757 3787 (face font-lock-constant-face) 3787 3788 (face font-lock-string-face) 3788 3798 nil 3798 3799 (face font-lock-string-face) 3799 3828 (face font-lock-constant-face) 3828 3829 (face font-lock-string-face) 3829 3839 nil 3839 3840 (face font-lock-string-face) 3840 3869 (face font-lock-constant-face) 3869 3870 (face font-lock-string-face) 3870 3880 nil 3880 3881 (face font-lock-string-face) 3881 3909 (face font-lock-constant-face) 3909 3910 (face font-lock-string-face) 3910 3920 nil 3920 3921 (face font-lock-string-face) 3921 3945 (face font-lock-constant-face) 3945 3946 (face font-lock-string-face) 3946 3956 nil 3956 3957 (face font-lock-string-face) 3957 3980 (face font-lock-constant-face) 3980 3981 (face font-lock-string-face) 3981 3991 nil 3991 3992 (face font-lock-string-face) 3992 4019 (face font-lock-constant-face) 4019 4020 (face font-lock-string-face) 4020 4030 nil 4030 4031 (face font-lock-string-face) 4031 4057 (face font-lock-constant-face) 4057 4058 (face font-lock-string-face) 4058 4068 nil 4068 4069 (face font-lock-string-face) 4069 4090 (face font-lock-constant-face) 4090 4091 (face font-lock-string-face) 4091 4101 nil 4101 4102 (face font-lock-string-face) 4102 4122 (face font-lock-constant-face) 4122 4123 (face font-lock-string-face) 4123 4133 nil 4133 4134 (face font-lock-string-face) 4134 4157 (face font-lock-constant-face) 4157 4158 (face font-lock-string-face) 4158 4168 nil 4168 4169 (face font-lock-string-face) 4169 4191 (face font-lock-constant-face) 4191 4192 (face font-lock-string-face) 4192 4202 nil 4202 4203 (face font-lock-string-face) 4203 4243 (face font-lock-constant-face) 4243 4244 (face font-lock-string-face) 4244 4254 nil 4254 4255 (face font-lock-string-face) 4255 4294 (face font-lock-constant-face) 4294 4295 (face font-lock-string-face) 4295 4305 nil 4305 4306 (face font-lock-string-face) 4306 4347 (face font-lock-constant-face) 4347 4348 (face font-lock-string-face) 4348 4358 nil 4358 4359 (face font-lock-string-face) 4359 4399 (face font-lock-constant-face) 4399 4400 (face font-lock-string-face) 4400 4410 nil 4410 4411 (face font-lock-string-face) 4411 4441 (face font-lock-constant-face) 4441 4442 (face font-lock-string-face) 4442 4452 nil 4452 4453 (face font-lock-string-face) 4453 4482 (face font-lock-constant-face) 4482 4483 (face font-lock-string-face) 4483 4493 nil 4493 4494 (face font-lock-string-face) 4494 4523 (face font-lock-constant-face) 4523 4524 (face font-lock-string-face) 4524 4534 nil 4534 4535 (face font-lock-string-face) 4535 4563 (face font-lock-constant-face) 4563 4564 (face font-lock-string-face) 4564 4574 nil 4574 4575 (face font-lock-string-face) 4575 4610 (face font-lock-constant-face) 4610 4611 (face font-lock-string-face) 4611 4621 nil 4621 4622 (face font-lock-string-face) 4622 4656 (face font-lock-constant-face) 4656 4657 (face font-lock-string-face) 4657 4667 nil 4667 4668 (face font-lock-string-face) 4668 4697 (face font-lock-constant-face) 4697 4698 (face font-lock-string-face) 4698 4708 nil 4708 4709 (face font-lock-string-face) 4709 4737 (face font-lock-constant-face) 4737 4738 (face font-lock-string-face) 4738 4748 nil 4748 4749 (face font-lock-string-face) 4749 4780 (face font-lock-constant-face) 4780 4781 (face font-lock-string-face) 4781 4791 nil 4791 4792 (face font-lock-string-face) 4792 4822 (face font-lock-constant-face) 4822 4823 (face font-lock-string-face) 4823 4833 nil 4833 4834 (face font-lock-string-face) 4834 4869 (face font-lock-constant-face) 4869 4870 (face font-lock-string-face) 4870 4880 nil 4880 4881 (face font-lock-string-face) 4881 4915 (face font-lock-constant-face) 4915 4916 (face font-lock-string-face) 4916 4926 nil 4926 4927 (face font-lock-string-face) 4927 4948 (face font-lock-constant-face) 4948 4949 (face font-lock-string-face) 4949 4959 nil 4959 4960 (face font-lock-string-face) 4960 4980 (face font-lock-constant-face) 4980 4981 (face font-lock-string-face) 4981 4991 nil 4991 4992 (face font-lock-string-face) 4992 5020 (face font-lock-constant-face) 5020 5021 (face font-lock-string-face) 5021 5031 nil 5031 5032 (face font-lock-string-face) 5032 5059 (face font-lock-constant-face) 5059 5060 (face font-lock-string-face) 5060 5070 nil 5070 5071 (face font-lock-string-face) 5071 5092 (face font-lock-constant-face) 5092 5093 (face font-lock-string-face) 5093 5103 nil 5103 5104 (face font-lock-string-face) 5104 5132 (face font-lock-constant-face) 5132 5133 (face font-lock-string-face) 5133 5143 nil 5143 5144 (face font-lock-string-face) 5144 5171 (face font-lock-constant-face) 5171 5172 (face font-lock-string-face) 5172 5182 nil 5182 5183 (face font-lock-string-face) 5183 5217 (face font-lock-constant-face) 5217 5218 (face font-lock-string-face) 5218 5228 nil 5228 5229 (face font-lock-string-face) 5229 5262 (face font-lock-constant-face) 5262 5263 (face font-lock-string-face) 5263 5273 nil 5273 5274 (face font-lock-string-face) 5274 5297 (face font-lock-constant-face) 5297 5298 (face font-lock-string-face) 5298 5308 nil 5308 5309 (face font-lock-string-face) 5309 5324 (face font-lock-constant-face) 5324 5325 (face font-lock-string-face) 5325 5335 nil 5335 5336 (face font-lock-string-face) 5336 5350 (face font-lock-constant-face) 5350 5351 (face font-lock-string-face) 5351 5361 nil 5361 5362 (face font-lock-string-face) 5362 5380 (face font-lock-constant-face) 5380 5381 (face font-lock-string-face) 5381 5391 nil 5391 5392 (face font-lock-string-face) 5392 5409 (face font-lock-constant-face) 5409 5410 (face font-lock-string-face) 5410 5420 nil 5420 5421 (face font-lock-string-face) 5421 5443 (face font-lock-constant-face) 5443 5444 (face font-lock-string-face) 5444 5454 nil 5454 5455 (face font-lock-string-face) 5455 5476 (face font-lock-constant-face) 5476 5477 (face font-lock-string-face) 5477 5487 nil 5487 5488 (face font-lock-string-face) 5488 5501 (face font-lock-constant-face) 5501 5502 (face font-lock-string-face) 5502 5512 nil 5512 5513 (face font-lock-string-face) 5513 5525 (face font-lock-constant-face) 5525 5526 (face font-lock-string-face) 5526 5536 nil 5536 5537 (face font-lock-string-face) 5537 5561 (face font-lock-constant-face) 5561 5562 (face font-lock-string-face) 5562 5572 nil 5572 5573 (face font-lock-string-face) 5573 5596 (face font-lock-constant-face) 5596 5597 (face font-lock-string-face) 5597 5607 nil 5607 5608 (face font-lock-string-face) 5608 5627 (face font-lock-constant-face) 5627 5628 (face font-lock-string-face) 5628 5638 nil 5638 5639 (face font-lock-string-face) 5639 5657 (face font-lock-constant-face) 5657 5658 (face font-lock-string-face) 5658 5668 nil 5668 5669 (face font-lock-string-face) 5669 5688 (face font-lock-constant-face) 5688 5689 (face font-lock-string-face) 5689 5699 nil 5699 5700 (face font-lock-string-face) 5700 5718 (face font-lock-constant-face) 5718 5719 (face font-lock-string-face) 5719 5729 nil 5729 5730 (face font-lock-string-face) 5730 5752 (face font-lock-constant-face) 5752 5753 (face font-lock-string-face) 5753 5763 nil 5763 5764 (face font-lock-string-face) 5764 5785 (face font-lock-constant-face) 5785 5786 (face font-lock-string-face) 5786 5796 nil 5796 5797 (face font-lock-string-face) 5797 5819 (face font-lock-constant-face) 5819 5820 (face font-lock-string-face) 5820 5830 nil 5830 5831 (face font-lock-string-face) 5831 5852 (face font-lock-constant-face) 5852 5853 (face font-lock-string-face) 5853 5863 nil 5863 5864 (face font-lock-string-face) 5864 5880 (face font-lock-constant-face) 5880 5881 (face font-lock-string-face) 5881 5891 nil 5891 5892 (face font-lock-string-face) 5892 5915 (face font-lock-constant-face) 5915 5916 (face font-lock-string-face) 5916 5926 nil 5926 5927 (face font-lock-string-face) 5927 5942 (face font-lock-constant-face) 5942 5943 (face font-lock-string-face) 5943 5953 nil 5953 5954 (face font-lock-string-face) 5954 5968 (face font-lock-constant-face) 5968 5969 (face font-lock-string-face) 5969 5979 nil 5979 5980 (face font-lock-string-face) 5980 6002 (face font-lock-constant-face) 6002 6003 (face font-lock-string-face) 6003 6013 nil 6013 6014 (face font-lock-string-face) 6014 6035 (face font-lock-constant-face) 6035 6036 (face font-lock-string-face) 6036 6046 nil 6046 6047 (face font-lock-string-face) 6047 6059 (face font-lock-constant-face) 6059 6060 (face font-lock-string-face) 6060 6070 nil 6070 6071 (face font-lock-string-face) 6071 6082 (face font-lock-constant-face) 6082 6083 (face font-lock-string-face) 6083 6093 nil 6093 6094 (face font-lock-string-face) 6094 6119 (face font-lock-constant-face) 6119 6120 (face font-lock-string-face) 6120 6130 nil 6130 6131 (face font-lock-string-face) 6131 6155 (face font-lock-constant-face) 6155 6156 (face font-lock-string-face) 6156 6166 nil 6166 6167 (face font-lock-string-face) 6167 6185 (face font-lock-constant-face) 6185 6186 (face font-lock-string-face) 6186 6196 nil 6196 6197 (face font-lock-string-face) 6197 6212 (face font-lock-constant-face) 6212 6213 (face font-lock-string-face) 6213 6223 nil 6223 6224 (face font-lock-string-face) 6224 6238 (face font-lock-constant-face) 6238 6239 (face font-lock-string-face) 6239 6249 nil 6249 6250 (face font-lock-string-face) 6250 6282 (face font-lock-constant-face) 6282 6283 (face font-lock-string-face) 6283 6293 nil 6293 6294 (face font-lock-string-face) 6294 6325 (face font-lock-constant-face) 6325 6326 (face font-lock-string-face) 6326 6336 nil 6336 6337 (face font-lock-string-face) 6337 6349 (face font-lock-constant-face) 6349 6350 (face font-lock-string-face) 6350 6360 nil 6360 6361 (face font-lock-string-face) 6361 6382 (face font-lock-constant-face) 6382 6383 (face font-lock-string-face) 6383 6393 nil 6393 6394 (face font-lock-string-face) 6394 6413 (face font-lock-constant-face) 6413 6414 (face font-lock-string-face) 6414 6424 nil 6424 6425 (face font-lock-string-face) 6425 6442 (face font-lock-constant-face) 6442 6443 (face font-lock-string-face) 6443 6453 nil 6453 6454 (face font-lock-string-face) 6454 6470 (face font-lock-constant-face) 6470 6471 (face font-lock-string-face) 6471 6481 nil 6481 6482 (face font-lock-string-face) 6482 6504 (face font-lock-constant-face) 6504 6505 (face font-lock-string-face) 6505 6515 nil 6515 6516 (face font-lock-string-face) 6516 6535 (face font-lock-constant-face) 6535 6536 (face font-lock-string-face) 6536 6546 nil 6546 6547 (face font-lock-string-face) 6547 6569 (face font-lock-constant-face) 6569 6570 (face font-lock-string-face) 6570 6580 nil 6580 6581 (face font-lock-string-face) 6581 6602 (face font-lock-constant-face) 6602 6603 (face font-lock-string-face) 6603 6613 nil 6613 6614 (face font-lock-string-face) 6614 6631 (face font-lock-constant-face) 6631 6632 (face font-lock-string-face) 6632 6642 nil 6642 6643 (face font-lock-string-face) 6643 6671 (face font-lock-constant-face) 6671 6672 (face font-lock-string-face) 6672 6682 nil 6682 6683 (face font-lock-string-face) 6683 6710 (face font-lock-constant-face) 6710 6711 (face font-lock-string-face) 6711 6721 nil 6721 6722 (face font-lock-string-face) 6722 6738 (face font-lock-constant-face) 6738 6739 (face font-lock-string-face) 6739 6749 nil 6749 6750 (face font-lock-string-face) 6750 6765 (face font-lock-constant-face) 6765 6766 (face font-lock-string-face) 6766 6776 nil 6776 6777 (face font-lock-string-face) 6777 6800 (face font-lock-constant-face) 6800 6801 (face font-lock-string-face) 6801 6811 nil 6811 6812 (face font-lock-string-face) 6812 6834 (face font-lock-constant-face) 6834 6835 (face font-lock-string-face) 6835 6845 nil 6845 6846 (face font-lock-string-face) 6846 6860 (face font-lock-constant-face) 6860 6861 (face font-lock-string-face) 6861 6871 nil 6871 6872 (face font-lock-string-face) 6872 6885 (face font-lock-constant-face) 6885 6886 (face font-lock-string-face) 6886 6896 nil 6896 6897 (face font-lock-string-face) 6897 6920 (face font-lock-constant-face) 6920 6921 (face font-lock-string-face) 6921 6931 nil 6931 6932 (face font-lock-string-face) 6932 6954 (face font-lock-constant-face) 6954 6955 (face font-lock-string-face) 6955 6965 nil 6965 6966 (face font-lock-string-face) 6966 6986 (face font-lock-constant-face) 6986 6987 (face font-lock-string-face) 6987 6997 nil 6997 6998 (face font-lock-string-face) 6998 7017 (face font-lock-constant-face) 7017 7018 (face font-lock-string-face) 7018 7028 nil 7028 7029 (face font-lock-string-face) 7029 7050 (face font-lock-constant-face) 7050 7051 (face font-lock-string-face) 7051 7061 nil 7061 7062 (face font-lock-string-face) 7062 7082 (face font-lock-constant-face) 7082 7083 (face font-lock-string-face) 7083 7093 nil 7093 7094 (face font-lock-string-face) 7094 7122 (face font-lock-constant-face) 7122 7123 (face font-lock-string-face) 7123 7133 nil 7133 7134 (face font-lock-string-face) 7134 7161 (face font-lock-constant-face) 7161 7162 (face font-lock-string-face) 7162 7172 nil 7172 7173 (face font-lock-string-face) 7173 7194 (face font-lock-constant-face) 7194 7195 (face font-lock-string-face) 7195 7205 nil 7205 7206 (face font-lock-string-face) 7206 7226 (face font-lock-constant-face) 7226 7227 (face font-lock-string-face) 7227 7237 nil 7237 7238 (face font-lock-string-face) 7238 7266 (face font-lock-constant-face) 7266 7267 (face font-lock-string-face) 7267 7277 nil 7277 7278 (face font-lock-string-face) 7278 7305 (face font-lock-constant-face) 7305 7306 (face font-lock-string-face) 7306 7316 nil 7316 7317 (face font-lock-string-face) 7317 7336 (face font-lock-constant-face) 7336 7337 (face font-lock-string-face) 7337 7347 nil 7347 7348 (face font-lock-string-face) 7348 7366 (face font-lock-constant-face) 7366 7367 (face font-lock-string-face) 7367 7377 nil 7377 7378 (face font-lock-string-face) 7378 7399 (face font-lock-constant-face) 7399 7400 (face font-lock-string-face) 7400 7410 nil 7410 7411 (face font-lock-string-face) 7411 7429 (face font-lock-constant-face) 7429 7430 (face font-lock-string-face) 7430 7440 nil 7440 7441 (face font-lock-string-face) 7441 7458 (face font-lock-constant-face) 7458 7459 (face font-lock-string-face) 7459 7469 nil 7469 7470 (face font-lock-string-face) 7470 7493 (face font-lock-constant-face) 7493 7494 (face font-lock-string-face) 7494 7504 nil 7504 7505 (face font-lock-string-face) 7505 7527 (face font-lock-constant-face) 7527 7528 (face font-lock-string-face) 7528 7538 nil 7538 7539 (face font-lock-string-face) 7539 7562 (face font-lock-constant-face) 7562 7563 (face font-lock-string-face) 7563 7573 nil 7573 7574 (face font-lock-string-face) 7574 7596 (face font-lock-constant-face) 7596 7597 (face font-lock-string-face) 7597 7607 nil 7607 7608 (face font-lock-string-face) 7608 7631 (face font-lock-constant-face) 7631 7632 (face font-lock-string-face) 7632 7642 nil 7642 7643 (face font-lock-string-face) 7643 7665 (face font-lock-constant-face) 7665 7666 (face font-lock-string-face) 7666 7676 nil 7676 7677 (face font-lock-string-face) 7677 7705 (face font-lock-constant-face) 7705 7706 (face font-lock-string-face) 7706 7716 nil 7716 7717 (face font-lock-string-face) 7717 7744 (face font-lock-constant-face) 7744 7745 (face font-lock-string-face) 7745 7755 nil 7755 7756 (face font-lock-string-face) 7756 7791 (face font-lock-constant-face) 7791 7792 (face font-lock-string-face) 7792 7802 nil 7802 7803 (face font-lock-string-face) 7803 7837 (face font-lock-constant-face) 7837 7838 (face font-lock-string-face) 7838 7848 nil 7848 7849 (face font-lock-string-face) 7849 7879 (face font-lock-constant-face) 7879 7880 (face font-lock-string-face) 7880 7890 nil 7890 7891 (face font-lock-string-face) 7891 7920 (face font-lock-constant-face) 7920 7921 (face font-lock-string-face) 7921 7931 nil 7931 7932 (face font-lock-string-face) 7932 7962 (face font-lock-constant-face) 7962 7963 (face font-lock-string-face) 7963 7973 nil 7973 7974 (face font-lock-string-face) 7974 8003 (face font-lock-constant-face) 8003 8004 (face font-lock-string-face) 8004 8014 nil 8014 8015 (face font-lock-string-face) 8015 8039 (face font-lock-constant-face) 8039 8040 (face font-lock-string-face) 8040 8050 nil 8050 8051 (face font-lock-string-face) 8051 8074 (face font-lock-constant-face) 8074 8075 (face font-lock-string-face) 8075 8085 nil 8085 8086 (face font-lock-string-face) 8086 8116 (face font-lock-constant-face) 8116 8117 (face font-lock-string-face) 8117 8127 nil 8127 8128 (face font-lock-string-face) 8128 8152 (face font-lock-constant-face) 8152 8153 (face font-lock-string-face) 8153 8163 nil 8163 8164 (face font-lock-string-face) 8164 8187 (face font-lock-constant-face) 8187 8188 (face font-lock-string-face) 8188 8198 nil 8198 8199 (face font-lock-string-face) 8199 8230 (face font-lock-constant-face) 8230 8231 (face font-lock-string-face) 8231 8241 nil 8241 8242 (face font-lock-string-face) 8242 8272 (face font-lock-constant-face) 8272 8273 (face font-lock-string-face) 8273 8283 nil 8283 8284 (face font-lock-string-face) 8284 8309 (face font-lock-constant-face) 8309 8310 (face font-lock-string-face) 8310 8320 nil 8320 8321 (face font-lock-string-face) 8321 8345 (face font-lock-constant-face) 8345 8346 (face font-lock-string-face) 8346 8356 nil 8356 8357 (face font-lock-string-face) 8357 8399 (face font-lock-constant-face) 8399 8400 (face font-lock-string-face) 8400 8410 nil 8410 8411 (face font-lock-string-face) 8411 8452 (face font-lock-constant-face) 8452 8453 (face font-lock-string-face) 8453 8463 nil 8463 8464 (face font-lock-string-face) 8464 8486 (face font-lock-constant-face) 8486 8487 (face font-lock-string-face) 8487 8497 nil 8497 8498 (face font-lock-string-face) 8498 8519 (face font-lock-constant-face) 8519 8520 (face font-lock-string-face) 8520 8530 nil 8530 8531 (face font-lock-string-face) 8531 8562 (face font-lock-constant-face) 8562 8563 (face font-lock-string-face) 8563 8573 nil 8573 8574 (face font-lock-string-face) 8574 8604 (face font-lock-constant-face) 8604 8605 (face font-lock-string-face) 8605 8615 nil 8615 8616 (face font-lock-string-face) 8616 8643 (face font-lock-constant-face) 8643 8644 (face font-lock-string-face) 8644 8654 nil 8654 8655 (face font-lock-string-face) 8655 8681 (face font-lock-constant-face) 8681 8682 (face font-lock-string-face) 8682 8692 nil 8692 8693 (face font-lock-string-face) 8693 8721 (face font-lock-constant-face) 8721 8722 (face font-lock-string-face) 8722 8732 nil 8732 8733 (face font-lock-string-face) 8733 8760 (face font-lock-constant-face) 8760 8761 (face font-lock-string-face) 8761 8771 nil 8771 8772 (face font-lock-string-face) 8772 8805 (face font-lock-constant-face) 8805 8806 (face font-lock-string-face) 8806 8816 nil 8816 8817 (face font-lock-string-face) 8817 8849 (face font-lock-constant-face) 8849 8850 (face font-lock-string-face) 8850 8860 nil 8860 8861 (face font-lock-string-face) 8861 8892 (face font-lock-constant-face) 8892 8893 (face font-lock-string-face) 8893 8903 nil 8903 8904 (face font-lock-string-face) 8904 8934 (face font-lock-constant-face) 8934 8935 (face font-lock-string-face) 8935 8945 nil 8945 8946 (face font-lock-string-face) 8946 8978 (face font-lock-constant-face) 8978 8979 (face font-lock-string-face) 8979 8989 nil 8989 8990 (face font-lock-string-face) 8990 9021 (face font-lock-constant-face) 9021 9022 (face font-lock-string-face) 9022 9032 nil 9032 9033 (face font-lock-string-face) 9033 9063 (face font-lock-constant-face) 9063 9064 (face font-lock-string-face) 9064 9074 nil 9074 9075 (face font-lock-string-face) 9075 9104 (face font-lock-constant-face) 9104 9105 (face font-lock-string-face) 9105 9115 nil 9115 9116 (face font-lock-string-face) 9116 9158 (face font-lock-constant-face) 9158 9159 (face font-lock-string-face) 9159 9169 nil 9169 9170 (face font-lock-string-face) 9170 9211 (face font-lock-constant-face) 9211 9212 (face font-lock-string-face) 9212 9222 nil 9222 9223 (face font-lock-string-face) 9223 9272 (face font-lock-constant-face) 9272 9273 (face font-lock-string-face) 9273 9283 nil 9283 9284 (face font-lock-string-face) 9284 9332 (face font-lock-constant-face) 9332 9333 (face font-lock-string-face) 9333 9343 nil 9343 9344 (face font-lock-string-face) 9344 9388 (face font-lock-constant-face) 9388 9389 (face font-lock-string-face) 9389 9399 nil 9399 9400 (face font-lock-string-face) 9400 9445 (face font-lock-constant-face) 9445 9446 (face font-lock-string-face) 9446 9456 nil 9456 9457 (face font-lock-string-face) 9457 9507 (face font-lock-constant-face) 9507 9508 (face font-lock-string-face) 9508 9518 nil 9518 9519 (face font-lock-string-face) 9519 9570 (face font-lock-constant-face) 9570 9571 (face font-lock-string-face) 9571 9581 nil 9581 9582 (face font-lock-string-face) 9582 9611 (face font-lock-constant-face) 9611 9612 (face font-lock-string-face) 9612 9622 nil 9622 9623 (face font-lock-string-face) 9623 9659 (face font-lock-constant-face) 9659 9660 (face font-lock-string-face) 9660 9670 nil 9670 9671 (face font-lock-string-face) 9671 9714 (face font-lock-constant-face) 9714 9715 (face font-lock-string-face) 9715 9725 nil 9725 9726 (face font-lock-string-face) 9726 9768 (face font-lock-constant-face) 9768 9769 (face font-lock-string-face) 9769 9779 nil 9779 9780 (face font-lock-string-face) 9780 9816 (face font-lock-constant-face) 9816 9817 (face font-lock-string-face) 9817 9827 nil 9827 9828 (face font-lock-string-face) 9828 9863 (face font-lock-constant-face) 9863 9864 (face font-lock-string-face) 9864 9874 nil 9874 9875 (face font-lock-string-face) 9875 9910 (face font-lock-constant-face) 9910 9911 (face font-lock-string-face) 9911 9921 nil 9921 9922 (face font-lock-string-face) 9922 9958 (face font-lock-constant-face) 9958 9959 (face font-lock-string-face) 9959 9969 nil 9969 9970 (face font-lock-string-face) 9970 10005 (face font-lock-constant-face) 10005 10006 (face font-lock-string-face) 10006 10016 nil 10016 10017 (face font-lock-string-face) 10017 10050 (face font-lock-constant-face) 10050 10051 (face font-lock-string-face) 10051 10061 nil 10061 10062 (face font-lock-string-face) 10062 10094 (face font-lock-constant-face) 10094 10095 (face font-lock-string-face) 10095 10105 nil 10105 10106 (face font-lock-string-face) 10106 10150 (face font-lock-constant-face) 10150 10151 (face font-lock-string-face) 10151 10161 nil 10161 10162 (face font-lock-string-face) 10162 10198 (face font-lock-constant-face) 10198 10199 (face font-lock-string-face) 10199 10209 nil 10209 10210 (face font-lock-string-face) 10210 10245 (face font-lock-constant-face) 10245 10246 (face font-lock-string-face) 10246 10256 nil 10256 10257 (face font-lock-string-face) 10257 10296 (face font-lock-constant-face) 10296 10297 (face font-lock-string-face) 10297 10307 nil 10307 10308 (face font-lock-string-face) 10308 10346 (face font-lock-constant-face) 10346 10347 (face font-lock-string-face) 10347 10357 nil 10357 10358 (face font-lock-string-face) 10358 10403 (face font-lock-constant-face) 10403 10404 (face font-lock-string-face) 10404 10414 nil 10414 10415 (face font-lock-string-face) 10415 10459 (face font-lock-constant-face) 10459 10460 (face font-lock-string-face) 10460 10470 nil 10470 10471 (face font-lock-string-face) 10471 10487 (face font-lock-constant-face) 10487 10488 (face font-lock-string-face) 10488 10498 nil 10498 10499 (face font-lock-string-face) 10499 10514 (face font-lock-constant-face) 10514 10515 (face font-lock-string-face) 10515 10525 nil 10525 10526 (face font-lock-string-face) 10526 10559 (face font-lock-constant-face) 10559 10560 (face font-lock-string-face) 10560 10570 nil 10570 10571 (face font-lock-string-face) 10571 10603 (face font-lock-constant-face) 10603 10604 (face font-lock-string-face) 10604 10614 nil 10614 10615 (face font-lock-string-face) 10615 10636 (face font-lock-constant-face) 10636 10637 (face font-lock-string-face) 10637 10647 nil 10647 10648 (face font-lock-string-face) 10648 10675 (face font-lock-constant-face) 10675 10676 (face font-lock-string-face) 10676 10686 nil 10686 10687 (face font-lock-string-face) 10687 10713 (face font-lock-constant-face) 10713 10714 (face font-lock-string-face) 10714 10724 nil 10724 10725 (face font-lock-string-face) 10725 10755 (face font-lock-constant-face) 10755 10756 (face font-lock-string-face) 10756 10766 nil 10766 10767 (face font-lock-string-face) 10767 10796 (face font-lock-constant-face) 10796 10797 (face font-lock-string-face) 10797 10807 nil 10807 10808 (face font-lock-string-face) 10808 10845 (face font-lock-constant-face) 10845 10846 (face font-lock-string-face) 10846 10856 nil 10856 10857 (face font-lock-string-face) 10857 10893 (face font-lock-constant-face) 10893 10894 (face font-lock-string-face) 10894 10904 nil 10904 10905 (face font-lock-string-face) 10905 10929 (face font-lock-constant-face) 10929 10930 (face font-lock-string-face) 10930 10940 nil 10940 10941 (face font-lock-string-face) 10941 10964 (face font-lock-constant-face) 10964 10965 (face font-lock-string-face) 10965 10975 nil 10975 10976 (face font-lock-string-face) 10976 10995 (face font-lock-constant-face) 10995 10996 (face font-lock-string-face) 10996 11006 nil 11006 11007 (face font-lock-string-face) 11007 11025 (face font-lock-constant-face) 11025 11026 (face font-lock-string-face) 11026 11036 nil 11036 11037 (face font-lock-string-face) 11037 11063 (face font-lock-constant-face) 11063 11064 (face font-lock-string-face) 11064 11074 nil 11074 11075 (face font-lock-string-face) 11075 11100 (face font-lock-constant-face) 11100 11101 (face font-lock-string-face) 11101 11111 nil 11111 11112 (face font-lock-string-face) 11112 11138 (face font-lock-constant-face) 11138 11139 (face font-lock-string-face) 11139 11149 nil 11149 11150 (face font-lock-string-face) 11150 11175 (face font-lock-constant-face) 11175 11176 (face font-lock-string-face) 11176 11193 nil 11193 11194 (face font-lock-string-face) 11194 11219 (face font-lock-keyword-face) 11219 11220 (face font-lock-string-face) 11220 11232 nil 11232 11233 (face font-lock-string-face) 11233 11245 (face font-lock-keyword-face) 11245 11246 (face font-lock-string-face) 11246 11260 nil 11260 11261 (face font-lock-string-face) 11261 11263 (face font-lock-constant-face) 11263 11264 (face font-lock-string-face) 11264 11292 nil 11292 11293 (face font-lock-string-face) 11293 11303 (face font-lock-keyword-face) 11303 11304 (face font-lock-string-face) 11304 11316 nil 11316 11381 (face font-lock-comment-face) 11381 11389 nil 11389 11439 (face font-lock-comment-face) 11439 11448 nil 11448 11449 (face font-lock-string-face) 11449 11464 (face font-lock-variable-name-face) 11464 11465 (face font-lock-string-face) 11465 11479 nil 11479 11480 (face font-lock-string-face) 11480 11492 (face font-lock-keyword-face) 11492 11493 (face font-lock-string-face) 11493 11509 nil 11509 11510 (face font-lock-string-face) 11510 11549 (face font-lock-function-name-face) 11549 11550 (face font-lock-string-face) 11550 11586 nil 11586 11587 (face font-lock-string-face) 11587 11602 (face font-lock-variable-name-face) 11602 11603 (face font-lock-string-face) 11603 11617 nil 11617 11618 (face font-lock-string-face) 11618 11626 (face font-lock-keyword-face) 11626 11627 (face font-lock-string-face) 11627 11643 nil 11643 11644 (face font-lock-string-face) 11644 11663 (face font-lock-constant-face) 11663 11664 (face font-lock-string-face) 11664 11678 nil 11678 11679 (face font-lock-string-face) 11679 11702 (face font-lock-constant-face) 11702 11703 (face font-lock-string-face) 11703 11717 nil 11717 11718 (face font-lock-string-face) 11718 11740 (face font-lock-constant-face) 11740 11741 (face font-lock-string-face) 11741 11755 nil 11755 11756 (face font-lock-string-face) 11756 11779 (face font-lock-constant-face) 11779 11780 (face font-lock-string-face) 11780 11794 nil 11794 11795 (face font-lock-string-face) 11795 11817 (face font-lock-constant-face) 11817 11818 (face font-lock-string-face) 11818 11832 nil 11832 11833 (face font-lock-string-face) 11833 11861 (face font-lock-constant-face) 11861 11862 (face font-lock-string-face) 11862 11876 nil 11876 11877 (face font-lock-string-face) 11877 11904 (face font-lock-constant-face) 11904 11905 (face font-lock-string-face) 11905 11919 nil 11919 11920 (face font-lock-string-face) 11920 11950 (face font-lock-constant-face) 11950 11951 (face font-lock-string-face) 11951 11965 nil 11965 11966 (face font-lock-string-face) 11966 11995 (face font-lock-constant-face) 11995 11996 (face font-lock-string-face) 11996 12010 nil 12010 12011 (face font-lock-string-face) 12011 12035 (face font-lock-constant-face) 12035 12036 (face font-lock-string-face) 12036 12050 nil 12050 12051 (face font-lock-string-face) 12051 12074 (face font-lock-constant-face) 12074 12075 (face font-lock-string-face) 12075 12089 nil 12089 12090 (face font-lock-string-face) 12090 12120 (face font-lock-constant-face) 12120 12121 (face font-lock-string-face) 12121 12135 nil 12135 12136 (face font-lock-string-face) 12136 12167 (face font-lock-constant-face) 12167 12168 (face font-lock-string-face) 12168 12182 nil 12182 12183 (face font-lock-string-face) 12183 12213 (face font-lock-constant-face) 12213 12214 (face font-lock-string-face) 12214 12228 nil 12228 12229 (face font-lock-string-face) 12229 12254 (face font-lock-constant-face) 12254 12255 (face font-lock-string-face) 12255 12269 nil 12269 12270 (face font-lock-string-face) 12270 12294 (face font-lock-constant-face) 12294 12295 (face font-lock-string-face) 12295 12309 nil 12309 12310 (face font-lock-string-face) 12310 12352 (face font-lock-constant-face) 12352 12353 (face font-lock-string-face) 12353 12367 nil 12367 12368 (face font-lock-string-face) 12368 12409 (face font-lock-constant-face) 12409 12410 (face font-lock-string-face) 12410 12424 nil 12424 12425 (face font-lock-string-face) 12425 12447 (face font-lock-constant-face) 12447 12448 (face font-lock-string-face) 12448 12462 nil 12462 12463 (face font-lock-string-face) 12463 12484 (face font-lock-constant-face) 12484 12485 (face font-lock-string-face) 12485 12499 nil 12499 12500 (face font-lock-string-face) 12500 12531 (face font-lock-constant-face) 12531 12532 (face font-lock-string-face) 12532 12546 nil 12546 12547 (face font-lock-string-face) 12547 12577 (face font-lock-constant-face) 12577 12578 (face font-lock-string-face) 12578 12592 nil 12592 12593 (face font-lock-string-face) 12593 12621 (face font-lock-constant-face) 12621 12622 (face font-lock-string-face) 12622 12636 nil 12636 12637 (face font-lock-string-face) 12637 12664 (face font-lock-constant-face) 12664 12665 (face font-lock-string-face) 12665 12679 nil 12679 12680 (face font-lock-string-face) 12680 12707 (face font-lock-constant-face) 12707 12708 (face font-lock-string-face) 12708 12722 nil 12722 12723 (face font-lock-string-face) 12723 12749 (face font-lock-constant-face) 12749 12750 (face font-lock-string-face) 12750 12764 nil 12764 12765 (face font-lock-string-face) 12765 12791 (face font-lock-constant-face) 12791 12792 (face font-lock-string-face) 12792 12806 nil 12806 12807 (face font-lock-string-face) 12807 12832 (face font-lock-constant-face) 12832 12833 (face font-lock-string-face) 12833 12868 nil 12868 12937 (face font-lock-comment-face) 12937 12945 nil 12945 13016 (face font-lock-comment-face) 13016 13024 nil 13024 13040 (face font-lock-comment-face) 13040 13049 nil 13049 13050 (face font-lock-string-face) 13050 13065 (face font-lock-variable-name-face) 13065 13066 (face font-lock-string-face) 13066 13080 nil 13080 13081 (face font-lock-string-face) 13081 13089 (face font-lock-keyword-face) 13089 13090 (face font-lock-string-face) 13090 13105 nil 13105 13106 (face font-lock-string-face) 13106 13149 (face font-lock-constant-face) 13149 13150 (face font-lock-string-face) 13150 13175 nil 13175 13176 (face font-lock-string-face) 13176 13183 (face font-lock-keyword-face) 13183 13184 (face font-lock-string-face) 13184 13199 nil 13199 13200 (face font-lock-string-face) 13200 13248 (face font-lock-constant-face) 13248 13249 (face font-lock-string-face) 13249 13274 nil 13274 13275 (face font-lock-string-face) 13275 13288 (face font-lock-keyword-face) 13288 13289 (face font-lock-string-face) 13289 13305 nil 13305 13306 (face font-lock-string-face) 13306 13315 (face font-lock-keyword-face) 13315 13316 (face font-lock-string-face) 13316 13334 nil 13334 13335 (face font-lock-string-face) 13335 13345 (face font-lock-constant-face) 13345 13346 (face font-lock-string-face) 13346 13397 nil 13397 13398 (face font-lock-string-face) 13398 13443 (face font-lock-variable-name-face) 13443 13444 (face font-lock-string-face) 13444 13458 nil 13458 13459 (face font-lock-string-face) 13459 13472 (face font-lock-keyword-face) 13472 13473 (face font-lock-string-face) 13473 13489 nil 13489 13490 (face font-lock-string-face) 13490 13499 (face font-lock-keyword-face) 13499 13500 (face font-lock-string-face) 13500 13518 nil 13518 13519 (face font-lock-string-face) 13519 13527 (face font-lock-constant-face) 13527 13528 (face font-lock-string-face) 13528 13579 nil 13579 13580 (face font-lock-string-face) 13580 13593 (face font-lock-variable-name-face) 13593 13594 (face font-lock-string-face) 13594 13608 nil 13608 13609 (face font-lock-string-face) 13609 13617 (face font-lock-keyword-face) 13617 13618 (face font-lock-string-face) 13618 13623 nil 13623 13624 (face font-lock-string-face) 13624 13631 (face font-lock-constant-face) 13631 13632 (face font-lock-string-face) 13632 13634 nil 13634 13635 (face font-lock-string-face) 13635 13641 (face font-lock-constant-face) 13641 13642 (face font-lock-string-face) 13642 13671 nil 13671 13672 (face font-lock-string-face) 13672 13679 (face font-lock-constant-face) 13679 13680 (face font-lock-string-face) 13680 13682 nil 13682 13683 (face font-lock-string-face) 13683 13703 (face font-lock-constant-face) 13703 13704 (face font-lock-string-face) 13704 13720 nil 13720 13721 (face font-lock-string-face) 13721 13734 (face font-lock-keyword-face) 13734 13735 (face font-lock-string-face) 13735 13751 nil 13751 13752 (face font-lock-string-face) 13752 13761 (face font-lock-keyword-face) 13761 13762 (face font-lock-string-face) 13762 13815 nil 13815 13816 (face font-lock-string-face) 13816 13829 (face font-lock-variable-name-face) 13829 13830 (face font-lock-string-face) 13830 13844 nil 13844 13845 (face font-lock-string-face) 13845 13853 (face font-lock-keyword-face) 13853 13854 (face font-lock-string-face) 13854 13870 nil 13870 13871 (face font-lock-string-face) 13871 13909 (face font-lock-constant-face) 13909 13910 (face font-lock-string-face) 13910 13924 nil 13924 13925 (face font-lock-string-face) 13925 13962 (face font-lock-constant-face) 13962 13963 (face font-lock-string-face) 13963 13999 nil 13999 14000 (face font-lock-string-face) 14000 14011 (face font-lock-variable-name-face) 14011 14012 (face font-lock-string-face) 14012 14026 nil 14026 14027 (face font-lock-string-face) 14027 14036 (face font-lock-keyword-face) 14036 14037 (face font-lock-string-face) 14037 14053 nil 14053 14054 (face font-lock-string-face) 14054 14064 (face font-lock-keyword-face) 14064 14065 (face font-lock-string-face) 14065 14084 nil 14084 14085 (face font-lock-string-face) 14085 14096 (face font-lock-variable-name-face) 14096 14097 (face font-lock-string-face) 14097 14117 nil 14117 14129 (face font-lock-string-face) 14129 14131 nil 14131 14169 (face font-lock-string-face) 14169 14176 (face font-lock-variable-name-face) 14176 14182 (face font-lock-string-face) 14182 14193 (face font-lock-variable-name-face) 14193 14196 (face font-lock-string-face) 14196 14233 nil 14233 14245 (face font-lock-string-face) 14245 14247 nil 14247 14259 (face font-lock-string-face) 14259 14316 nil 14316 14317 (face font-lock-string-face) 14317 14327 (face font-lock-keyword-face) 14327 14328 (face font-lock-string-face) 14328 14345 nil 14345 14346 (face font-lock-string-face) 14346 14359 (face font-lock-variable-name-face) 14359 14360 (face font-lock-string-face) 14360 14378 nil 14378 14379 (face font-lock-string-face) 14379 14385 (face font-lock-keyword-face) 14385 14386 (face font-lock-string-face) 14386 14406 nil 14406 14411 (face font-lock-string-face) 14411 14413 (face font-lock-variable-name-face) 14413 14423 (face font-lock-variable-name-face) 14423 14443 (face font-lock-string-face) 14443 14476 nil 14476 14477 (face font-lock-string-face) 14477 14490 (face font-lock-keyword-face) 14490 14491 (face font-lock-string-face) 14491 14511 nil 14511 14512 (face font-lock-string-face) 14512 14521 (face font-lock-keyword-face) 14521 14522 (face font-lock-string-face) 14522 14544 nil 14544 14545 (face font-lock-string-face) 14545 14549 (face font-lock-constant-face) 14549 14551 (face font-lock-variable-name-face) 14551 14561 (face font-lock-variable-name-face) 14561 14578 (face font-lock-constant-face) 14578 14579 (face font-lock-string-face) 14579 14631 nil 14631 14632 (face font-lock-string-face) 14632 14639 (face font-lock-keyword-face) 14639 14640 (face font-lock-string-face) 14640 14660 nil 14660 14661 (face font-lock-string-face) 14661 14669 (face font-lock-preprocessor-face) 14669 14670 (face font-lock-string-face) 14670 14707 nil 14707 14729 (face font-lock-comment-face) 14729 14743 nil 14743 14744 (face font-lock-string-face) 14744 14752 (face font-lock-keyword-face) 14752 14753 (face font-lock-string-face) 14753 14773 nil 14773 14774 (face font-lock-string-face) 14774 14800 (face font-lock-constant-face) 14800 14801 (face font-lock-string-face) 14801 14819 nil 14819 14820 (face font-lock-string-face) 14820 14845 (face font-lock-constant-face) 14845 14846 (face font-lock-string-face) 14846 14915 nil 14915 14916 (face font-lock-string-face) 14916 14929 (face font-lock-variable-name-face) 14929 14930 (face font-lock-string-face) 14930 14944 nil 14944 14945 (face font-lock-string-face) 14945 14955 (face font-lock-keyword-face) 14955 14956 (face font-lock-string-face) 14956 14973 nil 14973 14974 (face font-lock-string-face) 14974 14993 (face font-lock-variable-name-face) 14993 14994 (face font-lock-string-face) 14994 15012 nil 15012 15013 (face font-lock-string-face) 15013 15019 (face font-lock-keyword-face) 15019 15020 (face font-lock-string-face) 15020 15040 nil 15040 15075 (face font-lock-string-face) 15075 15108 nil 15108 15109 (face font-lock-string-face) 15109 15122 (face font-lock-keyword-face) 15122 15123 (face font-lock-string-face) 15123 15143 nil 15143 15144 (face font-lock-string-face) 15144 15153 (face font-lock-keyword-face) 15153 15154 (face font-lock-string-face) 15154 15176 nil 15176 15177 (face font-lock-string-face) 15177 15215 (face font-lock-constant-face) 15215 15216 (face font-lock-string-face) 15216 15268 nil 15268 15269 (face font-lock-string-face) 15269 15276 (face font-lock-keyword-face) 15276 15277 (face font-lock-string-face) 15277 15297 nil 15297 15298 (face font-lock-string-face) 15298 15312 (face font-lock-preprocessor-face) 15312 15313 (face font-lock-string-face) 15313 15350 nil 15350 15378 (face font-lock-comment-face) 15378 15392 nil 15392 15393 (face font-lock-string-face) 15393 15401 (face font-lock-keyword-face) 15401 15402 (face font-lock-string-face) 15402 15422 nil 15422 15423 (face font-lock-string-face) 15423 15450 (face font-lock-constant-face) 15450 15451 (face font-lock-string-face) 15451 15469 nil 15469 15470 (face font-lock-string-face) 15470 15496 (face font-lock-constant-face) 15496 15497 (face font-lock-string-face) 15497 15566 nil 15566 15567 (face font-lock-string-face) 15567 15600 (face font-lock-variable-name-face) 15600 15601 (face font-lock-string-face) 15601 15615 nil 15615 15663 (face font-lock-comment-face) 15663 15673 nil 15673 15674 (face font-lock-string-face) 15674 15682 (face font-lock-keyword-face) 15682 15683 (face font-lock-string-face) 15683 15699 nil 15699 15700 (face font-lock-string-face) 15700 15743 (face font-lock-constant-face) 15743 15744 (face font-lock-string-face) 15744 15758 nil 15758 15759 (face font-lock-string-face) 15759 15801 (face font-lock-constant-face) 15801 15802 (face font-lock-string-face) 15802 15838 nil 15838 15839 (face font-lock-string-face) 15839 15848 (face font-lock-variable-name-face) 15848 15849 (face font-lock-string-face) 15849 15863 nil 15863 15864 (face font-lock-string-face) 15864 15877 (face font-lock-keyword-face) 15877 15878 (face font-lock-string-face) 15878 15894 nil 15894 15895 (face font-lock-string-face) 15895 15904 (face font-lock-keyword-face) 15904 15905 (face font-lock-string-face) 15905 15923 nil 15923 15924 (face font-lock-string-face) 15924 15980 (face font-lock-constant-face) 15980 15981 (face font-lock-string-face) 15981 15997 nil 15997 15998 (face font-lock-string-face) 15998 16057 (face font-lock-constant-face) 16057 16058 (face font-lock-string-face) 16058 16074 nil 16074 16075 (face font-lock-string-face) 16075 16131 (face font-lock-constant-face) 16131 16132 (face font-lock-string-face) 16132 16148 nil 16148 16149 (face font-lock-string-face) 16149 16205 (face font-lock-constant-face) 16205 16206 (face font-lock-string-face) 16206 16222 nil 16222 16223 (face font-lock-string-face) 16223 16275 (face font-lock-constant-face) 16275 16276 (face font-lock-string-face) 16276 16327 nil 16327 16328 (face font-lock-string-face) 16328 16337 (face font-lock-variable-name-face) 16337 16338 (face font-lock-string-face) 16338 16352 nil 16352 16353 (face font-lock-string-face) 16353 16361 (face font-lock-keyword-face) 16361 16362 (face font-lock-string-face) 16362 16378 nil 16378 16379 (face font-lock-string-face) 16379 16406 (face font-lock-constant-face) 16406 16407 (face font-lock-string-face) 16407 16421 nil 16421 16422 (face font-lock-string-face) 16422 16448 (face font-lock-constant-face) 16448 16449 (face font-lock-string-face) 16449 16463 nil 16463 16464 (face font-lock-string-face) 16464 16507 (face font-lock-constant-face) 16507 16508 (face font-lock-string-face) 16508 16522 nil 16522 16523 (face font-lock-string-face) 16523 16565 (face font-lock-constant-face) 16565 16566 (face font-lock-string-face) 16566 16602 nil 16602 16603 (face font-lock-string-face) 16603 16646 (face font-lock-variable-name-face) 16646 16647 (face font-lock-string-face) 16647 16661 nil 16661 16662 (face font-lock-string-face) 16662 16669 (face font-lock-keyword-face) 16669 16670 (face font-lock-string-face) 16670 16686 nil 16686 16687 (face font-lock-string-face) 16687 16697 (face font-lock-constant-face) 16697 16698 (face font-lock-string-face) 16698 16712 nil 16712 16713 (face font-lock-string-face) 16713 16722 (face font-lock-constant-face) 16722 16723 (face font-lock-string-face) 16723 16737 nil 16737 16738 (face font-lock-string-face) 16738 16760 (face font-lock-constant-face) 16760 16761 (face font-lock-string-face) 16761 16775 nil 16775 16776 (face font-lock-string-face) 16776 16797 (face font-lock-constant-face) 16797 16798 (face font-lock-string-face) 16798 16812 nil 16812 16813 (face font-lock-string-face) 16813 16830 (face font-lock-constant-face) 16830 16831 (face font-lock-string-face) 16831 16845 nil 16845 16846 (face font-lock-string-face) 16846 16862 (face font-lock-constant-face) 16862 16863 (face font-lock-string-face) 16863 16877 nil 16877 16878 (face font-lock-string-face) 16878 16889 (face font-lock-constant-face) 16889 16890 (face font-lock-string-face) 16890 16904 nil 16904 16905 (face font-lock-string-face) 16905 16915 (face font-lock-constant-face) 16915 16916 (face font-lock-string-face) 16916 16930 nil 16930 16931 (face font-lock-string-face) 16931 16955 (face font-lock-constant-face) 16955 16956 (face font-lock-string-face) 16956 16970 nil 16970 16971 (face font-lock-string-face) 16971 16994 (face font-lock-constant-face) 16994 16995 (face font-lock-string-face) 16995 17009 nil 17009 17010 (face font-lock-string-face) 17010 17034 (face font-lock-constant-face) 17034 17035 (face font-lock-string-face) 17035 17049 nil 17049 17050 (face font-lock-string-face) 17050 17073 (face font-lock-constant-face) 17073 17074 (face font-lock-string-face) 17074 17088 nil 17088 17089 (face font-lock-string-face) 17089 17114 (face font-lock-constant-face) 17114 17115 (face font-lock-string-face) 17115 17129 nil 17129 17130 (face font-lock-string-face) 17130 17154 (face font-lock-constant-face) 17154 17155 (face font-lock-string-face) 17155 17210 nil 17210 17211 (face font-lock-string-face) 17211 17222 (face font-lock-keyword-face) 17222 17223 (face font-lock-string-face) 17223 17225 nil 17225 17226 (face font-lock-string-face) 17226 17237 (face font-lock-function-name-face) 17237 17238 (face font-lock-string-face) 17238 17246 nil 17246 17247 (face font-lock-string-face) 17247 17251 (face font-lock-keyword-face) 17251 17252 (face font-lock-string-face) 17252 17254 nil 17254 17255 (face font-lock-string-face) 17255 17269 (face font-lock-type-face) 17269 17270 (face font-lock-string-face) 17270 17278 nil 17278 17279 (face font-lock-string-face) 17279 17291 (face font-lock-keyword-face) 17291 17292 (face font-lock-string-face) 17292 17304 nil 17304 17305 (face font-lock-string-face) 17305 17307 (face font-lock-constant-face) 17307 17308 (face font-lock-string-face) 17308 17325 nil 17325 17326 (face font-lock-string-face) 17326 17336 (face font-lock-keyword-face) 17336 17337 (face font-lock-string-face) 17337 17350 nil 17350 17351 (face font-lock-string-face) 17351 17371 (face font-lock-variable-name-face) 17371 17372 (face font-lock-string-face) 17372 17386 nil 17386 17387 (face font-lock-string-face) 17387 17404 (face font-lock-keyword-face) 17404 17405 (face font-lock-string-face) 17405 17423 nil 17423 17424 (face font-lock-string-face) 17424 17442 (face font-lock-variable-name-face) 17442 17443 (face font-lock-string-face) 17443 17461 nil 17461 17462 (face font-lock-string-face) 17462 17469 (face font-lock-keyword-face) 17469 17470 (face font-lock-string-face) 17470 17474 nil 17474 17498 (face font-lock-string-face) 17498 17553 nil 17553 17554 (face font-lock-string-face) 17554 17599 (face font-lock-variable-name-face) 17599 17600 (face font-lock-string-face) 17600 17614 nil 17614 17615 (face font-lock-string-face) 17615 17627 (face font-lock-keyword-face) 17627 17628 (face font-lock-string-face) 17628 17644 nil 17644 17645 (face font-lock-string-face) 17645 17665 (face font-lock-function-name-face) 17665 17666 (face font-lock-string-face) 17666 17703 nil 17703 17704 (face font-lock-string-face) 17704 17724 (face font-lock-variable-name-face) 17724 17725 (face font-lock-string-face) 17725 17739 nil 17739 17740 (face font-lock-string-face) 17740 17752 (face font-lock-keyword-face) 17752 17753 (face font-lock-string-face) 17753 17769 nil 17769 17770 (face font-lock-string-face) 17770 17790 (face font-lock-function-name-face) 17790 17791 (face font-lock-string-face) 17791 17833 nil 17833 17834 (face font-lock-string-face) 17834 17841 (face font-lock-keyword-face) 17841 17842 (face font-lock-string-face) 17842 17854 nil 17854 17855 (face font-lock-string-face) 17855 17874 (face font-lock-constant-face) 17874 17875 (face font-lock-string-face) 17875 17885 nil 17885 17886 (face font-lock-string-face) 17886 17904 (face font-lock-constant-face) 17904 17905 (face font-lock-string-face) 17905 17935 nil 17935 17936 (face font-lock-string-face) 17936 17947 (face font-lock-keyword-face) 17947 17948 (face font-lock-string-face) 17948 17950 nil 17950 17951 (face font-lock-string-face) 17951 17971 (face font-lock-function-name-face) 17971 17972 (face font-lock-string-face) 17972 17980 nil 17980 17981 (face font-lock-string-face) 17981 17985 (face font-lock-keyword-face) 17985 17986 (face font-lock-string-face) 17986 17988 nil 17988 17989 (face font-lock-string-face) 17989 18003 (face font-lock-type-face) 18003 18004 (face font-lock-string-face) 18004 18012 nil 18012 18013 (face font-lock-string-face) 18013 18025 (face font-lock-keyword-face) 18025 18026 (face font-lock-string-face) 18026 18038 nil 18038 18039 (face font-lock-string-face) 18039 18041 (face font-lock-constant-face) 18041 18042 (face font-lock-string-face) 18042 18059 nil 18059 18060 (face font-lock-string-face) 18060 18067 (face font-lock-keyword-face) 18067 18068 (face font-lock-string-face) 18068 18080 nil 18080 18081 (face font-lock-string-face) 18081 18114 (face font-lock-constant-face) 18114 18115 (face font-lock-string-face) 18115 18125 nil 18125 18126 (face font-lock-string-face) 18126 18162 (face font-lock-constant-face) 18162 18163 (face font-lock-string-face) 18163 18173 nil 18173 18174 (face font-lock-string-face) 18174 18212 (face font-lock-constant-face) 18212 18213 (face font-lock-string-face) 18213 18223 nil 18223 18224 (face font-lock-string-face) 18224 18261 (face font-lock-constant-face) 18261 18262 (face font-lock-string-face) 18262 18272 nil 18272 18273 (face font-lock-string-face) 18273 18311 (face font-lock-constant-face) 18311 18312 (face font-lock-string-face) 18312 18322 nil 18322 18323 (face font-lock-string-face) 18323 18356 (face font-lock-constant-face) 18356 18357 (face font-lock-string-face) 18357 18367 nil 18367 18368 (face font-lock-string-face) 18368 18403 (face font-lock-constant-face) 18403 18404 (face font-lock-string-face) 18404 18414 nil 18414 18415 (face font-lock-string-face) 18415 18451 (face font-lock-constant-face) 18451 18452 (face font-lock-string-face) 18452 18462 nil 18462 18463 (face font-lock-string-face) 18463 18499 (face font-lock-constant-face) 18499 18500 (face font-lock-string-face) 18500 18510 nil 18510 18511 (face font-lock-string-face) 18511 18547 (face font-lock-constant-face) 18547 18548 (face font-lock-string-face) 18548 18558 nil 18558 18559 (face font-lock-string-face) 18559 18581 (face font-lock-constant-face) 18581 18582 (face font-lock-string-face) 18582 18592 nil 18592 18593 (face font-lock-string-face) 18593 18618 (face font-lock-constant-face) 18618 18619 (face font-lock-string-face) 18619 18629 nil 18629 18630 (face font-lock-string-face) 18630 18657 (face font-lock-constant-face) 18657 18658 (face font-lock-string-face) 18658 18668 nil 18668 18669 (face font-lock-string-face) 18669 18697 (face font-lock-constant-face) 18697 18698 (face font-lock-string-face) 18698 18708 nil 18708 18709 (face font-lock-string-face) 18709 18750 (face font-lock-constant-face) 18750 18751 (face font-lock-string-face) 18751 18761 nil 18761 18762 (face font-lock-string-face) 18762 18803 (face font-lock-constant-face) 18803 18804 (face font-lock-string-face) 18804 18814 nil 18814 18815 (face font-lock-string-face) 18815 18856 (face font-lock-constant-face) 18856 18857 (face font-lock-string-face) 18857 18867 nil 18867 18868 (face font-lock-string-face) 18868 18902 (face font-lock-constant-face) 18902 18903 (face font-lock-string-face) 18903 18913 nil 18913 18914 (face font-lock-string-face) 18914 18948 (face font-lock-constant-face) 18948 18949 (face font-lock-string-face) 18949 18959 nil 18959 18960 (face font-lock-string-face) 18960 18994 (face font-lock-constant-face) 18994 18995 (face font-lock-string-face) 18995 19005 nil 19005 19006 (face font-lock-string-face) 19006 19035 (face font-lock-constant-face) 19035 19036 (face font-lock-string-face) 19036 19046 nil 19046 19047 (face font-lock-string-face) 19047 19075 (face font-lock-constant-face) 19075 19076 (face font-lock-string-face) 19076 19093 nil 19093 19094 (face font-lock-string-face) 19094 19104 (face font-lock-keyword-face) 19104 19105 (face font-lock-string-face) 19105 19118 nil 19118 19119 (face font-lock-string-face) 19119 19139 (face font-lock-variable-name-face) 19139 19140 (face font-lock-string-face) 19140 19154 nil 19154 19155 (face font-lock-string-face) 19155 19172 (face font-lock-keyword-face) 19172 19173 (face font-lock-string-face) 19173 19191 nil 19191 19192 (face font-lock-string-face) 19192 19210 (face font-lock-variable-name-face) 19210 19211 (face font-lock-string-face) 19211 19229 nil 19229 19230 (face font-lock-string-face) 19230 19237 (face font-lock-keyword-face) 19237 19238 (face font-lock-string-face) 19238 19242 nil 19242 19266 (face font-lock-string-face) 19266 19321 nil 19321 19322 (face font-lock-string-face) 19322 19342 (face font-lock-variable-name-face) 19342 19343 (face font-lock-string-face) 19343 19357 nil 19357 19399 (face font-lock-comment-face) 19399 19409 nil 19409 19410 (face font-lock-string-face) 19410 19417 (face font-lock-keyword-face) 19417 19418 (face font-lock-string-face) 19418 19434 nil 19434 19435 (face font-lock-string-face) 19435 19480 (face font-lock-constant-face) 19480 19481 (face font-lock-string-face) 19481 19495 nil 19495 19496 (face font-lock-string-face) 19496 19535 (face font-lock-constant-face) 19535 19536 (face font-lock-string-face) 19536 19573 nil 19573 19574 (face font-lock-string-face) 19574 19623 (face font-lock-variable-name-face) 19623 19624 (face font-lock-string-face) 19624 19638 nil 19638 19639 (face font-lock-string-face) 19639 19645 (face font-lock-keyword-face) 19645 19646 (face font-lock-string-face) 19646 19662 nil 19662 19670 (face font-lock-string-face) 19670 19707 nil 19707 19708 (face font-lock-string-face) 19708 19719 (face font-lock-variable-name-face) 19719 19720 (face font-lock-string-face) 19720 19734 nil 19734 19735 (face font-lock-string-face) 19735 19749 (face font-lock-keyword-face) 19749 19750 (face font-lock-string-face) 19750 19766 nil 19766 19773 (face font-lock-string-face) 19773 19791 nil 19791 19792 (face font-lock-string-face) 19792 19806 (face font-lock-keyword-face) 19806 19807 (face font-lock-string-face) 19807 19827 nil 19827 19890 (face font-lock-comment-face) 19890 19906 nil 19906 19971 (face font-lock-comment-face) 19971 19987 nil 19987 20032 (face font-lock-comment-face) 20032 20048 nil 20048 20072 (face font-lock-string-face) 20072 20074 nil 20074 20077 (face font-lock-string-face) 20077 20080 nil 20080 20086 (face font-lock-comment-face) 20086 20155 nil 20155 20156 (face font-lock-string-face) 20156 20165 (face font-lock-variable-name-face) 20165 20166 (face font-lock-string-face) 20166 20180 nil 20180 20181 (face font-lock-string-face) 20181 20190 (face font-lock-keyword-face) 20190 20191 (face font-lock-string-face) 20191 20207 nil 20207 20208 (face font-lock-string-face) 20208 20218 (face font-lock-variable-name-face) 20218 20219 (face font-lock-string-face) 20219 20237 nil 20237 20246 (face font-lock-string-face) 20246 20262 nil 20262 20270 (face font-lock-string-face) 20270 20286 nil 20286 20298 (face font-lock-string-face) 20298 20314 nil 20314 20322 (face font-lock-string-face) 20322 20374 nil 20374 20375 (face font-lock-string-face) 20375 20384 (face font-lock-variable-name-face) 20384 20385 (face font-lock-string-face) 20385 20399 nil 20399 20400 (face font-lock-string-face) 20400 20409 (face font-lock-keyword-face) 20409 20410 (face font-lock-string-face) 20410 20426 nil 20426 20427 (face font-lock-string-face) 20427 20437 (face font-lock-variable-name-face) 20437 20438 (face font-lock-string-face) 20438 20456 nil 20456 20466 (face font-lock-string-face) 20466 20482 nil 20482 20491 (face font-lock-string-face) 20491 20507 nil 20507 20519 (face font-lock-string-face) 20519 20535 nil 20535 20543 (face font-lock-string-face) 20543 20595 nil 20595 20596 (face font-lock-string-face) 20596 20621 (face font-lock-variable-name-face) 20621 20622 (face font-lock-string-face) 20622 20636 nil 20636 20637 (face font-lock-string-face) 20637 20646 (face font-lock-keyword-face) 20646 20647 (face font-lock-string-face) 20647 20663 nil 20663 20664 (face font-lock-string-face) 20664 20674 (face font-lock-keyword-face) 20674 20675 (face font-lock-string-face) 20675 20695 nil 20695 20696 (face font-lock-string-face) 20696 20715 (face font-lock-variable-name-face) 20715 20716 (face font-lock-string-face) 20716 20736 nil 20736 20748 (face font-lock-string-face) 20748 20770 nil 20770 20780 (face font-lock-string-face) 20780 20800 nil 20800 20807 (face font-lock-string-face) 20807 20827 nil 20827 20839 (face font-lock-string-face) 20839 20859 nil 20859 20867 (face font-lock-string-face) 20867 20923 nil 20923 20935 (face font-lock-string-face) 20935 20957 nil 20957 20972 (face font-lock-string-face) 20972 20992 nil 20992 20999 (face font-lock-string-face) 20999 21019 nil 21019 21026 (face font-lock-string-face) 21026 21046 nil 21046 21058 (face font-lock-string-face) 21058 21078 nil 21078 21086 (face font-lock-string-face) 21086 21180 nil 21180 21181 (face font-lock-string-face) 21181 21190 (face font-lock-keyword-face) 21190 21191 (face font-lock-string-face) 21191 21203 nil 21203 21204 (face font-lock-string-face) 21204 21220 (face font-lock-variable-name-face) 21220 21221 (face font-lock-string-face) 21221 21223 nil 21223 21224 (face font-lock-string-face) 21224 21256 (face font-lock-variable-name-face) 21256 21257 (face font-lock-string-face) 21257 21274 nil 21274 21314 (face font-lock-string-face) 21314 21325 nil 21325 21326 (face font-lock-string-face) 21326 21334 (face font-lock-keyword-face) 21334 21335 (face font-lock-string-face) 21335 21347 nil 21347 21348 (face font-lock-string-face) 21348 21385 (face font-lock-constant-face) 21385 21386 (face font-lock-string-face) 21386 21416 nil 21416 21417 (face font-lock-string-face) 21417 21428 (face font-lock-keyword-face) 21428 21429 (face font-lock-string-face) 21429 21431 nil 21431 21432 (face font-lock-string-face) 21432 21452 (face font-lock-function-name-face) 21452 21453 (face font-lock-string-face) 21453 21461 nil 21461 21462 (face font-lock-string-face) 21462 21466 (face font-lock-keyword-face) 21466 21467 (face font-lock-string-face) 21467 21469 nil 21469 21470 (face font-lock-string-face) 21470 21484 (face font-lock-type-face) 21484 21485 (face font-lock-string-face) 21485 21493 nil 21493 21494 (face font-lock-string-face) 21494 21506 (face font-lock-keyword-face) 21506 21507 (face font-lock-string-face) 21507 21519 nil 21519 21520 (face font-lock-string-face) 21520 21522 (face font-lock-constant-face) 21522 21523 (face font-lock-string-face) 21523 21540 nil 21540 21541 (face font-lock-string-face) 21541 21548 (face font-lock-keyword-face) 21548 21549 (face font-lock-string-face) 21549 21561 nil 21561 21562 (face font-lock-string-face) 21562 21595 (face font-lock-constant-face) 21595 21596 (face font-lock-string-face) 21596 21606 nil 21606 21607 (face font-lock-string-face) 21607 21637 (face font-lock-constant-face) 21637 21638 (face font-lock-string-face) 21638 21648 nil 21648 21649 (face font-lock-string-face) 21649 21682 (face font-lock-constant-face) 21682 21683 (face font-lock-string-face) 21683 21693 nil 21693 21694 (face font-lock-string-face) 21694 21724 (face font-lock-constant-face) 21724 21725 (face font-lock-string-face) 21725 21735 nil 21735 21736 (face font-lock-string-face) 21736 21758 (face font-lock-constant-face) 21758 21759 (face font-lock-string-face) 21759 21769 nil 21769 21770 (face font-lock-string-face) 21770 21795 (face font-lock-constant-face) 21795 21796 (face font-lock-string-face) 21796 21806 nil 21806 21807 (face font-lock-string-face) 21807 21836 (face font-lock-constant-face) 21836 21837 (face font-lock-string-face) 21837 21847 nil 21847 21848 (face font-lock-string-face) 21848 21876 (face font-lock-constant-face) 21876 21877 (face font-lock-string-face) 21877 21907 nil 21907 21908 (face font-lock-string-face) 21908 21919 (face font-lock-keyword-face) 21919 21920 (face font-lock-string-face) 21920 21922 nil 21922 21923 (face font-lock-string-face) 21923 21938 (face font-lock-function-name-face) 21938 21939 (face font-lock-string-face) 21939 21947 nil 21947 21948 (face font-lock-string-face) 21948 21952 (face font-lock-keyword-face) 21952 21953 (face font-lock-string-face) 21953 21955 nil 21955 21956 (face font-lock-string-face) 21956 21966 (face font-lock-type-face) 21966 21967 (face font-lock-string-face) 21967 21975 nil 21975 21976 (face font-lock-string-face) 21976 21988 (face font-lock-keyword-face) 21988 21989 (face font-lock-string-face) 21989 22001 nil 22001 22002 (face font-lock-string-face) 22002 22007 (face font-lock-function-name-face) 22007 22008 (face font-lock-string-face) 22008 22018 nil 22018 22019 (face font-lock-string-face) 22019 22037 (face font-lock-function-name-face) 22037 22038 (face font-lock-string-face) 22038 22048 nil 22048 22049 (face font-lock-string-face) 22049 22060 (face font-lock-function-name-face) 22060 22061 (face font-lock-string-face) 22061 22071 nil 22071 22072 (face font-lock-string-face) 22072 22093 (face font-lock-function-name-face) 22093 22094 (face font-lock-string-face) 22094 22104 nil 22104 22105 (face font-lock-string-face) 22105 22131 (face font-lock-function-name-face) 22131 22132 (face font-lock-string-face) 22132 22142 nil 22142 22143 (face font-lock-string-face) 22143 22177 (face font-lock-function-name-face) 22177 22178 (face font-lock-string-face) 22178 22188 nil 22188 22189 (face font-lock-string-face) 22189 22215 (face font-lock-function-name-face) 22215 22216 (face font-lock-string-face) 22216 22226 nil 22226 22227 (face font-lock-string-face) 22227 22253 (face font-lock-function-name-face) 22253 22254 (face font-lock-string-face) 22254 22264 nil 22264 22265 (face font-lock-string-face) 22265 22280 (face font-lock-function-name-face) 22280 22281 (face font-lock-string-face) 22281 22298 nil 22298 22299 (face font-lock-string-face) 22299 22306 (face font-lock-keyword-face) 22306 22307 (face font-lock-string-face) 22307 22319 nil 22319 22320 (face font-lock-string-face) 22320 22361 (face font-lock-constant-face) 22361 22362 (face font-lock-string-face) 22362 22372 nil 22372 22373 (face font-lock-string-face) 22373 22413 (face font-lock-constant-face) 22413 22414 (face font-lock-string-face) 22414 22424 nil 22424 22425 (face font-lock-string-face) 22425 22461 (face font-lock-constant-face) 22461 22462 (face font-lock-string-face) 22462 22472 nil 22472 22473 (face font-lock-string-face) 22473 22502 (face font-lock-constant-face) 22502 22503 (face font-lock-string-face) 22503 22513 nil 22513 22514 (face font-lock-string-face) 22514 22550 (face font-lock-constant-face) 22550 22551 (face font-lock-string-face) 22551 22561 nil 22561 22562 (face font-lock-string-face) 22562 22610 (face font-lock-constant-face) 22610 22611 (face font-lock-string-face) 22611 22621 nil 22621 22622 (face font-lock-string-face) 22622 22663 (face font-lock-constant-face) 22663 22664 (face font-lock-string-face) 22664 22674 nil 22674 22675 (face font-lock-string-face) 22675 22711 (face font-lock-constant-face) 22711 22712 (face font-lock-string-face) 22712 22722 nil 22722 22723 (face font-lock-string-face) 22723 22757 (face font-lock-constant-face) 22757 22758 (face font-lock-string-face) 22758 22768 nil 22768 22769 (face font-lock-string-face) 22769 22797 (face font-lock-constant-face) 22797 22798 (face font-lock-string-face) 22798 22808 nil 22808 22809 (face font-lock-string-face) 22809 22853 (face font-lock-constant-face) 22853 22854 (face font-lock-string-face) 22854 22864 nil 22864 22865 (face font-lock-string-face) 22865 22900 (face font-lock-constant-face) 22900 22901 (face font-lock-string-face) 22901 22911 nil 22911 22912 (face font-lock-string-face) 22912 22961 (face font-lock-constant-face) 22961 22962 (face font-lock-string-face) 22962 22972 nil 22972 22973 (face font-lock-string-face) 22973 23011 (face font-lock-constant-face) 23011 23012 (face font-lock-string-face) 23012 23022 nil 23022 23023 (face font-lock-string-face) 23023 23055 (face font-lock-constant-face) 23055 23056 (face font-lock-string-face) 23056 23066 nil 23066 23067 (face font-lock-string-face) 23067 23116 (face font-lock-constant-face) 23116 23117 (face font-lock-string-face) 23117 23127 nil 23127 23128 (face font-lock-string-face) 23128 23178 (face font-lock-constant-face) 23178 23179 (face font-lock-string-face) 23179 23189 nil 23189 23190 (face font-lock-string-face) 23190 23228 (face font-lock-constant-face) 23228 23229 (face font-lock-string-face) 23229 23239 nil 23239 23240 (face font-lock-string-face) 23240 23277 (face font-lock-constant-face) 23277 23278 (face font-lock-string-face) 23278 23288 nil 23288 23289 (face font-lock-string-face) 23289 23332 (face font-lock-constant-face) 23332 23333 (face font-lock-string-face) 23333 23343 nil 23343 23344 (face font-lock-string-face) 23344 23368 (face font-lock-constant-face) 23368 23369 (face font-lock-string-face) 23369 23379 nil 23379 23380 (face font-lock-string-face) 23380 23402 (face font-lock-constant-face) 23402 23403 (face font-lock-string-face) 23403 23413 nil 23413 23414 (face font-lock-string-face) 23414 23447 (face font-lock-constant-face) 23447 23448 (face font-lock-string-face) 23448 23458 nil 23458 23459 (face font-lock-string-face) 23459 23487 (face font-lock-constant-face) 23487 23488 (face font-lock-string-face) 23488 23498 nil 23498 23499 (face font-lock-string-face) 23499 23530 (face font-lock-constant-face) 23530 23531 (face font-lock-string-face) 23531 23541 nil 23541 23542 (face font-lock-string-face) 23542 23563 (face font-lock-constant-face) 23563 23564 (face font-lock-string-face) 23564 23574 nil 23574 23575 (face font-lock-string-face) 23575 23609 (face font-lock-constant-face) 23609 23610 (face font-lock-string-face) 23610 23620 nil 23620 23621 (face font-lock-string-face) 23621 23654 (face font-lock-constant-face) 23654 23655 (face font-lock-string-face) 23655 23665 nil 23665 23666 (face font-lock-string-face) 23666 23700 (face font-lock-constant-face) 23700 23701 (face font-lock-string-face) 23701 23711 nil 23711 23712 (face font-lock-string-face) 23712 23753 (face font-lock-constant-face) 23753 23754 (face font-lock-string-face) 23754 23764 nil 23764 23765 (face font-lock-string-face) 23765 23790 (face font-lock-constant-face) 23790 23791 (face font-lock-string-face) 23791 23801 nil 23801 23802 (face font-lock-string-face) 23802 23825 (face font-lock-constant-face) 23825 23826 (face font-lock-string-face) 23826 23836 nil 23836 23837 (face font-lock-string-face) 23837 23862 (face font-lock-constant-face) 23862 23863 (face font-lock-string-face) 23863 23873 nil 23873 23874 (face font-lock-string-face) 23874 23906 (face font-lock-constant-face) 23906 23907 (face font-lock-string-face) 23907 23917 nil 23917 23918 (face font-lock-string-face) 23918 23947 (face font-lock-constant-face) 23947 23948 (face font-lock-string-face) 23948 23958 nil 23958 23959 (face font-lock-string-face) 23959 23981 (face font-lock-constant-face) 23981 23982 (face font-lock-string-face) 23982 23992 nil 23992 23993 (face font-lock-string-face) 23993 24014 (face font-lock-constant-face) 24014 24015 (face font-lock-string-face) 24015 24025 nil 24025 24026 (face font-lock-string-face) 24026 24054 (face font-lock-constant-face) 24054 24055 (face font-lock-string-face) 24055 24065 nil 24065 24066 (face font-lock-string-face) 24066 24093 (face font-lock-constant-face) 24093 24094 (face font-lock-string-face) 24094 24104 nil 24104 24105 (face font-lock-string-face) 24105 24133 (face font-lock-constant-face) 24133 24134 (face font-lock-string-face) 24134 24144 nil 24144 24145 (face font-lock-string-face) 24145 24177 (face font-lock-constant-face) 24177 24178 (face font-lock-string-face) 24178 24188 nil 24188 24189 (face font-lock-string-face) 24189 24221 (face font-lock-constant-face) 24221 24222 (face font-lock-string-face) 24222 24232 nil 24232 24233 (face font-lock-string-face) 24233 24277 (face font-lock-constant-face) 24277 24278 (face font-lock-string-face) 24278 24288 nil 24288 24289 (face font-lock-string-face) 24289 24328 (face font-lock-constant-face) 24328 24329 (face font-lock-string-face) 24329 24339 nil 24339 24340 (face font-lock-string-face) 24340 24379 (face font-lock-constant-face) 24379 24380 (face font-lock-string-face) 24380 24390 nil 24390 24391 (face font-lock-string-face) 24391 24424 (face font-lock-constant-face) 24424 24425 (face font-lock-string-face) 24425 24435 nil 24435 24436 (face font-lock-string-face) 24436 24476 (face font-lock-constant-face) 24476 24477 (face font-lock-string-face) 24477 24487 nil 24487 24488 (face font-lock-string-face) 24488 24521 (face font-lock-constant-face) 24521 24522 (face font-lock-string-face) 24522 24532 nil 24532 24533 (face font-lock-string-face) 24533 24567 (face font-lock-constant-face) 24567 24568 (face font-lock-string-face) 24568 24578 nil 24578 24579 (face font-lock-string-face) 24579 24610 (face font-lock-constant-face) 24610 24611 (face font-lock-string-face) 24611 24621 nil 24621 24622 (face font-lock-string-face) 24622 24673 (face font-lock-constant-face) 24673 24674 (face font-lock-string-face) 24674 24684 nil 24684 24685 (face font-lock-string-face) 24685 24725 (face font-lock-constant-face) 24725 24726 (face font-lock-string-face) 24726 24736 nil 24736 24737 (face font-lock-string-face) 24737 24773 (face font-lock-constant-face) 24773 24774 (face font-lock-string-face) 24774 24784 nil 24784 24785 (face font-lock-string-face) 24785 24821 (face font-lock-constant-face) 24821 24822 (face font-lock-string-face) 24822 24832 nil 24832 24833 (face font-lock-string-face) 24833 24874 (face font-lock-constant-face) 24874 24875 (face font-lock-string-face) 24875 24885 nil 24885 24886 (face font-lock-string-face) 24886 24926 (face font-lock-constant-face) 24926 24927 (face font-lock-string-face) 24927 24937 nil 24937 24938 (face font-lock-string-face) 24938 24977 (face font-lock-constant-face) 24977 24978 (face font-lock-string-face) 24978 24988 nil 24988 24989 (face font-lock-string-face) 24989 25035 (face font-lock-constant-face) 25035 25036 (face font-lock-string-face) 25036 25046 nil 25046 25047 (face font-lock-string-face) 25047 25070 (face font-lock-constant-face) 25070 25071 (face font-lock-string-face) 25071 25081 nil 25081 25082 (face font-lock-string-face) 25082 25104 (face font-lock-constant-face) 25104 25105 (face font-lock-string-face) 25105 25115 nil 25115 25116 (face font-lock-string-face) 25116 25152 (face font-lock-constant-face) 25152 25153 (face font-lock-string-face) 25153 25163 nil 25163 25164 (face font-lock-string-face) 25164 25210 (face font-lock-constant-face) 25210 25211 (face font-lock-string-face) 25211 25221 nil 25221 25222 (face font-lock-string-face) 25222 25250 (face font-lock-constant-face) 25250 25251 (face font-lock-string-face) 25251 25268 nil 25268 25269 (face font-lock-string-face) 25269 25279 (face font-lock-keyword-face) 25279 25280 (face font-lock-string-face) 25280 25293 nil 25293 25294 (face font-lock-string-face) 25294 25319 (face font-lock-variable-name-face) 25319 25320 (face font-lock-string-face) 25320 25334 nil 25334 25335 (face font-lock-string-face) 25335 25345 (face font-lock-keyword-face) 25345 25346 (face font-lock-string-face) 25346 25363 nil 25363 25364 (face font-lock-string-face) 25364 25385 (face font-lock-variable-name-face) 25385 25386 (face font-lock-string-face) 25386 25404 nil 25404 25405 (face font-lock-string-face) 25405 25417 (face font-lock-keyword-face) 25417 25418 (face font-lock-string-face) 25418 25438 nil 25438 25439 (face font-lock-string-face) 25439 25480 (face font-lock-function-name-face) 25480 25481 (face font-lock-string-face) 25481 25550 nil 25550 25551 (face font-lock-string-face) 25551 25566 (face font-lock-variable-name-face) 25566 25567 (face font-lock-string-face) 25567 25581 nil 25581 25582 (face font-lock-string-face) 25582 25594 (face font-lock-keyword-face) 25594 25595 (face font-lock-string-face) 25595 25611 nil 25611 25612 (face font-lock-string-face) 25612 25651 (face font-lock-function-name-face) 25651 25652 (face font-lock-string-face) 25652 25688 nil 25688 25689 (face font-lock-string-face) 25689 25704 (face font-lock-variable-name-face) 25704 25705 (face font-lock-string-face) 25705 25719 nil 25719 25720 (face font-lock-string-face) 25720 25728 (face font-lock-keyword-face) 25728 25729 (face font-lock-string-face) 25729 25745 nil 25745 25746 (face font-lock-string-face) 25746 25782 (face font-lock-constant-face) 25782 25783 (face font-lock-string-face) 25783 25797 nil 25797 25798 (face font-lock-string-face) 25798 25820 (face font-lock-constant-face) 25820 25821 (face font-lock-string-face) 25821 25835 nil 25835 25836 (face font-lock-string-face) 25836 25857 (face font-lock-constant-face) 25857 25858 (face font-lock-string-face) 25858 25872 nil 25872 25873 (face font-lock-string-face) 25873 25905 (face font-lock-constant-face) 25905 25906 (face font-lock-string-face) 25906 25920 nil 25920 25921 (face font-lock-string-face) 25921 25961 (face font-lock-constant-face) 25961 25962 (face font-lock-string-face) 25962 25976 nil 25976 25977 (face font-lock-string-face) 25977 26016 (face font-lock-constant-face) 26016 26017 (face font-lock-string-face) 26017 26031 nil 26031 26032 (face font-lock-string-face) 26032 26065 (face font-lock-constant-face) 26065 26066 (face font-lock-string-face) 26066 26080 nil 26080 26081 (face font-lock-string-face) 26081 26115 (face font-lock-constant-face) 26115 26116 (face font-lock-string-face) 26116 26130 nil 26130 26131 (face font-lock-string-face) 26131 26162 (face font-lock-constant-face) 26162 26163 (face font-lock-string-face) 26163 26177 nil 26177 26178 (face font-lock-string-face) 26178 26229 (face font-lock-constant-face) 26229 26230 (face font-lock-string-face) 26230 26244 nil 26244 26245 (face font-lock-string-face) 26245 26285 (face font-lock-constant-face) 26285 26286 (face font-lock-string-face) 26286 26300 nil 26300 26301 (face font-lock-string-face) 26301 26337 (face font-lock-constant-face) 26337 26338 (face font-lock-string-face) 26338 26352 nil 26352 26353 (face font-lock-string-face) 26353 26394 (face font-lock-constant-face) 26394 26395 (face font-lock-string-face) 26395 26409 nil 26409 26410 (face font-lock-string-face) 26410 26443 (face font-lock-constant-face) 26443 26444 (face font-lock-string-face) 26444 26458 nil 26458 26459 (face font-lock-string-face) 26459 26495 (face font-lock-constant-face) 26495 26496 (face font-lock-string-face) 26496 26532 nil 26532 26533 (face font-lock-string-face) 26533 26546 (face font-lock-variable-name-face) 26546 26547 (face font-lock-string-face) 26547 26561 nil 26561 26562 (face font-lock-string-face) 26562 26572 (face font-lock-keyword-face) 26572 26573 (face font-lock-string-face) 26573 26590 nil 26590 26591 (face font-lock-string-face) 26591 26604 (face font-lock-variable-name-face) 26604 26605 (face font-lock-string-face) 26605 26623 nil 26623 26624 (face font-lock-string-face) 26624 26631 (face font-lock-keyword-face) 26631 26632 (face font-lock-string-face) 26632 26652 nil 26652 26653 (face font-lock-string-face) 26653 26688 (face font-lock-constant-face) 26688 26689 (face font-lock-string-face) 26689 26722 nil 26722 26723 (face font-lock-string-face) 26723 26730 (face font-lock-keyword-face) 26730 26731 (face font-lock-string-face) 26731 26751 nil 26751 26752 (face font-lock-string-face) 26752 26760 (face font-lock-preprocessor-face) 26760 26761 (face font-lock-string-face) 26761 26831 nil 26831 26832 (face font-lock-string-face) 26832 26873 (face font-lock-variable-name-face) 26873 26874 (face font-lock-string-face) 26874 26888 nil 26888 26889 (face font-lock-string-face) 26889 26896 (face font-lock-keyword-face) 26896 26897 (face font-lock-string-face) 26897 26913 nil 26913 26914 (face font-lock-string-face) 26914 26954 (face font-lock-constant-face) 26954 26955 (face font-lock-string-face) 26955 26991 nil 26991 26992 (face font-lock-string-face) 26992 27035 (face font-lock-variable-name-face) 27035 27036 (face font-lock-string-face) 27036 27050 nil 27050 27051 (face font-lock-string-face) 27051 27058 (face font-lock-keyword-face) 27058 27059 (face font-lock-string-face) 27059 27075 nil 27075 27076 (face font-lock-string-face) 27076 27095 (face font-lock-constant-face) 27095 27096 (face font-lock-string-face) 27096 27110 nil 27110 27111 (face font-lock-string-face) 27111 27137 (face font-lock-constant-face) 27137 27138 (face font-lock-string-face) 27138 27152 nil 27152 27153 (face font-lock-string-face) 27153 27186 (face font-lock-constant-face) 27186 27187 (face font-lock-string-face) 27187 27201 nil 27201 27202 (face font-lock-string-face) 27202 27235 (face font-lock-constant-face) 27235 27236 (face font-lock-string-face) 27236 27291 nil 27291 27292 (face font-lock-string-face) 27292 27303 (face font-lock-keyword-face) 27303 27304 (face font-lock-string-face) 27304 27306 nil 27306 27307 (face font-lock-string-face) 27307 27325 (face font-lock-function-name-face) 27325 27326 (face font-lock-string-face) 27326 27334 nil 27334 27335 (face font-lock-string-face) 27335 27339 (face font-lock-keyword-face) 27339 27340 (face font-lock-string-face) 27340 27342 nil 27342 27343 (face font-lock-string-face) 27343 27357 (face font-lock-type-face) 27357 27358 (face font-lock-string-face) 27358 27366 nil 27366 27367 (face font-lock-string-face) 27367 27379 (face font-lock-keyword-face) 27379 27380 (face font-lock-string-face) 27380 27392 nil 27392 27393 (face font-lock-string-face) 27393 27398 (face font-lock-function-name-face) 27398 27399 (face font-lock-string-face) 27399 27409 nil 27409 27410 (face font-lock-string-face) 27410 27431 (face font-lock-function-name-face) 27431 27432 (face font-lock-string-face) 27432 27442 nil 27442 27443 (face font-lock-string-face) 27443 27469 (face font-lock-function-name-face) 27469 27470 (face font-lock-string-face) 27470 27480 nil 27480 27481 (face font-lock-string-face) 27481 27507 (face font-lock-function-name-face) 27507 27508 (face font-lock-string-face) 27508 27525 nil 27525 27526 (face font-lock-string-face) 27526 27533 (face font-lock-keyword-face) 27533 27534 (face font-lock-string-face) 27534 27546 nil 27546 27547 (face font-lock-string-face) 27547 27591 (face font-lock-constant-face) 27591 27592 (face font-lock-string-face) 27592 27602 nil 27602 27603 (face font-lock-string-face) 27603 27646 (face font-lock-constant-face) 27646 27647 (face font-lock-string-face) 27647 27657 nil 27657 27658 (face font-lock-string-face) 27658 27679 (face font-lock-constant-face) 27679 27680 (face font-lock-string-face) 27680 27690 nil 27690 27691 (face font-lock-string-face) 27691 27711 (face font-lock-constant-face) 27711 27712 (face font-lock-string-face) 27712 27722 nil 27722 27723 (face font-lock-string-face) 27723 27752 (face font-lock-constant-face) 27752 27753 (face font-lock-string-face) 27753 27763 nil 27763 27764 (face font-lock-string-face) 27764 27792 (face font-lock-constant-face) 27792 27793 (face font-lock-string-face) 27793 27803 nil 27803 27804 (face font-lock-string-face) 27804 27829 (face font-lock-constant-face) 27829 27830 (face font-lock-string-face) 27830 27840 nil 27840 27841 (face font-lock-string-face) 27841 27865 (face font-lock-constant-face) 27865 27866 (face font-lock-string-face) 27866 27876 nil 27876 27877 (face font-lock-string-face) 27877 27901 (face font-lock-constant-face) 27901 27902 (face font-lock-string-face) 27902 27912 nil 27912 27913 (face font-lock-string-face) 27913 27936 (face font-lock-constant-face) 27936 27937 (face font-lock-string-face) 27937 27947 nil 27947 27948 (face font-lock-string-face) 27948 27968 (face font-lock-constant-face) 27968 27969 (face font-lock-string-face) 27969 27979 nil 27979 27980 (face font-lock-string-face) 27980 27999 (face font-lock-constant-face) 27999 28000 (face font-lock-string-face) 28000 28030 nil 28030 28031 (face font-lock-string-face) 28031 28042 (face font-lock-keyword-face) 28042 28043 (face font-lock-string-face) 28043 28045 nil 28045 28046 (face font-lock-string-face) 28046 28058 (face font-lock-function-name-face) 28058 28059 (face font-lock-string-face) 28059 28067 nil 28067 28068 (face font-lock-string-face) 28068 28072 (face font-lock-keyword-face) 28072 28073 (face font-lock-string-face) 28073 28075 nil 28075 28076 (face font-lock-string-face) 28076 28086 (face font-lock-type-face) 28086 28087 (face font-lock-string-face) 28087 28095 nil 28095 28096 (face font-lock-string-face) 28096 28108 (face font-lock-keyword-face) 28108 28109 (face font-lock-string-face) 28109 28121 nil 28121 28122 (face font-lock-string-face) 28122 28127 (face font-lock-function-name-face) 28127 28128 (face font-lock-string-face) 28128 28138 nil 28138 28139 (face font-lock-string-face) 28139 28150 (face font-lock-function-name-face) 28150 28151 (face font-lock-string-face) 28151 28161 nil 28161 28162 (face font-lock-string-face) 28162 28183 (face font-lock-function-name-face) 28183 28184 (face font-lock-string-face) 28184 28194 nil 28194 28195 (face font-lock-string-face) 28195 28216 (face font-lock-function-name-face) 28216 28217 (face font-lock-string-face) 28217 28234 nil 28234 28235 (face font-lock-string-face) 28235 28242 (face font-lock-keyword-face) 28242 28243 (face font-lock-string-face) 28243 28255 nil 28255 28256 (face font-lock-string-face) 28256 28290 (face font-lock-constant-face) 28290 28291 (face font-lock-string-face) 28291 28321 nil 28321 28322 (face font-lock-string-face) 28322 28333 (face font-lock-keyword-face) 28333 28334 (face font-lock-string-face) 28334 28336 nil 28336 28337 (face font-lock-string-face) 28337 28349 (face font-lock-function-name-face) 28349 28350 (face font-lock-string-face) 28350 28358 nil 28358 28359 (face font-lock-string-face) 28359 28363 (face font-lock-keyword-face) 28363 28364 (face font-lock-string-face) 28364 28366 nil 28366 28367 (face font-lock-string-face) 28367 28377 (face font-lock-type-face) 28377 28378 (face font-lock-string-face) 28378 28386 nil 28386 28387 (face font-lock-string-face) 28387 28394 (face font-lock-keyword-face) 28394 28395 (face font-lock-string-face) 28395 28407 nil 28407 28408 (face font-lock-string-face) 28408 28441 (face font-lock-constant-face) 28441 28442 (face font-lock-string-face) 28442 28471 nil 28471 28472 (face font-lock-string-face) 28472 28483 (face font-lock-keyword-face) 28483 28484 (face font-lock-string-face) 28484 28486 nil 28486 28487 (face font-lock-string-face) 28487 28498 (face font-lock-function-name-face) 28498 28499 (face font-lock-string-face) 28499 28507 nil 28507 28508 (face font-lock-string-face) 28508 28512 (face font-lock-keyword-face) 28512 28513 (face font-lock-string-face) 28513 28515 nil 28515 28516 (face font-lock-string-face) 28516 28526 (face font-lock-type-face) 28526 28527 (face font-lock-string-face) 28527 28535 nil 28535 28536 (face font-lock-string-face) 28536 28548 (face font-lock-keyword-face) 28548 28549 (face font-lock-string-face) 28549 28561 nil 28561 28562 (face font-lock-string-face) 28562 28567 (face font-lock-function-name-face) 28567 28568 (face font-lock-string-face) 28568 28578 nil 28578 28579 (face font-lock-string-face) 28579 28600 (face font-lock-function-name-face) 28600 28601 (face font-lock-string-face) 28601 28618 nil 28618 28619 (face font-lock-string-face) 28619 28626 (face font-lock-keyword-face) 28626 28627 (face font-lock-string-face) 28627 28639 nil 28639 28640 (face font-lock-string-face) 28640 28672 (face font-lock-constant-face) 28672 28673 (face font-lock-string-face) 28673 28698 nil 28698 28699 (face font-lock-string-face) 28699 28709 (face font-lock-keyword-face) 28709 28710 (face font-lock-string-face) 28710 28719 nil 28719 28720 (face font-lock-string-face) 28720 28729 (face font-lock-variable-name-face) 28729 28730 (face font-lock-string-face) 28730 28740 nil 28740 28741 (face font-lock-string-face) 28741 28748 (face font-lock-keyword-face) 28748 28749 (face font-lock-string-face) 28749 28773 nil 28773 28774 (face font-lock-string-face) 28774 28785 (face font-lock-keyword-face) 28785 28786 (face font-lock-string-face) 28786 28788 nil 28788 28789 (face font-lock-string-face) 28789 28799 (face font-lock-function-name-face) 28799 28800 (face font-lock-string-face) 28800 28812 nil 28812 28813 (face font-lock-string-face) 28813 28817 (face font-lock-keyword-face) 28817 28818 (face font-lock-string-face) 28818 28820 nil 28820 28821 (face font-lock-string-face) 28821 28831 (face font-lock-type-face) 28831 28832 (face font-lock-string-face) 28832 28844 nil 28844 28845 (face font-lock-string-face) 28845 28857 (face font-lock-keyword-face) 28857 28858 (face font-lock-string-face) 28858 28874 nil 28874 28875 (face font-lock-string-face) 28875 28880 (face font-lock-function-name-face) 28880 28881 (face font-lock-string-face) 28881 28895 nil 28895 28896 (face font-lock-string-face) 28896 28907 (face font-lock-function-name-face) 28907 28908 (face font-lock-string-face) 28908 28922 nil 28922 28923 (face font-lock-string-face) 28923 28944 (face font-lock-function-name-face) 28944 28945 (face font-lock-string-face) 28945 28959 nil 28959 28960 (face font-lock-string-face) 28960 29043 (face font-lock-function-name-face) 29043 29044 (face font-lock-string-face) 29044 29058 nil 29058 29059 (face font-lock-string-face) 29059 29074 (face font-lock-function-name-face) 29074 29075 (face font-lock-string-face) 29075 29100 nil 29100 29101 (face font-lock-string-face) 29101 29113 (face font-lock-keyword-face) 29113 29114 (face font-lock-string-face) 29114 29130 nil 29130 29131 (face font-lock-string-face) 29131 29133 (face font-lock-constant-face) 29133 29138 (face font-lock-variable-name-face) 29138 29163 (face font-lock-constant-face) 29163 29164 (face font-lock-string-face) 29164 29189 nil 29189 29190 (face font-lock-string-face) 29190 29197 (face font-lock-keyword-face) 29197 29198 (face font-lock-string-face) 29198 29214 nil 29214 29215 (face font-lock-string-face) 29215 29238 (face font-lock-constant-face) 29238 29239 (face font-lock-string-face) 29239 29253 nil 29253 29254 (face font-lock-string-face) 29254 29280 (face font-lock-constant-face) 29280 29281 (face font-lock-string-face) 29281 29295 nil 29295 29296 (face font-lock-string-face) 29296 29321 (face font-lock-constant-face) 29321 29322 (face font-lock-string-face) 29322 29336 nil 29336 29337 (face font-lock-string-face) 29337 29361 (face font-lock-constant-face) 29361 29362 (face font-lock-string-face) 29362 29376 nil 29376 29377 (face font-lock-string-face) 29377 29407 (face font-lock-constant-face) 29407 29408 (face font-lock-string-face) 29408 29422 nil 29422 29423 (face font-lock-string-face) 29423 29453 (face font-lock-constant-face) 29453 29454 (face font-lock-string-face) 29454 29468 nil 29468 29469 (face font-lock-string-face) 29469 29493 (face font-lock-constant-face) 29493 29494 (face font-lock-string-face) 29494 29508 nil 29508 29509 (face font-lock-string-face) 29509 29532 (face font-lock-constant-face) 29532 29533 (face font-lock-string-face) 29533 29547 nil 29547 29548 (face font-lock-string-face) 29548 29575 (face font-lock-constant-face) 29575 29576 (face font-lock-string-face) 29576 29590 nil 29590 29591 (face font-lock-string-face) 29591 29614 (face font-lock-constant-face) 29614 29615 (face font-lock-string-face) 29615 29640 nil 29640 29655 (face font-lock-string-face) 29655 29671 nil 29671 29685 (face font-lock-string-face) 29685 29703 nil 29703 29714 (face font-lock-string-face) 29714 29716 nil 29716 29719 (face font-lock-string-face) 29719 29729 nil 29729 29754 (face font-lock-comment-face) 29754 29792 nil 29792 29793 (face font-lock-string-face) 29793 29800 (face font-lock-keyword-face) 29800 29801 (face font-lock-string-face) 29801 29817 nil 29817 29818 (face font-lock-string-face) 29818 29843 (face font-lock-preprocessor-face) 29843 29844 (face font-lock-string-face) 29844 29892 nil 29892 29893 (face font-lock-string-face) 29893 29929 (face font-lock-variable-name-face) 29929 29930 (face font-lock-string-face) 29930 29940 nil 29940 29941 (face font-lock-string-face) 29941 29948 (face font-lock-keyword-face) 29948 29949 (face font-lock-string-face) 29949 29973 nil 29973 29974 (face font-lock-string-face) 29974 29985 (face font-lock-keyword-face) 29985 29986 (face font-lock-string-face) 29986 29988 nil 29988 29989 (face font-lock-string-face) 29989 30001 (face font-lock-function-name-face) 30001 30002 (face font-lock-string-face) 30002 30014 nil 30014 30015 (face font-lock-string-face) 30015 30019 (face font-lock-keyword-face) 30019 30020 (face font-lock-string-face) 30020 30022 nil 30022 30023 (face font-lock-string-face) 30023 30033 (face font-lock-type-face) 30033 30034 (face font-lock-string-face) 30034 30046 nil 30046 30047 (face font-lock-string-face) 30047 30059 (face font-lock-keyword-face) 30059 30060 (face font-lock-string-face) 30060 30076 nil 30076 30077 (face font-lock-string-face) 30077 30082 (face font-lock-function-name-face) 30082 30083 (face font-lock-string-face) 30083 30097 nil 30097 30098 (face font-lock-string-face) 30098 30109 (face font-lock-function-name-face) 30109 30110 (face font-lock-string-face) 30110 30124 nil 30124 30125 (face font-lock-string-face) 30125 30146 (face font-lock-function-name-face) 30146 30147 (face font-lock-string-face) 30147 30161 nil 30161 30162 (face font-lock-string-face) 30162 30180 (face font-lock-function-name-face) 30180 30181 (face font-lock-string-face) 30181 30206 nil 30206 30207 (face font-lock-string-face) 30207 30214 (face font-lock-keyword-face) 30214 30215 (face font-lock-string-face) 30215 30231 nil 30231 30232 (face font-lock-string-face) 30232 30266 (face font-lock-constant-face) 30266 30267 (face font-lock-string-face) 30267 30281 nil 30281 30282 (face font-lock-string-face) 30282 30321 (face font-lock-constant-face) 30321 30322 (face font-lock-string-face) 30322 30336 nil 30336 30337 (face font-lock-string-face) 30337 30375 (face font-lock-constant-face) 30375 30376 (face font-lock-string-face) 30376 30390 nil 30390 30391 (face font-lock-string-face) 30391 30430 (face font-lock-constant-face) 30430 30431 (face font-lock-string-face) 30431 30445 nil 30445 30446 (face font-lock-string-face) 30446 30484 (face font-lock-constant-face) 30484 30485 (face font-lock-string-face) 30485 30499 nil 30499 30500 (face font-lock-string-face) 30500 30533 (face font-lock-constant-face) 30533 30534 (face font-lock-string-face) 30534 30548 nil 30548 30549 (face font-lock-string-face) 30549 30581 (face font-lock-constant-face) 30581 30582 (face font-lock-string-face) 30582 30596 nil 30596 30597 (face font-lock-string-face) 30597 30626 (face font-lock-constant-face) 30626 30627 (face font-lock-string-face) 30627 30641 nil 30641 30642 (face font-lock-string-face) 30642 30670 (face font-lock-constant-face) 30670 30671 (face font-lock-string-face) 30671 30685 nil 30685 30686 (face font-lock-string-face) 30686 30714 (face font-lock-constant-face) 30714 30715 (face font-lock-string-face) 30715 30729 nil 30729 30730 (face font-lock-string-face) 30730 30757 (face font-lock-constant-face) 30757 30758 (face font-lock-string-face) 30758 30783 nil 30783 30784 (face font-lock-string-face) 30784 30794 (face font-lock-keyword-face) 30794 30795 (face font-lock-string-face) 30795 30812 nil 30812 30813 (face font-lock-string-face) 30813 30834 (face font-lock-variable-name-face) 30834 30835 (face font-lock-string-face) 30835 30853 nil 30853 30854 (face font-lock-string-face) 30854 30866 (face font-lock-keyword-face) 30866 30867 (face font-lock-string-face) 30867 30887 nil 30887 30888 (face font-lock-string-face) 30888 30917 (face font-lock-function-name-face) 30917 30918 (face font-lock-string-face) 30918 30951 nil 30951 30952 (face font-lock-string-face) 30952 30959 (face font-lock-keyword-face) 30959 30960 (face font-lock-string-face) 30960 30980 nil 30980 30981 (face font-lock-string-face) 30981 31015 (face font-lock-constant-face) 31015 31016 (face font-lock-string-face) 31016 31064 nil 31064 31065 (face font-lock-string-face) 31065 31074 (face font-lock-variable-name-face) 31074 31075 (face font-lock-string-face) 31075 31093 nil 31093 31094 (face font-lock-string-face) 31094 31106 (face font-lock-keyword-face) 31106 31107 (face font-lock-string-face) 31107 31127 nil 31127 31128 (face font-lock-string-face) 31128 31175 (face font-lock-function-name-face) 31175 31176 (face font-lock-string-face) 31176 31194 nil 31194 31195 (face font-lock-string-face) 31195 31245 (face font-lock-function-name-face) 31245 31246 (face font-lock-string-face) 31246 31279 nil 31279 31280 (face font-lock-string-face) 31280 31287 (face font-lock-keyword-face) 31287 31288 (face font-lock-string-face) 31288 31308 nil 31308 31309 (face font-lock-string-face) 31309 31341 (face font-lock-constant-face) 31341 31342 (face font-lock-string-face) 31342 31423 nil 31423 31424 (face font-lock-string-face) 31424 31462 (face font-lock-variable-name-face) 31462 31463 (face font-lock-string-face) 31463 31473 nil 31473 31474 (face font-lock-string-face) 31474 31481 (face font-lock-keyword-face) 31481 31482 (face font-lock-string-face) 31482 31506 nil 31506 31507 (face font-lock-string-face) 31507 31518 (face font-lock-keyword-face) 31518 31519 (face font-lock-string-face) 31519 31521 nil 31521 31522 (face font-lock-string-face) 31522 31539 (face font-lock-function-name-face) 31539 31540 (face font-lock-string-face) 31540 31552 nil 31552 31553 (face font-lock-string-face) 31553 31557 (face font-lock-keyword-face) 31557 31558 (face font-lock-string-face) 31558 31560 nil 31560 31561 (face font-lock-string-face) 31561 31571 (face font-lock-type-face) 31571 31572 (face font-lock-string-face) 31572 31584 nil 31584 31585 (face font-lock-string-face) 31585 31597 (face font-lock-keyword-face) 31597 31598 (face font-lock-string-face) 31598 31614 nil 31614 31615 (face font-lock-string-face) 31615 31636 (face font-lock-function-name-face) 31636 31637 (face font-lock-string-face) 31637 31651 nil 31651 31652 (face font-lock-string-face) 31652 31670 (face font-lock-function-name-face) 31670 31671 (face font-lock-string-face) 31671 31696 nil 31696 31697 (face font-lock-string-face) 31697 31706 (face font-lock-keyword-face) 31706 31707 (face font-lock-string-face) 31707 31723 nil 31723 31724 (face font-lock-string-face) 31724 31728 (face font-lock-constant-face) 31728 31729 (face font-lock-string-face) 31729 31743 nil 31743 31744 (face font-lock-string-face) 31744 31748 (face font-lock-constant-face) 31748 31749 (face font-lock-string-face) 31749 31774 nil 31774 31775 (face font-lock-string-face) 31775 31782 (face font-lock-keyword-face) 31782 31783 (face font-lock-string-face) 31783 31799 nil 31799 31800 (face font-lock-string-face) 31800 31844 (face font-lock-constant-face) 31844 31845 (face font-lock-string-face) 31845 31893 nil 31893 31894 (face font-lock-string-face) 31894 31943 (face font-lock-variable-name-face) 31943 31944 (face font-lock-string-face) 31944 31954 nil 31954 31955 (face font-lock-string-face) 31955 31962 (face font-lock-keyword-face) 31962 31963 (face font-lock-string-face) 31963 31987 nil 31987 31988 (face font-lock-string-face) 31988 31999 (face font-lock-keyword-face) 31999 32000 (face font-lock-string-face) 32000 32002 nil 32002 32003 (face font-lock-string-face) 32003 32013 (face font-lock-function-name-face) 32013 32014 (face font-lock-string-face) 32014 32026 nil 32026 32027 (face font-lock-string-face) 32027 32031 (face font-lock-keyword-face) 32031 32032 (face font-lock-string-face) 32032 32034 nil 32034 32035 (face font-lock-string-face) 32035 32045 (face font-lock-type-face) 32045 32046 (face font-lock-string-face) 32046 32058 nil 32058 32059 (face font-lock-string-face) 32059 32071 (face font-lock-keyword-face) 32071 32072 (face font-lock-string-face) 32072 32088 nil 32088 32089 (face font-lock-string-face) 32089 32094 (face font-lock-function-name-face) 32094 32095 (face font-lock-string-face) 32095 32109 nil 32109 32110 (face font-lock-string-face) 32110 32121 (face font-lock-function-name-face) 32121 32122 (face font-lock-string-face) 32122 32136 nil 32136 32137 (face font-lock-string-face) 32137 32158 (face font-lock-function-name-face) 32158 32159 (face font-lock-string-face) 32159 32173 nil 32173 32174 (face font-lock-string-face) 32174 32192 (face font-lock-function-name-face) 32192 32193 (face font-lock-string-face) 32193 32218 nil 32218 32219 (face font-lock-string-face) 32219 32232 (face font-lock-keyword-face) 32232 32233 (face font-lock-string-face) 32233 32249 nil 32249 32250 (face font-lock-string-face) 32250 32259 (face font-lock-keyword-face) 32259 32260 (face font-lock-string-face) 32260 32278 nil 32278 32279 (face font-lock-string-face) 32279 32283 (face font-lock-constant-face) 32283 32284 (face font-lock-string-face) 32284 32300 nil 32300 32301 (face font-lock-string-face) 32301 32306 (face font-lock-constant-face) 32306 32307 (face font-lock-string-face) 32307 32323 nil 32323 32324 (face font-lock-string-face) 32324 32333 (face font-lock-constant-face) 32333 32334 (face font-lock-string-face) 32334 32350 nil 32350 32351 (face font-lock-string-face) 32351 32357 (face font-lock-constant-face) 32357 32358 (face font-lock-string-face) 32358 32398 nil 32398 32399 (face font-lock-string-face) 32399 32406 (face font-lock-keyword-face) 32406 32407 (face font-lock-string-face) 32407 32423 nil 32423 32424 (face font-lock-string-face) 32424 32462 (face font-lock-constant-face) 32462 32463 (face font-lock-string-face) 32463 32477 nil 32477 32478 (face font-lock-string-face) 32478 32515 (face font-lock-constant-face) 32515 32516 (face font-lock-string-face) 32516 32530 nil 32530 32531 (face font-lock-string-face) 32531 32568 (face font-lock-constant-face) 32568 32569 (face font-lock-string-face) 32569 32583 nil 32583 32584 (face font-lock-string-face) 32584 32620 (face font-lock-constant-face) 32620 32621 (face font-lock-string-face) 32621 32635 nil 32635 32636 (face font-lock-string-face) 32636 32666 (face font-lock-constant-face) 32666 32667 (face font-lock-string-face) 32667 32681 nil 32681 32682 (face font-lock-string-face) 32682 32720 (face font-lock-constant-face) 32720 32721 (face font-lock-string-face) 32721 32735 nil 32735 32736 (face font-lock-string-face) 32736 32773 (face font-lock-constant-face) 32773 32774 (face font-lock-string-face) 32774 32822 nil 32822 32823 (face font-lock-string-face) 32823 32838 (face font-lock-variable-name-face) 32838 32839 (face font-lock-string-face) 32839 32849 nil 32849 32850 (face font-lock-string-face) 32850 32857 (face font-lock-keyword-face) 32857 32858 (face font-lock-string-face) 32858 32882 nil 32882 32883 (face font-lock-string-face) 32883 32894 (face font-lock-keyword-face) 32894 32895 (face font-lock-string-face) 32895 32897 nil 32897 32898 (face font-lock-string-face) 32898 32912 (face font-lock-function-name-face) 32912 32913 (face font-lock-string-face) 32913 32925 nil 32925 32926 (face font-lock-string-face) 32926 32930 (face font-lock-keyword-face) 32930 32931 (face font-lock-string-face) 32931 32933 nil 32933 32934 (face font-lock-string-face) 32934 32948 (face font-lock-type-face) 32948 32949 (face font-lock-string-face) 32949 32961 nil 32961 32962 (face font-lock-string-face) 32962 32969 (face font-lock-keyword-face) 32969 32970 (face font-lock-string-face) 32970 32986 nil 32986 32987 (face font-lock-string-face) 32987 33022 (face font-lock-constant-face) 33022 33023 (face font-lock-string-face) 33023 33037 nil 33037 33038 (face font-lock-string-face) 33038 33072 (face font-lock-constant-face) 33072 33073 (face font-lock-string-face) 33073 33098 nil 33098 33099 (face font-lock-string-face) 33099 33111 (face font-lock-keyword-face) 33111 33112 (face font-lock-string-face) 33112 33128 nil 33128 33129 (face font-lock-string-face) 33129 33150 (face font-lock-function-name-face) 33150 33151 (face font-lock-string-face) 33151 33176 nil 33176 33177 (face font-lock-string-face) 33177 33189 (face font-lock-keyword-face) 33189 33190 (face font-lock-string-face) 33190 33206 nil 33206 33207 (face font-lock-string-face) 33207 33209 (face font-lock-constant-face) 33209 33232 (face font-lock-variable-name-face) 33232 33239 (face font-lock-constant-face) 33239 33240 (face font-lock-string-face) 33240 33265 nil 33265 33266 (face font-lock-string-face) 33266 33273 (face font-lock-keyword-face) 33273 33274 (face font-lock-string-face) 33274 33306 nil 33306 33307 (face font-lock-string-face) 33307 33318 (face font-lock-keyword-face) 33318 33319 (face font-lock-string-face) 33319 33321 nil 33321 33322 (face font-lock-string-face) 33322 33342 (face font-lock-function-name-face) 33342 33343 (face font-lock-string-face) 33343 33359 nil 33359 33360 (face font-lock-string-face) 33360 33366 (face font-lock-keyword-face) 33366 33367 (face font-lock-string-face) 33367 33387 nil 33387 33388 (face font-lock-string-face) 33388 33434 (face font-lock-constant-face) 33434 33435 (face font-lock-string-face) 33435 33453 nil 33453 33454 (face font-lock-string-face) 33454 33519 (face font-lock-constant-face) 33519 33520 (face font-lock-string-face) 33520 33553 nil 33553 33554 (face font-lock-string-face) 33554 33561 (face font-lock-keyword-face) 33561 33562 (face font-lock-string-face) 33562 33582 nil 33582 33583 (face font-lock-string-face) 33583 33585 (face font-lock-constant-face) 33585 33608 (face font-lock-variable-name-face) 33608 33647 (face font-lock-constant-face) 33647 33648 (face font-lock-string-face) 33648 33681 nil 33681 33682 (face font-lock-string-face) 33682 33688 (face font-lock-keyword-face) 33688 33689 (face font-lock-string-face) 33689 33709 nil 33709 33710 (face font-lock-string-face) 33710 33716 (face font-lock-constant-face) 33716 33717 (face font-lock-string-face) 33717 33735 nil 33735 33736 (face font-lock-string-face) 33736 33738 (face font-lock-constant-face) 33738 33743 (face font-lock-variable-name-face) 33743 33788 (face font-lock-constant-face) 33788 33789 (face font-lock-string-face) 33789 33807 nil 33807 33808 (face font-lock-string-face) 33808 33810 (face font-lock-constant-face) 33810 33811 (face font-lock-string-face) 33811 33829 nil 33829 33830 (face font-lock-string-face) 33830 33833 (face font-lock-constant-face) 33833 33840 (face font-lock-variable-name-face) 33840 33841 (face font-lock-constant-face) 33841 33842 (face font-lock-string-face) 33842 33860 nil 33860 33861 (face font-lock-string-face) 33861 33864 (face font-lock-constant-face) 33864 33872 (face font-lock-variable-name-face) 33872 33873 (face font-lock-constant-face) 33873 33874 (face font-lock-string-face) 33874 33952 nil 33952 33953 (face font-lock-string-face) 33953 33964 (face font-lock-keyword-face) 33964 33965 (face font-lock-string-face) 33965 33967 nil 33967 33968 (face font-lock-string-face) 33968 33978 (face font-lock-function-name-face) 33978 33979 (face font-lock-string-face) 33979 33991 nil 33991 33992 (face font-lock-string-face) 33992 33996 (face font-lock-keyword-face) 33996 33997 (face font-lock-string-face) 33997 33999 nil 33999 34000 (face font-lock-string-face) 34000 34004 (face font-lock-type-face) 34004 34005 (face font-lock-string-face) 34005 34017 nil 34017 34018 (face font-lock-string-face) 34018 34030 (face font-lock-keyword-face) 34030 34031 (face font-lock-string-face) 34031 34035 nil 34035 34036 (face font-lock-string-face) 34036 34062 (face font-lock-function-name-face) 34062 34063 (face font-lock-string-face) 34063 34077 nil 34077 34078 (face font-lock-string-face) 34078 34087 (face font-lock-keyword-face) 34087 34088 (face font-lock-string-face) 34088 34104 nil 34104 34105 (face font-lock-string-face) 34105 34117 (face font-lock-variable-name-face) 34117 34118 (face font-lock-string-face) 34118 34120 nil 34120 34121 (face font-lock-string-face) 34121 34126 (face font-lock-variable-name-face) 34126 34127 (face font-lock-string-face) 34127 34141 nil 34141 34142 (face font-lock-string-face) 34142 34153 (face font-lock-variable-name-face) 34153 34154 (face font-lock-string-face) 34154 34156 nil 34156 34157 (face font-lock-string-face) 34157 34174 (face font-lock-variable-name-face) 34174 34175 (face font-lock-string-face) 34175 34200 nil 34200 34201 (face font-lock-string-face) 34201 34209 (face font-lock-keyword-face) 34209 34210 (face font-lock-string-face) 34210 34214 nil 34214 34215 (face font-lock-string-face) 34215 34233 (face font-lock-constant-face) 34233 34234 (face font-lock-string-face) 34234 34268 nil 34268 34287 (face font-lock-comment-face) 34287 34293 nil 34293 34365 (face font-lock-comment-face) 34365 34371 nil 34371 34372 (face font-lock-string-face) 34372 34379 (face font-lock-keyword-face) 34379 34380 (face font-lock-string-face) 34380 34404 nil 34404 34405 (face font-lock-string-face) 34405 34416 (face font-lock-keyword-face) 34416 34417 (face font-lock-string-face) 34417 34419 nil 34419 34420 (face font-lock-string-face) 34420 34436 (face font-lock-function-name-face) 34436 34437 (face font-lock-string-face) 34437 34449 nil 34449 34450 (face font-lock-string-face) 34450 34454 (face font-lock-keyword-face) 34454 34455 (face font-lock-string-face) 34455 34457 nil 34457 34458 (face font-lock-string-face) 34458 34468 (face font-lock-type-face) 34468 34469 (face font-lock-string-face) 34469 34481 nil 34481 34482 (face font-lock-string-face) 34482 34494 (face font-lock-keyword-face) 34494 34495 (face font-lock-string-face) 34495 34511 nil 34511 34512 (face font-lock-string-face) 34512 34517 (face font-lock-function-name-face) 34517 34518 (face font-lock-string-face) 34518 34532 nil 34532 34533 (face font-lock-string-face) 34533 34551 (face font-lock-function-name-face) 34551 34552 (face font-lock-string-face) 34552 34566 nil 34566 34567 (face font-lock-string-face) 34567 34588 (face font-lock-function-name-face) 34588 34589 (face font-lock-string-face) 34589 34603 nil 34603 34604 (face font-lock-string-face) 34604 34630 (face font-lock-function-name-face) 34630 34631 (face font-lock-string-face) 34631 34645 nil 34645 34646 (face font-lock-string-face) 34646 34680 (face font-lock-function-name-face) 34680 34681 (face font-lock-string-face) 34681 34695 nil 34695 34696 (face font-lock-string-face) 34696 34730 (face font-lock-function-name-face) 34730 34731 (face font-lock-string-face) 34731 34745 nil 34745 34746 (face font-lock-string-face) 34746 34772 (face font-lock-function-name-face) 34772 34773 (face font-lock-string-face) 34773 34787 nil 34787 34788 (face font-lock-string-face) 34788 34827 (face font-lock-function-name-face) 34827 34828 (face font-lock-string-face) 34828 34853 nil 34853 34854 (face font-lock-string-face) 34854 34861 (face font-lock-keyword-face) 34861 34862 (face font-lock-string-face) 34862 34878 nil 34878 34879 (face font-lock-string-face) 34879 34904 (face font-lock-constant-face) 34904 34905 (face font-lock-string-face) 34905 34930 nil 34930 34931 (face font-lock-string-face) 34931 34941 (face font-lock-keyword-face) 34941 34942 (face font-lock-string-face) 34942 34959 nil 34959 34960 (face font-lock-string-face) 34960 34981 (face font-lock-variable-name-face) 34981 34982 (face font-lock-string-face) 34982 35000 nil 35000 35001 (face font-lock-string-face) 35001 35013 (face font-lock-keyword-face) 35013 35014 (face font-lock-string-face) 35014 35034 nil 35034 35077 (face font-lock-comment-face) 35077 35093 nil 35093 35123 (face font-lock-comment-face) 35123 35139 nil 35139 35164 (face font-lock-comment-face) 35164 35180 nil 35180 35194 (face font-lock-comment-face) 35194 35210 nil 35210 35211 (face font-lock-string-face) 35211 35240 (face font-lock-function-name-face) 35240 35241 (face font-lock-string-face) 35241 35274 nil 35274 35275 (face font-lock-string-face) 35275 35285 (face font-lock-keyword-face) 35285 35286 (face font-lock-string-face) 35286 35307 nil 35307 35308 (face font-lock-string-face) 35308 35329 (face font-lock-variable-name-face) 35329 35330 (face font-lock-string-face) 35330 35352 nil 35352 35353 (face font-lock-string-face) 35353 35365 (face font-lock-keyword-face) 35365 35366 (face font-lock-string-face) 35366 35390 nil 35390 35391 (face font-lock-string-face) 35391 35432 (face font-lock-function-name-face) 35432 35433 (face font-lock-string-face) 35433 35553 nil 35553 35554 (face font-lock-string-face) 35554 35565 (face font-lock-keyword-face) 35565 35566 (face font-lock-string-face) 35566 35568 nil 35568 35569 (face font-lock-string-face) 35569 35592 (face font-lock-function-name-face) 35592 35593 (face font-lock-string-face) 35593 35605 nil 35605 35606 (face font-lock-string-face) 35606 35610 (face font-lock-keyword-face) 35610 35611 (face font-lock-string-face) 35611 35613 nil 35613 35614 (face font-lock-string-face) 35614 35624 (face font-lock-type-face) 35624 35625 (face font-lock-string-face) 35625 35637 nil 35637 35638 (face font-lock-string-face) 35638 35650 (face font-lock-keyword-face) 35650 35651 (face font-lock-string-face) 35651 35667 nil 35667 35668 (face font-lock-string-face) 35668 35673 (face font-lock-function-name-face) 35673 35674 (face font-lock-string-face) 35674 35688 nil 35688 35689 (face font-lock-string-face) 35689 35707 (face font-lock-function-name-face) 35707 35708 (face font-lock-string-face) 35708 35722 nil 35722 35723 (face font-lock-string-face) 35723 35757 (face font-lock-function-name-face) 35757 35758 (face font-lock-string-face) 35758 35772 nil 35772 35773 (face font-lock-string-face) 35773 35799 (face font-lock-function-name-face) 35799 35800 (face font-lock-string-face) 35800 35814 nil 35814 35815 (face font-lock-string-face) 35815 35841 (face font-lock-function-name-face) 35841 35842 (face font-lock-string-face) 35842 35856 nil 35856 35857 (face font-lock-string-face) 35857 35896 (face font-lock-function-name-face) 35896 35897 (face font-lock-string-face) 35897 35922 nil 35922 35923 (face font-lock-string-face) 35923 35930 (face font-lock-keyword-face) 35930 35931 (face font-lock-string-face) 35931 35947 nil 35947 35948 (face font-lock-string-face) 35948 35970 (face font-lock-constant-face) 35970 35971 (face font-lock-string-face) 35971 35985 nil 35985 35986 (face font-lock-string-face) 35986 36011 (face font-lock-constant-face) 36011 36012 (face font-lock-string-face) 36012 36026 nil 36026 36027 (face font-lock-string-face) 36027 36060 (face font-lock-constant-face) 36060 36061 (face font-lock-string-face) 36061 36075 nil 36075 36076 (face font-lock-string-face) 36076 36117 (face font-lock-constant-face) 36117 36118 (face font-lock-string-face) 36118 36143 nil 36143 36144 (face font-lock-string-face) 36144 36154 (face font-lock-keyword-face) 36154 36155 (face font-lock-string-face) 36155 36172 nil 36172 36173 (face font-lock-string-face) 36173 36198 (face font-lock-variable-name-face) 36198 36199 (face font-lock-string-face) 36199 36217 nil 36217 36218 (face font-lock-string-face) 36218 36228 (face font-lock-keyword-face) 36228 36229 (face font-lock-string-face) 36229 36250 nil 36250 36251 (face font-lock-string-face) 36251 36272 (face font-lock-variable-name-face) 36272 36273 (face font-lock-string-face) 36273 36295 nil 36295 36296 (face font-lock-string-face) 36296 36308 (face font-lock-keyword-face) 36308 36309 (face font-lock-string-face) 36309 36333 nil 36333 36334 (face font-lock-string-face) 36334 36375 (face font-lock-function-name-face) 36375 36376 (face font-lock-string-face) 36376 36496 nil 36496 36497 (face font-lock-string-face) 36497 36508 (face font-lock-keyword-face) 36508 36509 (face font-lock-string-face) 36509 36511 nil 36511 36512 (face font-lock-string-face) 36512 36524 (face font-lock-function-name-face) 36524 36525 (face font-lock-string-face) 36525 36537 nil 36537 36538 (face font-lock-string-face) 36538 36542 (face font-lock-keyword-face) 36542 36543 (face font-lock-string-face) 36543 36545 nil 36545 36546 (face font-lock-string-face) 36546 36556 (face font-lock-type-face) 36556 36557 (face font-lock-string-face) 36557 36569 nil 36569 36570 (face font-lock-string-face) 36570 36582 (face font-lock-keyword-face) 36582 36583 (face font-lock-string-face) 36583 36599 nil 36599 36600 (face font-lock-string-face) 36600 36605 (face font-lock-function-name-face) 36605 36606 (face font-lock-string-face) 36606 36620 nil 36620 36621 (face font-lock-string-face) 36621 36642 (face font-lock-function-name-face) 36642 36643 (face font-lock-string-face) 36643 36657 nil 36657 36658 (face font-lock-string-face) 36658 36697 (face font-lock-function-name-face) 36697 36698 (face font-lock-string-face) 36698 36723 nil 36723 36724 (face font-lock-string-face) 36724 36731 (face font-lock-keyword-face) 36731 36732 (face font-lock-string-face) 36732 36748 nil 36748 36749 (face font-lock-string-face) 36749 36782 (face font-lock-constant-face) 36782 36783 (face font-lock-string-face) 36783 36829 nil 36829 36830 (face font-lock-string-face) 36830 36841 (face font-lock-keyword-face) 36841 36842 (face font-lock-string-face) 36842 36844 nil 36844 36845 (face font-lock-string-face) 36845 36856 (face font-lock-function-name-face) 36856 36857 (face font-lock-string-face) 36857 36869 nil 36869 36870 (face font-lock-string-face) 36870 36874 (face font-lock-keyword-face) 36874 36875 (face font-lock-string-face) 36875 36877 nil 36877 36878 (face font-lock-string-face) 36878 36888 (face font-lock-type-face) 36888 36889 (face font-lock-string-face) 36889 36901 nil 36901 36902 (face font-lock-string-face) 36902 36914 (face font-lock-keyword-face) 36914 36915 (face font-lock-string-face) 36915 36931 nil 36931 36932 (face font-lock-string-face) 36932 36937 (face font-lock-function-name-face) 36937 36938 (face font-lock-string-face) 36938 36952 nil 36952 36953 (face font-lock-string-face) 36953 36974 (face font-lock-function-name-face) 36974 36975 (face font-lock-string-face) 36975 36989 nil 36989 36990 (face font-lock-string-face) 36990 37029 (face font-lock-function-name-face) 37029 37030 (face font-lock-string-face) 37030 37055 nil 37055 37056 (face font-lock-string-face) 37056 37063 (face font-lock-keyword-face) 37063 37064 (face font-lock-string-face) 37064 37080 nil 37080 37081 (face font-lock-string-face) 37081 37113 (face font-lock-constant-face) 37113 37114 (face font-lock-string-face) 37114 37163 nil)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/graphviz.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/graphviz.py
deleted file mode 100755
index 326ae221cf..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/graphviz.py
+++ /dev/null
@@ -1,100 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2011 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Using the JSON dumped by the dump-dependency-json generator,
-generate input suitable for graphviz to render a dependency graph of
-targets."""
-
-import collections
-import json
-import sys
-
-
-def ParseTarget(target):
- target, _, suffix = target.partition('#')
- filename, _, target = target.partition(':')
- return filename, target, suffix
-
-
-def LoadEdges(filename, targets):
- """Load the edges map from the dump file, and filter it to only
- show targets in |targets| and their depedendents."""
-
- file = open('dump.json')
- edges = json.load(file)
- file.close()
-
- # Copy out only the edges we're interested in from the full edge list.
- target_edges = {}
- to_visit = targets[:]
- while to_visit:
- src = to_visit.pop()
- if src in target_edges:
- continue
- target_edges[src] = edges[src]
- to_visit.extend(edges[src])
-
- return target_edges
-
-
-def WriteGraph(edges):
- """Print a graphviz graph to stdout.
- |edges| is a map of target to a list of other targets it depends on."""
-
- # Bucket targets by file.
- files = collections.defaultdict(list)
- for src, dst in edges.items():
- build_file, target_name, toolset = ParseTarget(src)
- files[build_file].append(src)
-
- print 'digraph D {'
- print ' fontsize=8' # Used by subgraphs.
- print ' node [fontsize=8]'
-
- # Output nodes by file. We must first write out each node within
- # its file grouping before writing out any edges that may refer
- # to those nodes.
- for filename, targets in files.items():
- if len(targets) == 1:
- # If there's only one node for this file, simplify
- # the display by making it a box without an internal node.
- target = targets[0]
- build_file, target_name, toolset = ParseTarget(target)
- print ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename,
- target_name)
- else:
- # Group multiple nodes together in a subgraph.
- print ' subgraph "cluster_%s" {' % filename
- print ' label = "%s"' % filename
- for target in targets:
- build_file, target_name, toolset = ParseTarget(target)
- print ' "%s" [label="%s"]' % (target, target_name)
- print ' }'
-
- # Now that we've placed all the nodes within subgraphs, output all
- # the edges between nodes.
- for src, dsts in edges.items():
- for dst in dsts:
- print ' "%s" -> "%s"' % (src, dst)
-
- print '}'
-
-
-def main():
- if len(sys.argv) < 2:
- print >>sys.stderr, __doc__
- print >>sys.stderr
- print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0])
- return 1
-
- edges = LoadEdges('dump.json', sys.argv[1:])
-
- WriteGraph(edges)
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/pretty_gyp.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/pretty_gyp.py
deleted file mode 100755
index c51d35872c..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/pretty_gyp.py
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Pretty-prints the contents of a GYP file."""
-
-import sys
-import re
-
-
-# Regex to remove comments when we're counting braces.
-COMMENT_RE = re.compile(r'\s*#.*')
-
-# Regex to remove quoted strings when we're counting braces.
-# It takes into account quoted quotes, and makes sure that the quotes match.
-# NOTE: It does not handle quotes that span more than one line, or
-# cases where an escaped quote is preceeded by an escaped backslash.
-QUOTE_RE_STR = r'(?P<q>[\'"])(.*?)(?<![^\\][\\])(?P=q)'
-QUOTE_RE = re.compile(QUOTE_RE_STR)
-
-
-def comment_replace(matchobj):
- return matchobj.group(1) + matchobj.group(2) + '#' * len(matchobj.group(3))
-
-
-def mask_comments(input):
- """Mask the quoted strings so we skip braces inside quoted strings."""
- search_re = re.compile(r'(.*?)(#)(.*)')
- return [search_re.sub(comment_replace, line) for line in input]
-
-
-def quote_replace(matchobj):
- return "%s%s%s%s" % (matchobj.group(1),
- matchobj.group(2),
- 'x'*len(matchobj.group(3)),
- matchobj.group(2))
-
-
-def mask_quotes(input):
- """Mask the quoted strings so we skip braces inside quoted strings."""
- search_re = re.compile(r'(.*?)' + QUOTE_RE_STR)
- return [search_re.sub(quote_replace, line) for line in input]
-
-
-def do_split(input, masked_input, search_re):
- output = []
- mask_output = []
- for (line, masked_line) in zip(input, masked_input):
- m = search_re.match(masked_line)
- while m:
- split = len(m.group(1))
- line = line[:split] + r'\n' + line[split:]
- masked_line = masked_line[:split] + r'\n' + masked_line[split:]
- m = search_re.match(masked_line)
- output.extend(line.split(r'\n'))
- mask_output.extend(masked_line.split(r'\n'))
- return (output, mask_output)
-
-
-def split_double_braces(input):
- """Masks out the quotes and comments, and then splits appropriate
- lines (lines that matche the double_*_brace re's above) before
- indenting them below.
-
- These are used to split lines which have multiple braces on them, so
- that the indentation looks prettier when all laid out (e.g. closing
- braces make a nice diagonal line).
- """
- double_open_brace_re = re.compile(r'(.*?[\[\{\(,])(\s*)([\[\{\(])')
- double_close_brace_re = re.compile(r'(.*?[\]\}\)],?)(\s*)([\]\}\)])')
-
- masked_input = mask_quotes(input)
- masked_input = mask_comments(masked_input)
-
- (output, mask_output) = do_split(input, masked_input, double_open_brace_re)
- (output, mask_output) = do_split(output, mask_output, double_close_brace_re)
-
- return output
-
-
-def count_braces(line):
- """keeps track of the number of braces on a given line and returns the result.
-
- It starts at zero and subtracts for closed braces, and adds for open braces.
- """
- open_braces = ['[', '(', '{']
- close_braces = [']', ')', '}']
- closing_prefix_re = re.compile(r'(.*?[^\s\]\}\)]+.*?)([\]\}\)],?)\s*$')
- cnt = 0
- stripline = COMMENT_RE.sub(r'', line)
- stripline = QUOTE_RE.sub(r"''", stripline)
- for char in stripline:
- for brace in open_braces:
- if char == brace:
- cnt += 1
- for brace in close_braces:
- if char == brace:
- cnt -= 1
-
- after = False
- if cnt > 0:
- after = True
-
- # This catches the special case of a closing brace having something
- # other than just whitespace ahead of it -- we don't want to
- # unindent that until after this line is printed so it stays with
- # the previous indentation level.
- if cnt < 0 and closing_prefix_re.match(stripline):
- after = True
- return (cnt, after)
-
-
-def prettyprint_input(lines):
- """Does the main work of indenting the input based on the brace counts."""
- indent = 0
- basic_offset = 2
- last_line = ""
- for line in lines:
- if COMMENT_RE.match(line):
- print line
- else:
- line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
- if len(line) > 0:
- (brace_diff, after) = count_braces(line)
- if brace_diff != 0:
- if after:
- print " " * (basic_offset * indent) + line
- indent += brace_diff
- else:
- indent += brace_diff
- print " " * (basic_offset * indent) + line
- else:
- print " " * (basic_offset * indent) + line
- else:
- print ""
- last_line = line
-
-
-def main():
- if len(sys.argv) > 1:
- data = open(sys.argv[1]).read().splitlines()
- else:
- data = sys.stdin.read().splitlines()
- # Split up the double braces.
- lines = split_double_braces(data)
-
- # Indent and print the output.
- prettyprint_input(lines)
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/pretty_sln.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/pretty_sln.py
deleted file mode 100755
index ca8cf4ad3f..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/pretty_sln.py
+++ /dev/null
@@ -1,169 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Prints the information in a sln file in a diffable way.
-
- It first outputs each projects in alphabetical order with their
- dependencies.
-
- Then it outputs a possible build order.
-"""
-
-__author__ = 'nsylvain (Nicolas Sylvain)'
-
-import os
-import re
-import sys
-import pretty_vcproj
-
-def BuildProject(project, built, projects, deps):
- # if all dependencies are done, we can build it, otherwise we try to build the
- # dependency.
- # This is not infinite-recursion proof.
- for dep in deps[project]:
- if dep not in built:
- BuildProject(dep, built, projects, deps)
- print project
- built.append(project)
-
-def ParseSolution(solution_file):
- # All projects, their clsid and paths.
- projects = dict()
-
- # A list of dependencies associated with a project.
- dependencies = dict()
-
- # Regular expressions that matches the SLN format.
- # The first line of a project definition.
- begin_project = re.compile(r'^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
- r'}"\) = "(.*)", "(.*)", "(.*)"$')
- # The last line of a project definition.
- end_project = re.compile('^EndProject$')
- # The first line of a dependency list.
- begin_dep = re.compile(
- r'ProjectSection\(ProjectDependencies\) = postProject$')
- # The last line of a dependency list.
- end_dep = re.compile('EndProjectSection$')
- # A line describing a dependency.
- dep_line = re.compile(' *({.*}) = ({.*})$')
-
- in_deps = False
- solution = open(solution_file)
- for line in solution:
- results = begin_project.search(line)
- if results:
- # Hack to remove icu because the diff is too different.
- if results.group(1).find('icu') != -1:
- continue
- # We remove "_gyp" from the names because it helps to diff them.
- current_project = results.group(1).replace('_gyp', '')
- projects[current_project] = [results.group(2).replace('_gyp', ''),
- results.group(3),
- results.group(2)]
- dependencies[current_project] = []
- continue
-
- results = end_project.search(line)
- if results:
- current_project = None
- continue
-
- results = begin_dep.search(line)
- if results:
- in_deps = True
- continue
-
- results = end_dep.search(line)
- if results:
- in_deps = False
- continue
-
- results = dep_line.search(line)
- if results and in_deps and current_project:
- dependencies[current_project].append(results.group(1))
- continue
-
- # Change all dependencies clsid to name instead.
- for project in dependencies:
- # For each dependencies in this project
- new_dep_array = []
- for dep in dependencies[project]:
- # Look for the project name matching this cldis
- for project_info in projects:
- if projects[project_info][1] == dep:
- new_dep_array.append(project_info)
- dependencies[project] = sorted(new_dep_array)
-
- return (projects, dependencies)
-
-def PrintDependencies(projects, deps):
- print "---------------------------------------"
- print "Dependencies for all projects"
- print "---------------------------------------"
- print "-- --"
-
- for (project, dep_list) in sorted(deps.items()):
- print "Project : %s" % project
- print "Path : %s" % projects[project][0]
- if dep_list:
- for dep in dep_list:
- print " - %s" % dep
- print ""
-
- print "-- --"
-
-def PrintBuildOrder(projects, deps):
- print "---------------------------------------"
- print "Build order "
- print "---------------------------------------"
- print "-- --"
-
- built = []
- for (project, _) in sorted(deps.items()):
- if project not in built:
- BuildProject(project, built, projects, deps)
-
- print "-- --"
-
-def PrintVCProj(projects):
-
- for project in projects:
- print "-------------------------------------"
- print "-------------------------------------"
- print project
- print project
- print project
- print "-------------------------------------"
- print "-------------------------------------"
-
- project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
- projects[project][2]))
-
- pretty = pretty_vcproj
- argv = [ '',
- project_path,
- '$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]),
- ]
- argv.extend(sys.argv[3:])
- pretty.main(argv)
-
-def main():
- # check if we have exactly 1 parameter.
- if len(sys.argv) < 2:
- print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
- return 1
-
- (projects, deps) = ParseSolution(sys.argv[1])
- PrintDependencies(projects, deps)
- PrintBuildOrder(projects, deps)
-
- if '--recursive' in sys.argv:
- PrintVCProj(projects)
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/pretty_vcproj.py b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/pretty_vcproj.py
deleted file mode 100755
index 6099bd7cc4..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/gyp/tools/pretty_vcproj.py
+++ /dev/null
@@ -1,329 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Make the format of a vcproj really pretty.
-
- This script normalize and sort an xml. It also fetches all the properties
- inside linked vsprops and include them explicitly in the vcproj.
-
- It outputs the resulting xml to stdout.
-"""
-
-__author__ = 'nsylvain (Nicolas Sylvain)'
-
-import os
-import sys
-
-from xml.dom.minidom import parse
-from xml.dom.minidom import Node
-
-REPLACEMENTS = dict()
-ARGUMENTS = None
-
-
-class CmpTuple(object):
- """Compare function between 2 tuple."""
- def __call__(self, x, y):
- return cmp(x[0], y[0])
-
-
-class CmpNode(object):
- """Compare function between 2 xml nodes."""
-
- def __call__(self, x, y):
- def get_string(node):
- node_string = "node"
- node_string += node.nodeName
- if node.nodeValue:
- node_string += node.nodeValue
-
- if node.attributes:
- # We first sort by name, if present.
- node_string += node.getAttribute("Name")
-
- all_nodes = []
- for (name, value) in node.attributes.items():
- all_nodes.append((name, value))
-
- all_nodes.sort(CmpTuple())
- for (name, value) in all_nodes:
- node_string += name
- node_string += value
-
- return node_string
-
- return cmp(get_string(x), get_string(y))
-
-
-def PrettyPrintNode(node, indent=0):
- if node.nodeType == Node.TEXT_NODE:
- if node.data.strip():
- print '%s%s' % (' '*indent, node.data.strip())
- return
-
- if node.childNodes:
- node.normalize()
- # Get the number of attributes
- attr_count = 0
- if node.attributes:
- attr_count = node.attributes.length
-
- # Print the main tag
- if attr_count == 0:
- print '%s<%s>' % (' '*indent, node.nodeName)
- else:
- print '%s<%s' % (' '*indent, node.nodeName)
-
- all_attributes = []
- for (name, value) in node.attributes.items():
- all_attributes.append((name, value))
- all_attributes.sort(CmpTuple())
- for (name, value) in all_attributes:
- print '%s %s="%s"' % (' '*indent, name, value)
- print '%s>' % (' '*indent)
- if node.nodeValue:
- print '%s %s' % (' '*indent, node.nodeValue)
-
- for sub_node in node.childNodes:
- PrettyPrintNode(sub_node, indent=indent+2)
- print '%s</%s>' % (' '*indent, node.nodeName)
-
-
-def FlattenFilter(node):
- """Returns a list of all the node and sub nodes."""
- node_list = []
-
- if (node.attributes and
- node.getAttribute('Name') == '_excluded_files'):
- # We don't add the "_excluded_files" filter.
- return []
-
- for current in node.childNodes:
- if current.nodeName == 'Filter':
- node_list.extend(FlattenFilter(current))
- else:
- node_list.append(current)
-
- return node_list
-
-
-def FixFilenames(filenames, current_directory):
- new_list = []
- for filename in filenames:
- if filename:
- for key in REPLACEMENTS:
- filename = filename.replace(key, REPLACEMENTS[key])
- os.chdir(current_directory)
- filename = filename.strip('"\' ')
- if filename.startswith('$'):
- new_list.append(filename)
- else:
- new_list.append(os.path.abspath(filename))
- return new_list
-
-
-def AbsoluteNode(node):
- """Makes all the properties we know about in this node absolute."""
- if node.attributes:
- for (name, value) in node.attributes.items():
- if name in ['InheritedPropertySheets', 'RelativePath',
- 'AdditionalIncludeDirectories',
- 'IntermediateDirectory', 'OutputDirectory',
- 'AdditionalLibraryDirectories']:
- # We want to fix up these paths
- path_list = value.split(';')
- new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
- node.setAttribute(name, ';'.join(new_list))
- if not value:
- node.removeAttribute(name)
-
-
-def CleanupVcproj(node):
- """For each sub node, we call recursively this function."""
- for sub_node in node.childNodes:
- AbsoluteNode(sub_node)
- CleanupVcproj(sub_node)
-
- # Normalize the node, and remove all extranous whitespaces.
- for sub_node in node.childNodes:
- if sub_node.nodeType == Node.TEXT_NODE:
- sub_node.data = sub_node.data.replace("\r", "")
- sub_node.data = sub_node.data.replace("\n", "")
- sub_node.data = sub_node.data.rstrip()
-
- # Fix all the semicolon separated attributes to be sorted, and we also
- # remove the dups.
- if node.attributes:
- for (name, value) in node.attributes.items():
- sorted_list = sorted(value.split(';'))
- unique_list = []
- for i in sorted_list:
- if not unique_list.count(i):
- unique_list.append(i)
- node.setAttribute(name, ';'.join(unique_list))
- if not value:
- node.removeAttribute(name)
-
- if node.childNodes:
- node.normalize()
-
- # For each node, take a copy, and remove it from the list.
- node_array = []
- while node.childNodes and node.childNodes[0]:
- # Take a copy of the node and remove it from the list.
- current = node.childNodes[0]
- node.removeChild(current)
-
- # If the child is a filter, we want to append all its children
- # to this same list.
- if current.nodeName == 'Filter':
- node_array.extend(FlattenFilter(current))
- else:
- node_array.append(current)
-
-
- # Sort the list.
- node_array.sort(CmpNode())
-
- # Insert the nodes in the correct order.
- for new_node in node_array:
- # But don't append empty tool node.
- if new_node.nodeName == 'Tool':
- if new_node.attributes and new_node.attributes.length == 1:
- # This one was empty.
- continue
- if new_node.nodeName == 'UserMacro':
- continue
- node.appendChild(new_node)
-
-
-def GetConfiguationNodes(vcproj):
- #TODO(nsylvain): Find a better way to navigate the xml.
- nodes = []
- for node in vcproj.childNodes:
- if node.nodeName == "Configurations":
- for sub_node in node.childNodes:
- if sub_node.nodeName == "Configuration":
- nodes.append(sub_node)
-
- return nodes
-
-
-def GetChildrenVsprops(filename):
- dom = parse(filename)
- if dom.documentElement.attributes:
- vsprops = dom.documentElement.getAttribute('InheritedPropertySheets')
- return FixFilenames(vsprops.split(';'), os.path.dirname(filename))
- return []
-
-def SeekToNode(node1, child2):
- # A text node does not have properties.
- if child2.nodeType == Node.TEXT_NODE:
- return None
-
- # Get the name of the current node.
- current_name = child2.getAttribute("Name")
- if not current_name:
- # There is no name. We don't know how to merge.
- return None
-
- # Look through all the nodes to find a match.
- for sub_node in node1.childNodes:
- if sub_node.nodeName == child2.nodeName:
- name = sub_node.getAttribute("Name")
- if name == current_name:
- return sub_node
-
- # No match. We give up.
- return None
-
-
-def MergeAttributes(node1, node2):
- # No attributes to merge?
- if not node2.attributes:
- return
-
- for (name, value2) in node2.attributes.items():
- # Don't merge the 'Name' attribute.
- if name == 'Name':
- continue
- value1 = node1.getAttribute(name)
- if value1:
- # The attribute exist in the main node. If it's equal, we leave it
- # untouched, otherwise we concatenate it.
- if value1 != value2:
- node1.setAttribute(name, ';'.join([value1, value2]))
- else:
- # The attribute does nto exist in the main node. We append this one.
- node1.setAttribute(name, value2)
-
- # If the attribute was a property sheet attributes, we remove it, since
- # they are useless.
- if name == 'InheritedPropertySheets':
- node1.removeAttribute(name)
-
-
-def MergeProperties(node1, node2):
- MergeAttributes(node1, node2)
- for child2 in node2.childNodes:
- child1 = SeekToNode(node1, child2)
- if child1:
- MergeProperties(child1, child2)
- else:
- node1.appendChild(child2.cloneNode(True))
-
-
-def main(argv):
- """Main function of this vcproj prettifier."""
- global ARGUMENTS
- ARGUMENTS = argv
-
- # check if we have exactly 1 parameter.
- if len(argv) < 2:
- print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
- '[key2=value2]' % argv[0])
- return 1
-
- # Parse the keys
- for i in range(2, len(argv)):
- (key, value) = argv[i].split('=')
- REPLACEMENTS[key] = value
-
- # Open the vcproj and parse the xml.
- dom = parse(argv[1])
-
- # First thing we need to do is find the Configuration Node and merge them
- # with the vsprops they include.
- for configuration_node in GetConfiguationNodes(dom.documentElement):
- # Get the property sheets associated with this configuration.
- vsprops = configuration_node.getAttribute('InheritedPropertySheets')
-
- # Fix the filenames to be absolute.
- vsprops_list = FixFilenames(vsprops.strip().split(';'),
- os.path.dirname(argv[1]))
-
- # Extend the list of vsprops with all vsprops contained in the current
- # vsprops.
- for current_vsprops in vsprops_list:
- vsprops_list.extend(GetChildrenVsprops(current_vsprops))
-
- # Now that we have all the vsprops, we need to merge them.
- for current_vsprops in vsprops_list:
- MergeProperties(configuration_node,
- parse(current_vsprops).documentElement)
-
- # Now that everything is merged, we need to cleanup the xml.
- CleanupVcproj(dom.documentElement)
-
- # Finally, we use the prett xml function to print the vcproj back to the
- # user.
- #print dom.toprettyxml(newl="\n")
- PrettyPrintNode(dom.documentElement)
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv))
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/Find-VS2017.cs b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/Find-VS2017.cs
deleted file mode 100644
index 87e0a9c9bb..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/Find-VS2017.cs
+++ /dev/null
@@ -1,271 +0,0 @@
-// Copyright 2017 - Refael Ackermann
-// Distributed under MIT style license
-// See accompanying file LICENSE at https://github.com/node4good/windows-autoconf
-
-// Usage:
-// powershell -ExecutionPolicy Unrestricted -Version "2.0" -Command "&{Add-Type -Path Find-VS2017.cs; [VisualStudioConfiguration.Main]::Query()}"
-using System;
-using System.Text;
-using System.Runtime.InteropServices;
-
-namespace VisualStudioConfiguration
-{
- [Flags]
- public enum InstanceState : uint
- {
- None = 0,
- Local = 1,
- Registered = 2,
- NoRebootRequired = 4,
- NoErrors = 8,
- Complete = 4294967295,
- }
-
- [Guid("6380BCFF-41D3-4B2E-8B2E-BF8A6810C848")]
- [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
- [ComImport]
- public interface IEnumSetupInstances
- {
-
- void Next([MarshalAs(UnmanagedType.U4), In] int celt,
- [MarshalAs(UnmanagedType.LPArray, ArraySubType = UnmanagedType.Interface), Out] ISetupInstance[] rgelt,
- [MarshalAs(UnmanagedType.U4)] out int pceltFetched);
-
- void Skip([MarshalAs(UnmanagedType.U4), In] int celt);
-
- void Reset();
-
- [return: MarshalAs(UnmanagedType.Interface)]
- IEnumSetupInstances Clone();
- }
-
- [Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")]
- [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
- [ComImport]
- public interface ISetupConfiguration
- {
- }
-
- [Guid("26AAB78C-4A60-49D6-AF3B-3C35BC93365D")]
- [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
- [ComImport]
- public interface ISetupConfiguration2 : ISetupConfiguration
- {
-
- [return: MarshalAs(UnmanagedType.Interface)]
- IEnumSetupInstances EnumInstances();
-
- [return: MarshalAs(UnmanagedType.Interface)]
- ISetupInstance GetInstanceForCurrentProcess();
-
- [return: MarshalAs(UnmanagedType.Interface)]
- ISetupInstance GetInstanceForPath([MarshalAs(UnmanagedType.LPWStr), In] string path);
-
- [return: MarshalAs(UnmanagedType.Interface)]
- IEnumSetupInstances EnumAllInstances();
- }
-
- [Guid("B41463C3-8866-43B5-BC33-2B0676F7F42E")]
- [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
- [ComImport]
- public interface ISetupInstance
- {
- }
-
- [Guid("89143C9A-05AF-49B0-B717-72E218A2185C")]
- [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
- [ComImport]
- public interface ISetupInstance2 : ISetupInstance
- {
- [return: MarshalAs(UnmanagedType.BStr)]
- string GetInstanceId();
-
- [return: MarshalAs(UnmanagedType.Struct)]
- System.Runtime.InteropServices.ComTypes.FILETIME GetInstallDate();
-
- [return: MarshalAs(UnmanagedType.BStr)]
- string GetInstallationName();
-
- [return: MarshalAs(UnmanagedType.BStr)]
- string GetInstallationPath();
-
- [return: MarshalAs(UnmanagedType.BStr)]
- string GetInstallationVersion();
-
- [return: MarshalAs(UnmanagedType.BStr)]
- string GetDisplayName([MarshalAs(UnmanagedType.U4), In] int lcid);
-
- [return: MarshalAs(UnmanagedType.BStr)]
- string GetDescription([MarshalAs(UnmanagedType.U4), In] int lcid);
-
- [return: MarshalAs(UnmanagedType.BStr)]
- string ResolvePath([MarshalAs(UnmanagedType.LPWStr), In] string pwszRelativePath);
-
- [return: MarshalAs(UnmanagedType.U4)]
- InstanceState GetState();
-
- [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)]
- ISetupPackageReference[] GetPackages();
-
- ISetupPackageReference GetProduct();
-
- [return: MarshalAs(UnmanagedType.BStr)]
- string GetProductPath();
-
- [return: MarshalAs(UnmanagedType.VariantBool)]
- bool IsLaunchable();
-
- [return: MarshalAs(UnmanagedType.VariantBool)]
- bool IsComplete();
-
- [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)]
- ISetupPropertyStore GetProperties();
-
- [return: MarshalAs(UnmanagedType.BStr)]
- string GetEnginePath();
- }
-
- [Guid("DA8D8A16-B2B6-4487-A2F1-594CCCCD6BF5")]
- [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
- [ComImport]
- public interface ISetupPackageReference
- {
-
- [return: MarshalAs(UnmanagedType.BStr)]
- string GetId();
-
- [return: MarshalAs(UnmanagedType.BStr)]
- string GetVersion();
-
- [return: MarshalAs(UnmanagedType.BStr)]
- string GetChip();
-
- [return: MarshalAs(UnmanagedType.BStr)]
- string GetLanguage();
-
- [return: MarshalAs(UnmanagedType.BStr)]
- string GetBranch();
-
- [return: MarshalAs(UnmanagedType.BStr)]
- string GetType();
-
- [return: MarshalAs(UnmanagedType.BStr)]
- string GetUniqueId();
-
- [return: MarshalAs(UnmanagedType.VariantBool)]
- bool GetIsExtension();
- }
-
- [Guid("c601c175-a3be-44bc-91f6-4568d230fc83")]
- [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
- [ComImport]
- public interface ISetupPropertyStore
- {
-
- [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_BSTR)]
- string[] GetNames();
-
- object GetValue([MarshalAs(UnmanagedType.LPWStr), In] string pwszName);
- }
-
- [Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")]
- [CoClass(typeof(SetupConfigurationClass))]
- [ComImport]
- public interface SetupConfiguration : ISetupConfiguration2, ISetupConfiguration
- {
- }
-
- [Guid("177F0C4A-1CD3-4DE7-A32C-71DBBB9FA36D")]
- [ClassInterface(ClassInterfaceType.None)]
- [ComImport]
- public class SetupConfigurationClass
- {
- }
-
- public static class Main
- {
- public static void Query()
- {
- ISetupConfiguration query = new SetupConfiguration();
- ISetupConfiguration2 query2 = (ISetupConfiguration2)query;
- IEnumSetupInstances e = query2.EnumAllInstances();
-
- int pceltFetched;
- ISetupInstance2[] rgelt = new ISetupInstance2[1];
- StringBuilder log = new StringBuilder();
- while (true)
- {
- e.Next(1, rgelt, out pceltFetched);
- if (pceltFetched <= 0)
- {
- Console.WriteLine(String.Format("{{\"log\":\"{0}\"}}", log.ToString()));
- return;
- }
- if (CheckInstance(rgelt[0], ref log))
- return;
- }
- }
-
- private static bool CheckInstance(ISetupInstance2 setupInstance2, ref StringBuilder log)
- {
- // Visual Studio Community 2017 component directory:
- // https://www.visualstudio.com/en-us/productinfo/vs2017-install-product-Community.workloads
-
- string path = setupInstance2.GetInstallationPath().Replace("\\", "\\\\");
- log.Append(String.Format("Found installation at: {0}\\n", path));
-
- bool hasMSBuild = false;
- bool hasVCTools = false;
- uint Win10SDKVer = 0;
- bool hasWin8SDK = false;
-
- foreach (ISetupPackageReference package in setupInstance2.GetPackages())
- {
- const string Win10SDKPrefix = "Microsoft.VisualStudio.Component.Windows10SDK.";
-
- string id = package.GetId();
- if (id == "Microsoft.VisualStudio.VC.MSBuild.Base")
- hasMSBuild = true;
- else if (id == "Microsoft.VisualStudio.Component.VC.Tools.x86.x64")
- hasVCTools = true;
- else if (id.StartsWith(Win10SDKPrefix)) {
- string[] parts = id.Substring(Win10SDKPrefix.Length).Split('.');
- if (parts.Length > 1 && parts[1] != "Desktop")
- continue;
- Win10SDKVer = Math.Max(Win10SDKVer, UInt32.Parse(parts[0]));
- } else if (id == "Microsoft.VisualStudio.Component.Windows81SDK")
- hasWin8SDK = true;
- else
- continue;
-
- log.Append(String.Format(" - Found {0}\\n", id));
- }
-
- if (!hasMSBuild)
- log.Append(" - Missing Visual Studio C++ core features (Microsoft.VisualStudio.VC.MSBuild.Base)\\n");
- if (!hasVCTools)
- log.Append(" - Missing VC++ 2017 v141 toolset (x86,x64) (Microsoft.VisualStudio.Component.VC.Tools.x86.x64)\\n");
- if ((Win10SDKVer == 0) && (!hasWin8SDK))
- log.Append(" - Missing a Windows SDK (Microsoft.VisualStudio.Component.Windows10SDK.* or Microsoft.VisualStudio.Component.Windows81SDK)\\n");
-
- if (hasMSBuild && hasVCTools)
- {
- if (Win10SDKVer > 0)
- {
- log.Append(" - Using this installation with Windows 10 SDK"/*\\n*/);
- Console.WriteLine(String.Format("{{\"log\":\"{0}\",\"path\":\"{1}\",\"sdk\":\"10.0.{2}.0\"}}", log.ToString(), path, Win10SDKVer));
- return true;
- }
- else if (hasWin8SDK)
- {
- log.Append(" - Using this installation with Windows 8.1 SDK"/*\\n*/);
- Console.WriteLine(String.Format("{{\"log\":\"{0}\",\"path\":\"{1}\",\"sdk\":\"8.1\"}}", log.ToString(), path));
- return true;
- }
- }
-
- log.Append(" - Some required components are missing, not using this installation\\n");
- return false;
- }
- }
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/build.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/build.js
deleted file mode 100644
index 0445fb6452..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/build.js
+++ /dev/null
@@ -1,266 +0,0 @@
-
-module.exports = exports = build
-
-/**
- * Module dependencies.
- */
-
-var fs = require('graceful-fs')
- , rm = require('rimraf')
- , path = require('path')
- , glob = require('glob')
- , log = require('npmlog')
- , which = require('which')
- , exec = require('child_process').exec
- , processRelease = require('./process-release')
- , win = process.platform === 'win32'
-
-exports.usage = 'Invokes `' + (win ? 'msbuild' : 'make') + '` and builds the module'
-
-function build (gyp, argv, callback) {
- var platformMake = 'make'
- if (process.platform === 'aix') {
- platformMake = 'gmake'
- } else if (process.platform.indexOf('bsd') !== -1) {
- platformMake = 'gmake'
- }
-
- var release = processRelease(argv, gyp, process.version, process.release)
- , makeCommand = gyp.opts.make || process.env.MAKE || platformMake
- , command = win ? 'msbuild' : makeCommand
- , buildDir = path.resolve('build')
- , configPath = path.resolve(buildDir, 'config.gypi')
- , jobs = gyp.opts.jobs || process.env.JOBS
- , buildType
- , config
- , arch
- , nodeDir
-
- loadConfigGypi()
-
- /**
- * Load the "config.gypi" file that was generated during "configure".
- */
-
- function loadConfigGypi () {
- fs.readFile(configPath, 'utf8', function (err, data) {
- if (err) {
- if (err.code == 'ENOENT') {
- callback(new Error('You must run `node-gyp configure` first!'))
- } else {
- callback(err)
- }
- return
- }
- config = JSON.parse(data.replace(/\#.+\n/, ''))
-
- // get the 'arch', 'buildType', and 'nodeDir' vars from the config
- buildType = config.target_defaults.default_configuration
- arch = config.variables.target_arch
- nodeDir = config.variables.nodedir
-
- if ('debug' in gyp.opts) {
- buildType = gyp.opts.debug ? 'Debug' : 'Release'
- }
- if (!buildType) {
- buildType = 'Release'
- }
-
- log.verbose('build type', buildType)
- log.verbose('architecture', arch)
- log.verbose('node dev dir', nodeDir)
-
- if (win) {
- findSolutionFile()
- } else {
- doWhich()
- }
- })
- }
-
- /**
- * On Windows, find the first build/*.sln file.
- */
-
- function findSolutionFile () {
- glob('build/*.sln', function (err, files) {
- if (err) return callback(err)
- if (files.length === 0) {
- return callback(new Error('Could not find *.sln file. Did you run "configure"?'))
- }
- guessedSolution = files[0]
- log.verbose('found first Solution file', guessedSolution)
- doWhich()
- })
- }
-
- /**
- * Uses node-which to locate the msbuild / make executable.
- */
-
- function doWhich () {
- // First make sure we have the build command in the PATH
- which(command, function (err, execPath) {
- if (err) {
- if (win && /not found/.test(err.message)) {
- // On windows and no 'msbuild' found. Let's guess where it is
- findMsbuild()
- } else {
- // Some other error or 'make' not found on Unix, report that to the user
- callback(err)
- }
- return
- }
- log.verbose('`which` succeeded for `' + command + '`', execPath)
- doBuild()
- })
- }
-
- /**
- * Search for the location of "msbuild.exe" file on Windows.
- */
-
- function findMsbuild () {
- if (config.variables.msbuild_path) {
- command = config.variables.msbuild_path
- log.verbose('using MSBuild:', command)
- doBuild()
- return
- }
-
- log.verbose('could not find "msbuild.exe" in PATH - finding location in registry')
- var notfoundErr = 'Can\'t find "msbuild.exe". Do you have Microsoft Visual Studio C++ 2008+ installed?'
- var cmd = 'reg query "HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions" /s'
- if (process.arch !== 'ia32')
- cmd += ' /reg:32'
- exec(cmd, function (err, stdout, stderr) {
- if (err) {
- return callback(new Error(err.message + '\n' + notfoundErr))
- }
- var reVers = /ToolsVersions\\([^\\]+)$/i
- , rePath = /\r\n[ \t]+MSBuildToolsPath[ \t]+REG_SZ[ \t]+([^\r]+)/i
- , msbuilds = []
- , r
- , msbuildPath
- stdout.split('\r\n\r\n').forEach(function(l) {
- if (!l) return
- l = l.trim()
- if (r = reVers.exec(l.substring(0, l.indexOf('\r\n')))) {
- var ver = parseFloat(r[1], 10)
- if (ver >= 3.5) {
- if (r = rePath.exec(l)) {
- msbuilds.push({
- version: ver,
- path: r[1]
- })
- }
- }
- }
- })
- msbuilds.sort(function (x, y) {
- return (x.version < y.version ? -1 : 1)
- })
- ;(function verifyMsbuild () {
- if (!msbuilds.length) return callback(new Error(notfoundErr))
- msbuildPath = path.resolve(msbuilds.pop().path, 'msbuild.exe')
- fs.stat(msbuildPath, function (err, stat) {
- if (err) {
- if (err.code == 'ENOENT') {
- if (msbuilds.length) {
- return verifyMsbuild()
- } else {
- callback(new Error(notfoundErr))
- }
- } else {
- callback(err)
- }
- return
- }
- command = msbuildPath
- doBuild()
- })
- })()
- })
- }
-
-
- /**
- * Actually spawn the process and compile the module.
- */
-
- function doBuild () {
-
- // Enable Verbose build
- var verbose = log.levels[log.level] <= log.levels.verbose
- if (!win && verbose) {
- argv.push('V=1')
- }
- if (win && !verbose) {
- argv.push('/clp:Verbosity=minimal')
- }
-
- if (win) {
- // Turn off the Microsoft logo on Windows
- argv.push('/nologo')
- }
-
- // Specify the build type, Release by default
- if (win) {
- var archLower = arch.toLowerCase()
- var p = archLower === 'x64' ? 'x64' :
- (archLower === 'arm' ? 'ARM' : 'Win32')
- argv.push('/p:Configuration=' + buildType + ';Platform=' + p)
- if (jobs) {
- var j = parseInt(jobs, 10)
- if (!isNaN(j) && j > 0) {
- argv.push('/m:' + j)
- } else if (jobs.toUpperCase() === 'MAX') {
- argv.push('/m:' + require('os').cpus().length)
- }
- }
- } else {
- argv.push('BUILDTYPE=' + buildType)
- // Invoke the Makefile in the 'build' dir.
- argv.push('-C')
- argv.push('build')
- if (jobs) {
- var j = parseInt(jobs, 10)
- if (!isNaN(j) && j > 0) {
- argv.push('--jobs')
- argv.push(j)
- } else if (jobs.toUpperCase() === 'MAX') {
- argv.push('--jobs')
- argv.push(require('os').cpus().length)
- }
- }
- }
-
- if (win) {
- // did the user specify their own .sln file?
- var hasSln = argv.some(function (arg) {
- return path.extname(arg) == '.sln'
- })
- if (!hasSln) {
- argv.unshift(gyp.opts.solution || guessedSolution)
- }
- }
-
- var proc = gyp.spawn(command, argv)
- proc.on('exit', onExit)
- }
-
- /**
- * Invoked after the make/msbuild command exits.
- */
-
- function onExit (code, signal) {
- if (code !== 0) {
- return callback(new Error('`' + command + '` failed with exit code: ' + code))
- }
- if (signal) {
- return callback(new Error('`' + command + '` got signal: ' + signal))
- }
- callback()
- }
-
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/clean.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/clean.js
deleted file mode 100644
index e69164d45a..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/clean.js
+++ /dev/null
@@ -1,22 +0,0 @@
-
-module.exports = exports = clean
-
-exports.usage = 'Removes any generated build files and the "out" dir'
-
-/**
- * Module dependencies.
- */
-
-var rm = require('rimraf')
-var log = require('npmlog')
-
-
-function clean (gyp, argv, callback) {
-
- // Remove the 'build' dir
- var buildDir = 'build'
-
- log.verbose('clean', 'removing "%s" directory', buildDir)
- rm(buildDir, callback)
-
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/configure.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/configure.js
deleted file mode 100644
index 1351576d12..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/configure.js
+++ /dev/null
@@ -1,523 +0,0 @@
-module.exports = exports = configure
-module.exports.test = {
- PythonFinder: PythonFinder,
- findAccessibleSync: findAccessibleSync,
- findPython: findPython,
-}
-
-/**
- * Module dependencies.
- */
-
-var fs = require('graceful-fs')
- , path = require('path')
- , log = require('npmlog')
- , osenv = require('osenv')
- , which = require('which')
- , semver = require('semver')
- , mkdirp = require('mkdirp')
- , cp = require('child_process')
- , extend = require('util')._extend
- , processRelease = require('./process-release')
- , win = process.platform === 'win32'
- , findNodeDirectory = require('./find-node-directory')
- , msgFormat = require('util').format
-if (win)
- var findVS2017 = require('./find-vs2017')
-
-exports.usage = 'Generates ' + (win ? 'MSVC project files' : 'a Makefile') + ' for the current module'
-
-function configure (gyp, argv, callback) {
-
- var python = gyp.opts.python || process.env.PYTHON || 'python2'
- , buildDir = path.resolve('build')
- , configNames = [ 'config.gypi', 'common.gypi' ]
- , configs = []
- , nodeDir
- , release = processRelease(argv, gyp, process.version, process.release)
-
- findPython(python, function (err, found) {
- if (err) {
- callback(err)
- } else {
- python = found
- getNodeDir()
- }
- })
-
- function getNodeDir () {
-
- // 'python' should be set by now
- process.env.PYTHON = python
-
- if (gyp.opts.nodedir) {
- // --nodedir was specified. use that for the dev files
- nodeDir = gyp.opts.nodedir.replace(/^~/, osenv.home())
-
- log.verbose('get node dir', 'compiling against specified --nodedir dev files: %s', nodeDir)
- createBuildDir()
-
- } else {
- // if no --nodedir specified, ensure node dependencies are installed
- if ('v' + release.version !== process.version) {
- // if --target was given, then determine a target version to compile for
- log.verbose('get node dir', 'compiling against --target node version: %s', release.version)
- } else {
- // if no --target was specified then use the current host node version
- log.verbose('get node dir', 'no --target version specified, falling back to host node version: %s', release.version)
- }
-
- if (!release.semver) {
- // could not parse the version string with semver
- return callback(new Error('Invalid version number: ' + release.version))
- }
-
- // ensure that the target node version's dev files are installed
- gyp.opts.ensure = true
- gyp.commands.install([ release.version ], function (err, version) {
- if (err) return callback(err)
- log.verbose('get node dir', 'target node version installed:', release.versionDir)
- nodeDir = path.resolve(gyp.devDir, release.versionDir)
- createBuildDir()
- })
- }
- }
-
- function createBuildDir () {
- log.verbose('build dir', 'attempting to create "build" dir: %s', buildDir)
- mkdirp(buildDir, function (err, isNew) {
- if (err) return callback(err)
- log.verbose('build dir', '"build" dir needed to be created?', isNew)
- if (win && (!gyp.opts.msvs_version || gyp.opts.msvs_version === '2017')) {
- findVS2017(function (err, vsSetup) {
- if (err) {
- log.verbose('Not using VS2017:', err.message)
- createConfigFile()
- } else {
- createConfigFile(null, vsSetup)
- }
- })
- } else {
- createConfigFile()
- }
- })
- }
-
- function createConfigFile (err, vsSetup) {
- if (err) return callback(err)
-
- var configFilename = 'config.gypi'
- var configPath = path.resolve(buildDir, configFilename)
-
- log.verbose('build/' + configFilename, 'creating config file')
-
- var config = process.config || {}
- , defaults = config.target_defaults
- , variables = config.variables
-
- // default "config.variables"
- if (!variables) variables = config.variables = {}
-
- // default "config.defaults"
- if (!defaults) defaults = config.target_defaults = {}
-
- // don't inherit the "defaults" from node's `process.config` object.
- // doing so could cause problems in cases where the `node` executable was
- // compiled on a different machine (with different lib/include paths) than
- // the machine where the addon is being built to
- defaults.cflags = []
- defaults.defines = []
- defaults.include_dirs = []
- defaults.libraries = []
-
- // set the default_configuration prop
- if ('debug' in gyp.opts) {
- defaults.default_configuration = gyp.opts.debug ? 'Debug' : 'Release'
- }
- if (!defaults.default_configuration) {
- defaults.default_configuration = 'Release'
- }
-
- // set the target_arch variable
- variables.target_arch = gyp.opts.arch || process.arch || 'ia32'
-
- // set the node development directory
- variables.nodedir = nodeDir
-
- // disable -T "thin" static archives by default
- variables.standalone_static_library = gyp.opts.thin ? 0 : 1
-
- if (vsSetup) {
- // GYP doesn't (yet) have support for VS2017, so we force it to VS2015
- // to avoid pulling a floating patch that has not landed upstream.
- // Ref: https://chromium-review.googlesource.com/#/c/433540/
- gyp.opts.msvs_version = '2015'
- process.env['GYP_MSVS_VERSION'] = 2015
- process.env['GYP_MSVS_OVERRIDE_PATH'] = vsSetup.path
- defaults['msbuild_toolset'] = 'v141'
- defaults['msvs_windows_target_platform_version'] = vsSetup.sdk
- variables['msbuild_path'] = path.join(vsSetup.path, 'MSBuild', '15.0',
- 'Bin', 'MSBuild.exe')
- }
-
- // loop through the rest of the opts and add the unknown ones as variables.
- // this allows for module-specific configure flags like:
- //
- // $ node-gyp configure --shared-libxml2
- Object.keys(gyp.opts).forEach(function (opt) {
- if (opt === 'argv') return
- if (opt in gyp.configDefs) return
- variables[opt.replace(/-/g, '_')] = gyp.opts[opt]
- })
-
- // ensures that any boolean values from `process.config` get stringified
- function boolsToString (k, v) {
- if (typeof v === 'boolean')
- return String(v)
- return v
- }
-
- log.silly('build/' + configFilename, config)
-
- // now write out the config.gypi file to the build/ dir
- var prefix = '# Do not edit. File was generated by node-gyp\'s "configure" step'
- , json = JSON.stringify(config, boolsToString, 2)
- log.verbose('build/' + configFilename, 'writing out config file: %s', configPath)
- configs.push(configPath)
- fs.writeFile(configPath, [prefix, json, ''].join('\n'), findConfigs)
- }
-
- function findConfigs (err) {
- if (err) return callback(err)
- var name = configNames.shift()
- if (!name) return runGyp()
- var fullPath = path.resolve(name)
- log.verbose(name, 'checking for gypi file: %s', fullPath)
- fs.stat(fullPath, function (err, stat) {
- if (err) {
- if (err.code == 'ENOENT') {
- findConfigs() // check next gypi filename
- } else {
- callback(err)
- }
- } else {
- log.verbose(name, 'found gypi file')
- configs.push(fullPath)
- findConfigs()
- }
- })
- }
-
- function runGyp (err) {
- if (err) return callback(err)
-
- if (!~argv.indexOf('-f') && !~argv.indexOf('--format')) {
- if (win) {
- log.verbose('gyp', 'gyp format was not specified; forcing "msvs"')
- // force the 'make' target for non-Windows
- argv.push('-f', 'msvs')
- } else {
- log.verbose('gyp', 'gyp format was not specified; forcing "make"')
- // force the 'make' target for non-Windows
- argv.push('-f', 'make')
- }
- }
-
- function hasMsvsVersion () {
- return argv.some(function (arg) {
- return arg.indexOf('msvs_version') === 0
- })
- }
-
- if (win && !hasMsvsVersion()) {
- if ('msvs_version' in gyp.opts) {
- argv.push('-G', 'msvs_version=' + gyp.opts.msvs_version)
- } else {
- argv.push('-G', 'msvs_version=auto')
- }
- }
-
- // include all the ".gypi" files that were found
- configs.forEach(function (config) {
- argv.push('-I', config)
- })
-
- // for AIX we need to set up the path to the exp file
- // which contains the symbols needed for linking.
- // The file will either be in one of the following
- // depending on whether it is an installed or
- // development environment:
- // - the include/node directory
- // - the out/Release directory
- // - the out/Debug directory
- // - the root directory
- var node_exp_file = undefined
- if (process.platform === 'aix') {
- var node_root_dir = findNodeDirectory()
- var candidates = ['include/node/node.exp',
- 'out/Release/node.exp',
- 'out/Debug/node.exp',
- 'node.exp']
- var logprefix = 'find exports file'
- node_exp_file = findAccessibleSync(logprefix, node_root_dir, candidates)
- if (node_exp_file !== undefined) {
- log.verbose(logprefix, 'Found exports file: %s', node_exp_file)
- } else {
- var msg = msgFormat('Could not find node.exp file in %s', node_root_dir)
- log.error(logprefix, 'Could not find exports file')
- return callback(new Error(msg))
- }
- }
-
- // this logic ported from the old `gyp_addon` python file
- var gyp_script = path.resolve(__dirname, '..', 'gyp', 'gyp_main.py')
- var addon_gypi = path.resolve(__dirname, '..', 'addon.gypi')
- var common_gypi = path.resolve(nodeDir, 'include/node/common.gypi')
- fs.stat(common_gypi, function (err, stat) {
- if (err)
- common_gypi = path.resolve(nodeDir, 'common.gypi')
-
- var output_dir = 'build'
- if (win) {
- // Windows expects an absolute path
- output_dir = buildDir
- }
- var nodeGypDir = path.resolve(__dirname, '..')
- var nodeLibFile = path.join(nodeDir,
- !gyp.opts.nodedir ? '<(target_arch)' : '$(Configuration)',
- release.name + '.lib')
-
- argv.push('-I', addon_gypi)
- argv.push('-I', common_gypi)
- argv.push('-Dlibrary=shared_library')
- argv.push('-Dvisibility=default')
- argv.push('-Dnode_root_dir=' + nodeDir)
- if (process.platform === 'aix') {
- argv.push('-Dnode_exp_file=' + node_exp_file)
- }
- argv.push('-Dnode_gyp_dir=' + nodeGypDir)
- argv.push('-Dnode_lib_file=' + nodeLibFile)
- argv.push('-Dmodule_root_dir=' + process.cwd())
- argv.push('-Dnode_engine=' +
- (gyp.opts.node_engine || process.jsEngine || 'v8'))
- argv.push('--depth=.')
- argv.push('--no-parallel')
-
- // tell gyp to write the Makefile/Solution files into output_dir
- argv.push('--generator-output', output_dir)
-
- // tell make to write its output into the same dir
- argv.push('-Goutput_dir=.')
-
- // enforce use of the "binding.gyp" file
- argv.unshift('binding.gyp')
-
- // execute `gyp` from the current target nodedir
- argv.unshift(gyp_script)
-
- // make sure python uses files that came with this particular node package
- var pypath = [path.join(__dirname, '..', 'gyp', 'pylib')]
- if (process.env.PYTHONPATH) {
- pypath.push(process.env.PYTHONPATH)
- }
- process.env.PYTHONPATH = pypath.join(win ? ';' : ':')
-
- var cp = gyp.spawn(python, argv)
- cp.on('exit', onCpExit)
- })
- }
-
- /**
- * Called when the `gyp` child process exits.
- */
-
- function onCpExit (code, signal) {
- if (code !== 0) {
- callback(new Error('`gyp` failed with exit code: ' + code))
- } else {
- // we're done
- callback()
- }
- }
-
-}
-
-/**
- * Returns the first file or directory from an array of candidates that is
- * readable by the current user, or undefined if none of the candidates are
- * readable.
- */
-function findAccessibleSync (logprefix, dir, candidates) {
- for (var next = 0; next < candidates.length; next++) {
- var candidate = path.resolve(dir, candidates[next])
- try {
- var fd = fs.openSync(candidate, 'r')
- } catch (e) {
- // this candidate was not found or not readable, do nothing
- log.silly(logprefix, 'Could not open %s: %s', candidate, e.message)
- continue
- }
- fs.closeSync(fd)
- log.silly(logprefix, 'Found readable %s', candidate)
- return candidate
- }
-
- return undefined
-}
-
-function PythonFinder(python, callback) {
- this.callback = callback
- this.python = python
-}
-
-PythonFinder.prototype = {
- checkPythonLauncherDepth: 0,
- env: process.env,
- execFile: cp.execFile,
- log: log,
- resolve: path.win32 && path.win32.resolve || path.resolve,
- stat: fs.stat,
- which: which,
- win: win,
-
- checkPython: function checkPython () {
- this.log.verbose('check python',
- 'checking for Python executable "%s" in the PATH',
- this.python)
- this.which(this.python, function (err, execPath) {
- if (err) {
- this.log.verbose('`which` failed', this.python, err)
- if (this.python === 'python2') {
- this.python = 'python'
- return this.checkPython()
- }
- if (this.win) {
- this.checkPythonLauncher()
- } else {
- this.failNoPython()
- }
- } else {
- this.log.verbose('`which` succeeded', this.python, execPath)
- // Found the `python` executable, and from now on we use it explicitly.
- // This solves #667 and #750 (`execFile` won't run batch files
- // (*.cmd, and *.bat))
- this.python = execPath
- this.checkPythonVersion()
- }
- }.bind(this))
- },
-
- // Distributions of Python on Windows by default install with the "py.exe"
- // Python launcher which is more likely to exist than the Python executable
- // being in the $PATH.
- // Because the Python launcher supports all versions of Python, we have to
- // explicitly request a Python 2 version. This is done by supplying "-2" as
- // the first command line argument. Since "py.exe -2" would be an invalid
- // executable for "execFile", we have to use the launcher to figure out
- // where the actual "python.exe" executable is located.
- checkPythonLauncher: function checkPythonLauncher () {
- this.checkPythonLauncherDepth += 1
-
- this.log.verbose(
- 'could not find "' + this.python + '". checking python launcher')
- var env = extend({}, this.env)
- env.TERM = 'dumb'
-
- var launcherArgs = ['-2', '-c', 'import sys; print sys.executable']
- this.execFile('py.exe', launcherArgs, { env: env }, function (err, stdout) {
- if (err) {
- this.guessPython()
- } else {
- this.python = stdout.trim()
- this.log.verbose('check python launcher',
- 'python executable found: %j',
- this.python)
- this.checkPythonVersion()
- }
- this.checkPythonLauncherDepth -= 1
- }.bind(this))
- },
-
- checkPythonVersion: function checkPythonVersion () {
- var args = ['-c', 'import platform; print(platform.python_version());']
- var env = extend({}, this.env)
- env.TERM = 'dumb'
-
- this.execFile(this.python, args, { env: env }, function (err, stdout) {
- if (err) {
- return this.callback(err)
- }
- this.log.verbose('check python version',
- '`%s -c "' + args[1] + '"` returned: %j',
- this.python, stdout)
- var version = stdout.trim()
- if (~version.indexOf('+')) {
- this.log.silly('stripping "+" sign(s) from version')
- version = version.replace(/\+/g, '')
- }
- if (~version.indexOf('rc')) {
- this.log.silly('stripping "rc" identifier from version')
- version = version.replace(/rc(.*)$/ig, '')
- }
- var range = semver.Range('>=2.5.0 <3.0.0')
- var valid = false
- try {
- valid = range.test(version)
- } catch (e) {
- this.log.silly('range.test() error', e)
- }
- if (valid) {
- this.callback(null, this.python)
- } else if (this.win && this.checkPythonLauncherDepth === 0) {
- this.checkPythonLauncher()
- } else {
- this.failPythonVersion(version)
- }
- }.bind(this))
- },
-
- failNoPython: function failNoPython () {
- var errmsg =
- 'Can\'t find Python executable "' + this.python +
- '", you can set the PYTHON env variable.'
- this.callback(new Error(errmsg))
- },
-
- failPythonVersion: function failPythonVersion (badVersion) {
- var errmsg =
- 'Python executable "' + this.python +
- '" is v' + badVersion + ', which is not supported by gyp.\n' +
- 'You can pass the --python switch to point to ' +
- 'Python >= v2.5.0 & < 3.0.0.'
- this.callback(new Error(errmsg))
- },
-
- // Called on Windows when "python" isn't available in the current $PATH.
- // We are going to check if "%SystemDrive%\python27\python.exe" exists.
- guessPython: function guessPython () {
- this.log.verbose('could not find "' + this.python + '". guessing location')
- var rootDir = this.env.SystemDrive || 'C:\\'
- if (rootDir[rootDir.length - 1] !== '\\') {
- rootDir += '\\'
- }
- var pythonPath = this.resolve(rootDir, 'Python27', 'python.exe')
- this.log.verbose('ensuring that file exists:', pythonPath)
- this.stat(pythonPath, function (err, stat) {
- if (err) {
- if (err.code == 'ENOENT') {
- this.failNoPython()
- } else {
- this.callback(err)
- }
- return
- }
- this.python = pythonPath
- this.checkPythonVersion()
- }.bind(this))
- },
-}
-
-function findPython (python, callback) {
- var finder = new PythonFinder(python, callback)
- finder.checkPython()
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/find-node-directory.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/find-node-directory.js
deleted file mode 100644
index 3aee8a109a..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/find-node-directory.js
+++ /dev/null
@@ -1,61 +0,0 @@
-var path = require('path')
- , log = require('npmlog')
-
-function findNodeDirectory(scriptLocation, processObj) {
- // set dirname and process if not passed in
- // this facilitates regression tests
- if (scriptLocation === undefined) {
- scriptLocation = __dirname
- }
- if (processObj === undefined) {
- processObj = process
- }
-
- // Have a look to see what is above us, to try and work out where we are
- npm_parent_directory = path.join(scriptLocation, '../../../..')
- log.verbose('node-gyp root', 'npm_parent_directory is '
- + path.basename(npm_parent_directory))
- node_root_dir = ""
-
- log.verbose('node-gyp root', 'Finding node root directory')
- if (path.basename(npm_parent_directory) === 'deps') {
- // We are in a build directory where this script lives in
- // deps/npm/node_modules/node-gyp/lib
- node_root_dir = path.join(npm_parent_directory, '..')
- log.verbose('node-gyp root', 'in build directory, root = '
- + node_root_dir)
- } else if (path.basename(npm_parent_directory) === 'node_modules') {
- // We are in a node install directory where this script lives in
- // lib/node_modules/npm/node_modules/node-gyp/lib or
- // node_modules/npm/node_modules/node-gyp/lib depending on the
- // platform
- if (processObj.platform === 'win32') {
- node_root_dir = path.join(npm_parent_directory, '..')
- } else {
- node_root_dir = path.join(npm_parent_directory, '../..')
- }
- log.verbose('node-gyp root', 'in install directory, root = '
- + node_root_dir)
- } else {
- // We don't know where we are, try working it out from the location
- // of the node binary
- var node_dir = path.dirname(processObj.execPath)
- var directory_up = path.basename(node_dir)
- if (directory_up === 'bin') {
- node_root_dir = path.join(node_dir, '..')
- } else if (directory_up === 'Release' || directory_up === 'Debug') {
- // If we are a recently built node, and the directory structure
- // is that of a repository. If we are on Windows then we only need
- // to go one level up, everything else, two
- if (processObj.platform === 'win32') {
- node_root_dir = path.join(node_dir, '..')
- } else {
- node_root_dir = path.join(node_dir, '../..')
- }
- }
- // Else return the default blank, "".
- }
- return node_root_dir
-}
-
-module.exports = findNodeDirectory
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/find-vs2017.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/find-vs2017.js
deleted file mode 100644
index 8c79e9ec9b..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/find-vs2017.js
+++ /dev/null
@@ -1,46 +0,0 @@
-var log = require('npmlog')
- , execFile = require('child_process').execFile
- , path = require('path')
-
-function findVS2017(callback) {
- var ps = path.join(process.env.SystemRoot, 'System32', 'WindowsPowerShell',
- 'v1.0', 'powershell.exe')
- var csFile = path.join(__dirname, 'Find-VS2017.cs')
- var psArgs = ['-ExecutionPolicy', 'Unrestricted', '-Command',
- '&{Add-Type -Path \'' + csFile +
- '\'; [VisualStudioConfiguration.Main]::Query()}']
-
- log.silly('find vs2017', 'Running', ps, psArgs)
- var child = execFile(ps, psArgs, { encoding: 'utf8' },
- function (err, stdout, stderr) {
- log.silly('find vs2017', 'PS err:', err)
- log.silly('find vs2017', 'PS stdout:', stdout)
- log.silly('find vs2017', 'PS stderr:', stderr)
-
- if (err)
- return callback(new Error('Could not use PowerShell to find VS2017'))
-
- var vsSetup
- try {
- vsSetup = JSON.parse(stdout)
- } catch (e) {
- log.silly('find vs2017', e)
- return callback(new Error('Could not use PowerShell to find VS2017'))
- }
- log.silly('find vs2017', 'vsSetup:', vsSetup)
-
- if (vsSetup && vsSetup.log)
- log.verbose('find vs2017', vsSetup.log.trimRight())
-
- if (!vsSetup || !vsSetup.path || !vsSetup.sdk) {
- return callback(new Error('No usable installation of VS2017 found'))
- }
-
- log.verbose('find vs2017', 'using installation:', vsSetup.path)
- callback(null, { "path": vsSetup.path, "sdk": vsSetup.sdk })
- })
-
- child.stdin.end()
-}
-
-module.exports = findVS2017
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/install.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/install.js
deleted file mode 100644
index fa2e1c5430..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/install.js
+++ /dev/null
@@ -1,469 +0,0 @@
-
-module.exports = exports = install
-
-module.exports.test = { download: download, readCAFile: readCAFile }
-
-exports.usage = 'Install node development files for the specified node version.'
-
-/**
- * Module dependencies.
- */
-
-var fs = require('graceful-fs')
- , osenv = require('osenv')
- , tar = require('tar')
- , rm = require('rimraf')
- , path = require('path')
- , crypto = require('crypto')
- , zlib = require('zlib')
- , log = require('npmlog')
- , semver = require('semver')
- , fstream = require('fstream')
- , request = require('request')
- , minimatch = require('minimatch')
- , mkdir = require('mkdirp')
- , processRelease = require('./process-release')
- , win = process.platform == 'win32'
-
-function install (gyp, argv, callback) {
-
- var release = processRelease(argv, gyp, process.version, process.release)
-
- // ensure no double-callbacks happen
- function cb (err) {
- if (cb.done) return
- cb.done = true
- if (err) {
- log.warn('install', 'got an error, rolling back install')
- // roll-back the install if anything went wrong
- gyp.commands.remove([ release.versionDir ], function (err2) {
- callback(err)
- })
- } else {
- callback(null, release.version)
- }
- }
-
- // Determine which node dev files version we are installing
- log.verbose('install', 'input version string %j', release.version)
-
- if (!release.semver) {
- // could not parse the version string with semver
- return callback(new Error('Invalid version number: ' + release.version))
- }
-
- if (semver.lt(release.version, '0.8.0')) {
- return callback(new Error('Minimum target version is `0.8.0` or greater. Got: ' + release.version))
- }
-
- // 0.x.y-pre versions are not published yet and cannot be installed. Bail.
- if (release.semver.prerelease[0] === 'pre') {
- log.verbose('detected "pre" node version', release.version)
- if (gyp.opts.nodedir) {
- log.verbose('--nodedir flag was passed; skipping install', gyp.opts.nodedir)
- callback()
- } else {
- callback(new Error('"pre" versions of node cannot be installed, use the --nodedir flag instead'))
- }
- return
- }
-
- // flatten version into String
- log.verbose('install', 'installing version: %s', release.versionDir)
-
- // the directory where the dev files will be installed
- var devDir = path.resolve(gyp.devDir, release.versionDir)
-
- // If '--ensure' was passed, then don't *always* install the version;
- // check if it is already installed, and only install when needed
- if (gyp.opts.ensure) {
- log.verbose('install', '--ensure was passed, so won\'t reinstall if already installed')
- fs.stat(devDir, function (err, stat) {
- if (err) {
- if (err.code == 'ENOENT') {
- log.verbose('install', 'version not already installed, continuing with install', release.version)
- go()
- } else if (err.code == 'EACCES') {
- eaccesFallback()
- } else {
- cb(err)
- }
- return
- }
- log.verbose('install', 'version is already installed, need to check "installVersion"')
- var installVersionFile = path.resolve(devDir, 'installVersion')
- fs.readFile(installVersionFile, 'ascii', function (err, ver) {
- if (err && err.code != 'ENOENT') {
- return cb(err)
- }
- var installVersion = parseInt(ver, 10) || 0
- log.verbose('got "installVersion"', installVersion)
- log.verbose('needs "installVersion"', gyp.package.installVersion)
- if (installVersion < gyp.package.installVersion) {
- log.verbose('install', 'version is no good; reinstalling')
- go()
- } else {
- log.verbose('install', 'version is good')
- cb()
- }
- })
- })
- } else {
- go()
- }
-
- function getContentSha(res, callback) {
- var shasum = crypto.createHash('sha256')
- res.on('data', function (chunk) {
- shasum.update(chunk)
- }).on('end', function () {
- callback(null, shasum.digest('hex'))
- })
- }
-
- function go () {
-
- log.verbose('ensuring nodedir is created', devDir)
-
- // first create the dir for the node dev files
- mkdir(devDir, function (err, created) {
- if (err) {
- if (err.code == 'EACCES') {
- eaccesFallback()
- } else {
- cb(err)
- }
- return
- }
-
- if (created) {
- log.verbose('created nodedir', created)
- }
-
- // now download the node tarball
- var tarPath = gyp.opts.tarball
- var badDownload = false
- , extractCount = 0
- , gunzip = zlib.createGunzip()
- , extracter = tar.Extract({ path: devDir, strip: 1, filter: isValid })
-
- var contentShasums = {}
- var expectShasums = {}
-
- // checks if a file to be extracted from the tarball is valid.
- // only .h header files and the gyp files get extracted
- function isValid () {
- var name = this.path.substring(devDir.length + 1)
- var isValid = valid(name)
- if (name === '' && this.type === 'Directory') {
- // the first directory entry is ok
- return true
- }
- if (isValid) {
- log.verbose('extracted file from tarball', name)
- extractCount++
- } else {
- // invalid
- log.silly('ignoring from tarball', name)
- }
- return isValid
- }
-
- gunzip.on('error', cb)
- extracter.on('error', cb)
- extracter.on('end', afterTarball)
-
- // download the tarball, gunzip and extract!
-
- if (tarPath) {
- var input = fs.createReadStream(tarPath)
- input.pipe(gunzip).pipe(extracter)
- return
- }
-
- try {
- var req = download(gyp, process.env, release.tarballUrl)
- } catch (e) {
- return cb(e)
- }
-
- // something went wrong downloading the tarball?
- req.on('error', function (err) {
- if (err.code === 'ENOTFOUND') {
- return cb(new Error('This is most likely not a problem with node-gyp or the package itself and\n' +
- 'is related to network connectivity. In most cases you are behind a proxy or have bad \n' +
- 'network settings.'))
- }
- badDownload = true
- cb(err)
- })
-
- req.on('close', function () {
- if (extractCount === 0) {
- cb(new Error('Connection closed while downloading tarball file'))
- }
- })
-
- req.on('response', function (res) {
- if (res.statusCode !== 200) {
- badDownload = true
- cb(new Error(res.statusCode + ' response downloading ' + release.tarballUrl))
- return
- }
- // content checksum
- getContentSha(res, function (_, checksum) {
- var filename = path.basename(release.tarballUrl).trim()
- contentShasums[filename] = checksum
- log.verbose('content checksum', filename, checksum)
- })
-
- // start unzipping and untaring
- req.pipe(gunzip).pipe(extracter)
- })
-
- // invoked after the tarball has finished being extracted
- function afterTarball () {
- if (badDownload) return
- if (extractCount === 0) {
- return cb(new Error('There was a fatal problem while downloading/extracting the tarball'))
- }
- log.verbose('tarball', 'done parsing tarball')
- var async = 0
-
- if (win) {
- // need to download node.lib
- async++
- downloadNodeLib(deref)
- }
-
- // write the "installVersion" file
- async++
- var installVersionPath = path.resolve(devDir, 'installVersion')
- fs.writeFile(installVersionPath, gyp.package.installVersion + '\n', deref)
-
- // Only download SHASUMS.txt if not using tarPath override
- if (!tarPath) {
- // download SHASUMS.txt
- async++
- downloadShasums(deref)
- }
-
- if (async === 0) {
- // no async tasks required
- cb()
- }
-
- function deref (err) {
- if (err) return cb(err)
-
- async--
- if (!async) {
- log.verbose('download contents checksum', JSON.stringify(contentShasums))
- // check content shasums
- for (var k in contentShasums) {
- log.verbose('validating download checksum for ' + k, '(%s == %s)', contentShasums[k], expectShasums[k])
- if (contentShasums[k] !== expectShasums[k]) {
- cb(new Error(k + ' local checksum ' + contentShasums[k] + ' not match remote ' + expectShasums[k]))
- return
- }
- }
- cb()
- }
- }
- }
-
- function downloadShasums(done) {
- log.verbose('check download content checksum, need to download `SHASUMS256.txt`...')
- var shasumsPath = path.resolve(devDir, 'SHASUMS256.txt')
-
- log.verbose('checksum url', release.shasumsUrl)
- try {
- var req = download(gyp, process.env, release.shasumsUrl)
- } catch (e) {
- return cb(e)
- }
-
- req.on('error', done)
- req.on('response', function (res) {
- if (res.statusCode !== 200) {
- done(new Error(res.statusCode + ' status code downloading checksum'))
- return
- }
-
- var chunks = []
- res.on('data', function (chunk) {
- chunks.push(chunk)
- })
- res.on('end', function () {
- var lines = Buffer.concat(chunks).toString().trim().split('\n')
- lines.forEach(function (line) {
- var items = line.trim().split(/\s+/)
- if (items.length !== 2) return
-
- // 0035d18e2dcf9aad669b1c7c07319e17abfe3762 ./node-v0.11.4.tar.gz
- var name = items[1].replace(/^\.\//, '')
- expectShasums[name] = items[0]
- })
-
- log.verbose('checksum data', JSON.stringify(expectShasums))
- done()
- })
- })
- }
-
- function downloadNodeLib (done) {
- log.verbose('on Windows; need to download `' + release.name + '.lib`...')
- var dir32 = path.resolve(devDir, 'ia32')
- , dir64 = path.resolve(devDir, 'x64')
- , libPath32 = path.resolve(dir32, release.name + '.lib')
- , libPath64 = path.resolve(dir64, release.name + '.lib')
-
- log.verbose('32-bit ' + release.name + '.lib dir', dir32)
- log.verbose('64-bit ' + release.name + '.lib dir', dir64)
- log.verbose('`' + release.name + '.lib` 32-bit url', release.libUrl32)
- log.verbose('`' + release.name + '.lib` 64-bit url', release.libUrl64)
-
- var async = 2
- mkdir(dir32, function (err) {
- if (err) return done(err)
- log.verbose('streaming 32-bit ' + release.name + '.lib to:', libPath32)
-
- try {
- var req = download(gyp, process.env, release.libUrl32, cb)
- } catch (e) {
- return cb(e)
- }
-
- req.on('error', done)
- req.on('response', function (res) {
- if (res.statusCode !== 200) {
- done(new Error(res.statusCode + ' status code downloading 32-bit ' + release.name + '.lib'))
- return
- }
-
- getContentSha(res, function (_, checksum) {
- contentShasums[release.libPath32] = checksum
- log.verbose('content checksum', release.libPath32, checksum)
- })
-
- var ws = fs.createWriteStream(libPath32)
- ws.on('error', cb)
- req.pipe(ws)
- })
- req.on('end', function () {
- --async || done()
- })
- })
- mkdir(dir64, function (err) {
- if (err) return done(err)
- log.verbose('streaming 64-bit ' + release.name + '.lib to:', libPath64)
-
- try {
- var req = download(gyp, process.env, release.libUrl64, cb)
- } catch (e) {
- return cb(e)
- }
-
- req.on('error', done)
- req.on('response', function (res) {
- if (res.statusCode !== 200) {
- done(new Error(res.statusCode + ' status code downloading 64-bit ' + release.name + '.lib'))
- return
- }
-
- getContentSha(res, function (_, checksum) {
- contentShasums[release.libPath64] = checksum
- log.verbose('content checksum', release.libPath64, checksum)
- })
-
- var ws = fs.createWriteStream(libPath64)
- ws.on('error', cb)
- req.pipe(ws)
- })
- req.on('end', function () {
- --async || done()
- })
- })
- } // downloadNodeLib()
-
- }) // mkdir()
-
- } // go()
-
- /**
- * Checks if a given filename is "valid" for this installation.
- */
-
- function valid (file) {
- // header files
- return minimatch(file, '*.h', { matchBase: true }) ||
- minimatch(file, '*.gypi', { matchBase: true })
- }
-
- /**
- * The EACCES fallback is a workaround for npm's `sudo` behavior, where
- * it drops the permissions before invoking any child processes (like
- * node-gyp). So what happens is the "nobody" user doesn't have
- * permission to create the dev dir. As a fallback, make the tmpdir() be
- * the dev dir for this installation. This is not ideal, but at least
- * the compilation will succeed...
- */
-
- function eaccesFallback () {
- var tmpdir = osenv.tmpdir()
- gyp.devDir = path.resolve(tmpdir, '.node-gyp')
- log.warn('EACCES', 'user "%s" does not have permission to access the dev dir "%s"', osenv.user(), devDir)
- log.warn('EACCES', 'attempting to reinstall using temporary dev dir "%s"', gyp.devDir)
- if (process.cwd() == tmpdir) {
- log.verbose('tmpdir == cwd', 'automatically will remove dev files after to save disk space')
- gyp.todo.push({ name: 'remove', args: argv })
- }
- gyp.commands.install(argv, cb)
- }
-
-}
-
-function download (gyp, env, url) {
- log.http('GET', url)
-
- var requestOpts = {
- uri: url
- , headers: {
- 'User-Agent': 'node-gyp v' + gyp.version + ' (node ' + process.version + ')'
- }
- }
-
- var cafile = gyp.opts.cafile
- if (cafile) {
- requestOpts.ca = readCAFile(cafile)
- }
-
- // basic support for a proxy server
- var proxyUrl = gyp.opts.proxy
- || env.http_proxy
- || env.HTTP_PROXY
- || env.npm_config_proxy
- if (proxyUrl) {
- if (/^https?:\/\//i.test(proxyUrl)) {
- log.verbose('download', 'using proxy url: "%s"', proxyUrl)
- requestOpts.proxy = proxyUrl
- } else {
- log.warn('download', 'ignoring invalid "proxy" config setting: "%s"', proxyUrl)
- }
- }
-
- var req = request(requestOpts)
- req.on('response', function (res) {
- log.http(res.statusCode, url)
- })
-
- return req
-}
-
-function readCAFile (filename) {
- // The CA file can contain multiple certificates so split on certificate
- // boundaries. [\S\s]*? is used to match everything including newlines.
- var ca = fs.readFileSync(filename, 'utf8')
- var re = /(-----BEGIN CERTIFICATE-----[\S\s]*?-----END CERTIFICATE-----)/g
- return ca.match(re)
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/list.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/list.js
deleted file mode 100644
index 9d680a56a4..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/list.js
+++ /dev/null
@@ -1,33 +0,0 @@
-
-module.exports = exports = list
-
-exports.usage = 'Prints a listing of the currently installed node development files'
-
-/**
- * Module dependencies.
- */
-
-var fs = require('graceful-fs')
- , path = require('path')
- , log = require('npmlog')
-
-function list (gyp, args, callback) {
-
- var devDir = gyp.devDir
- log.verbose('list', 'using node-gyp dir:', devDir)
-
- // readdir() the node-gyp dir
- fs.readdir(devDir, onreaddir)
-
- function onreaddir (err, versions) {
- if (err && err.code != 'ENOENT') {
- return callback(err)
- }
- if (Array.isArray(versions)) {
- versions = versions.filter(function (v) { return v != 'current' })
- } else {
- versions = []
- }
- callback(null, versions)
- }
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/node-gyp.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/node-gyp.js
deleted file mode 100644
index 0dcea7298f..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/node-gyp.js
+++ /dev/null
@@ -1,215 +0,0 @@
-
-/**
- * Module exports.
- */
-
-module.exports = exports = gyp
-
-/**
- * Module dependencies.
- */
-
-var fs = require('graceful-fs')
- , path = require('path')
- , nopt = require('nopt')
- , log = require('npmlog')
- , child_process = require('child_process')
- , EE = require('events').EventEmitter
- , inherits = require('util').inherits
- , commands = [
- // Module build commands
- 'build'
- , 'clean'
- , 'configure'
- , 'rebuild'
- // Development Header File management commands
- , 'install'
- , 'list'
- , 'remove'
- ]
- , aliases = {
- 'ls': 'list'
- , 'rm': 'remove'
- }
-
-// differentiate node-gyp's logs from npm's
-log.heading = 'gyp'
-
-/**
- * The `gyp` function.
- */
-
-function gyp () {
- return new Gyp()
-}
-
-function Gyp () {
- var self = this
-
- this.devDir = ''
- this.commands = {}
-
- commands.forEach(function (command) {
- self.commands[command] = function (argv, callback) {
- log.verbose('command', command, argv)
- return require('./' + command)(self, argv, callback)
- }
- })
-}
-inherits(Gyp, EE)
-exports.Gyp = Gyp
-var proto = Gyp.prototype
-
-/**
- * Export the contents of the package.json.
- */
-
-proto.package = require('../package')
-
-/**
- * nopt configuration definitions
- */
-
-proto.configDefs = {
- help: Boolean // everywhere
- , arch: String // 'configure'
- , cafile: String // 'install'
- , debug: Boolean // 'build'
- , directory: String // bin
- , make: String // 'build'
- , msvs_version: String // 'configure'
- , ensure: Boolean // 'install'
- , solution: String // 'build' (windows only)
- , proxy: String // 'install'
- , devdir: String // everywhere
- , nodedir: String // 'configure'
- , loglevel: String // everywhere
- , python: String // 'configure'
- , 'dist-url': String // 'install'
- , 'tarball': String // 'install'
- , jobs: String // 'build'
- , thin: String // 'configure'
-}
-
-/**
- * nopt shorthands
- */
-
-proto.shorthands = {
- release: '--no-debug'
- , C: '--directory'
- , debug: '--debug'
- , j: '--jobs'
- , silly: '--loglevel=silly'
- , verbose: '--loglevel=verbose'
- , silent: '--loglevel=silent'
-}
-
-/**
- * expose the command aliases for the bin file to use.
- */
-
-proto.aliases = aliases
-
-/**
- * Parses the given argv array and sets the 'opts',
- * 'argv' and 'command' properties.
- */
-
-proto.parseArgv = function parseOpts (argv) {
- this.opts = nopt(this.configDefs, this.shorthands, argv)
- this.argv = this.opts.argv.remain.slice()
-
- var commands = this.todo = []
-
- // create a copy of the argv array with aliases mapped
- argv = this.argv.map(function (arg) {
- // is this an alias?
- if (arg in this.aliases) {
- arg = this.aliases[arg]
- }
- return arg
- }, this)
-
- // process the mapped args into "command" objects ("name" and "args" props)
- argv.slice().forEach(function (arg) {
- if (arg in this.commands) {
- var args = argv.splice(0, argv.indexOf(arg))
- argv.shift()
- if (commands.length > 0) {
- commands[commands.length - 1].args = args
- }
- commands.push({ name: arg, args: [] })
- }
- }, this)
- if (commands.length > 0) {
- commands[commands.length - 1].args = argv.splice(0)
- }
-
- // support for inheriting config env variables from npm
- var npm_config_prefix = 'npm_config_'
- Object.keys(process.env).forEach(function (name) {
- if (name.indexOf(npm_config_prefix) !== 0) return
- var val = process.env[name]
- if (name === npm_config_prefix + 'loglevel') {
- log.level = val
- } else {
- // add the user-defined options to the config
- name = name.substring(npm_config_prefix.length)
- // gyp@741b7f1 enters an infinite loop when it encounters
- // zero-length options so ensure those don't get through.
- if (name) this.opts[name] = val
- }
- }, this)
-
- if (this.opts.loglevel) {
- log.level = this.opts.loglevel
- }
- log.resume()
-}
-
-/**
- * Spawns a child process and emits a 'spawn' event.
- */
-
-proto.spawn = function spawn (command, args, opts) {
- if (!opts) opts = {}
- if (!opts.silent && !opts.stdio) {
- opts.stdio = [ 0, 1, 2 ]
- }
- var cp = child_process.spawn(command, args, opts)
- log.info('spawn', command)
- log.info('spawn args', args)
- return cp
-}
-
-/**
- * Returns the usage instructions for node-gyp.
- */
-
-proto.usage = function usage () {
- var str = [
- ''
- , ' Usage: node-gyp <command> [options]'
- , ''
- , ' where <command> is one of:'
- , commands.map(function (c) {
- return ' - ' + c + ' - ' + require('./' + c).usage
- }).join('\n')
- , ''
- , 'node-gyp@' + this.version + ' ' + path.resolve(__dirname, '..')
- , 'node@' + process.versions.node
- ].join('\n')
- return str
-}
-
-/**
- * Version number getter.
- */
-
-Object.defineProperty(proto, 'version', {
- get: function () {
- return this.package.version
- }
- , enumerable: true
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/process-release.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/process-release.js
deleted file mode 100644
index f9ba98f199..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/process-release.js
+++ /dev/null
@@ -1,155 +0,0 @@
-var semver = require('semver')
- , url = require('url')
- , path = require('path')
- , log = require('npmlog')
-
- // versions where -headers.tar.gz started shipping
- , headersTarballRange = '>= 3.0.0 || ~0.12.10 || ~0.10.42'
- , bitsre = /\/win-(x86|x64)\//
- , bitsreV3 = /\/win-(x86|ia32|x64)\// // io.js v3.x.x shipped with "ia32" but should
- // have been "x86"
-
-// Captures all the logic required to determine download URLs, local directory and
-// file names. Inputs come from command-line switches (--target, --dist-url),
-// `process.version` and `process.release` where it exists.
-function processRelease (argv, gyp, defaultVersion, defaultRelease) {
- var version = (semver.valid(argv[0]) && argv[0]) || gyp.opts.target || defaultVersion
- , versionSemver = semver.parse(version)
- , overrideDistUrl = gyp.opts['dist-url'] || gyp.opts.disturl
- , isDefaultVersion
- , isIojs
- , name
- , distBaseUrl
- , baseUrl
- , libUrl32
- , libUrl64
- , tarballUrl
- , canGetHeaders
-
- if (!versionSemver) {
- // not a valid semver string, nothing we can do
- return { version: version }
- }
- // flatten version into String
- version = versionSemver.version
-
- // defaultVersion should come from process.version so ought to be valid semver
- isDefaultVersion = version === semver.parse(defaultVersion).version
-
- // can't use process.release if we're using --target=x.y.z
- if (!isDefaultVersion)
- defaultRelease = null
-
- if (defaultRelease) {
- // v3 onward, has process.release
- name = defaultRelease.name.replace(/io\.js/, 'iojs') // remove the '.' for directory naming purposes
- isIojs = name === 'iojs'
- } else {
- // old node or alternative --target=
- // semver.satisfies() doesn't like prerelease tags so test major directly
- isIojs = versionSemver.major >= 1 && versionSemver.major < 4
- name = isIojs ? 'iojs' : 'node'
- }
-
- // check for the nvm.sh standard mirror env variables
- if (!overrideDistUrl) {
- if (isIojs) {
- if (process.env.IOJS_ORG_MIRROR) {
- overrideDistUrl = process.env.IOJS_ORG_MIRROR
- } else if (process.env.NVM_IOJS_ORG_MIRROR) {// remove on next semver-major
- overrideDistUrl = process.env.NVM_IOJS_ORG_MIRROR
- log.warn('download',
- 'NVM_IOJS_ORG_MIRROR is deprecated and will be removed in node-gyp v4, ' +
- 'please use IOJS_ORG_MIRROR')
- }
- } else {
- if (process.env.NODEJS_ORG_MIRROR) {
- overrideDistUrl = process.env.NODEJS_ORG_MIRROR
- } else if (process.env.NVM_NODEJS_ORG_MIRROR) {// remove on next semver-major
- overrideDistUrl = process.env.NVM_NODEJS_ORG_MIRROR
- log.warn('download',
- 'NVM_NODEJS_ORG_MIRROR is deprecated and will be removed in node-gyp v4, ' +
- 'please use NODEJS_ORG_MIRROR')
- }
- }
- }
-
- if (overrideDistUrl)
- log.verbose('download', 'using dist-url', overrideDistUrl)
-
- if (overrideDistUrl)
- distBaseUrl = overrideDistUrl.replace(/\/+$/, '')
- else
- distBaseUrl = isIojs ? 'https://iojs.org/download/release' : 'https://nodejs.org/dist'
- distBaseUrl += '/v' + version + '/'
-
- // new style, based on process.release so we have a lot of the data we need
- if (defaultRelease && defaultRelease.headersUrl && !overrideDistUrl) {
- baseUrl = url.resolve(defaultRelease.headersUrl, './')
- libUrl32 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'x86', versionSemver.major)
- libUrl64 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'x64', versionSemver.major)
-
- return {
- version: version,
- semver: versionSemver,
- name: name,
- baseUrl: baseUrl,
- tarballUrl: defaultRelease.headersUrl,
- shasumsUrl: url.resolve(baseUrl, 'SHASUMS256.txt'),
- versionDir: (name !== 'node' ? name + '-' : '') + version,
- libUrl32: libUrl32,
- libUrl64: libUrl64,
- libPath32: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl32).path)),
- libPath64: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl64).path))
- }
- }
-
- // older versions without process.release are captured here and we have to make
- // a lot of assumptions, additionally if you --target=x.y.z then we can't use the
- // current process.release
-
- baseUrl = distBaseUrl
- libUrl32 = resolveLibUrl(name, baseUrl, 'x86', versionSemver.major)
- libUrl64 = resolveLibUrl(name, baseUrl, 'x64', versionSemver.major)
- // making the bold assumption that anything with a version number >3.0.0 will
- // have a *-headers.tar.gz file in its dist location, even some frankenstein
- // custom version
- canGetHeaders = semver.satisfies(versionSemver, headersTarballRange)
- tarballUrl = url.resolve(baseUrl, name + '-v' + version + (canGetHeaders ? '-headers' : '') + '.tar.gz')
-
- return {
- version: version,
- semver: versionSemver,
- name: name,
- baseUrl: baseUrl,
- tarballUrl: tarballUrl,
- shasumsUrl: url.resolve(baseUrl, 'SHASUMS256.txt'),
- versionDir: (name !== 'node' ? name + '-' : '') + version,
- libUrl32: libUrl32,
- libUrl64: libUrl64,
- libPath32: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl32).path)),
- libPath64: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl64).path))
- }
-}
-
-function normalizePath (p) {
- return path.normalize(p).replace(/\\/g, '/')
-}
-
-function resolveLibUrl (name, defaultUrl, arch, versionMajor) {
- var base = url.resolve(defaultUrl, './')
- , hasLibUrl = bitsre.test(defaultUrl) || (versionMajor === 3 && bitsreV3.test(defaultUrl))
-
- if (!hasLibUrl) {
- // let's assume it's a baseUrl then
- if (versionMajor >= 1)
- return url.resolve(base, 'win-' + arch +'/' + name + '.lib')
- // prior to io.js@1.0.0 32-bit node.lib lives in /, 64-bit lives in /x64/
- return url.resolve(base, (arch === 'x64' ? 'x64/' : '') + name + '.lib')
- }
-
- // else we have a proper url to a .lib, just make sure it's the right arch
- return defaultUrl.replace(versionMajor === 3 ? bitsreV3 : bitsre, '/win-' + arch + '/')
-}
-
-module.exports = processRelease
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/rebuild.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/rebuild.js
deleted file mode 100644
index 4c6f472aa7..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/rebuild.js
+++ /dev/null
@@ -1,14 +0,0 @@
-
-module.exports = exports = rebuild
-
-exports.usage = 'Runs "clean", "configure" and "build" all at once'
-
-function rebuild (gyp, argv, callback) {
-
- gyp.todo.push(
- { name: 'clean', args: [] }
- , { name: 'configure', args: argv }
- , { name: 'build', args: [] }
- )
- process.nextTick(callback)
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/remove.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/remove.js
deleted file mode 100644
index eb80981b88..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/lib/remove.js
+++ /dev/null
@@ -1,52 +0,0 @@
-
-module.exports = exports = remove
-
-exports.usage = 'Removes the node development files for the specified version'
-
-/**
- * Module dependencies.
- */
-
-var fs = require('fs')
- , rm = require('rimraf')
- , path = require('path')
- , log = require('npmlog')
- , semver = require('semver')
-
-function remove (gyp, argv, callback) {
-
- var devDir = gyp.devDir
- log.verbose('remove', 'using node-gyp dir:', devDir)
-
- // get the user-specified version to remove
- var version = argv[0] || gyp.opts.target
- log.verbose('remove', 'removing target version:', version)
-
- if (!version) {
- return callback(new Error('You must specify a version number to remove. Ex: "' + process.version + '"'))
- }
-
- var versionSemver = semver.parse(version)
- if (versionSemver) {
- // flatten the version Array into a String
- version = versionSemver.version
- }
-
- var versionPath = path.resolve(gyp.devDir, version)
- log.verbose('remove', 'removing development files for version:', version)
-
- // first check if its even installed
- fs.stat(versionPath, function (err, stat) {
- if (err) {
- if (err.code == 'ENOENT') {
- callback(null, 'version was already uninstalled: ' + version)
- } else {
- callback(err)
- }
- return
- }
- // Go ahead and delete the dir
- rm(versionPath, callback)
- })
-
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/.npmignore b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/.npmignore
deleted file mode 100644
index 494272a81a..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/.npmignore
+++ /dev/null
@@ -1,5 +0,0 @@
-.*.swp
-node_modules/
-examples/deep-copy/
-examples/path/
-examples/filter-copy/
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/.travis.yml b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/.travis.yml
deleted file mode 100644
index 9f5972ab5a..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/.travis.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-language: node_js
-node_js:
- - "6"
- - "4"
- - "0.10"
- - "0.12"
-before_install:
- - "npm config set spin false"
- - "npm install -g npm/npm"
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/LICENSE b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/LICENSE
deleted file mode 100644
index 19129e315f..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/README.md b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/README.md
deleted file mode 100644
index 9d8cb77e5c..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/README.md
+++ /dev/null
@@ -1,76 +0,0 @@
-Like FS streams, but with stat on them, and supporting directories and
-symbolic links, as well as normal files. Also, you can use this to set
-the stats on a file, even if you don't change its contents, or to create
-a symlink, etc.
-
-So, for example, you can "write" a directory, and it'll call `mkdir`. You
-can specify a uid and gid, and it'll call `chown`. You can specify a
-`mtime` and `atime`, and it'll call `utimes`. You can call it a symlink
-and provide a `linkpath` and it'll call `symlink`.
-
-Note that it won't automatically resolve symbolic links. So, if you
-call `fstream.Reader('/some/symlink')` then you'll get an object
-that stats and then ends immediately (since it has no data). To follow
-symbolic links, do this: `fstream.Reader({path:'/some/symlink', follow:
-true })`.
-
-There are various checks to make sure that the bytes emitted are the
-same as the intended size, if the size is set.
-
-## Examples
-
-```javascript
-fstream
- .Writer({ path: "path/to/file"
- , mode: 0755
- , size: 6
- })
- .write("hello\n")
- .end()
-```
-
-This will create the directories if they're missing, and then write
-`hello\n` into the file, chmod it to 0755, and assert that 6 bytes have
-been written when it's done.
-
-```javascript
-fstream
- .Writer({ path: "path/to/file"
- , mode: 0755
- , size: 6
- , flags: "a"
- })
- .write("hello\n")
- .end()
-```
-
-You can pass flags in, if you want to append to a file.
-
-```javascript
-fstream
- .Writer({ path: "path/to/symlink"
- , linkpath: "./file"
- , SymbolicLink: true
- , mode: "0755" // octal strings supported
- })
- .end()
-```
-
-If isSymbolicLink is a function, it'll be called, and if it returns
-true, then it'll treat it as a symlink. If it's not a function, then
-any truish value will make a symlink, or you can set `type:
-'SymbolicLink'`, which does the same thing.
-
-Note that the linkpath is relative to the symbolic link location, not
-the parent dir or cwd.
-
-```javascript
-fstream
- .Reader("path/to/dir")
- .pipe(fstream.Writer("path/to/other/dir"))
-```
-
-This will do like `cp -Rp path/to/dir path/to/other/dir`. If the other
-dir exists and isn't a directory, then it'll emit an error. It'll also
-set the uid, gid, mode, etc. to be identical. In this way, it's more
-like `rsync -a` than simply a copy.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/filter-pipe.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/filter-pipe.js
deleted file mode 100644
index 83dadef8a6..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/filter-pipe.js
+++ /dev/null
@@ -1,134 +0,0 @@
-var fstream = require('../fstream.js')
-var path = require('path')
-
-var r = fstream.Reader({
- path: path.dirname(__dirname),
- filter: function () {
- return !this.basename.match(/^\./) &&
- !this.basename.match(/^node_modules$/) &&
- !this.basename.match(/^deep-copy$/) &&
- !this.basename.match(/^filter-copy$/)
- }
-})
-
-// this writer will only write directories
-var w = fstream.Writer({
- path: path.resolve(__dirname, 'filter-copy'),
- type: 'Directory',
- filter: function () {
- return this.type === 'Directory'
- }
-})
-
-var indent = ''
-
-r.on('entry', appears)
-r.on('ready', function () {
- console.error('ready to begin!', r.path)
-})
-
-function appears (entry) {
- console.error(indent + 'a %s appears!', entry.type, entry.basename, typeof entry.basename)
- if (foggy) {
- console.error('FOGGY!')
- var p = entry
- do {
- console.error(p.depth, p.path, p._paused)
- p = p.parent
- } while (p)
-
- throw new Error('\u001b[mshould not have entries while foggy')
- }
- indent += '\t'
- entry.on('data', missile(entry))
- entry.on('end', runaway(entry))
- entry.on('entry', appears)
-}
-
-var foggy
-function missile (entry) {
- function liftFog (who) {
- if (!foggy) return
- if (who) {
- console.error('%s breaks the spell!', who && who.path)
- } else {
- console.error('the spell expires!')
- }
- console.error('\u001b[mthe fog lifts!\n')
- clearTimeout(foggy)
- foggy = null
- if (entry._paused) entry.resume()
- }
-
- if (entry.type === 'Directory') {
- var ended = false
- entry.once('end', function () { ended = true })
- return function (c) {
- // throw in some pathological pause()/resume() behavior
- // just for extra fun.
- process.nextTick(function () {
- if (!foggy && !ended) { // && Math.random() < 0.3) {
- console.error(indent + '%s casts a spell', entry.basename)
- console.error('\na slowing fog comes over the battlefield...\n\u001b[32m')
- entry.pause()
- entry.once('resume', liftFog)
- foggy = setTimeout(liftFog, 1000)
- }
- })
- }
- }
-
- return function (c) {
- var e = Math.random() < 0.5
- console.error(indent + '%s %s for %d damage!',
- entry.basename,
- e ? 'is struck' : 'fires a chunk',
- c.length)
- }
-}
-
-function runaway (entry) {
- return function () {
- var e = Math.random() < 0.5
- console.error(indent + '%s %s',
- entry.basename,
- e ? 'turns to flee' : 'is vanquished!')
- indent = indent.slice(0, -1)
- }
-}
-
-w.on('entry', attacks)
-// w.on('ready', function () { attacks(w) })
-function attacks (entry) {
- console.error(indent + '%s %s!', entry.basename,
- entry.type === 'Directory' ? 'calls for backup' : 'attacks')
- entry.on('entry', attacks)
-}
-
-var ended = false
-var i = 1
-r.on('end', function () {
- if (foggy) clearTimeout(foggy)
- console.error("\u001b[mIT'S OVER!!")
- console.error('A WINNAR IS YOU!')
-
- console.log('ok ' + (i++) + ' A WINNAR IS YOU')
- ended = true
- // now go through and verify that everything in there is a dir.
- var p = path.resolve(__dirname, 'filter-copy')
- var checker = fstream.Reader({ path: p })
- checker.checker = true
- checker.on('child', function (e) {
- var ok = e.type === 'Directory'
- console.log((ok ? '' : 'not ') + 'ok ' + (i++) +
- ' should be a dir: ' +
- e.path.substr(checker.path.length + 1))
- })
-})
-
-process.on('exit', function () {
- console.log((ended ? '' : 'not ') + 'ok ' + (i) + ' ended')
- console.log('1..' + i)
-})
-
-r.pipe(w)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/pipe.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/pipe.js
deleted file mode 100644
index 3de42ef32b..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/pipe.js
+++ /dev/null
@@ -1,118 +0,0 @@
-var fstream = require('../fstream.js')
-var path = require('path')
-
-var r = fstream.Reader({
- path: path.dirname(__dirname),
- filter: function () {
- return !this.basename.match(/^\./) &&
- !this.basename.match(/^node_modules$/) &&
- !this.basename.match(/^deep-copy$/)
- }
-})
-
-var w = fstream.Writer({
- path: path.resolve(__dirname, 'deep-copy'),
- type: 'Directory'
-})
-
-var indent = ''
-
-r.on('entry', appears)
-r.on('ready', function () {
- console.error('ready to begin!', r.path)
-})
-
-function appears (entry) {
- console.error(indent + 'a %s appears!', entry.type, entry.basename, typeof entry.basename, entry)
- if (foggy) {
- console.error('FOGGY!')
- var p = entry
- do {
- console.error(p.depth, p.path, p._paused)
- p = p.parent
- } while (p)
-
- throw new Error('\u001b[mshould not have entries while foggy')
- }
- indent += '\t'
- entry.on('data', missile(entry))
- entry.on('end', runaway(entry))
- entry.on('entry', appears)
-}
-
-var foggy
-function missile (entry) {
- function liftFog (who) {
- if (!foggy) return
- if (who) {
- console.error('%s breaks the spell!', who && who.path)
- } else {
- console.error('the spell expires!')
- }
- console.error('\u001b[mthe fog lifts!\n')
- clearTimeout(foggy)
- foggy = null
- if (entry._paused) entry.resume()
- }
-
- if (entry.type === 'Directory') {
- var ended = false
- entry.once('end', function () { ended = true })
- return function (c) {
- // throw in some pathological pause()/resume() behavior
- // just for extra fun.
- process.nextTick(function () {
- if (!foggy && !ended) { // && Math.random() < 0.3) {
- console.error(indent + '%s casts a spell', entry.basename)
- console.error('\na slowing fog comes over the battlefield...\n\u001b[32m')
- entry.pause()
- entry.once('resume', liftFog)
- foggy = setTimeout(liftFog, 10)
- }
- })
- }
- }
-
- return function (c) {
- var e = Math.random() < 0.5
- console.error(indent + '%s %s for %d damage!',
- entry.basename,
- e ? 'is struck' : 'fires a chunk',
- c.length)
- }
-}
-
-function runaway (entry) {
- return function () {
- var e = Math.random() < 0.5
- console.error(indent + '%s %s',
- entry.basename,
- e ? 'turns to flee' : 'is vanquished!')
- indent = indent.slice(0, -1)
- }
-}
-
-w.on('entry', attacks)
-// w.on('ready', function () { attacks(w) })
-function attacks (entry) {
- console.error(indent + '%s %s!', entry.basename,
- entry.type === 'Directory' ? 'calls for backup' : 'attacks')
- entry.on('entry', attacks)
-}
-
-var ended = false
-r.on('end', function () {
- if (foggy) clearTimeout(foggy)
- console.error("\u001b[mIT'S OVER!!")
- console.error('A WINNAR IS YOU!')
-
- console.log('ok 1 A WINNAR IS YOU')
- ended = true
-})
-
-process.on('exit', function () {
- console.log((ended ? '' : 'not ') + 'ok 2 ended')
- console.log('1..2')
-})
-
-r.pipe(w)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/reader.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/reader.js
deleted file mode 100644
index 19affbe7e6..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/reader.js
+++ /dev/null
@@ -1,68 +0,0 @@
-var fstream = require('../fstream.js')
-var tap = require('tap')
-var fs = require('fs')
-var path = require('path')
-var dir = path.dirname(__dirname)
-
-tap.test('reader test', function (t) {
- var children = -1
- var gotReady = false
- var ended = false
-
- var r = fstream.Reader({
- path: dir,
- filter: function () {
- // return this.parent === r
- return this.parent === r || this === r
- }
- })
-
- r.on('ready', function () {
- gotReady = true
- children = fs.readdirSync(dir).length
- console.error('Setting expected children to ' + children)
- t.equal(r.type, 'Directory', 'should be a directory')
- })
-
- r.on('entry', function (entry) {
- children--
- if (!gotReady) {
- t.fail('children before ready!')
- }
- t.equal(entry.dirname, r.path, 'basename is parent dir')
- })
-
- r.on('error', function (er) {
- t.fail(er)
- t.end()
- process.exit(1)
- })
-
- r.on('end', function () {
- t.equal(children, 0, 'should have seen all children')
- ended = true
- })
-
- var closed = false
- r.on('close', function () {
- t.ok(ended, 'saw end before close')
- t.notOk(closed, 'close should only happen once')
- closed = true
- t.end()
- })
-})
-
-tap.test('reader error test', function (t) {
- // assumes non-root on a *nix system
- var r = fstream.Reader({ path: '/etc/shadow' })
-
- r.once('error', function (er) {
- t.ok(true)
- t.end()
- })
-
- r.on('end', function () {
- t.fail('reader ended without error')
- t.end()
- })
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/symlink-write.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/symlink-write.js
deleted file mode 100644
index 19e81eea9f..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/examples/symlink-write.js
+++ /dev/null
@@ -1,27 +0,0 @@
-var fstream = require('../fstream.js')
-var notOpen = false
-process.chdir(__dirname)
-
-fstream
- .Writer({
- path: 'path/to/symlink',
- linkpath: './file',
- isSymbolicLink: true,
- mode: '0755' // octal strings supported
- })
- .on('close', function () {
- notOpen = true
- var fs = require('fs')
- var s = fs.lstatSync('path/to/symlink')
- var isSym = s.isSymbolicLink()
- console.log((isSym ? '' : 'not ') + 'ok 1 should be symlink')
- var t = fs.readlinkSync('path/to/symlink')
- var isTarget = t === './file'
- console.log((isTarget ? '' : 'not ') + 'ok 2 should link to ./file')
- })
- .end()
-
-process.on('exit', function () {
- console.log((notOpen ? '' : 'not ') + 'ok 3 should be closed')
- console.log('1..3')
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/fstream.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/fstream.js
deleted file mode 100644
index c0eb3bea78..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/fstream.js
+++ /dev/null
@@ -1,35 +0,0 @@
-exports.Abstract = require('./lib/abstract.js')
-exports.Reader = require('./lib/reader.js')
-exports.Writer = require('./lib/writer.js')
-
-exports.File = {
- Reader: require('./lib/file-reader.js'),
- Writer: require('./lib/file-writer.js')
-}
-
-exports.Dir = {
- Reader: require('./lib/dir-reader.js'),
- Writer: require('./lib/dir-writer.js')
-}
-
-exports.Link = {
- Reader: require('./lib/link-reader.js'),
- Writer: require('./lib/link-writer.js')
-}
-
-exports.Proxy = {
- Reader: require('./lib/proxy-reader.js'),
- Writer: require('./lib/proxy-writer.js')
-}
-
-exports.Reader.Dir = exports.DirReader = exports.Dir.Reader
-exports.Reader.File = exports.FileReader = exports.File.Reader
-exports.Reader.Link = exports.LinkReader = exports.Link.Reader
-exports.Reader.Proxy = exports.ProxyReader = exports.Proxy.Reader
-
-exports.Writer.Dir = exports.DirWriter = exports.Dir.Writer
-exports.Writer.File = exports.FileWriter = exports.File.Writer
-exports.Writer.Link = exports.LinkWriter = exports.Link.Writer
-exports.Writer.Proxy = exports.ProxyWriter = exports.Proxy.Writer
-
-exports.collect = require('./lib/collect.js')
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/abstract.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/abstract.js
deleted file mode 100644
index 97c120e1d5..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/abstract.js
+++ /dev/null
@@ -1,85 +0,0 @@
-// the parent class for all fstreams.
-
-module.exports = Abstract
-
-var Stream = require('stream').Stream
-var inherits = require('inherits')
-
-function Abstract () {
- Stream.call(this)
-}
-
-inherits(Abstract, Stream)
-
-Abstract.prototype.on = function (ev, fn) {
- if (ev === 'ready' && this.ready) {
- process.nextTick(fn.bind(this))
- } else {
- Stream.prototype.on.call(this, ev, fn)
- }
- return this
-}
-
-Abstract.prototype.abort = function () {
- this._aborted = true
- this.emit('abort')
-}
-
-Abstract.prototype.destroy = function () {}
-
-Abstract.prototype.warn = function (msg, code) {
- var self = this
- var er = decorate(msg, code, self)
- if (!self.listeners('warn')) {
- console.error('%s %s\n' +
- 'path = %s\n' +
- 'syscall = %s\n' +
- 'fstream_type = %s\n' +
- 'fstream_path = %s\n' +
- 'fstream_unc_path = %s\n' +
- 'fstream_class = %s\n' +
- 'fstream_stack =\n%s\n',
- code || 'UNKNOWN',
- er.stack,
- er.path,
- er.syscall,
- er.fstream_type,
- er.fstream_path,
- er.fstream_unc_path,
- er.fstream_class,
- er.fstream_stack.join('\n'))
- } else {
- self.emit('warn', er)
- }
-}
-
-Abstract.prototype.info = function (msg, code) {
- this.emit('info', msg, code)
-}
-
-Abstract.prototype.error = function (msg, code, th) {
- var er = decorate(msg, code, this)
- if (th) throw er
- else this.emit('error', er)
-}
-
-function decorate (er, code, self) {
- if (!(er instanceof Error)) er = new Error(er)
- er.code = er.code || code
- er.path = er.path || self.path
- er.fstream_type = er.fstream_type || self.type
- er.fstream_path = er.fstream_path || self.path
- if (self._path !== self.path) {
- er.fstream_unc_path = er.fstream_unc_path || self._path
- }
- if (self.linkpath) {
- er.fstream_linkpath = er.fstream_linkpath || self.linkpath
- }
- er.fstream_class = er.fstream_class || self.constructor.name
- er.fstream_stack = er.fstream_stack ||
- new Error().stack.split(/\n/).slice(3).map(function (s) {
- return s.replace(/^ {4}at /, '')
- })
-
- return er
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/collect.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/collect.js
deleted file mode 100644
index e5d4f35833..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/collect.js
+++ /dev/null
@@ -1,70 +0,0 @@
-module.exports = collect
-
-function collect (stream) {
- if (stream._collected) return
-
- if (stream._paused) return stream.on('resume', collect.bind(null, stream))
-
- stream._collected = true
- stream.pause()
-
- stream.on('data', save)
- stream.on('end', save)
- var buf = []
- function save (b) {
- if (typeof b === 'string') b = new Buffer(b)
- if (Buffer.isBuffer(b) && !b.length) return
- buf.push(b)
- }
-
- stream.on('entry', saveEntry)
- var entryBuffer = []
- function saveEntry (e) {
- collect(e)
- entryBuffer.push(e)
- }
-
- stream.on('proxy', proxyPause)
- function proxyPause (p) {
- p.pause()
- }
-
- // replace the pipe method with a new version that will
- // unlock the buffered stuff. if you just call .pipe()
- // without a destination, then it'll re-play the events.
- stream.pipe = (function (orig) {
- return function (dest) {
- // console.error(' === open the pipes', dest && dest.path)
-
- // let the entries flow through one at a time.
- // Once they're all done, then we can resume completely.
- var e = 0
- ;(function unblockEntry () {
- var entry = entryBuffer[e++]
- // console.error(" ==== unblock entry", entry && entry.path)
- if (!entry) return resume()
- entry.on('end', unblockEntry)
- if (dest) dest.add(entry)
- else stream.emit('entry', entry)
- })()
-
- function resume () {
- stream.removeListener('entry', saveEntry)
- stream.removeListener('data', save)
- stream.removeListener('end', save)
-
- stream.pipe = orig
- if (dest) stream.pipe(dest)
-
- buf.forEach(function (b) {
- if (b) stream.emit('data', b)
- else stream.emit('end')
- })
-
- stream.resume()
- }
-
- return dest
- }
- })(stream.pipe)
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/dir-reader.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/dir-reader.js
deleted file mode 100644
index 820cdc85a8..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/dir-reader.js
+++ /dev/null
@@ -1,252 +0,0 @@
-// A thing that emits "entry" events with Reader objects
-// Pausing it causes it to stop emitting entry events, and also
-// pauses the current entry if there is one.
-
-module.exports = DirReader
-
-var fs = require('graceful-fs')
-var inherits = require('inherits')
-var path = require('path')
-var Reader = require('./reader.js')
-var assert = require('assert').ok
-
-inherits(DirReader, Reader)
-
-function DirReader (props) {
- var self = this
- if (!(self instanceof DirReader)) {
- throw new Error('DirReader must be called as constructor.')
- }
-
- // should already be established as a Directory type
- if (props.type !== 'Directory' || !props.Directory) {
- throw new Error('Non-directory type ' + props.type)
- }
-
- self.entries = null
- self._index = -1
- self._paused = false
- self._length = -1
-
- if (props.sort) {
- this.sort = props.sort
- }
-
- Reader.call(this, props)
-}
-
-DirReader.prototype._getEntries = function () {
- var self = this
-
- // race condition. might pause() before calling _getEntries,
- // and then resume, and try to get them a second time.
- if (self._gotEntries) return
- self._gotEntries = true
-
- fs.readdir(self._path, function (er, entries) {
- if (er) return self.error(er)
-
- self.entries = entries
-
- self.emit('entries', entries)
- if (self._paused) self.once('resume', processEntries)
- else processEntries()
-
- function processEntries () {
- self._length = self.entries.length
- if (typeof self.sort === 'function') {
- self.entries = self.entries.sort(self.sort.bind(self))
- }
- self._read()
- }
- })
-}
-
-// start walking the dir, and emit an "entry" event for each one.
-DirReader.prototype._read = function () {
- var self = this
-
- if (!self.entries) return self._getEntries()
-
- if (self._paused || self._currentEntry || self._aborted) {
- // console.error('DR paused=%j, current=%j, aborted=%j', self._paused, !!self._currentEntry, self._aborted)
- return
- }
-
- self._index++
- if (self._index >= self.entries.length) {
- if (!self._ended) {
- self._ended = true
- self.emit('end')
- self.emit('close')
- }
- return
- }
-
- // ok, handle this one, then.
-
- // save creating a proxy, by stat'ing the thing now.
- var p = path.resolve(self._path, self.entries[self._index])
- assert(p !== self._path)
- assert(self.entries[self._index])
-
- // set this to prevent trying to _read() again in the stat time.
- self._currentEntry = p
- fs[ self.props.follow ? 'stat' : 'lstat' ](p, function (er, stat) {
- if (er) return self.error(er)
-
- var who = self._proxy || self
-
- stat.path = p
- stat.basename = path.basename(p)
- stat.dirname = path.dirname(p)
- var childProps = self.getChildProps.call(who, stat)
- childProps.path = p
- childProps.basename = path.basename(p)
- childProps.dirname = path.dirname(p)
-
- var entry = Reader(childProps, stat)
-
- // console.error("DR Entry", p, stat.size)
-
- self._currentEntry = entry
-
- // "entry" events are for direct entries in a specific dir.
- // "child" events are for any and all children at all levels.
- // This nomenclature is not completely final.
-
- entry.on('pause', function (who) {
- if (!self._paused && !entry._disowned) {
- self.pause(who)
- }
- })
-
- entry.on('resume', function (who) {
- if (self._paused && !entry._disowned) {
- self.resume(who)
- }
- })
-
- entry.on('stat', function (props) {
- self.emit('_entryStat', entry, props)
- if (entry._aborted) return
- if (entry._paused) {
- entry.once('resume', function () {
- self.emit('entryStat', entry, props)
- })
- } else self.emit('entryStat', entry, props)
- })
-
- entry.on('ready', function EMITCHILD () {
- // console.error("DR emit child", entry._path)
- if (self._paused) {
- // console.error(" DR emit child - try again later")
- // pause the child, and emit the "entry" event once we drain.
- // console.error("DR pausing child entry")
- entry.pause(self)
- return self.once('resume', EMITCHILD)
- }
-
- // skip over sockets. they can't be piped around properly,
- // so there's really no sense even acknowledging them.
- // if someone really wants to see them, they can listen to
- // the "socket" events.
- if (entry.type === 'Socket') {
- self.emit('socket', entry)
- } else {
- self.emitEntry(entry)
- }
- })
-
- var ended = false
- entry.on('close', onend)
- entry.on('disown', onend)
- function onend () {
- if (ended) return
- ended = true
- self.emit('childEnd', entry)
- self.emit('entryEnd', entry)
- self._currentEntry = null
- if (!self._paused) {
- self._read()
- }
- }
-
- // XXX Remove this. Works in node as of 0.6.2 or so.
- // Long filenames should not break stuff.
- entry.on('error', function (er) {
- if (entry._swallowErrors) {
- self.warn(er)
- entry.emit('end')
- entry.emit('close')
- } else {
- self.emit('error', er)
- }
- })
-
- // proxy up some events.
- ;[
- 'child',
- 'childEnd',
- 'warn'
- ].forEach(function (ev) {
- entry.on(ev, self.emit.bind(self, ev))
- })
- })
-}
-
-DirReader.prototype.disown = function (entry) {
- entry.emit('beforeDisown')
- entry._disowned = true
- entry.parent = entry.root = null
- if (entry === this._currentEntry) {
- this._currentEntry = null
- }
- entry.emit('disown')
-}
-
-DirReader.prototype.getChildProps = function () {
- return {
- depth: this.depth + 1,
- root: this.root || this,
- parent: this,
- follow: this.follow,
- filter: this.filter,
- sort: this.props.sort,
- hardlinks: this.props.hardlinks
- }
-}
-
-DirReader.prototype.pause = function (who) {
- var self = this
- if (self._paused) return
- who = who || self
- self._paused = true
- if (self._currentEntry && self._currentEntry.pause) {
- self._currentEntry.pause(who)
- }
- self.emit('pause', who)
-}
-
-DirReader.prototype.resume = function (who) {
- var self = this
- if (!self._paused) return
- who = who || self
-
- self._paused = false
- // console.error('DR Emit Resume', self._path)
- self.emit('resume', who)
- if (self._paused) {
- // console.error('DR Re-paused', self._path)
- return
- }
-
- if (self._currentEntry) {
- if (self._currentEntry.resume) self._currentEntry.resume(who)
- } else self._read()
-}
-
-DirReader.prototype.emitEntry = function (entry) {
- this.emit('entry', entry)
- this.emit('child', entry)
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/dir-writer.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/dir-writer.js
deleted file mode 100644
index ec50dca900..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/dir-writer.js
+++ /dev/null
@@ -1,174 +0,0 @@
-// It is expected that, when .add() returns false, the consumer
-// of the DirWriter will pause until a "drain" event occurs. Note
-// that this is *almost always going to be the case*, unless the
-// thing being written is some sort of unsupported type, and thus
-// skipped over.
-
-module.exports = DirWriter
-
-var Writer = require('./writer.js')
-var inherits = require('inherits')
-var mkdir = require('mkdirp')
-var path = require('path')
-var collect = require('./collect.js')
-
-inherits(DirWriter, Writer)
-
-function DirWriter (props) {
- var self = this
- if (!(self instanceof DirWriter)) {
- self.error('DirWriter must be called as constructor.', null, true)
- }
-
- // should already be established as a Directory type
- if (props.type !== 'Directory' || !props.Directory) {
- self.error('Non-directory type ' + props.type + ' ' +
- JSON.stringify(props), null, true)
- }
-
- Writer.call(this, props)
-}
-
-DirWriter.prototype._create = function () {
- var self = this
- mkdir(self._path, Writer.dirmode, function (er) {
- if (er) return self.error(er)
- // ready to start getting entries!
- self.ready = true
- self.emit('ready')
- self._process()
- })
-}
-
-// a DirWriter has an add(entry) method, but its .write() doesn't
-// do anything. Why a no-op rather than a throw? Because this
-// leaves open the door for writing directory metadata for
-// gnu/solaris style dumpdirs.
-DirWriter.prototype.write = function () {
- return true
-}
-
-DirWriter.prototype.end = function () {
- this._ended = true
- this._process()
-}
-
-DirWriter.prototype.add = function (entry) {
- var self = this
-
- // console.error('\tadd', entry._path, '->', self._path)
- collect(entry)
- if (!self.ready || self._currentEntry) {
- self._buffer.push(entry)
- return false
- }
-
- // create a new writer, and pipe the incoming entry into it.
- if (self._ended) {
- return self.error('add after end')
- }
-
- self._buffer.push(entry)
- self._process()
-
- return this._buffer.length === 0
-}
-
-DirWriter.prototype._process = function () {
- var self = this
-
- // console.error('DW Process p=%j', self._processing, self.basename)
-
- if (self._processing) return
-
- var entry = self._buffer.shift()
- if (!entry) {
- // console.error("DW Drain")
- self.emit('drain')
- if (self._ended) self._finish()
- return
- }
-
- self._processing = true
- // console.error("DW Entry", entry._path)
-
- self.emit('entry', entry)
-
- // ok, add this entry
- //
- // don't allow recursive copying
- var p = entry
- var pp
- do {
- pp = p._path || p.path
- if (pp === self.root._path || pp === self._path ||
- (pp && pp.indexOf(self._path) === 0)) {
- // console.error('DW Exit (recursive)', entry.basename, self._path)
- self._processing = false
- if (entry._collected) entry.pipe()
- return self._process()
- }
- p = p.parent
- } while (p)
-
- // console.error("DW not recursive")
-
- // chop off the entry's root dir, replace with ours
- var props = {
- parent: self,
- root: self.root || self,
- type: entry.type,
- depth: self.depth + 1
- }
-
- pp = entry._path || entry.path || entry.props.path
- if (entry.parent) {
- pp = pp.substr(entry.parent._path.length + 1)
- }
- // get rid of any ../../ shenanigans
- props.path = path.join(self.path, path.join('/', pp))
-
- // if i have a filter, the child should inherit it.
- props.filter = self.filter
-
- // all the rest of the stuff, copy over from the source.
- Object.keys(entry.props).forEach(function (k) {
- if (!props.hasOwnProperty(k)) {
- props[k] = entry.props[k]
- }
- })
-
- // not sure at this point what kind of writer this is.
- var child = self._currentChild = new Writer(props)
- child.on('ready', function () {
- // console.error("DW Child Ready", child.type, child._path)
- // console.error(" resuming", entry._path)
- entry.pipe(child)
- entry.resume()
- })
-
- // XXX Make this work in node.
- // Long filenames should not break stuff.
- child.on('error', function (er) {
- if (child._swallowErrors) {
- self.warn(er)
- child.emit('end')
- child.emit('close')
- } else {
- self.emit('error', er)
- }
- })
-
- // we fire _end internally *after* end, so that we don't move on
- // until any "end" listeners have had their chance to do stuff.
- child.on('close', onend)
- var ended = false
- function onend () {
- if (ended) return
- ended = true
- // console.error("* DW Child end", child.basename)
- self._currentChild = null
- self._processing = false
- self._process()
- }
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/file-reader.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/file-reader.js
deleted file mode 100644
index baa01f4b3d..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/file-reader.js
+++ /dev/null
@@ -1,150 +0,0 @@
-// Basically just a wrapper around an fs.ReadStream
-
-module.exports = FileReader
-
-var fs = require('graceful-fs')
-var inherits = require('inherits')
-var Reader = require('./reader.js')
-var EOF = {EOF: true}
-var CLOSE = {CLOSE: true}
-
-inherits(FileReader, Reader)
-
-function FileReader (props) {
- // console.error(" FR create", props.path, props.size, new Error().stack)
- var self = this
- if (!(self instanceof FileReader)) {
- throw new Error('FileReader must be called as constructor.')
- }
-
- // should already be established as a File type
- // XXX Todo: preserve hardlinks by tracking dev+inode+nlink,
- // with a HardLinkReader class.
- if (!((props.type === 'Link' && props.Link) ||
- (props.type === 'File' && props.File))) {
- throw new Error('Non-file type ' + props.type)
- }
-
- self._buffer = []
- self._bytesEmitted = 0
- Reader.call(self, props)
-}
-
-FileReader.prototype._getStream = function () {
- var self = this
- var stream = self._stream = fs.createReadStream(self._path, self.props)
-
- if (self.props.blksize) {
- stream.bufferSize = self.props.blksize
- }
-
- stream.on('open', self.emit.bind(self, 'open'))
-
- stream.on('data', function (c) {
- // console.error('\t\t%d %s', c.length, self.basename)
- self._bytesEmitted += c.length
- // no point saving empty chunks
- if (!c.length) {
- return
- } else if (self._paused || self._buffer.length) {
- self._buffer.push(c)
- self._read()
- } else self.emit('data', c)
- })
-
- stream.on('end', function () {
- if (self._paused || self._buffer.length) {
- // console.error('FR Buffering End', self._path)
- self._buffer.push(EOF)
- self._read()
- } else {
- self.emit('end')
- }
-
- if (self._bytesEmitted !== self.props.size) {
- self.error("Didn't get expected byte count\n" +
- 'expect: ' + self.props.size + '\n' +
- 'actual: ' + self._bytesEmitted)
- }
- })
-
- stream.on('close', function () {
- if (self._paused || self._buffer.length) {
- // console.error('FR Buffering Close', self._path)
- self._buffer.push(CLOSE)
- self._read()
- } else {
- // console.error('FR close 1', self._path)
- self.emit('close')
- }
- })
-
- stream.on('error', function (e) {
- self.emit('error', e)
- })
-
- self._read()
-}
-
-FileReader.prototype._read = function () {
- var self = this
- // console.error('FR _read', self._path)
- if (self._paused) {
- // console.error('FR _read paused', self._path)
- return
- }
-
- if (!self._stream) {
- // console.error('FR _getStream calling', self._path)
- return self._getStream()
- }
-
- // clear out the buffer, if there is one.
- if (self._buffer.length) {
- // console.error('FR _read has buffer', self._buffer.length, self._path)
- var buf = self._buffer
- for (var i = 0, l = buf.length; i < l; i++) {
- var c = buf[i]
- if (c === EOF) {
- // console.error('FR Read emitting buffered end', self._path)
- self.emit('end')
- } else if (c === CLOSE) {
- // console.error('FR Read emitting buffered close', self._path)
- self.emit('close')
- } else {
- // console.error('FR Read emitting buffered data', self._path)
- self.emit('data', c)
- }
-
- if (self._paused) {
- // console.error('FR Read Re-pausing at '+i, self._path)
- self._buffer = buf.slice(i)
- return
- }
- }
- self._buffer.length = 0
- }
-// console.error("FR _read done")
-// that's about all there is to it.
-}
-
-FileReader.prototype.pause = function (who) {
- var self = this
- // console.error('FR Pause', self._path)
- if (self._paused) return
- who = who || self
- self._paused = true
- if (self._stream) self._stream.pause()
- self.emit('pause', who)
-}
-
-FileReader.prototype.resume = function (who) {
- var self = this
- // console.error('FR Resume', self._path)
- if (!self._paused) return
- who = who || self
- self.emit('resume', who)
- self._paused = false
- if (self._stream) self._stream.resume()
- self._read()
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/file-writer.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/file-writer.js
deleted file mode 100644
index 4c803d8d68..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/file-writer.js
+++ /dev/null
@@ -1,107 +0,0 @@
-module.exports = FileWriter
-
-var fs = require('graceful-fs')
-var Writer = require('./writer.js')
-var inherits = require('inherits')
-var EOF = {}
-
-inherits(FileWriter, Writer)
-
-function FileWriter (props) {
- var self = this
- if (!(self instanceof FileWriter)) {
- throw new Error('FileWriter must be called as constructor.')
- }
-
- // should already be established as a File type
- if (props.type !== 'File' || !props.File) {
- throw new Error('Non-file type ' + props.type)
- }
-
- self._buffer = []
- self._bytesWritten = 0
-
- Writer.call(this, props)
-}
-
-FileWriter.prototype._create = function () {
- var self = this
- if (self._stream) return
-
- var so = {}
- if (self.props.flags) so.flags = self.props.flags
- so.mode = Writer.filemode
- if (self._old && self._old.blksize) so.bufferSize = self._old.blksize
-
- self._stream = fs.createWriteStream(self._path, so)
-
- self._stream.on('open', function () {
- // console.error("FW open", self._buffer, self._path)
- self.ready = true
- self._buffer.forEach(function (c) {
- if (c === EOF) self._stream.end()
- else self._stream.write(c)
- })
- self.emit('ready')
- // give this a kick just in case it needs it.
- self.emit('drain')
- })
-
- self._stream.on('error', function (er) { self.emit('error', er) })
-
- self._stream.on('drain', function () { self.emit('drain') })
-
- self._stream.on('close', function () {
- // console.error('\n\nFW Stream Close', self._path, self.size)
- self._finish()
- })
-}
-
-FileWriter.prototype.write = function (c) {
- var self = this
-
- self._bytesWritten += c.length
-
- if (!self.ready) {
- if (!Buffer.isBuffer(c) && typeof c !== 'string') {
- throw new Error('invalid write data')
- }
- self._buffer.push(c)
- return false
- }
-
- var ret = self._stream.write(c)
- // console.error('\t-- fw wrote, _stream says', ret, self._stream._queue.length)
-
- // allow 2 buffered writes, because otherwise there's just too
- // much stop and go bs.
- if (ret === false && self._stream._queue) {
- return self._stream._queue.length <= 2
- } else {
- return ret
- }
-}
-
-FileWriter.prototype.end = function (c) {
- var self = this
-
- if (c) self.write(c)
-
- if (!self.ready) {
- self._buffer.push(EOF)
- return false
- }
-
- return self._stream.end()
-}
-
-FileWriter.prototype._finish = function () {
- var self = this
- if (typeof self.size === 'number' && self._bytesWritten !== self.size) {
- self.error(
- 'Did not get expected byte count.\n' +
- 'expect: ' + self.size + '\n' +
- 'actual: ' + self._bytesWritten)
- }
- Writer.prototype._finish.call(self)
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/get-type.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/get-type.js
deleted file mode 100644
index 19f6a657db..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/get-type.js
+++ /dev/null
@@ -1,33 +0,0 @@
-module.exports = getType
-
-function getType (st) {
- var types = [
- 'Directory',
- 'File',
- 'SymbolicLink',
- 'Link', // special for hardlinks from tarballs
- 'BlockDevice',
- 'CharacterDevice',
- 'FIFO',
- 'Socket'
- ]
- var type
-
- if (st.type && types.indexOf(st.type) !== -1) {
- st[st.type] = true
- return st.type
- }
-
- for (var i = 0, l = types.length; i < l; i++) {
- type = types[i]
- var is = st[type] || st['is' + type]
- if (typeof is === 'function') is = is.call(st)
- if (is) {
- st[type] = true
- st.type = type
- return type
- }
- }
-
- return null
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/link-reader.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/link-reader.js
deleted file mode 100644
index fb4cc67a98..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/link-reader.js
+++ /dev/null
@@ -1,53 +0,0 @@
-// Basically just a wrapper around an fs.readlink
-//
-// XXX: Enhance this to support the Link type, by keeping
-// a lookup table of {<dev+inode>:<path>}, so that hardlinks
-// can be preserved in tarballs.
-
-module.exports = LinkReader
-
-var fs = require('graceful-fs')
-var inherits = require('inherits')
-var Reader = require('./reader.js')
-
-inherits(LinkReader, Reader)
-
-function LinkReader (props) {
- var self = this
- if (!(self instanceof LinkReader)) {
- throw new Error('LinkReader must be called as constructor.')
- }
-
- if (!((props.type === 'Link' && props.Link) ||
- (props.type === 'SymbolicLink' && props.SymbolicLink))) {
- throw new Error('Non-link type ' + props.type)
- }
-
- Reader.call(self, props)
-}
-
-// When piping a LinkReader into a LinkWriter, we have to
-// already have the linkpath property set, so that has to
-// happen *before* the "ready" event, which means we need to
-// override the _stat method.
-LinkReader.prototype._stat = function (currentStat) {
- var self = this
- fs.readlink(self._path, function (er, linkpath) {
- if (er) return self.error(er)
- self.linkpath = self.props.linkpath = linkpath
- self.emit('linkpath', linkpath)
- Reader.prototype._stat.call(self, currentStat)
- })
-}
-
-LinkReader.prototype._read = function () {
- var self = this
- if (self._paused) return
- // basically just a no-op, since we got all the info we need
- // from the _stat method
- if (!self._ended) {
- self.emit('end')
- self.emit('close')
- self._ended = true
- }
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/link-writer.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/link-writer.js
deleted file mode 100644
index af54284008..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/link-writer.js
+++ /dev/null
@@ -1,95 +0,0 @@
-module.exports = LinkWriter
-
-var fs = require('graceful-fs')
-var Writer = require('./writer.js')
-var inherits = require('inherits')
-var path = require('path')
-var rimraf = require('rimraf')
-
-inherits(LinkWriter, Writer)
-
-function LinkWriter (props) {
- var self = this
- if (!(self instanceof LinkWriter)) {
- throw new Error('LinkWriter must be called as constructor.')
- }
-
- // should already be established as a Link type
- if (!((props.type === 'Link' && props.Link) ||
- (props.type === 'SymbolicLink' && props.SymbolicLink))) {
- throw new Error('Non-link type ' + props.type)
- }
-
- if (props.linkpath === '') props.linkpath = '.'
- if (!props.linkpath) {
- self.error('Need linkpath property to create ' + props.type)
- }
-
- Writer.call(this, props)
-}
-
-LinkWriter.prototype._create = function () {
- // console.error(" LW _create")
- var self = this
- var hard = self.type === 'Link' || process.platform === 'win32'
- var link = hard ? 'link' : 'symlink'
- var lp = hard ? path.resolve(self.dirname, self.linkpath) : self.linkpath
-
- // can only change the link path by clobbering
- // For hard links, let's just assume that's always the case, since
- // there's no good way to read them if we don't already know.
- if (hard) return clobber(self, lp, link)
-
- fs.readlink(self._path, function (er, p) {
- // only skip creation if it's exactly the same link
- if (p && p === lp) return finish(self)
- clobber(self, lp, link)
- })
-}
-
-function clobber (self, lp, link) {
- rimraf(self._path, function (er) {
- if (er) return self.error(er)
- create(self, lp, link)
- })
-}
-
-function create (self, lp, link) {
- fs[link](lp, self._path, function (er) {
- // if this is a hard link, and we're in the process of writing out a
- // directory, it's very possible that the thing we're linking to
- // doesn't exist yet (especially if it was intended as a symlink),
- // so swallow ENOENT errors here and just soldier in.
- // Additionally, an EPERM or EACCES can happen on win32 if it's trying
- // to make a link to a directory. Again, just skip it.
- // A better solution would be to have fs.symlink be supported on
- // windows in some nice fashion.
- if (er) {
- if ((er.code === 'ENOENT' ||
- er.code === 'EACCES' ||
- er.code === 'EPERM') && process.platform === 'win32') {
- self.ready = true
- self.emit('ready')
- self.emit('end')
- self.emit('close')
- self.end = self._finish = function () {}
- } else return self.error(er)
- }
- finish(self)
- })
-}
-
-function finish (self) {
- self.ready = true
- self.emit('ready')
- if (self._ended && !self._finished) self._finish()
-}
-
-LinkWriter.prototype.end = function () {
- // console.error("LW finish in end")
- this._ended = true
- if (this.ready) {
- this._finished = true
- this._finish()
- }
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/proxy-reader.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/proxy-reader.js
deleted file mode 100644
index 4f431c9d9e..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/proxy-reader.js
+++ /dev/null
@@ -1,95 +0,0 @@
-// A reader for when we don't yet know what kind of thing
-// the thing is.
-
-module.exports = ProxyReader
-
-var Reader = require('./reader.js')
-var getType = require('./get-type.js')
-var inherits = require('inherits')
-var fs = require('graceful-fs')
-
-inherits(ProxyReader, Reader)
-
-function ProxyReader (props) {
- var self = this
- if (!(self instanceof ProxyReader)) {
- throw new Error('ProxyReader must be called as constructor.')
- }
-
- self.props = props
- self._buffer = []
- self.ready = false
-
- Reader.call(self, props)
-}
-
-ProxyReader.prototype._stat = function () {
- var self = this
- var props = self.props
- // stat the thing to see what the proxy should be.
- var stat = props.follow ? 'stat' : 'lstat'
-
- fs[stat](props.path, function (er, current) {
- var type
- if (er || !current) {
- type = 'File'
- } else {
- type = getType(current)
- }
-
- props[type] = true
- props.type = self.type = type
-
- self._old = current
- self._addProxy(Reader(props, current))
- })
-}
-
-ProxyReader.prototype._addProxy = function (proxy) {
- var self = this
- if (self._proxyTarget) {
- return self.error('proxy already set')
- }
-
- self._proxyTarget = proxy
- proxy._proxy = self
-
- ;[
- 'error',
- 'data',
- 'end',
- 'close',
- 'linkpath',
- 'entry',
- 'entryEnd',
- 'child',
- 'childEnd',
- 'warn',
- 'stat'
- ].forEach(function (ev) {
- // console.error('~~ proxy event', ev, self.path)
- proxy.on(ev, self.emit.bind(self, ev))
- })
-
- self.emit('proxy', proxy)
-
- proxy.on('ready', function () {
- // console.error("~~ proxy is ready!", self.path)
- self.ready = true
- self.emit('ready')
- })
-
- var calls = self._buffer
- self._buffer.length = 0
- calls.forEach(function (c) {
- proxy[c[0]].apply(proxy, c[1])
- })
-}
-
-ProxyReader.prototype.pause = function () {
- return this._proxyTarget ? this._proxyTarget.pause() : false
-}
-
-ProxyReader.prototype.resume = function () {
- return this._proxyTarget ? this._proxyTarget.resume() : false
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/proxy-writer.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/proxy-writer.js
deleted file mode 100644
index a6544621bf..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/proxy-writer.js
+++ /dev/null
@@ -1,111 +0,0 @@
-// A writer for when we don't know what kind of thing
-// the thing is. That is, it's not explicitly set,
-// so we're going to make it whatever the thing already
-// is, or "File"
-//
-// Until then, collect all events.
-
-module.exports = ProxyWriter
-
-var Writer = require('./writer.js')
-var getType = require('./get-type.js')
-var inherits = require('inherits')
-var collect = require('./collect.js')
-var fs = require('fs')
-
-inherits(ProxyWriter, Writer)
-
-function ProxyWriter (props) {
- var self = this
- if (!(self instanceof ProxyWriter)) {
- throw new Error('ProxyWriter must be called as constructor.')
- }
-
- self.props = props
- self._needDrain = false
-
- Writer.call(self, props)
-}
-
-ProxyWriter.prototype._stat = function () {
- var self = this
- var props = self.props
- // stat the thing to see what the proxy should be.
- var stat = props.follow ? 'stat' : 'lstat'
-
- fs[stat](props.path, function (er, current) {
- var type
- if (er || !current) {
- type = 'File'
- } else {
- type = getType(current)
- }
-
- props[type] = true
- props.type = self.type = type
-
- self._old = current
- self._addProxy(Writer(props, current))
- })
-}
-
-ProxyWriter.prototype._addProxy = function (proxy) {
- // console.error("~~ set proxy", this.path)
- var self = this
- if (self._proxy) {
- return self.error('proxy already set')
- }
-
- self._proxy = proxy
- ;[
- 'ready',
- 'error',
- 'close',
- 'pipe',
- 'drain',
- 'warn'
- ].forEach(function (ev) {
- proxy.on(ev, self.emit.bind(self, ev))
- })
-
- self.emit('proxy', proxy)
-
- var calls = self._buffer
- calls.forEach(function (c) {
- // console.error("~~ ~~ proxy buffered call", c[0], c[1])
- proxy[c[0]].apply(proxy, c[1])
- })
- self._buffer.length = 0
- if (self._needsDrain) self.emit('drain')
-}
-
-ProxyWriter.prototype.add = function (entry) {
- // console.error("~~ proxy add")
- collect(entry)
-
- if (!this._proxy) {
- this._buffer.push(['add', [entry]])
- this._needDrain = true
- return false
- }
- return this._proxy.add(entry)
-}
-
-ProxyWriter.prototype.write = function (c) {
- // console.error('~~ proxy write')
- if (!this._proxy) {
- this._buffer.push(['write', [c]])
- this._needDrain = true
- return false
- }
- return this._proxy.write(c)
-}
-
-ProxyWriter.prototype.end = function (c) {
- // console.error('~~ proxy end')
- if (!this._proxy) {
- this._buffer.push(['end', [c]])
- return false
- }
- return this._proxy.end(c)
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/reader.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/reader.js
deleted file mode 100644
index be4f570eeb..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/reader.js
+++ /dev/null
@@ -1,255 +0,0 @@
-module.exports = Reader
-
-var fs = require('graceful-fs')
-var Stream = require('stream').Stream
-var inherits = require('inherits')
-var path = require('path')
-var getType = require('./get-type.js')
-var hardLinks = Reader.hardLinks = {}
-var Abstract = require('./abstract.js')
-
-// Must do this *before* loading the child classes
-inherits(Reader, Abstract)
-
-var LinkReader = require('./link-reader.js')
-
-function Reader (props, currentStat) {
- var self = this
- if (!(self instanceof Reader)) return new Reader(props, currentStat)
-
- if (typeof props === 'string') {
- props = { path: props }
- }
-
- // polymorphism.
- // call fstream.Reader(dir) to get a DirReader object, etc.
- // Note that, unlike in the Writer case, ProxyReader is going
- // to be the *normal* state of affairs, since we rarely know
- // the type of a file prior to reading it.
-
- var type
- var ClassType
-
- if (props.type && typeof props.type === 'function') {
- type = props.type
- ClassType = type
- } else {
- type = getType(props)
- ClassType = Reader
- }
-
- if (currentStat && !type) {
- type = getType(currentStat)
- props[type] = true
- props.type = type
- }
-
- switch (type) {
- case 'Directory':
- ClassType = require('./dir-reader.js')
- break
-
- case 'Link':
- // XXX hard links are just files.
- // However, it would be good to keep track of files' dev+inode
- // and nlink values, and create a HardLinkReader that emits
- // a linkpath value of the original copy, so that the tar
- // writer can preserve them.
- // ClassType = HardLinkReader
- // break
-
- case 'File':
- ClassType = require('./file-reader.js')
- break
-
- case 'SymbolicLink':
- ClassType = LinkReader
- break
-
- case 'Socket':
- ClassType = require('./socket-reader.js')
- break
-
- case null:
- ClassType = require('./proxy-reader.js')
- break
- }
-
- if (!(self instanceof ClassType)) {
- return new ClassType(props)
- }
-
- Abstract.call(self)
-
- if (!props.path) {
- self.error('Must provide a path', null, true)
- }
-
- self.readable = true
- self.writable = false
-
- self.type = type
- self.props = props
- self.depth = props.depth = props.depth || 0
- self.parent = props.parent || null
- self.root = props.root || (props.parent && props.parent.root) || self
-
- self._path = self.path = path.resolve(props.path)
- if (process.platform === 'win32') {
- self.path = self._path = self.path.replace(/\?/g, '_')
- if (self._path.length >= 260) {
- // how DOES one create files on the moon?
- // if the path has spaces in it, then UNC will fail.
- self._swallowErrors = true
- // if (self._path.indexOf(" ") === -1) {
- self._path = '\\\\?\\' + self.path.replace(/\//g, '\\')
- // }
- }
- }
- self.basename = props.basename = path.basename(self.path)
- self.dirname = props.dirname = path.dirname(self.path)
-
- // these have served their purpose, and are now just noisy clutter
- props.parent = props.root = null
-
- // console.error("\n\n\n%s setting size to", props.path, props.size)
- self.size = props.size
- self.filter = typeof props.filter === 'function' ? props.filter : null
- if (props.sort === 'alpha') props.sort = alphasort
-
- // start the ball rolling.
- // this will stat the thing, and then call self._read()
- // to start reading whatever it is.
- // console.error("calling stat", props.path, currentStat)
- self._stat(currentStat)
-}
-
-function alphasort (a, b) {
- return a === b ? 0
- : a.toLowerCase() > b.toLowerCase() ? 1
- : a.toLowerCase() < b.toLowerCase() ? -1
- : a > b ? 1
- : -1
-}
-
-Reader.prototype._stat = function (currentStat) {
- var self = this
- var props = self.props
- var stat = props.follow ? 'stat' : 'lstat'
- // console.error("Reader._stat", self._path, currentStat)
- if (currentStat) process.nextTick(statCb.bind(null, null, currentStat))
- else fs[stat](self._path, statCb)
-
- function statCb (er, props_) {
- // console.error("Reader._stat, statCb", self._path, props_, props_.nlink)
- if (er) return self.error(er)
-
- Object.keys(props_).forEach(function (k) {
- props[k] = props_[k]
- })
-
- // if it's not the expected size, then abort here.
- if (undefined !== self.size && props.size !== self.size) {
- return self.error('incorrect size')
- }
- self.size = props.size
-
- var type = getType(props)
- var handleHardlinks = props.hardlinks !== false
-
- // special little thing for handling hardlinks.
- if (handleHardlinks && type !== 'Directory' && props.nlink && props.nlink > 1) {
- var k = props.dev + ':' + props.ino
- // console.error("Reader has nlink", self._path, k)
- if (hardLinks[k] === self._path || !hardLinks[k]) {
- hardLinks[k] = self._path
- } else {
- // switch into hardlink mode.
- type = self.type = self.props.type = 'Link'
- self.Link = self.props.Link = true
- self.linkpath = self.props.linkpath = hardLinks[k]
- // console.error("Hardlink detected, switching mode", self._path, self.linkpath)
- // Setting __proto__ would arguably be the "correct"
- // approach here, but that just seems too wrong.
- self._stat = self._read = LinkReader.prototype._read
- }
- }
-
- if (self.type && self.type !== type) {
- self.error('Unexpected type: ' + type)
- }
-
- // if the filter doesn't pass, then just skip over this one.
- // still have to emit end so that dir-walking can move on.
- if (self.filter) {
- var who = self._proxy || self
- // special handling for ProxyReaders
- if (!self.filter.call(who, who, props)) {
- if (!self._disowned) {
- self.abort()
- self.emit('end')
- self.emit('close')
- }
- return
- }
- }
-
- // last chance to abort or disown before the flow starts!
- var events = ['_stat', 'stat', 'ready']
- var e = 0
- ;(function go () {
- if (self._aborted) {
- self.emit('end')
- self.emit('close')
- return
- }
-
- if (self._paused && self.type !== 'Directory') {
- self.once('resume', go)
- return
- }
-
- var ev = events[e++]
- if (!ev) {
- return self._read()
- }
- self.emit(ev, props)
- go()
- })()
- }
-}
-
-Reader.prototype.pipe = function (dest) {
- var self = this
- if (typeof dest.add === 'function') {
- // piping to a multi-compatible, and we've got directory entries.
- self.on('entry', function (entry) {
- var ret = dest.add(entry)
- if (ret === false) {
- self.pause()
- }
- })
- }
-
- // console.error("R Pipe apply Stream Pipe")
- return Stream.prototype.pipe.apply(this, arguments)
-}
-
-Reader.prototype.pause = function (who) {
- this._paused = true
- who = who || this
- this.emit('pause', who)
- if (this._stream) this._stream.pause(who)
-}
-
-Reader.prototype.resume = function (who) {
- this._paused = false
- who = who || this
- this.emit('resume', who)
- if (this._stream) this._stream.resume(who)
- this._read()
-}
-
-Reader.prototype._read = function () {
- this.error('Cannot read unknown type: ' + this.type)
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/socket-reader.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/socket-reader.js
deleted file mode 100644
index e0456ba890..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/socket-reader.js
+++ /dev/null
@@ -1,36 +0,0 @@
-// Just get the stats, and then don't do anything.
-// You can't really "read" from a socket. You "connect" to it.
-// Mostly, this is here so that reading a dir with a socket in it
-// doesn't blow up.
-
-module.exports = SocketReader
-
-var inherits = require('inherits')
-var Reader = require('./reader.js')
-
-inherits(SocketReader, Reader)
-
-function SocketReader (props) {
- var self = this
- if (!(self instanceof SocketReader)) {
- throw new Error('SocketReader must be called as constructor.')
- }
-
- if (!(props.type === 'Socket' && props.Socket)) {
- throw new Error('Non-socket type ' + props.type)
- }
-
- Reader.call(self, props)
-}
-
-SocketReader.prototype._read = function () {
- var self = this
- if (self._paused) return
- // basically just a no-op, since we got all the info we have
- // from the _stat method
- if (!self._ended) {
- self.emit('end')
- self.emit('close')
- self._ended = true
- }
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/writer.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/writer.js
deleted file mode 100644
index 140e449e06..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/lib/writer.js
+++ /dev/null
@@ -1,390 +0,0 @@
-module.exports = Writer
-
-var fs = require('graceful-fs')
-var inherits = require('inherits')
-var rimraf = require('rimraf')
-var mkdir = require('mkdirp')
-var path = require('path')
-var umask = process.platform === 'win32' ? 0 : process.umask()
-var getType = require('./get-type.js')
-var Abstract = require('./abstract.js')
-
-// Must do this *before* loading the child classes
-inherits(Writer, Abstract)
-
-Writer.dirmode = parseInt('0777', 8) & (~umask)
-Writer.filemode = parseInt('0666', 8) & (~umask)
-
-var DirWriter = require('./dir-writer.js')
-var LinkWriter = require('./link-writer.js')
-var FileWriter = require('./file-writer.js')
-var ProxyWriter = require('./proxy-writer.js')
-
-// props is the desired state. current is optionally the current stat,
-// provided here so that subclasses can avoid statting the target
-// more than necessary.
-function Writer (props, current) {
- var self = this
-
- if (typeof props === 'string') {
- props = { path: props }
- }
-
- // polymorphism.
- // call fstream.Writer(dir) to get a DirWriter object, etc.
- var type = getType(props)
- var ClassType = Writer
-
- switch (type) {
- case 'Directory':
- ClassType = DirWriter
- break
- case 'File':
- ClassType = FileWriter
- break
- case 'Link':
- case 'SymbolicLink':
- ClassType = LinkWriter
- break
- case null:
- default:
- // Don't know yet what type to create, so we wrap in a proxy.
- ClassType = ProxyWriter
- break
- }
-
- if (!(self instanceof ClassType)) return new ClassType(props)
-
- // now get down to business.
-
- Abstract.call(self)
-
- if (!props.path) self.error('Must provide a path', null, true)
-
- // props is what we want to set.
- // set some convenience properties as well.
- self.type = props.type
- self.props = props
- self.depth = props.depth || 0
- self.clobber = props.clobber === false ? props.clobber : true
- self.parent = props.parent || null
- self.root = props.root || (props.parent && props.parent.root) || self
-
- self._path = self.path = path.resolve(props.path)
- if (process.platform === 'win32') {
- self.path = self._path = self.path.replace(/\?/g, '_')
- if (self._path.length >= 260) {
- self._swallowErrors = true
- self._path = '\\\\?\\' + self.path.replace(/\//g, '\\')
- }
- }
- self.basename = path.basename(props.path)
- self.dirname = path.dirname(props.path)
- self.linkpath = props.linkpath || null
-
- props.parent = props.root = null
-
- // console.error("\n\n\n%s setting size to", props.path, props.size)
- self.size = props.size
-
- if (typeof props.mode === 'string') {
- props.mode = parseInt(props.mode, 8)
- }
-
- self.readable = false
- self.writable = true
-
- // buffer until ready, or while handling another entry
- self._buffer = []
- self.ready = false
-
- self.filter = typeof props.filter === 'function' ? props.filter : null
-
- // start the ball rolling.
- // this checks what's there already, and then calls
- // self._create() to call the impl-specific creation stuff.
- self._stat(current)
-}
-
-// Calling this means that it's something we can't create.
-// Just assert that it's already there, otherwise raise a warning.
-Writer.prototype._create = function () {
- var self = this
- fs[self.props.follow ? 'stat' : 'lstat'](self._path, function (er) {
- if (er) {
- return self.warn('Cannot create ' + self._path + '\n' +
- 'Unsupported type: ' + self.type, 'ENOTSUP')
- }
- self._finish()
- })
-}
-
-Writer.prototype._stat = function (current) {
- var self = this
- var props = self.props
- var stat = props.follow ? 'stat' : 'lstat'
- var who = self._proxy || self
-
- if (current) statCb(null, current)
- else fs[stat](self._path, statCb)
-
- function statCb (er, current) {
- if (self.filter && !self.filter.call(who, who, current)) {
- self._aborted = true
- self.emit('end')
- self.emit('close')
- return
- }
-
- // if it's not there, great. We'll just create it.
- // if it is there, then we'll need to change whatever differs
- if (er || !current) {
- return create(self)
- }
-
- self._old = current
- var currentType = getType(current)
-
- // if it's a type change, then we need to clobber or error.
- // if it's not a type change, then let the impl take care of it.
- if (currentType !== self.type) {
- return rimraf(self._path, function (er) {
- if (er) return self.error(er)
- self._old = null
- create(self)
- })
- }
-
- // otherwise, just handle in the app-specific way
- // this creates a fs.WriteStream, or mkdir's, or whatever
- create(self)
- }
-}
-
-function create (self) {
- // console.error("W create", self._path, Writer.dirmode)
-
- // XXX Need to clobber non-dirs that are in the way,
- // unless { clobber: false } in the props.
- mkdir(path.dirname(self._path), Writer.dirmode, function (er, made) {
- // console.error("W created", path.dirname(self._path), er)
- if (er) return self.error(er)
-
- // later on, we have to set the mode and owner for these
- self._madeDir = made
- return self._create()
- })
-}
-
-function endChmod (self, want, current, path, cb) {
- var wantMode = want.mode
- var chmod = want.follow || self.type !== 'SymbolicLink'
- ? 'chmod' : 'lchmod'
-
- if (!fs[chmod]) return cb()
- if (typeof wantMode !== 'number') return cb()
-
- var curMode = current.mode & parseInt('0777', 8)
- wantMode = wantMode & parseInt('0777', 8)
- if (wantMode === curMode) return cb()
-
- fs[chmod](path, wantMode, cb)
-}
-
-function endChown (self, want, current, path, cb) {
- // Don't even try it unless root. Too easy to EPERM.
- if (process.platform === 'win32') return cb()
- if (!process.getuid || process.getuid() !== 0) return cb()
- if (typeof want.uid !== 'number' &&
- typeof want.gid !== 'number') return cb()
-
- if (current.uid === want.uid &&
- current.gid === want.gid) return cb()
-
- var chown = (self.props.follow || self.type !== 'SymbolicLink')
- ? 'chown' : 'lchown'
- if (!fs[chown]) return cb()
-
- if (typeof want.uid !== 'number') want.uid = current.uid
- if (typeof want.gid !== 'number') want.gid = current.gid
-
- fs[chown](path, want.uid, want.gid, cb)
-}
-
-function endUtimes (self, want, current, path, cb) {
- if (!fs.utimes || process.platform === 'win32') return cb()
-
- var utimes = (want.follow || self.type !== 'SymbolicLink')
- ? 'utimes' : 'lutimes'
-
- if (utimes === 'lutimes' && !fs[utimes]) {
- utimes = 'utimes'
- }
-
- if (!fs[utimes]) return cb()
-
- var curA = current.atime
- var curM = current.mtime
- var meA = want.atime
- var meM = want.mtime
-
- if (meA === undefined) meA = curA
- if (meM === undefined) meM = curM
-
- if (!isDate(meA)) meA = new Date(meA)
- if (!isDate(meM)) meA = new Date(meM)
-
- if (meA.getTime() === curA.getTime() &&
- meM.getTime() === curM.getTime()) return cb()
-
- fs[utimes](path, meA, meM, cb)
-}
-
-// XXX This function is beastly. Break it up!
-Writer.prototype._finish = function () {
- var self = this
-
- if (self._finishing) return
- self._finishing = true
-
- // console.error(" W Finish", self._path, self.size)
-
- // set up all the things.
- // At this point, we're already done writing whatever we've gotta write,
- // adding files to the dir, etc.
- var todo = 0
- var errState = null
- var done = false
-
- if (self._old) {
- // the times will almost *certainly* have changed.
- // adds the utimes syscall, but remove another stat.
- self._old.atime = new Date(0)
- self._old.mtime = new Date(0)
- // console.error(" W Finish Stale Stat", self._path, self.size)
- setProps(self._old)
- } else {
- var stat = self.props.follow ? 'stat' : 'lstat'
- // console.error(" W Finish Stating", self._path, self.size)
- fs[stat](self._path, function (er, current) {
- // console.error(" W Finish Stated", self._path, self.size, current)
- if (er) {
- // if we're in the process of writing out a
- // directory, it's very possible that the thing we're linking to
- // doesn't exist yet (especially if it was intended as a symlink),
- // so swallow ENOENT errors here and just soldier on.
- if (er.code === 'ENOENT' &&
- (self.type === 'Link' || self.type === 'SymbolicLink') &&
- process.platform === 'win32') {
- self.ready = true
- self.emit('ready')
- self.emit('end')
- self.emit('close')
- self.end = self._finish = function () {}
- return
- } else return self.error(er)
- }
- setProps(self._old = current)
- })
- }
-
- return
-
- function setProps (current) {
- todo += 3
- endChmod(self, self.props, current, self._path, next('chmod'))
- endChown(self, self.props, current, self._path, next('chown'))
- endUtimes(self, self.props, current, self._path, next('utimes'))
- }
-
- function next (what) {
- return function (er) {
- // console.error(" W Finish", what, todo)
- if (errState) return
- if (er) {
- er.fstream_finish_call = what
- return self.error(errState = er)
- }
- if (--todo > 0) return
- if (done) return
- done = true
-
- // we may still need to set the mode/etc. on some parent dirs
- // that were created previously. delay end/close until then.
- if (!self._madeDir) return end()
- else endMadeDir(self, self._path, end)
-
- function end (er) {
- if (er) {
- er.fstream_finish_call = 'setupMadeDir'
- return self.error(er)
- }
- // all the props have been set, so we're completely done.
- self.emit('end')
- self.emit('close')
- }
- }
- }
-}
-
-function endMadeDir (self, p, cb) {
- var made = self._madeDir
- // everything *between* made and path.dirname(self._path)
- // needs to be set up. Note that this may just be one dir.
- var d = path.dirname(p)
-
- endMadeDir_(self, d, function (er) {
- if (er) return cb(er)
- if (d === made) {
- return cb()
- }
- endMadeDir(self, d, cb)
- })
-}
-
-function endMadeDir_ (self, p, cb) {
- var dirProps = {}
- Object.keys(self.props).forEach(function (k) {
- dirProps[k] = self.props[k]
-
- // only make non-readable dirs if explicitly requested.
- if (k === 'mode' && self.type !== 'Directory') {
- dirProps[k] = dirProps[k] | parseInt('0111', 8)
- }
- })
-
- var todo = 3
- var errState = null
- fs.stat(p, function (er, current) {
- if (er) return cb(errState = er)
- endChmod(self, dirProps, current, p, next)
- endChown(self, dirProps, current, p, next)
- endUtimes(self, dirProps, current, p, next)
- })
-
- function next (er) {
- if (errState) return
- if (er) return cb(errState = er)
- if (--todo === 0) return cb()
- }
-}
-
-Writer.prototype.pipe = function () {
- this.error("Can't pipe from writable stream")
-}
-
-Writer.prototype.add = function () {
- this.error("Can't add to non-Directory type")
-}
-
-Writer.prototype.write = function () {
- return true
-}
-
-function objectToString (d) {
- return Object.prototype.toString.call(d)
-}
-
-function isDate (d) {
- return typeof d === 'object' && objectToString(d) === '[object Date]'
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/package.json b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/package.json
deleted file mode 100644
index 92e06be562..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/fstream/package.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
- "_from": "fstream@^1.0.0",
- "_id": "fstream@1.0.11",
- "_inBundle": false,
- "_integrity": "sha1-XB+x8RdHcRTwYyoOtLcbPLD9MXE=",
- "_location": "/npm-lifecycle/node-gyp/fstream",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "fstream@^1.0.0",
- "name": "fstream",
- "escapedName": "fstream",
- "rawSpec": "^1.0.0",
- "saveSpec": null,
- "fetchSpec": "^1.0.0"
- },
- "_requiredBy": [
- "/npm-lifecycle/node-gyp",
- "/npm-lifecycle/node-gyp/tar"
- ],
- "_resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.11.tgz",
- "_shasum": "5c1fb1f117477114f0632a0eb4b71b3cb0fd3171",
- "_spec": "fstream@^1.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-lifecycle/node_modules/node-gyp",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/npm/fstream/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "graceful-fs": "^4.1.2",
- "inherits": "~2.0.0",
- "mkdirp": ">=0.5 0",
- "rimraf": "2"
- },
- "deprecated": false,
- "description": "Advanced file system stream things",
- "devDependencies": {
- "standard": "^4.0.0",
- "tap": "^1.2.0"
- },
- "engines": {
- "node": ">=0.6"
- },
- "homepage": "https://github.com/npm/fstream#readme",
- "license": "ISC",
- "main": "fstream.js",
- "name": "fstream",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/fstream.git"
- },
- "scripts": {
- "test": "standard && tap examples/*.js"
- },
- "version": "1.0.11"
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/LICENSE b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/LICENSE
deleted file mode 100644
index 19129e315f..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/README.md b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/README.md
deleted file mode 100644
index ad72b8133e..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/README.md
+++ /dev/null
@@ -1,209 +0,0 @@
-# minimatch
-
-A minimal matching utility.
-
-[![Build Status](https://secure.travis-ci.org/isaacs/minimatch.svg)](http://travis-ci.org/isaacs/minimatch)
-
-
-This is the matching library used internally by npm.
-
-It works by converting glob expressions into JavaScript `RegExp`
-objects.
-
-## Usage
-
-```javascript
-var minimatch = require("minimatch")
-
-minimatch("bar.foo", "*.foo") // true!
-minimatch("bar.foo", "*.bar") // false!
-minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy!
-```
-
-## Features
-
-Supports these glob features:
-
-* Brace Expansion
-* Extended glob matching
-* "Globstar" `**` matching
-
-See:
-
-* `man sh`
-* `man bash`
-* `man 3 fnmatch`
-* `man 5 gitignore`
-
-## Minimatch Class
-
-Create a minimatch object by instantiating the `minimatch.Minimatch` class.
-
-```javascript
-var Minimatch = require("minimatch").Minimatch
-var mm = new Minimatch(pattern, options)
-```
-
-### Properties
-
-* `pattern` The original pattern the minimatch object represents.
-* `options` The options supplied to the constructor.
-* `set` A 2-dimensional array of regexp or string expressions.
- Each row in the
- array corresponds to a brace-expanded pattern. Each item in the row
- corresponds to a single path-part. For example, the pattern
- `{a,b/c}/d` would expand to a set of patterns like:
-
- [ [ a, d ]
- , [ b, c, d ] ]
-
- If a portion of the pattern doesn't have any "magic" in it
- (that is, it's something like `"foo"` rather than `fo*o?`), then it
- will be left as a string rather than converted to a regular
- expression.
-
-* `regexp` Created by the `makeRe` method. A single regular expression
- expressing the entire pattern. This is useful in cases where you wish
- to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled.
-* `negate` True if the pattern is negated.
-* `comment` True if the pattern is a comment.
-* `empty` True if the pattern is `""`.
-
-### Methods
-
-* `makeRe` Generate the `regexp` member if necessary, and return it.
- Will return `false` if the pattern is invalid.
-* `match(fname)` Return true if the filename matches the pattern, or
- false otherwise.
-* `matchOne(fileArray, patternArray, partial)` Take a `/`-split
- filename, and match it against a single row in the `regExpSet`. This
- method is mainly for internal use, but is exposed so that it can be
- used by a glob-walker that needs to avoid excessive filesystem calls.
-
-All other methods are internal, and will be called as necessary.
-
-### minimatch(path, pattern, options)
-
-Main export. Tests a path against the pattern using the options.
-
-```javascript
-var isJS = minimatch(file, "*.js", { matchBase: true })
-```
-
-### minimatch.filter(pattern, options)
-
-Returns a function that tests its
-supplied argument, suitable for use with `Array.filter`. Example:
-
-```javascript
-var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true}))
-```
-
-### minimatch.match(list, pattern, options)
-
-Match against the list of
-files, in the style of fnmatch or glob. If nothing is matched, and
-options.nonull is set, then return a list containing the pattern itself.
-
-```javascript
-var javascripts = minimatch.match(fileList, "*.js", {matchBase: true}))
-```
-
-### minimatch.makeRe(pattern, options)
-
-Make a regular expression object from the pattern.
-
-## Options
-
-All options are `false` by default.
-
-### debug
-
-Dump a ton of stuff to stderr.
-
-### nobrace
-
-Do not expand `{a,b}` and `{1..3}` brace sets.
-
-### noglobstar
-
-Disable `**` matching against multiple folder names.
-
-### dot
-
-Allow patterns to match filenames starting with a period, even if
-the pattern does not explicitly have a period in that spot.
-
-Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot`
-is set.
-
-### noext
-
-Disable "extglob" style patterns like `+(a|b)`.
-
-### nocase
-
-Perform a case-insensitive match.
-
-### nonull
-
-When a match is not found by `minimatch.match`, return a list containing
-the pattern itself if this option is set. When not set, an empty list
-is returned if there are no matches.
-
-### matchBase
-
-If set, then patterns without slashes will be matched
-against the basename of the path if it contains slashes. For example,
-`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.
-
-### nocomment
-
-Suppress the behavior of treating `#` at the start of a pattern as a
-comment.
-
-### nonegate
-
-Suppress the behavior of treating a leading `!` character as negation.
-
-### flipNegate
-
-Returns from negate expressions the same as if they were not negated.
-(Ie, true on a hit, false on a miss.)
-
-
-## Comparisons to other fnmatch/glob implementations
-
-While strict compliance with the existing standards is a worthwhile
-goal, some discrepancies exist between minimatch and other
-implementations, and are intentional.
-
-If the pattern starts with a `!` character, then it is negated. Set the
-`nonegate` flag to suppress this behavior, and treat leading `!`
-characters normally. This is perhaps relevant if you wish to start the
-pattern with a negative extglob pattern like `!(a|B)`. Multiple `!`
-characters at the start of a pattern will negate the pattern multiple
-times.
-
-If a pattern starts with `#`, then it is treated as a comment, and
-will not match anything. Use `\#` to match a literal `#` at the
-start of a line, or set the `nocomment` flag to suppress this behavior.
-
-The double-star character `**` is supported by default, unless the
-`noglobstar` flag is set. This is supported in the manner of bsdglob
-and bash 4.1, where `**` only has special significance if it is the only
-thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
-`a/**b` will not.
-
-If an escaped pattern has no matches, and the `nonull` flag is set,
-then minimatch.match returns the pattern as-provided, rather than
-interpreting the character escapes. For example,
-`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
-`"*a?"`. This is akin to setting the `nullglob` option in bash, except
-that it does not resolve escaped pattern characters.
-
-If brace expansion is not disabled, then it is performed before any
-other interpretation of the glob pattern. Thus, a pattern like
-`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
-**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
-checked for validity. Since those two are valid, matching proceeds.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/minimatch.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/minimatch.js
deleted file mode 100644
index 5b5f8cf444..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/minimatch.js
+++ /dev/null
@@ -1,923 +0,0 @@
-module.exports = minimatch
-minimatch.Minimatch = Minimatch
-
-var path = { sep: '/' }
-try {
- path = require('path')
-} catch (er) {}
-
-var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
-var expand = require('brace-expansion')
-
-var plTypes = {
- '!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
- '?': { open: '(?:', close: ')?' },
- '+': { open: '(?:', close: ')+' },
- '*': { open: '(?:', close: ')*' },
- '@': { open: '(?:', close: ')' }
-}
-
-// any single thing other than /
-// don't need to escape / when using new RegExp()
-var qmark = '[^/]'
-
-// * => any number of characters
-var star = qmark + '*?'
-
-// ** when dots are allowed. Anything goes, except .. and .
-// not (^ or / followed by one or two dots followed by $ or /),
-// followed by anything, any number of times.
-var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
-
-// not a ^ or / followed by a dot,
-// followed by anything, any number of times.
-var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
-
-// characters that need to be escaped in RegExp.
-var reSpecials = charSet('().*{}+?[]^$\\!')
-
-// "abc" -> { a:true, b:true, c:true }
-function charSet (s) {
- return s.split('').reduce(function (set, c) {
- set[c] = true
- return set
- }, {})
-}
-
-// normalizes slashes.
-var slashSplit = /\/+/
-
-minimatch.filter = filter
-function filter (pattern, options) {
- options = options || {}
- return function (p, i, list) {
- return minimatch(p, pattern, options)
- }
-}
-
-function ext (a, b) {
- a = a || {}
- b = b || {}
- var t = {}
- Object.keys(b).forEach(function (k) {
- t[k] = b[k]
- })
- Object.keys(a).forEach(function (k) {
- t[k] = a[k]
- })
- return t
-}
-
-minimatch.defaults = function (def) {
- if (!def || !Object.keys(def).length) return minimatch
-
- var orig = minimatch
-
- var m = function minimatch (p, pattern, options) {
- return orig.minimatch(p, pattern, ext(def, options))
- }
-
- m.Minimatch = function Minimatch (pattern, options) {
- return new orig.Minimatch(pattern, ext(def, options))
- }
-
- return m
-}
-
-Minimatch.defaults = function (def) {
- if (!def || !Object.keys(def).length) return Minimatch
- return minimatch.defaults(def).Minimatch
-}
-
-function minimatch (p, pattern, options) {
- if (typeof pattern !== 'string') {
- throw new TypeError('glob pattern string required')
- }
-
- if (!options) options = {}
-
- // shortcut: comments match nothing.
- if (!options.nocomment && pattern.charAt(0) === '#') {
- return false
- }
-
- // "" only matches ""
- if (pattern.trim() === '') return p === ''
-
- return new Minimatch(pattern, options).match(p)
-}
-
-function Minimatch (pattern, options) {
- if (!(this instanceof Minimatch)) {
- return new Minimatch(pattern, options)
- }
-
- if (typeof pattern !== 'string') {
- throw new TypeError('glob pattern string required')
- }
-
- if (!options) options = {}
- pattern = pattern.trim()
-
- // windows support: need to use /, not \
- if (path.sep !== '/') {
- pattern = pattern.split(path.sep).join('/')
- }
-
- this.options = options
- this.set = []
- this.pattern = pattern
- this.regexp = null
- this.negate = false
- this.comment = false
- this.empty = false
-
- // make the set of regexps etc.
- this.make()
-}
-
-Minimatch.prototype.debug = function () {}
-
-Minimatch.prototype.make = make
-function make () {
- // don't do it more than once.
- if (this._made) return
-
- var pattern = this.pattern
- var options = this.options
-
- // empty patterns and comments match nothing.
- if (!options.nocomment && pattern.charAt(0) === '#') {
- this.comment = true
- return
- }
- if (!pattern) {
- this.empty = true
- return
- }
-
- // step 1: figure out negation, etc.
- this.parseNegate()
-
- // step 2: expand braces
- var set = this.globSet = this.braceExpand()
-
- if (options.debug) this.debug = console.error
-
- this.debug(this.pattern, set)
-
- // step 3: now we have a set, so turn each one into a series of path-portion
- // matching patterns.
- // These will be regexps, except in the case of "**", which is
- // set to the GLOBSTAR object for globstar behavior,
- // and will not contain any / characters
- set = this.globParts = set.map(function (s) {
- return s.split(slashSplit)
- })
-
- this.debug(this.pattern, set)
-
- // glob --> regexps
- set = set.map(function (s, si, set) {
- return s.map(this.parse, this)
- }, this)
-
- this.debug(this.pattern, set)
-
- // filter out everything that didn't compile properly.
- set = set.filter(function (s) {
- return s.indexOf(false) === -1
- })
-
- this.debug(this.pattern, set)
-
- this.set = set
-}
-
-Minimatch.prototype.parseNegate = parseNegate
-function parseNegate () {
- var pattern = this.pattern
- var negate = false
- var options = this.options
- var negateOffset = 0
-
- if (options.nonegate) return
-
- for (var i = 0, l = pattern.length
- ; i < l && pattern.charAt(i) === '!'
- ; i++) {
- negate = !negate
- negateOffset++
- }
-
- if (negateOffset) this.pattern = pattern.substr(negateOffset)
- this.negate = negate
-}
-
-// Brace expansion:
-// a{b,c}d -> abd acd
-// a{b,}c -> abc ac
-// a{0..3}d -> a0d a1d a2d a3d
-// a{b,c{d,e}f}g -> abg acdfg acefg
-// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
-//
-// Invalid sets are not expanded.
-// a{2..}b -> a{2..}b
-// a{b}c -> a{b}c
-minimatch.braceExpand = function (pattern, options) {
- return braceExpand(pattern, options)
-}
-
-Minimatch.prototype.braceExpand = braceExpand
-
-function braceExpand (pattern, options) {
- if (!options) {
- if (this instanceof Minimatch) {
- options = this.options
- } else {
- options = {}
- }
- }
-
- pattern = typeof pattern === 'undefined'
- ? this.pattern : pattern
-
- if (typeof pattern === 'undefined') {
- throw new TypeError('undefined pattern')
- }
-
- if (options.nobrace ||
- !pattern.match(/\{.*\}/)) {
- // shortcut. no need to expand.
- return [pattern]
- }
-
- return expand(pattern)
-}
-
-// parse a component of the expanded set.
-// At this point, no pattern may contain "/" in it
-// so we're going to return a 2d array, where each entry is the full
-// pattern, split on '/', and then turned into a regular expression.
-// A regexp is made at the end which joins each array with an
-// escaped /, and another full one which joins each regexp with |.
-//
-// Following the lead of Bash 4.1, note that "**" only has special meaning
-// when it is the *only* thing in a path portion. Otherwise, any series
-// of * is equivalent to a single *. Globstar behavior is enabled by
-// default, and can be disabled by setting options.noglobstar.
-Minimatch.prototype.parse = parse
-var SUBPARSE = {}
-function parse (pattern, isSub) {
- if (pattern.length > 1024 * 64) {
- throw new TypeError('pattern is too long')
- }
-
- var options = this.options
-
- // shortcuts
- if (!options.noglobstar && pattern === '**') return GLOBSTAR
- if (pattern === '') return ''
-
- var re = ''
- var hasMagic = !!options.nocase
- var escaping = false
- // ? => one single character
- var patternListStack = []
- var negativeLists = []
- var stateChar
- var inClass = false
- var reClassStart = -1
- var classStart = -1
- // . and .. never match anything that doesn't start with .,
- // even when options.dot is set.
- var patternStart = pattern.charAt(0) === '.' ? '' // anything
- // not (start or / followed by . or .. followed by / or end)
- : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
- : '(?!\\.)'
- var self = this
-
- function clearStateChar () {
- if (stateChar) {
- // we had some state-tracking character
- // that wasn't consumed by this pass.
- switch (stateChar) {
- case '*':
- re += star
- hasMagic = true
- break
- case '?':
- re += qmark
- hasMagic = true
- break
- default:
- re += '\\' + stateChar
- break
- }
- self.debug('clearStateChar %j %j', stateChar, re)
- stateChar = false
- }
- }
-
- for (var i = 0, len = pattern.length, c
- ; (i < len) && (c = pattern.charAt(i))
- ; i++) {
- this.debug('%s\t%s %s %j', pattern, i, re, c)
-
- // skip over any that are escaped.
- if (escaping && reSpecials[c]) {
- re += '\\' + c
- escaping = false
- continue
- }
-
- switch (c) {
- case '/':
- // completely not allowed, even escaped.
- // Should already be path-split by now.
- return false
-
- case '\\':
- clearStateChar()
- escaping = true
- continue
-
- // the various stateChar values
- // for the "extglob" stuff.
- case '?':
- case '*':
- case '+':
- case '@':
- case '!':
- this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
-
- // all of those are literals inside a class, except that
- // the glob [!a] means [^a] in regexp
- if (inClass) {
- this.debug(' in class')
- if (c === '!' && i === classStart + 1) c = '^'
- re += c
- continue
- }
-
- // if we already have a stateChar, then it means
- // that there was something like ** or +? in there.
- // Handle the stateChar, then proceed with this one.
- self.debug('call clearStateChar %j', stateChar)
- clearStateChar()
- stateChar = c
- // if extglob is disabled, then +(asdf|foo) isn't a thing.
- // just clear the statechar *now*, rather than even diving into
- // the patternList stuff.
- if (options.noext) clearStateChar()
- continue
-
- case '(':
- if (inClass) {
- re += '('
- continue
- }
-
- if (!stateChar) {
- re += '\\('
- continue
- }
-
- patternListStack.push({
- type: stateChar,
- start: i - 1,
- reStart: re.length,
- open: plTypes[stateChar].open,
- close: plTypes[stateChar].close
- })
- // negation is (?:(?!js)[^/]*)
- re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
- this.debug('plType %j %j', stateChar, re)
- stateChar = false
- continue
-
- case ')':
- if (inClass || !patternListStack.length) {
- re += '\\)'
- continue
- }
-
- clearStateChar()
- hasMagic = true
- var pl = patternListStack.pop()
- // negation is (?:(?!js)[^/]*)
- // The others are (?:<pattern>)<type>
- re += pl.close
- if (pl.type === '!') {
- negativeLists.push(pl)
- }
- pl.reEnd = re.length
- continue
-
- case '|':
- if (inClass || !patternListStack.length || escaping) {
- re += '\\|'
- escaping = false
- continue
- }
-
- clearStateChar()
- re += '|'
- continue
-
- // these are mostly the same in regexp and glob
- case '[':
- // swallow any state-tracking char before the [
- clearStateChar()
-
- if (inClass) {
- re += '\\' + c
- continue
- }
-
- inClass = true
- classStart = i
- reClassStart = re.length
- re += c
- continue
-
- case ']':
- // a right bracket shall lose its special
- // meaning and represent itself in
- // a bracket expression if it occurs
- // first in the list. -- POSIX.2 2.8.3.2
- if (i === classStart + 1 || !inClass) {
- re += '\\' + c
- escaping = false
- continue
- }
-
- // handle the case where we left a class open.
- // "[z-a]" is valid, equivalent to "\[z-a\]"
- if (inClass) {
- // split where the last [ was, make sure we don't have
- // an invalid re. if so, re-walk the contents of the
- // would-be class to re-translate any characters that
- // were passed through as-is
- // TODO: It would probably be faster to determine this
- // without a try/catch and a new RegExp, but it's tricky
- // to do safely. For now, this is safe and works.
- var cs = pattern.substring(classStart + 1, i)
- try {
- RegExp('[' + cs + ']')
- } catch (er) {
- // not a valid class!
- var sp = this.parse(cs, SUBPARSE)
- re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
- hasMagic = hasMagic || sp[1]
- inClass = false
- continue
- }
- }
-
- // finish up the class.
- hasMagic = true
- inClass = false
- re += c
- continue
-
- default:
- // swallow any state char that wasn't consumed
- clearStateChar()
-
- if (escaping) {
- // no need
- escaping = false
- } else if (reSpecials[c]
- && !(c === '^' && inClass)) {
- re += '\\'
- }
-
- re += c
-
- } // switch
- } // for
-
- // handle the case where we left a class open.
- // "[abc" is valid, equivalent to "\[abc"
- if (inClass) {
- // split where the last [ was, and escape it
- // this is a huge pita. We now have to re-walk
- // the contents of the would-be class to re-translate
- // any characters that were passed through as-is
- cs = pattern.substr(classStart + 1)
- sp = this.parse(cs, SUBPARSE)
- re = re.substr(0, reClassStart) + '\\[' + sp[0]
- hasMagic = hasMagic || sp[1]
- }
-
- // handle the case where we had a +( thing at the *end*
- // of the pattern.
- // each pattern list stack adds 3 chars, and we need to go through
- // and escape any | chars that were passed through as-is for the regexp.
- // Go through and escape them, taking care not to double-escape any
- // | chars that were already escaped.
- for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
- var tail = re.slice(pl.reStart + pl.open.length)
- this.debug('setting tail', re, pl)
- // maybe some even number of \, then maybe 1 \, followed by a |
- tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) {
- if (!$2) {
- // the | isn't already escaped, so escape it.
- $2 = '\\'
- }
-
- // need to escape all those slashes *again*, without escaping the
- // one that we need for escaping the | character. As it works out,
- // escaping an even number of slashes can be done by simply repeating
- // it exactly after itself. That's why this trick works.
- //
- // I am sorry that you have to see this.
- return $1 + $1 + $2 + '|'
- })
-
- this.debug('tail=%j\n %s', tail, tail, pl, re)
- var t = pl.type === '*' ? star
- : pl.type === '?' ? qmark
- : '\\' + pl.type
-
- hasMagic = true
- re = re.slice(0, pl.reStart) + t + '\\(' + tail
- }
-
- // handle trailing things that only matter at the very end.
- clearStateChar()
- if (escaping) {
- // trailing \\
- re += '\\\\'
- }
-
- // only need to apply the nodot start if the re starts with
- // something that could conceivably capture a dot
- var addPatternStart = false
- switch (re.charAt(0)) {
- case '.':
- case '[':
- case '(': addPatternStart = true
- }
-
- // Hack to work around lack of negative lookbehind in JS
- // A pattern like: *.!(x).!(y|z) needs to ensure that a name
- // like 'a.xyz.yz' doesn't match. So, the first negative
- // lookahead, has to look ALL the way ahead, to the end of
- // the pattern.
- for (var n = negativeLists.length - 1; n > -1; n--) {
- var nl = negativeLists[n]
-
- var nlBefore = re.slice(0, nl.reStart)
- var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
- var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)
- var nlAfter = re.slice(nl.reEnd)
-
- nlLast += nlAfter
-
- // Handle nested stuff like *(*.js|!(*.json)), where open parens
- // mean that we should *not* include the ) in the bit that is considered
- // "after" the negated section.
- var openParensBefore = nlBefore.split('(').length - 1
- var cleanAfter = nlAfter
- for (i = 0; i < openParensBefore; i++) {
- cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
- }
- nlAfter = cleanAfter
-
- var dollar = ''
- if (nlAfter === '' && isSub !== SUBPARSE) {
- dollar = '$'
- }
- var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
- re = newRe
- }
-
- // if the re is not "" at this point, then we need to make sure
- // it doesn't match against an empty path part.
- // Otherwise a/* will match a/, which it should not.
- if (re !== '' && hasMagic) {
- re = '(?=.)' + re
- }
-
- if (addPatternStart) {
- re = patternStart + re
- }
-
- // parsing just a piece of a larger pattern.
- if (isSub === SUBPARSE) {
- return [re, hasMagic]
- }
-
- // skip the regexp for non-magical patterns
- // unescape anything in it, though, so that it'll be
- // an exact match against a file etc.
- if (!hasMagic) {
- return globUnescape(pattern)
- }
-
- var flags = options.nocase ? 'i' : ''
- try {
- var regExp = new RegExp('^' + re + '$', flags)
- } catch (er) {
- // If it was an invalid regular expression, then it can't match
- // anything. This trick looks for a character after the end of
- // the string, which is of course impossible, except in multi-line
- // mode, but it's not a /m regex.
- return new RegExp('$.')
- }
-
- regExp._glob = pattern
- regExp._src = re
-
- return regExp
-}
-
-minimatch.makeRe = function (pattern, options) {
- return new Minimatch(pattern, options || {}).makeRe()
-}
-
-Minimatch.prototype.makeRe = makeRe
-function makeRe () {
- if (this.regexp || this.regexp === false) return this.regexp
-
- // at this point, this.set is a 2d array of partial
- // pattern strings, or "**".
- //
- // It's better to use .match(). This function shouldn't
- // be used, really, but it's pretty convenient sometimes,
- // when you just want to work with a regex.
- var set = this.set
-
- if (!set.length) {
- this.regexp = false
- return this.regexp
- }
- var options = this.options
-
- var twoStar = options.noglobstar ? star
- : options.dot ? twoStarDot
- : twoStarNoDot
- var flags = options.nocase ? 'i' : ''
-
- var re = set.map(function (pattern) {
- return pattern.map(function (p) {
- return (p === GLOBSTAR) ? twoStar
- : (typeof p === 'string') ? regExpEscape(p)
- : p._src
- }).join('\\\/')
- }).join('|')
-
- // must match entire pattern
- // ending in a * or ** will make it less strict.
- re = '^(?:' + re + ')$'
-
- // can match anything, as long as it's not this.
- if (this.negate) re = '^(?!' + re + ').*$'
-
- try {
- this.regexp = new RegExp(re, flags)
- } catch (ex) {
- this.regexp = false
- }
- return this.regexp
-}
-
-minimatch.match = function (list, pattern, options) {
- options = options || {}
- var mm = new Minimatch(pattern, options)
- list = list.filter(function (f) {
- return mm.match(f)
- })
- if (mm.options.nonull && !list.length) {
- list.push(pattern)
- }
- return list
-}
-
-Minimatch.prototype.match = match
-function match (f, partial) {
- this.debug('match', f, this.pattern)
- // short-circuit in the case of busted things.
- // comments, etc.
- if (this.comment) return false
- if (this.empty) return f === ''
-
- if (f === '/' && partial) return true
-
- var options = this.options
-
- // windows: need to use /, not \
- if (path.sep !== '/') {
- f = f.split(path.sep).join('/')
- }
-
- // treat the test path as a set of pathparts.
- f = f.split(slashSplit)
- this.debug(this.pattern, 'split', f)
-
- // just ONE of the pattern sets in this.set needs to match
- // in order for it to be valid. If negating, then just one
- // match means that we have failed.
- // Either way, return on the first hit.
-
- var set = this.set
- this.debug(this.pattern, 'set', set)
-
- // Find the basename of the path by looking for the last non-empty segment
- var filename
- var i
- for (i = f.length - 1; i >= 0; i--) {
- filename = f[i]
- if (filename) break
- }
-
- for (i = 0; i < set.length; i++) {
- var pattern = set[i]
- var file = f
- if (options.matchBase && pattern.length === 1) {
- file = [filename]
- }
- var hit = this.matchOne(file, pattern, partial)
- if (hit) {
- if (options.flipNegate) return true
- return !this.negate
- }
- }
-
- // didn't get any hits. this is success if it's a negative
- // pattern, failure otherwise.
- if (options.flipNegate) return false
- return this.negate
-}
-
-// set partial to true to test if, for example,
-// "/a/b" matches the start of "/*/b/*/d"
-// Partial means, if you run out of file before you run
-// out of pattern, then that's fine, as long as all
-// the parts match.
-Minimatch.prototype.matchOne = function (file, pattern, partial) {
- var options = this.options
-
- this.debug('matchOne',
- { 'this': this, file: file, pattern: pattern })
-
- this.debug('matchOne', file.length, pattern.length)
-
- for (var fi = 0,
- pi = 0,
- fl = file.length,
- pl = pattern.length
- ; (fi < fl) && (pi < pl)
- ; fi++, pi++) {
- this.debug('matchOne loop')
- var p = pattern[pi]
- var f = file[fi]
-
- this.debug(pattern, p, f)
-
- // should be impossible.
- // some invalid regexp stuff in the set.
- if (p === false) return false
-
- if (p === GLOBSTAR) {
- this.debug('GLOBSTAR', [pattern, p, f])
-
- // "**"
- // a/**/b/**/c would match the following:
- // a/b/x/y/z/c
- // a/x/y/z/b/c
- // a/b/x/b/x/c
- // a/b/c
- // To do this, take the rest of the pattern after
- // the **, and see if it would match the file remainder.
- // If so, return success.
- // If not, the ** "swallows" a segment, and try again.
- // This is recursively awful.
- //
- // a/**/b/**/c matching a/b/x/y/z/c
- // - a matches a
- // - doublestar
- // - matchOne(b/x/y/z/c, b/**/c)
- // - b matches b
- // - doublestar
- // - matchOne(x/y/z/c, c) -> no
- // - matchOne(y/z/c, c) -> no
- // - matchOne(z/c, c) -> no
- // - matchOne(c, c) yes, hit
- var fr = fi
- var pr = pi + 1
- if (pr === pl) {
- this.debug('** at the end')
- // a ** at the end will just swallow the rest.
- // We have found a match.
- // however, it will not swallow /.x, unless
- // options.dot is set.
- // . and .. are *never* matched by **, for explosively
- // exponential reasons.
- for (; fi < fl; fi++) {
- if (file[fi] === '.' || file[fi] === '..' ||
- (!options.dot && file[fi].charAt(0) === '.')) return false
- }
- return true
- }
-
- // ok, let's see if we can swallow whatever we can.
- while (fr < fl) {
- var swallowee = file[fr]
-
- this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
-
- // XXX remove this slice. Just pass the start index.
- if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
- this.debug('globstar found match!', fr, fl, swallowee)
- // found a match.
- return true
- } else {
- // can't swallow "." or ".." ever.
- // can only swallow ".foo" when explicitly asked.
- if (swallowee === '.' || swallowee === '..' ||
- (!options.dot && swallowee.charAt(0) === '.')) {
- this.debug('dot detected!', file, fr, pattern, pr)
- break
- }
-
- // ** swallows a segment, and continue.
- this.debug('globstar swallow a segment, and continue')
- fr++
- }
- }
-
- // no match was found.
- // However, in partial mode, we can't say this is necessarily over.
- // If there's more *pattern* left, then
- if (partial) {
- // ran out of file
- this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
- if (fr === fl) return true
- }
- return false
- }
-
- // something other than **
- // non-magic patterns just have to match exactly
- // patterns with magic have been turned into regexps.
- var hit
- if (typeof p === 'string') {
- if (options.nocase) {
- hit = f.toLowerCase() === p.toLowerCase()
- } else {
- hit = f === p
- }
- this.debug('string match', p, f, hit)
- } else {
- hit = f.match(p)
- this.debug('pattern match', p, f, hit)
- }
-
- if (!hit) return false
- }
-
- // Note: ending in / means that we'll get a final ""
- // at the end of the pattern. This can only match a
- // corresponding "" at the end of the file.
- // If the file ends in /, then it can only match a
- // a pattern that ends in /, unless the pattern just
- // doesn't have any more for it. But, a/b/ should *not*
- // match "a/b/*", even though "" matches against the
- // [^/]*? pattern, except in partial mode, where it might
- // simply not be reached yet.
- // However, a/b/ should still satisfy a/*
-
- // now either we fell off the end of the pattern, or we're done.
- if (fi === fl && pi === pl) {
- // ran out of pattern and filename at the same time.
- // an exact hit!
- return true
- } else if (fi === fl) {
- // ran out of file, but still had pattern left.
- // this is ok if we're doing the match as part of
- // a glob fs traversal.
- return partial
- } else if (pi === pl) {
- // ran out of pattern, still have file left.
- // this is only acceptable if we're on the very last
- // empty segment of a file with a trailing slash.
- // a/* should match a/b/
- var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
- return emptyFileEnd
- }
-
- // should be unreachable.
- throw new Error('wtf?')
-}
-
-// replace stuff like \* with *
-function globUnescape (s) {
- return s.replace(/\\(.)/g, '$1')
-}
-
-function regExpEscape (s) {
- return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/LICENSE b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/LICENSE
deleted file mode 100644
index de3226673c..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-MIT License
-
-Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/README.md b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/README.md
deleted file mode 100644
index bbfd3fcb88..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/README.md
+++ /dev/null
@@ -1,129 +0,0 @@
-# brace-expansion
-
-[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html),
-as known from sh/bash, in JavaScript.
-
-[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion)
-[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion)
-[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/)
-
-[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion)
-
-## Example
-
-```js
-var expand = require('brace-expansion');
-
-expand('file-{a,b,c}.jpg')
-// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
-
-expand('-v{,,}')
-// => ['-v', '-v', '-v']
-
-expand('file{0..2}.jpg')
-// => ['file0.jpg', 'file1.jpg', 'file2.jpg']
-
-expand('file-{a..c}.jpg')
-// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
-
-expand('file{2..0}.jpg')
-// => ['file2.jpg', 'file1.jpg', 'file0.jpg']
-
-expand('file{0..4..2}.jpg')
-// => ['file0.jpg', 'file2.jpg', 'file4.jpg']
-
-expand('file-{a..e..2}.jpg')
-// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg']
-
-expand('file{00..10..5}.jpg')
-// => ['file00.jpg', 'file05.jpg', 'file10.jpg']
-
-expand('{{A..C},{a..c}}')
-// => ['A', 'B', 'C', 'a', 'b', 'c']
-
-expand('ppp{,config,oe{,conf}}')
-// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf']
-```
-
-## API
-
-```js
-var expand = require('brace-expansion');
-```
-
-### var expanded = expand(str)
-
-Return an array of all possible and valid expansions of `str`. If none are
-found, `[str]` is returned.
-
-Valid expansions are:
-
-```js
-/^(.*,)+(.+)?$/
-// {a,b,...}
-```
-
-A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`.
-
-```js
-/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
-// {x..y[..incr]}
-```
-
-A numeric sequence from `x` to `y` inclusive, with optional increment.
-If `x` or `y` start with a leading `0`, all the numbers will be padded
-to have equal length. Negative numbers and backwards iteration work too.
-
-```js
-/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
-// {x..y[..incr]}
-```
-
-An alphabetic sequence from `x` to `y` inclusive, with optional increment.
-`x` and `y` must be exactly one character, and if given, `incr` must be a
-number.
-
-For compatibility reasons, the string `${` is not eligible for brace expansion.
-
-## Installation
-
-With [npm](https://npmjs.org) do:
-
-```bash
-npm install brace-expansion
-```
-
-## Contributors
-
-- [Julian Gruber](https://github.com/juliangruber)
-- [Isaac Z. Schlueter](https://github.com/isaacs)
-
-## Sponsors
-
-This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)!
-
-Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)!
-
-## License
-
-(MIT)
-
-Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is furnished to do
-so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/index.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/index.js
deleted file mode 100644
index 2b6f4f85c9..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/index.js
+++ /dev/null
@@ -1,200 +0,0 @@
-var concatMap = require('concat-map');
-var balanced = require('balanced-match');
-
-module.exports = expandTop;
-
-var escSlash = '\0SLASH'+Math.random()+'\0';
-var escOpen = '\0OPEN'+Math.random()+'\0';
-var escClose = '\0CLOSE'+Math.random()+'\0';
-var escComma = '\0COMMA'+Math.random()+'\0';
-var escPeriod = '\0PERIOD'+Math.random()+'\0';
-
-function numeric(str) {
- return parseInt(str, 10) == str
- ? parseInt(str, 10)
- : str.charCodeAt(0);
-}
-
-function escapeBraces(str) {
- return str.split('\\\\').join(escSlash)
- .split('\\{').join(escOpen)
- .split('\\}').join(escClose)
- .split('\\,').join(escComma)
- .split('\\.').join(escPeriod);
-}
-
-function unescapeBraces(str) {
- return str.split(escSlash).join('\\')
- .split(escOpen).join('{')
- .split(escClose).join('}')
- .split(escComma).join(',')
- .split(escPeriod).join('.');
-}
-
-
-// Basically just str.split(","), but handling cases
-// where we have nested braced sections, which should be
-// treated as individual members, like {a,{b,c},d}
-function parseCommaParts(str) {
- if (!str)
- return [''];
-
- var parts = [];
- var m = balanced('{', '}', str);
-
- if (!m)
- return str.split(',');
-
- var pre = m.pre;
- var body = m.body;
- var post = m.post;
- var p = pre.split(',');
-
- p[p.length-1] += '{' + body + '}';
- var postParts = parseCommaParts(post);
- if (post.length) {
- p[p.length-1] += postParts.shift();
- p.push.apply(p, postParts);
- }
-
- parts.push.apply(parts, p);
-
- return parts;
-}
-
-function expandTop(str) {
- if (!str)
- return [];
-
- // I don't know why Bash 4.3 does this, but it does.
- // Anything starting with {} will have the first two bytes preserved
- // but *only* at the top level, so {},a}b will not expand to anything,
- // but a{},b}c will be expanded to [a}c,abc].
- // One could argue that this is a bug in Bash, but since the goal of
- // this module is to match Bash's rules, we escape a leading {}
- if (str.substr(0, 2) === '{}') {
- str = '\\{\\}' + str.substr(2);
- }
-
- return expand(escapeBraces(str), true).map(unescapeBraces);
-}
-
-function identity(e) {
- return e;
-}
-
-function embrace(str) {
- return '{' + str + '}';
-}
-function isPadded(el) {
- return /^-?0\d/.test(el);
-}
-
-function lte(i, y) {
- return i <= y;
-}
-function gte(i, y) {
- return i >= y;
-}
-
-function expand(str, isTop) {
- var expansions = [];
-
- var m = balanced('{', '}', str);
- if (!m || /\$$/.test(m.pre)) return [str];
-
- var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
- var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
- var isSequence = isNumericSequence || isAlphaSequence;
- var isOptions = m.body.indexOf(',') >= 0;
- if (!isSequence && !isOptions) {
- // {a},b}
- if (m.post.match(/,.*\}/)) {
- str = m.pre + '{' + m.body + escClose + m.post;
- return expand(str);
- }
- return [str];
- }
-
- var n;
- if (isSequence) {
- n = m.body.split(/\.\./);
- } else {
- n = parseCommaParts(m.body);
- if (n.length === 1) {
- // x{{a,b}}y ==> x{a}y x{b}y
- n = expand(n[0], false).map(embrace);
- if (n.length === 1) {
- var post = m.post.length
- ? expand(m.post, false)
- : [''];
- return post.map(function(p) {
- return m.pre + n[0] + p;
- });
- }
- }
- }
-
- // at this point, n is the parts, and we know it's not a comma set
- // with a single entry.
-
- // no need to expand pre, since it is guaranteed to be free of brace-sets
- var pre = m.pre;
- var post = m.post.length
- ? expand(m.post, false)
- : [''];
-
- var N;
-
- if (isSequence) {
- var x = numeric(n[0]);
- var y = numeric(n[1]);
- var width = Math.max(n[0].length, n[1].length)
- var incr = n.length == 3
- ? Math.abs(numeric(n[2]))
- : 1;
- var test = lte;
- var reverse = y < x;
- if (reverse) {
- incr *= -1;
- test = gte;
- }
- var pad = n.some(isPadded);
-
- N = [];
-
- for (var i = x; test(i, y); i += incr) {
- var c;
- if (isAlphaSequence) {
- c = String.fromCharCode(i);
- if (c === '\\')
- c = '';
- } else {
- c = String(i);
- if (pad) {
- var need = width - c.length;
- if (need > 0) {
- var z = new Array(need + 1).join('0');
- if (i < 0)
- c = '-' + z + c.slice(1);
- else
- c = z + c;
- }
- }
- }
- N.push(c);
- }
- } else {
- N = concatMap(n, function(el) { return expand(el, false) });
- }
-
- for (var j = 0; j < N.length; j++) {
- for (var k = 0; k < post.length; k++) {
- var expansion = pre + N[j] + post[k];
- if (!isTop || isSequence || expansion)
- expansions.push(expansion);
- }
- }
-
- return expansions;
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/.npmignore b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/.npmignore
deleted file mode 100644
index ae5d8c36ac..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/.npmignore
+++ /dev/null
@@ -1,5 +0,0 @@
-test
-.gitignore
-.travis.yml
-Makefile
-example.js
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/LICENSE.md b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/LICENSE.md
deleted file mode 100644
index 2cdc8e4148..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/LICENSE.md
+++ /dev/null
@@ -1,21 +0,0 @@
-(MIT)
-
-Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is furnished to do
-so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/README.md b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/README.md
deleted file mode 100644
index 08e918c0db..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/README.md
+++ /dev/null
@@ -1,91 +0,0 @@
-# balanced-match
-
-Match balanced string pairs, like `{` and `}` or `<b>` and `</b>`. Supports regular expressions as well!
-
-[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match)
-[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match)
-
-[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match)
-
-## Example
-
-Get the first matching pair of braces:
-
-```js
-var balanced = require('balanced-match');
-
-console.log(balanced('{', '}', 'pre{in{nested}}post'));
-console.log(balanced('{', '}', 'pre{first}between{second}post'));
-console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post'));
-```
-
-The matches are:
-
-```bash
-$ node example.js
-{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' }
-{ start: 3,
- end: 9,
- pre: 'pre',
- body: 'first',
- post: 'between{second}post' }
-{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' }
-```
-
-## API
-
-### var m = balanced(a, b, str)
-
-For the first non-nested matching pair of `a` and `b` in `str`, return an
-object with those keys:
-
-* **start** the index of the first match of `a`
-* **end** the index of the matching `b`
-* **pre** the preamble, `a` and `b` not included
-* **body** the match, `a` and `b` not included
-* **post** the postscript, `a` and `b` not included
-
-If there's no match, `undefined` will be returned.
-
-If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`.
-
-### var r = balanced.range(a, b, str)
-
-For the first non-nested matching pair of `a` and `b` in `str`, return an
-array with indexes: `[ <a index>, <b index> ]`.
-
-If there's no match, `undefined` will be returned.
-
-If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`.
-
-## Installation
-
-With [npm](https://npmjs.org) do:
-
-```bash
-npm install balanced-match
-```
-
-## License
-
-(MIT)
-
-Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is furnished to do
-so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/index.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/index.js
deleted file mode 100644
index 1685a76293..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/index.js
+++ /dev/null
@@ -1,59 +0,0 @@
-'use strict';
-module.exports = balanced;
-function balanced(a, b, str) {
- if (a instanceof RegExp) a = maybeMatch(a, str);
- if (b instanceof RegExp) b = maybeMatch(b, str);
-
- var r = range(a, b, str);
-
- return r && {
- start: r[0],
- end: r[1],
- pre: str.slice(0, r[0]),
- body: str.slice(r[0] + a.length, r[1]),
- post: str.slice(r[1] + b.length)
- };
-}
-
-function maybeMatch(reg, str) {
- var m = str.match(reg);
- return m ? m[0] : null;
-}
-
-balanced.range = range;
-function range(a, b, str) {
- var begs, beg, left, right, result;
- var ai = str.indexOf(a);
- var bi = str.indexOf(b, ai + 1);
- var i = ai;
-
- if (ai >= 0 && bi > 0) {
- begs = [];
- left = str.length;
-
- while (i >= 0 && !result) {
- if (i == ai) {
- begs.push(i);
- ai = str.indexOf(a, i + 1);
- } else if (begs.length == 1) {
- result = [ begs.pop(), bi ];
- } else {
- beg = begs.pop();
- if (beg < left) {
- left = beg;
- right = bi;
- }
-
- bi = str.indexOf(b, i + 1);
- }
-
- i = ai < bi && ai >= 0 ? ai : bi;
- }
-
- if (begs.length) {
- result = [ left, right ];
- }
- }
-
- return result;
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json
deleted file mode 100644
index c8a1dfc7c3..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json
+++ /dev/null
@@ -1,77 +0,0 @@
-{
- "_from": "balanced-match@^1.0.0",
- "_id": "balanced-match@1.0.0",
- "_inBundle": false,
- "_integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=",
- "_location": "/npm-lifecycle/node-gyp/minimatch/brace-expansion/balanced-match",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "balanced-match@^1.0.0",
- "name": "balanced-match",
- "escapedName": "balanced-match",
- "rawSpec": "^1.0.0",
- "saveSpec": null,
- "fetchSpec": "^1.0.0"
- },
- "_requiredBy": [
- "/npm-lifecycle/node-gyp/minimatch/brace-expansion"
- ],
- "_resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
- "_shasum": "89b4d199ab2bee49de164ea02b89ce462d71b767",
- "_spec": "balanced-match@^1.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion",
- "author": {
- "name": "Julian Gruber",
- "email": "mail@juliangruber.com",
- "url": "http://juliangruber.com"
- },
- "bugs": {
- "url": "https://github.com/juliangruber/balanced-match/issues"
- },
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": "Match balanced character pairs, like \"{\" and \"}\"",
- "devDependencies": {
- "matcha": "^0.7.0",
- "tape": "^4.6.0"
- },
- "homepage": "https://github.com/juliangruber/balanced-match",
- "keywords": [
- "match",
- "regexp",
- "test",
- "balanced",
- "parse"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "balanced-match",
- "repository": {
- "type": "git",
- "url": "git://github.com/juliangruber/balanced-match.git"
- },
- "scripts": {
- "bench": "make bench",
- "test": "make test"
- },
- "testling": {
- "files": "test/*.js",
- "browsers": [
- "ie/8..latest",
- "firefox/20..latest",
- "firefox/nightly",
- "chrome/25..latest",
- "chrome/canary",
- "opera/12..latest",
- "opera/next",
- "safari/5.1..latest",
- "ipad/6.0..latest",
- "iphone/6.0..latest",
- "android-browser/4.2..latest"
- ]
- },
- "version": "1.0.0"
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/.travis.yml b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/.travis.yml
deleted file mode 100644
index f1d0f13c8a..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/.travis.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-language: node_js
-node_js:
- - 0.4
- - 0.6
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/LICENSE b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/LICENSE
deleted file mode 100644
index ee27ba4b44..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/LICENSE
+++ /dev/null
@@ -1,18 +0,0 @@
-This software is released under the MIT license:
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
-the Software, and to permit persons to whom the Software is furnished to do so,
-subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
-FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
-COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
-IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/README.markdown b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/README.markdown
deleted file mode 100644
index 408f70a1be..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/README.markdown
+++ /dev/null
@@ -1,62 +0,0 @@
-concat-map
-==========
-
-Concatenative mapdashery.
-
-[![browser support](http://ci.testling.com/substack/node-concat-map.png)](http://ci.testling.com/substack/node-concat-map)
-
-[![build status](https://secure.travis-ci.org/substack/node-concat-map.png)](http://travis-ci.org/substack/node-concat-map)
-
-example
-=======
-
-``` js
-var concatMap = require('concat-map');
-var xs = [ 1, 2, 3, 4, 5, 6 ];
-var ys = concatMap(xs, function (x) {
- return x % 2 ? [ x - 0.1, x, x + 0.1 ] : [];
-});
-console.dir(ys);
-```
-
-***
-
-```
-[ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ]
-```
-
-methods
-=======
-
-``` js
-var concatMap = require('concat-map')
-```
-
-concatMap(xs, fn)
------------------
-
-Return an array of concatenated elements by calling `fn(x, i)` for each element
-`x` and each index `i` in the array `xs`.
-
-When `fn(x, i)` returns an array, its result will be concatenated with the
-result array. If `fn(x, i)` returns anything else, that value will be pushed
-onto the end of the result array.
-
-install
-=======
-
-With [npm](http://npmjs.org) do:
-
-```
-npm install concat-map
-```
-
-license
-=======
-
-MIT
-
-notes
-=====
-
-This module was written while sitting high above the ground in a tree.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/example/map.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/example/map.js
deleted file mode 100644
index 33656217b6..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/example/map.js
+++ /dev/null
@@ -1,6 +0,0 @@
-var concatMap = require('../');
-var xs = [ 1, 2, 3, 4, 5, 6 ];
-var ys = concatMap(xs, function (x) {
- return x % 2 ? [ x - 0.1, x, x + 0.1 ] : [];
-});
-console.dir(ys);
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/index.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/index.js
deleted file mode 100644
index b29a7812e5..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/index.js
+++ /dev/null
@@ -1,13 +0,0 @@
-module.exports = function (xs, fn) {
- var res = [];
- for (var i = 0; i < xs.length; i++) {
- var x = fn(xs[i], i);
- if (isArray(x)) res.push.apply(res, x);
- else res.push(x);
- }
- return res;
-};
-
-var isArray = Array.isArray || function (xs) {
- return Object.prototype.toString.call(xs) === '[object Array]';
-};
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json
deleted file mode 100644
index d27e688606..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json
+++ /dev/null
@@ -1,93 +0,0 @@
-{
- "_from": "concat-map@0.0.1",
- "_id": "concat-map@0.0.1",
- "_inBundle": false,
- "_integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
- "_location": "/npm-lifecycle/node-gyp/minimatch/brace-expansion/concat-map",
- "_phantomChildren": {},
- "_requested": {
- "type": "version",
- "registry": true,
- "raw": "concat-map@0.0.1",
- "name": "concat-map",
- "escapedName": "concat-map",
- "rawSpec": "0.0.1",
- "saveSpec": null,
- "fetchSpec": "0.0.1"
- },
- "_requiredBy": [
- "/npm-lifecycle/node-gyp/minimatch/brace-expansion"
- ],
- "_resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
- "_shasum": "d8a96bd77fd68df7793a73036a3ba0d5405d477b",
- "_shrinkwrap": null,
- "_spec": "concat-map@0.0.1",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion",
- "author": {
- "name": "James Halliday",
- "email": "mail@substack.net",
- "url": "http://substack.net"
- },
- "bin": null,
- "bugs": {
- "url": "https://github.com/substack/node-concat-map/issues"
- },
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": "concatenative mapdashery",
- "devDependencies": {
- "tape": "~2.4.0"
- },
- "directories": {
- "example": "example",
- "test": "test"
- },
- "homepage": "https://github.com/substack/node-concat-map#readme",
- "keywords": [
- "concat",
- "concatMap",
- "map",
- "functional",
- "higher-order"
- ],
- "license": "MIT",
- "main": "index.js",
- "name": "concat-map",
- "optionalDependencies": {},
- "peerDependencies": {},
- "repository": {
- "type": "git",
- "url": "git://github.com/substack/node-concat-map.git"
- },
- "scripts": {
- "test": "tape test/*.js"
- },
- "testling": {
- "files": "test/*.js",
- "browsers": {
- "ie": [
- 6,
- 7,
- 8,
- 9
- ],
- "ff": [
- 3.5,
- 10,
- 15
- ],
- "chrome": [
- 10,
- 22
- ],
- "safari": [
- 5.1
- ],
- "opera": [
- 12
- ]
- }
- },
- "version": "0.0.1"
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/test/map.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/test/map.js
deleted file mode 100644
index fdbd7022f6..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/test/map.js
+++ /dev/null
@@ -1,39 +0,0 @@
-var concatMap = require('../');
-var test = require('tape');
-
-test('empty or not', function (t) {
- var xs = [ 1, 2, 3, 4, 5, 6 ];
- var ixes = [];
- var ys = concatMap(xs, function (x, ix) {
- ixes.push(ix);
- return x % 2 ? [ x - 0.1, x, x + 0.1 ] : [];
- });
- t.same(ys, [ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ]);
- t.same(ixes, [ 0, 1, 2, 3, 4, 5 ]);
- t.end();
-});
-
-test('always something', function (t) {
- var xs = [ 'a', 'b', 'c', 'd' ];
- var ys = concatMap(xs, function (x) {
- return x === 'b' ? [ 'B', 'B', 'B' ] : [ x ];
- });
- t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]);
- t.end();
-});
-
-test('scalars', function (t) {
- var xs = [ 'a', 'b', 'c', 'd' ];
- var ys = concatMap(xs, function (x) {
- return x === 'b' ? [ 'B', 'B', 'B' ] : x;
- });
- t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]);
- t.end();
-});
-
-test('undefs', function (t) {
- var xs = [ 'a', 'b', 'c', 'd' ];
- var ys = concatMap(xs, function () {});
- t.same(ys, [ undefined, undefined, undefined, undefined ]);
- t.end();
-});
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/package.json b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/package.json
deleted file mode 100644
index 4c6256179c..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/package.json
+++ /dev/null
@@ -1,75 +0,0 @@
-{
- "_from": "brace-expansion@^1.1.7",
- "_id": "brace-expansion@1.1.11",
- "_inBundle": false,
- "_integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
- "_location": "/npm-lifecycle/node-gyp/minimatch/brace-expansion",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "brace-expansion@^1.1.7",
- "name": "brace-expansion",
- "escapedName": "brace-expansion",
- "rawSpec": "^1.1.7",
- "saveSpec": null,
- "fetchSpec": "^1.1.7"
- },
- "_requiredBy": [
- "/npm-lifecycle/node-gyp/minimatch"
- ],
- "_resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
- "_shasum": "3c7fcbf529d87226f3d2f52b966ff5271eb441dd",
- "_spec": "brace-expansion@^1.1.7",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch",
- "author": {
- "name": "Julian Gruber",
- "email": "mail@juliangruber.com",
- "url": "http://juliangruber.com"
- },
- "bugs": {
- "url": "https://github.com/juliangruber/brace-expansion/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
- },
- "deprecated": false,
- "description": "Brace expansion as known from sh/bash",
- "devDependencies": {
- "matcha": "^0.7.0",
- "tape": "^4.6.0"
- },
- "homepage": "https://github.com/juliangruber/brace-expansion",
- "keywords": [],
- "license": "MIT",
- "main": "index.js",
- "name": "brace-expansion",
- "repository": {
- "type": "git",
- "url": "git://github.com/juliangruber/brace-expansion.git"
- },
- "scripts": {
- "bench": "matcha test/perf/bench.js",
- "gentest": "bash test/generate.sh",
- "test": "tape test/*.js"
- },
- "testling": {
- "files": "test/*.js",
- "browsers": [
- "ie/8..latest",
- "firefox/20..latest",
- "firefox/nightly",
- "chrome/25..latest",
- "chrome/canary",
- "opera/12..latest",
- "opera/next",
- "safari/5.1..latest",
- "ipad/6.0..latest",
- "iphone/6.0..latest",
- "android-browser/4.2..latest"
- ]
- },
- "version": "1.1.11"
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/package.json b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/package.json
deleted file mode 100644
index 4e2da98f85..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/minimatch/package.json
+++ /dev/null
@@ -1,63 +0,0 @@
-{
- "_from": "minimatch@^3.0.2",
- "_id": "minimatch@3.0.4",
- "_inBundle": false,
- "_integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
- "_location": "/npm-lifecycle/node-gyp/minimatch",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "minimatch@^3.0.2",
- "name": "minimatch",
- "escapedName": "minimatch",
- "rawSpec": "^3.0.2",
- "saveSpec": null,
- "fetchSpec": "^3.0.2"
- },
- "_requiredBy": [
- "/npm-lifecycle/node-gyp"
- ],
- "_resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
- "_shasum": "5166e286457f03306064be5497e8dbb0c3d32083",
- "_spec": "minimatch@^3.0.2",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-lifecycle/node_modules/node-gyp",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me"
- },
- "bugs": {
- "url": "https://github.com/isaacs/minimatch/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "brace-expansion": "^1.1.7"
- },
- "deprecated": false,
- "description": "a glob matcher in javascript",
- "devDependencies": {
- "tap": "^10.3.2"
- },
- "engines": {
- "node": "*"
- },
- "files": [
- "minimatch.js"
- ],
- "homepage": "https://github.com/isaacs/minimatch#readme",
- "license": "ISC",
- "main": "minimatch.js",
- "name": "minimatch",
- "repository": {
- "type": "git",
- "url": "git://github.com/isaacs/minimatch.git"
- },
- "scripts": {
- "postpublish": "git push origin --all; git push origin --tags",
- "postversion": "npm publish",
- "preversion": "npm test",
- "test": "tap test/*.js --cov"
- },
- "version": "3.0.4"
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/.npmignore b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/.npmignore
deleted file mode 100644
index 3c3629e647..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/.npmignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/.travis.yml b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/.travis.yml
deleted file mode 100644
index 99f2bbf506..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/.travis.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-language: node_js
-language: node_js
-node_js:
- - '0.8'
- - '0.10'
- - '0.12'
- - 'iojs'
-before_install:
- - npm install -g npm@latest
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/LICENSE b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/LICENSE
deleted file mode 100644
index 19129e315f..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/README.md b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/README.md
deleted file mode 100644
index f21a4b31c5..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/README.md
+++ /dev/null
@@ -1,211 +0,0 @@
-If you want to write an option parser, and have it be good, there are
-two ways to do it. The Right Way, and the Wrong Way.
-
-The Wrong Way is to sit down and write an option parser. We've all done
-that.
-
-The Right Way is to write some complex configurable program with so many
-options that you hit the limit of your frustration just trying to
-manage them all, and defer it with duct-tape solutions until you see
-exactly to the core of the problem, and finally snap and write an
-awesome option parser.
-
-If you want to write an option parser, don't write an option parser.
-Write a package manager, or a source control system, or a service
-restarter, or an operating system. You probably won't end up with a
-good one of those, but if you don't give up, and you are relentless and
-diligent enough in your procrastination, you may just end up with a very
-nice option parser.
-
-## USAGE
-
- // my-program.js
- var nopt = require("nopt")
- , Stream = require("stream").Stream
- , path = require("path")
- , knownOpts = { "foo" : [String, null]
- , "bar" : [Stream, Number]
- , "baz" : path
- , "bloo" : [ "big", "medium", "small" ]
- , "flag" : Boolean
- , "pick" : Boolean
- , "many1" : [String, Array]
- , "many2" : [path]
- }
- , shortHands = { "foofoo" : ["--foo", "Mr. Foo"]
- , "b7" : ["--bar", "7"]
- , "m" : ["--bloo", "medium"]
- , "p" : ["--pick"]
- , "f" : ["--flag"]
- }
- // everything is optional.
- // knownOpts and shorthands default to {}
- // arg list defaults to process.argv
- // slice defaults to 2
- , parsed = nopt(knownOpts, shortHands, process.argv, 2)
- console.log(parsed)
-
-This would give you support for any of the following:
-
-```bash
-$ node my-program.js --foo "blerp" --no-flag
-{ "foo" : "blerp", "flag" : false }
-
-$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag
-{ bar: 7, foo: "Mr. Hand", flag: true }
-
-$ node my-program.js --foo "blerp" -f -----p
-{ foo: "blerp", flag: true, pick: true }
-
-$ node my-program.js -fp --foofoo
-{ foo: "Mr. Foo", flag: true, pick: true }
-
-$ node my-program.js --foofoo -- -fp # -- stops the flag parsing.
-{ foo: "Mr. Foo", argv: { remain: ["-fp"] } }
-
-$ node my-program.js --blatzk -fp # unknown opts are ok.
-{ blatzk: true, flag: true, pick: true }
-
-$ node my-program.js --blatzk=1000 -fp # but you need to use = if they have a value
-{ blatzk: 1000, flag: true, pick: true }
-
-$ node my-program.js --no-blatzk -fp # unless they start with "no-"
-{ blatzk: false, flag: true, pick: true }
-
-$ node my-program.js --baz b/a/z # known paths are resolved.
-{ baz: "/Users/isaacs/b/a/z" }
-
-# if Array is one of the types, then it can take many
-# values, and will always be an array. The other types provided
-# specify what types are allowed in the list.
-
-$ node my-program.js --many1 5 --many1 null --many1 foo
-{ many1: ["5", "null", "foo"] }
-
-$ node my-program.js --many2 foo --many2 bar
-{ many2: ["/path/to/foo", "path/to/bar"] }
-```
-
-Read the tests at the bottom of `lib/nopt.js` for more examples of
-what this puppy can do.
-
-## Types
-
-The following types are supported, and defined on `nopt.typeDefs`
-
-* String: A normal string. No parsing is done.
-* path: A file system path. Gets resolved against cwd if not absolute.
-* url: A url. If it doesn't parse, it isn't accepted.
-* Number: Must be numeric.
-* Date: Must parse as a date. If it does, and `Date` is one of the options,
- then it will return a Date object, not a string.
-* Boolean: Must be either `true` or `false`. If an option is a boolean,
- then it does not need a value, and its presence will imply `true` as
- the value. To negate boolean flags, do `--no-whatever` or `--whatever
- false`
-* NaN: Means that the option is strictly not allowed. Any value will
- fail.
-* Stream: An object matching the "Stream" class in node. Valuable
- for use when validating programmatically. (npm uses this to let you
- supply any WriteStream on the `outfd` and `logfd` config options.)
-* Array: If `Array` is specified as one of the types, then the value
- will be parsed as a list of options. This means that multiple values
- can be specified, and that the value will always be an array.
-
-If a type is an array of values not on this list, then those are
-considered valid values. For instance, in the example above, the
-`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`,
-and any other value will be rejected.
-
-When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be
-interpreted as their JavaScript equivalents.
-
-You can also mix types and values, or multiple types, in a list. For
-instance `{ blah: [Number, null] }` would allow a value to be set to
-either a Number or null. When types are ordered, this implies a
-preference, and the first type that can be used to properly interpret
-the value will be used.
-
-To define a new type, add it to `nopt.typeDefs`. Each item in that
-hash is an object with a `type` member and a `validate` method. The
-`type` member is an object that matches what goes in the type list. The
-`validate` method is a function that gets called with `validate(data,
-key, val)`. Validate methods should assign `data[key]` to the valid
-value of `val` if it can be handled properly, or return boolean
-`false` if it cannot.
-
-You can also call `nopt.clean(data, types, typeDefs)` to clean up a
-config object and remove its invalid properties.
-
-## Error Handling
-
-By default, nopt outputs a warning to standard error when invalid values for
-known options are found. You can change this behavior by assigning a method
-to `nopt.invalidHandler`. This method will be called with
-the offending `nopt.invalidHandler(key, val, types)`.
-
-If no `nopt.invalidHandler` is assigned, then it will console.error
-its whining. If it is assigned to boolean `false` then the warning is
-suppressed.
-
-## Abbreviations
-
-Yes, they are supported. If you define options like this:
-
-```javascript
-{ "foolhardyelephants" : Boolean
-, "pileofmonkeys" : Boolean }
-```
-
-Then this will work:
-
-```bash
-node program.js --foolhar --pil
-node program.js --no-f --pileofmon
-# etc.
-```
-
-## Shorthands
-
-Shorthands are a hash of shorter option names to a snippet of args that
-they expand to.
-
-If multiple one-character shorthands are all combined, and the
-combination does not unambiguously match any other option or shorthand,
-then they will be broken up into their constituent parts. For example:
-
-```json
-{ "s" : ["--loglevel", "silent"]
-, "g" : "--global"
-, "f" : "--force"
-, "p" : "--parseable"
-, "l" : "--long"
-}
-```
-
-```bash
-npm ls -sgflp
-# just like doing this:
-npm ls --loglevel silent --global --force --long --parseable
-```
-
-## The Rest of the args
-
-The config object returned by nopt is given a special member called
-`argv`, which is an object with the following fields:
-
-* `remain`: The remaining args after all the parsing has occurred.
-* `original`: The args as they originally appeared.
-* `cooked`: The args after flags and shorthands are expanded.
-
-## Slicing
-
-Node programs are called with more or less the exact argv as it appears
-in C land, after the v8 and node-specific options have been plucked off.
-As such, `argv[0]` is always `node` and `argv[1]` is always the
-JavaScript program being run.
-
-That's usually not very useful to you. So they're sliced off by
-default. If you want them, then you can pass in `0` as the last
-argument, or any other number that you'd like to slice off the start of
-the list.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/bin/nopt.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/bin/nopt.js
deleted file mode 100755
index 3232d4c570..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/bin/nopt.js
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env node
-var nopt = require("../lib/nopt")
- , path = require("path")
- , types = { num: Number
- , bool: Boolean
- , help: Boolean
- , list: Array
- , "num-list": [Number, Array]
- , "str-list": [String, Array]
- , "bool-list": [Boolean, Array]
- , str: String
- , clear: Boolean
- , config: Boolean
- , length: Number
- , file: path
- }
- , shorthands = { s: [ "--str", "astring" ]
- , b: [ "--bool" ]
- , nb: [ "--no-bool" ]
- , tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ]
- , "?": ["--help"]
- , h: ["--help"]
- , H: ["--help"]
- , n: [ "--num", "125" ]
- , c: ["--config"]
- , l: ["--length"]
- , f: ["--file"]
- }
- , parsed = nopt( types
- , shorthands
- , process.argv
- , 2 )
-
-console.log("parsed", parsed)
-
-if (parsed.help) {
- console.log("")
- console.log("nopt cli tester")
- console.log("")
- console.log("types")
- console.log(Object.keys(types).map(function M (t) {
- var type = types[t]
- if (Array.isArray(type)) {
- return [t, type.map(function (type) { return type.name })]
- }
- return [t, type && type.name]
- }).reduce(function (s, i) {
- s[i[0]] = i[1]
- return s
- }, {}))
- console.log("")
- console.log("shorthands")
- console.log(shorthands)
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/examples/my-program.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/examples/my-program.js
deleted file mode 100755
index 142447e18e..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/examples/my-program.js
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env node
-
-//process.env.DEBUG_NOPT = 1
-
-// my-program.js
-var nopt = require("../lib/nopt")
- , Stream = require("stream").Stream
- , path = require("path")
- , knownOpts = { "foo" : [String, null]
- , "bar" : [Stream, Number]
- , "baz" : path
- , "bloo" : [ "big", "medium", "small" ]
- , "flag" : Boolean
- , "pick" : Boolean
- }
- , shortHands = { "foofoo" : ["--foo", "Mr. Foo"]
- , "b7" : ["--bar", "7"]
- , "m" : ["--bloo", "medium"]
- , "p" : ["--pick"]
- , "f" : ["--flag", "true"]
- , "g" : ["--flag"]
- , "s" : "--flag"
- }
- // everything is optional.
- // knownOpts and shorthands default to {}
- // arg list defaults to process.argv
- // slice defaults to 2
- , parsed = nopt(knownOpts, shortHands, process.argv, 2)
-
-console.log("parsed =\n"+ require("util").inspect(parsed))
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/lib/nopt.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/lib/nopt.js
deleted file mode 100644
index 97707e7842..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/lib/nopt.js
+++ /dev/null
@@ -1,415 +0,0 @@
-// info about each config option.
-
-var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG
- ? function () { console.error.apply(console, arguments) }
- : function () {}
-
-var url = require("url")
- , path = require("path")
- , Stream = require("stream").Stream
- , abbrev = require("abbrev")
-
-module.exports = exports = nopt
-exports.clean = clean
-
-exports.typeDefs =
- { String : { type: String, validate: validateString }
- , Boolean : { type: Boolean, validate: validateBoolean }
- , url : { type: url, validate: validateUrl }
- , Number : { type: Number, validate: validateNumber }
- , path : { type: path, validate: validatePath }
- , Stream : { type: Stream, validate: validateStream }
- , Date : { type: Date, validate: validateDate }
- }
-
-function nopt (types, shorthands, args, slice) {
- args = args || process.argv
- types = types || {}
- shorthands = shorthands || {}
- if (typeof slice !== "number") slice = 2
-
- debug(types, shorthands, args, slice)
-
- args = args.slice(slice)
- var data = {}
- , key
- , remain = []
- , cooked = args
- , original = args.slice(0)
-
- parse(args, data, remain, types, shorthands)
- // now data is full
- clean(data, types, exports.typeDefs)
- data.argv = {remain:remain,cooked:cooked,original:original}
- Object.defineProperty(data.argv, 'toString', { value: function () {
- return this.original.map(JSON.stringify).join(" ")
- }, enumerable: false })
- return data
-}
-
-function clean (data, types, typeDefs) {
- typeDefs = typeDefs || exports.typeDefs
- var remove = {}
- , typeDefault = [false, true, null, String, Array]
-
- Object.keys(data).forEach(function (k) {
- if (k === "argv") return
- var val = data[k]
- , isArray = Array.isArray(val)
- , type = types[k]
- if (!isArray) val = [val]
- if (!type) type = typeDefault
- if (type === Array) type = typeDefault.concat(Array)
- if (!Array.isArray(type)) type = [type]
-
- debug("val=%j", val)
- debug("types=", type)
- val = val.map(function (val) {
- // if it's an unknown value, then parse false/true/null/numbers/dates
- if (typeof val === "string") {
- debug("string %j", val)
- val = val.trim()
- if ((val === "null" && ~type.indexOf(null))
- || (val === "true" &&
- (~type.indexOf(true) || ~type.indexOf(Boolean)))
- || (val === "false" &&
- (~type.indexOf(false) || ~type.indexOf(Boolean)))) {
- val = JSON.parse(val)
- debug("jsonable %j", val)
- } else if (~type.indexOf(Number) && !isNaN(val)) {
- debug("convert to number", val)
- val = +val
- } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) {
- debug("convert to date", val)
- val = new Date(val)
- }
- }
-
- if (!types.hasOwnProperty(k)) {
- return val
- }
-
- // allow `--no-blah` to set 'blah' to null if null is allowed
- if (val === false && ~type.indexOf(null) &&
- !(~type.indexOf(false) || ~type.indexOf(Boolean))) {
- val = null
- }
-
- var d = {}
- d[k] = val
- debug("prevalidated val", d, val, types[k])
- if (!validate(d, k, val, types[k], typeDefs)) {
- if (exports.invalidHandler) {
- exports.invalidHandler(k, val, types[k], data)
- } else if (exports.invalidHandler !== false) {
- debug("invalid: "+k+"="+val, types[k])
- }
- return remove
- }
- debug("validated val", d, val, types[k])
- return d[k]
- }).filter(function (val) { return val !== remove })
-
- if (!val.length) delete data[k]
- else if (isArray) {
- debug(isArray, data[k], val)
- data[k] = val
- } else data[k] = val[0]
-
- debug("k=%s val=%j", k, val, data[k])
- })
-}
-
-function validateString (data, k, val) {
- data[k] = String(val)
-}
-
-function validatePath (data, k, val) {
- if (val === true) return false
- if (val === null) return true
-
- val = String(val)
- var homePattern = process.platform === 'win32' ? /^~(\/|\\)/ : /^~\//
- if (val.match(homePattern) && process.env.HOME) {
- val = path.resolve(process.env.HOME, val.substr(2))
- }
- data[k] = path.resolve(String(val))
- return true
-}
-
-function validateNumber (data, k, val) {
- debug("validate Number %j %j %j", k, val, isNaN(val))
- if (isNaN(val)) return false
- data[k] = +val
-}
-
-function validateDate (data, k, val) {
- debug("validate Date %j %j %j", k, val, Date.parse(val))
- var s = Date.parse(val)
- if (isNaN(s)) return false
- data[k] = new Date(val)
-}
-
-function validateBoolean (data, k, val) {
- if (val instanceof Boolean) val = val.valueOf()
- else if (typeof val === "string") {
- if (!isNaN(val)) val = !!(+val)
- else if (val === "null" || val === "false") val = false
- else val = true
- } else val = !!val
- data[k] = val
-}
-
-function validateUrl (data, k, val) {
- val = url.parse(String(val))
- if (!val.host) return false
- data[k] = val.href
-}
-
-function validateStream (data, k, val) {
- if (!(val instanceof Stream)) return false
- data[k] = val
-}
-
-function validate (data, k, val, type, typeDefs) {
- // arrays are lists of types.
- if (Array.isArray(type)) {
- for (var i = 0, l = type.length; i < l; i ++) {
- if (type[i] === Array) continue
- if (validate(data, k, val, type[i], typeDefs)) return true
- }
- delete data[k]
- return false
- }
-
- // an array of anything?
- if (type === Array) return true
-
- // NaN is poisonous. Means that something is not allowed.
- if (type !== type) {
- debug("Poison NaN", k, val, type)
- delete data[k]
- return false
- }
-
- // explicit list of values
- if (val === type) {
- debug("Explicitly allowed %j", val)
- // if (isArray) (data[k] = data[k] || []).push(val)
- // else data[k] = val
- data[k] = val
- return true
- }
-
- // now go through the list of typeDefs, validate against each one.
- var ok = false
- , types = Object.keys(typeDefs)
- for (var i = 0, l = types.length; i < l; i ++) {
- debug("test type %j %j %j", k, val, types[i])
- var t = typeDefs[types[i]]
- if (t &&
- ((type && type.name && t.type && t.type.name) ? (type.name === t.type.name) : (type === t.type))) {
- var d = {}
- ok = false !== t.validate(d, k, val)
- val = d[k]
- if (ok) {
- // if (isArray) (data[k] = data[k] || []).push(val)
- // else data[k] = val
- data[k] = val
- break
- }
- }
- }
- debug("OK? %j (%j %j %j)", ok, k, val, types[i])
-
- if (!ok) delete data[k]
- return ok
-}
-
-function parse (args, data, remain, types, shorthands) {
- debug("parse", args, data, remain)
-
- var key = null
- , abbrevs = abbrev(Object.keys(types))
- , shortAbbr = abbrev(Object.keys(shorthands))
-
- for (var i = 0; i < args.length; i ++) {
- var arg = args[i]
- debug("arg", arg)
-
- if (arg.match(/^-{2,}$/)) {
- // done with keys.
- // the rest are args.
- remain.push.apply(remain, args.slice(i + 1))
- args[i] = "--"
- break
- }
- var hadEq = false
- if (arg.charAt(0) === "-" && arg.length > 1) {
- if (arg.indexOf("=") !== -1) {
- hadEq = true
- var v = arg.split("=")
- arg = v.shift()
- v = v.join("=")
- args.splice.apply(args, [i, 1].concat([arg, v]))
- }
-
- // see if it's a shorthand
- // if so, splice and back up to re-parse it.
- var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs)
- debug("arg=%j shRes=%j", arg, shRes)
- if (shRes) {
- debug(arg, shRes)
- args.splice.apply(args, [i, 1].concat(shRes))
- if (arg !== shRes[0]) {
- i --
- continue
- }
- }
- arg = arg.replace(/^-+/, "")
- var no = null
- while (arg.toLowerCase().indexOf("no-") === 0) {
- no = !no
- arg = arg.substr(3)
- }
-
- if (abbrevs[arg]) arg = abbrevs[arg]
-
- var isArray = types[arg] === Array ||
- Array.isArray(types[arg]) && types[arg].indexOf(Array) !== -1
-
- // allow unknown things to be arrays if specified multiple times.
- if (!types.hasOwnProperty(arg) && data.hasOwnProperty(arg)) {
- if (!Array.isArray(data[arg]))
- data[arg] = [data[arg]]
- isArray = true
- }
-
- var val
- , la = args[i + 1]
-
- var isBool = typeof no === 'boolean' ||
- types[arg] === Boolean ||
- Array.isArray(types[arg]) && types[arg].indexOf(Boolean) !== -1 ||
- (typeof types[arg] === 'undefined' && !hadEq) ||
- (la === "false" &&
- (types[arg] === null ||
- Array.isArray(types[arg]) && ~types[arg].indexOf(null)))
-
- if (isBool) {
- // just set and move along
- val = !no
- // however, also support --bool true or --bool false
- if (la === "true" || la === "false") {
- val = JSON.parse(la)
- la = null
- if (no) val = !val
- i ++
- }
-
- // also support "foo":[Boolean, "bar"] and "--foo bar"
- if (Array.isArray(types[arg]) && la) {
- if (~types[arg].indexOf(la)) {
- // an explicit type
- val = la
- i ++
- } else if ( la === "null" && ~types[arg].indexOf(null) ) {
- // null allowed
- val = null
- i ++
- } else if ( !la.match(/^-{2,}[^-]/) &&
- !isNaN(la) &&
- ~types[arg].indexOf(Number) ) {
- // number
- val = +la
- i ++
- } else if ( !la.match(/^-[^-]/) && ~types[arg].indexOf(String) ) {
- // string
- val = la
- i ++
- }
- }
-
- if (isArray) (data[arg] = data[arg] || []).push(val)
- else data[arg] = val
-
- continue
- }
-
- if (types[arg] === String && la === undefined)
- la = ""
-
- if (la && la.match(/^-{2,}$/)) {
- la = undefined
- i --
- }
-
- val = la === undefined ? true : la
- if (isArray) (data[arg] = data[arg] || []).push(val)
- else data[arg] = val
-
- i ++
- continue
- }
- remain.push(arg)
- }
-}
-
-function resolveShort (arg, shorthands, shortAbbr, abbrevs) {
- // handle single-char shorthands glommed together, like
- // npm ls -glp, but only if there is one dash, and only if
- // all of the chars are single-char shorthands, and it's
- // not a match to some other abbrev.
- arg = arg.replace(/^-+/, '')
-
- // if it's an exact known option, then don't go any further
- if (abbrevs[arg] === arg)
- return null
-
- // if it's an exact known shortopt, same deal
- if (shorthands[arg]) {
- // make it an array, if it's a list of words
- if (shorthands[arg] && !Array.isArray(shorthands[arg]))
- shorthands[arg] = shorthands[arg].split(/\s+/)
-
- return shorthands[arg]
- }
-
- // first check to see if this arg is a set of single-char shorthands
- var singles = shorthands.___singles
- if (!singles) {
- singles = Object.keys(shorthands).filter(function (s) {
- return s.length === 1
- }).reduce(function (l,r) {
- l[r] = true
- return l
- }, {})
- shorthands.___singles = singles
- debug('shorthand singles', singles)
- }
-
- var chrs = arg.split("").filter(function (c) {
- return singles[c]
- })
-
- if (chrs.join("") === arg) return chrs.map(function (c) {
- return shorthands[c]
- }).reduce(function (l, r) {
- return l.concat(r)
- }, [])
-
-
- // if it's an arg abbrev, and not a literal shorthand, then prefer the arg
- if (abbrevs[arg] && !shorthands[arg])
- return null
-
- // if it's an abbr for a shorthand, then use that
- if (shortAbbr[arg])
- arg = shortAbbr[arg]
-
- // make it an array, if it's a list of words
- if (shorthands[arg] && !Array.isArray(shorthands[arg]))
- shorthands[arg] = shorthands[arg].split(/\s+/)
-
- return shorthands[arg]
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/package.json b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/package.json
deleted file mode 100644
index 62d2bcafb3..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/package.json
+++ /dev/null
@@ -1,60 +0,0 @@
-{
- "_from": "nopt@2 || 3",
- "_id": "nopt@3.0.6",
- "_inBundle": false,
- "_integrity": "sha1-xkZdvwirzU2zWTF/eaxopkayj/k=",
- "_location": "/npm-lifecycle/node-gyp/nopt",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "nopt@2 || 3",
- "name": "nopt",
- "escapedName": "nopt",
- "rawSpec": "2 || 3",
- "saveSpec": null,
- "fetchSpec": "2 || 3"
- },
- "_requiredBy": [
- "/npm-lifecycle/node-gyp"
- ],
- "_resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.6.tgz",
- "_shasum": "c6465dbf08abcd4db359317f79ac68a646b28ff9",
- "_shrinkwrap": null,
- "_spec": "nopt@2 || 3",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-lifecycle/node_modules/node-gyp",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bin": {
- "nopt": "./bin/nopt.js"
- },
- "bugs": {
- "url": "https://github.com/npm/nopt/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "abbrev": "1"
- },
- "deprecated": false,
- "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.",
- "devDependencies": {
- "tap": "^1.2.0"
- },
- "homepage": "https://github.com/npm/nopt#readme",
- "license": "ISC",
- "main": "lib/nopt.js",
- "name": "nopt",
- "optionalDependencies": {},
- "peerDependencies": {},
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/nopt.git"
- },
- "scripts": {
- "test": "tap test/*.js"
- },
- "version": "3.0.6"
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/test/basic.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/test/basic.js
deleted file mode 100644
index d399de9209..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/nopt/test/basic.js
+++ /dev/null
@@ -1,273 +0,0 @@
-var nopt = require("../")
- , test = require('tap').test
-
-
-test("passing a string results in a string", function (t) {
- var parsed = nopt({ key: String }, {}, ["--key", "myvalue"], 0)
- t.same(parsed.key, "myvalue")
- t.end()
-})
-
-// https://github.com/npm/nopt/issues/31
-test("Empty String results in empty string, not true", function (t) {
- var parsed = nopt({ empty: String }, {}, ["--empty"], 0)
- t.same(parsed.empty, "")
- t.end()
-})
-
-test("~ path is resolved to $HOME", function (t) {
- var path = require("path")
- if (!process.env.HOME) process.env.HOME = "/tmp"
- var parsed = nopt({key: path}, {}, ["--key=~/val"], 0)
- t.same(parsed.key, path.resolve(process.env.HOME, "val"))
- t.end()
-})
-
-// https://github.com/npm/nopt/issues/24
-test("Unknown options are not parsed as numbers", function (t) {
- var parsed = nopt({"parse-me": Number}, null, ['--leave-as-is=1.20', '--parse-me=1.20'], 0)
- t.equal(parsed['leave-as-is'], '1.20')
- t.equal(parsed['parse-me'], 1.2)
- t.end()
-});
-
-// https://github.com/npm/nopt/issues/48
-test("Check types based on name of type", function (t) {
- var parsed = nopt({"parse-me": {name: "Number"}}, null, ['--parse-me=1.20'], 0)
- t.equal(parsed['parse-me'], 1.2)
- t.end()
-})
-
-
-test("Missing types are not parsed", function (t) {
- var parsed = nopt({"parse-me": {}}, null, ['--parse-me=1.20'], 0)
- //should only contain argv
- t.equal(Object.keys(parsed).length, 1)
- t.end()
-})
-
-test("Types passed without a name are not parsed", function (t) {
- var parsed = nopt({"parse-me": {}}, {}, ['--parse-me=1.20'], 0)
- //should only contain argv
- t.equal(Object.keys(parsed).length, 1)
- t.end()
-})
-
-test("other tests", function (t) {
-
- var util = require("util")
- , Stream = require("stream")
- , path = require("path")
- , url = require("url")
-
- , shorthands =
- { s : ["--loglevel", "silent"]
- , d : ["--loglevel", "info"]
- , dd : ["--loglevel", "verbose"]
- , ddd : ["--loglevel", "silly"]
- , noreg : ["--no-registry"]
- , reg : ["--registry"]
- , "no-reg" : ["--no-registry"]
- , silent : ["--loglevel", "silent"]
- , verbose : ["--loglevel", "verbose"]
- , h : ["--usage"]
- , H : ["--usage"]
- , "?" : ["--usage"]
- , help : ["--usage"]
- , v : ["--version"]
- , f : ["--force"]
- , desc : ["--description"]
- , "no-desc" : ["--no-description"]
- , "local" : ["--no-global"]
- , l : ["--long"]
- , p : ["--parseable"]
- , porcelain : ["--parseable"]
- , g : ["--global"]
- }
-
- , types =
- { aoa: Array
- , nullstream: [null, Stream]
- , date: Date
- , str: String
- , browser : String
- , cache : path
- , color : ["always", Boolean]
- , depth : Number
- , description : Boolean
- , dev : Boolean
- , editor : path
- , force : Boolean
- , global : Boolean
- , globalconfig : path
- , group : [String, Number]
- , gzipbin : String
- , logfd : [Number, Stream]
- , loglevel : ["silent","win","error","warn","info","verbose","silly"]
- , long : Boolean
- , "node-version" : [false, String]
- , npaturl : url
- , npat : Boolean
- , "onload-script" : [false, String]
- , outfd : [Number, Stream]
- , parseable : Boolean
- , pre: Boolean
- , prefix: path
- , proxy : url
- , "rebuild-bundle" : Boolean
- , registry : url
- , searchopts : String
- , searchexclude: [null, String]
- , shell : path
- , t: [Array, String]
- , tag : String
- , tar : String
- , tmp : path
- , "unsafe-perm" : Boolean
- , usage : Boolean
- , user : String
- , username : String
- , userconfig : path
- , version : Boolean
- , viewer: path
- , _exit : Boolean
- , path: path
- }
-
- ; [["-v", {version:true}, []]
- ,["---v", {version:true}, []]
- ,["ls -s --no-reg connect -d",
- {loglevel:"info",registry:null},["ls","connect"]]
- ,["ls ---s foo",{loglevel:"silent"},["ls","foo"]]
- ,["ls --registry blargle", {}, ["ls"]]
- ,["--no-registry", {registry:null}, []]
- ,["--no-color true", {color:false}, []]
- ,["--no-color false", {color:true}, []]
- ,["--no-color", {color:false}, []]
- ,["--color false", {color:false}, []]
- ,["--color --logfd 7", {logfd:7,color:true}, []]
- ,["--color=true", {color:true}, []]
- ,["--logfd=10", {logfd:10}, []]
- ,["--tmp=/tmp -tar=gtar",{tmp:"/tmp",tar:"gtar"},[]]
- ,["--tmp=tmp -tar=gtar",
- {tmp:path.resolve(process.cwd(), "tmp"),tar:"gtar"},[]]
- ,["--logfd x", {}, []]
- ,["a -true -- -no-false", {true:true},["a","-no-false"]]
- ,["a -no-false", {false:false},["a"]]
- ,["a -no-no-true", {true:true}, ["a"]]
- ,["a -no-no-no-false", {false:false}, ["a"]]
- ,["---NO-no-No-no-no-no-nO-no-no"+
- "-No-no-no-no-no-no-no-no-no"+
- "-no-no-no-no-NO-NO-no-no-no-no-no-no"+
- "-no-body-can-do-the-boogaloo-like-I-do"
- ,{"body-can-do-the-boogaloo-like-I-do":false}, []]
- ,["we are -no-strangers-to-love "+
- "--you-know=the-rules --and=so-do-i "+
- "---im-thinking-of=a-full-commitment "+
- "--no-you-would-get-this-from-any-other-guy "+
- "--no-gonna-give-you-up "+
- "-no-gonna-let-you-down=true "+
- "--no-no-gonna-run-around false "+
- "--desert-you=false "+
- "--make-you-cry false "+
- "--no-tell-a-lie "+
- "--no-no-and-hurt-you false"
- ,{"strangers-to-love":false
- ,"you-know":"the-rules"
- ,"and":"so-do-i"
- ,"you-would-get-this-from-any-other-guy":false
- ,"gonna-give-you-up":false
- ,"gonna-let-you-down":false
- ,"gonna-run-around":false
- ,"desert-you":false
- ,"make-you-cry":false
- ,"tell-a-lie":false
- ,"and-hurt-you":false
- },["we", "are"]]
- ,["-t one -t two -t three"
- ,{t: ["one", "two", "three"]}
- ,[]]
- ,["-t one -t null -t three four five null"
- ,{t: ["one", "null", "three"]}
- ,["four", "five", "null"]]
- ,["-t foo"
- ,{t:["foo"]}
- ,[]]
- ,["--no-t"
- ,{t:["false"]}
- ,[]]
- ,["-no-no-t"
- ,{t:["true"]}
- ,[]]
- ,["-aoa one -aoa null -aoa 100"
- ,{aoa:["one", null, '100']}
- ,[]]
- ,["-str 100"
- ,{str:"100"}
- ,[]]
- ,["--color always"
- ,{color:"always"}
- ,[]]
- ,["--no-nullstream"
- ,{nullstream:null}
- ,[]]
- ,["--nullstream false"
- ,{nullstream:null}
- ,[]]
- ,["--notadate=2011-01-25"
- ,{notadate: "2011-01-25"}
- ,[]]
- ,["--date 2011-01-25"
- ,{date: new Date("2011-01-25")}
- ,[]]
- ,["-cl 1"
- ,{config: true, length: 1}
- ,[]
- ,{config: Boolean, length: Number, clear: Boolean}
- ,{c: "--config", l: "--length"}]
- ,["--acount bla"
- ,{"acount":true}
- ,["bla"]
- ,{account: Boolean, credentials: Boolean, options: String}
- ,{a:"--account", c:"--credentials",o:"--options"}]
- ,["--clear"
- ,{clear:true}
- ,[]
- ,{clear:Boolean,con:Boolean,len:Boolean,exp:Boolean,add:Boolean,rep:Boolean}
- ,{c:"--con",l:"--len",e:"--exp",a:"--add",r:"--rep"}]
- ,["--file -"
- ,{"file":"-"}
- ,[]
- ,{file:String}
- ,{}]
- ,["--file -"
- ,{"file":true}
- ,["-"]
- ,{file:Boolean}
- ,{}]
- ,["--path"
- ,{"path":null}
- ,[]]
- ,["--path ."
- ,{"path":process.cwd()}
- ,[]]
- ].forEach(function (test) {
- var argv = test[0].split(/\s+/)
- , opts = test[1]
- , rem = test[2]
- , actual = nopt(test[3] || types, test[4] || shorthands, argv, 0)
- , parsed = actual.argv
- delete actual.argv
- for (var i in opts) {
- var e = JSON.stringify(opts[i])
- , a = JSON.stringify(actual[i] === undefined ? null : actual[i])
- if (e && typeof e === "object") {
- t.deepEqual(e, a)
- } else {
- t.equal(e, a)
- }
- }
- t.deepEqual(rem, parsed.remain)
- })
- t.end()
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/LICENSE b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/LICENSE
deleted file mode 100644
index 19129e315f..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/README.md b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/README.md
deleted file mode 100644
index cbd956549d..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/README.md
+++ /dev/null
@@ -1,350 +0,0 @@
-semver(1) -- The semantic versioner for npm
-===========================================
-
-## Usage
-
- $ npm install semver
- $ node
- var semver = require('semver')
-
- semver.valid('1.2.3') // '1.2.3'
- semver.valid('a.b.c') // null
- semver.clean(' =v1.2.3 ') // '1.2.3'
- semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
- semver.gt('1.2.3', '9.8.7') // false
- semver.lt('1.2.3', '9.8.7') // true
-
-As a command-line utility:
-
- $ semver -h
-
- SemVer 5.1.0
-
- A JavaScript implementation of the http://semver.org/ specification
- Copyright Isaac Z. Schlueter
-
- Usage: semver [options] <version> [<version> [...]]
- Prints valid versions sorted by SemVer precedence
-
- Options:
- -r --range <range>
- Print versions that match the specified range.
-
- -i --increment [<level>]
- Increment a version by the specified level. Level can
- be one of: major, minor, patch, premajor, preminor,
- prepatch, or prerelease. Default level is 'patch'.
- Only one version may be specified.
-
- --preid <identifier>
- Identifier to be used to prefix premajor, preminor,
- prepatch or prerelease version increments.
-
- -l --loose
- Interpret versions and ranges loosely
-
- Program exits successfully if any valid version satisfies
- all supplied ranges, and prints all satisfying versions.
-
- If no satisfying versions are found, then exits failure.
-
- Versions are printed in ascending order, so supplying
- multiple versions to the utility will just sort them.
-
-## Versions
-
-A "version" is described by the `v2.0.0` specification found at
-<http://semver.org/>.
-
-A leading `"="` or `"v"` character is stripped off and ignored.
-
-## Ranges
-
-A `version range` is a set of `comparators` which specify versions
-that satisfy the range.
-
-A `comparator` is composed of an `operator` and a `version`. The set
-of primitive `operators` is:
-
-* `<` Less than
-* `<=` Less than or equal to
-* `>` Greater than
-* `>=` Greater than or equal to
-* `=` Equal. If no operator is specified, then equality is assumed,
- so this operator is optional, but MAY be included.
-
-For example, the comparator `>=1.2.7` would match the versions
-`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6`
-or `1.1.0`.
-
-Comparators can be joined by whitespace to form a `comparator set`,
-which is satisfied by the **intersection** of all of the comparators
-it includes.
-
-A range is composed of one or more comparator sets, joined by `||`. A
-version matches a range if and only if every comparator in at least
-one of the `||`-separated comparator sets is satisfied by the version.
-
-For example, the range `>=1.2.7 <1.3.0` would match the versions
-`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`,
-or `1.1.0`.
-
-The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`,
-`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`.
-
-### Prerelease Tags
-
-If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then
-it will only be allowed to satisfy comparator sets if at least one
-comparator with the same `[major, minor, patch]` tuple also has a
-prerelease tag.
-
-For example, the range `>1.2.3-alpha.3` would be allowed to match the
-version `1.2.3-alpha.7`, but it would *not* be satisfied by
-`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater
-than" `1.2.3-alpha.3` according to the SemVer sort rules. The version
-range only accepts prerelease tags on the `1.2.3` version. The
-version `3.4.5` *would* satisfy the range, because it does not have a
-prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`.
-
-The purpose for this behavior is twofold. First, prerelease versions
-frequently are updated very quickly, and contain many breaking changes
-that are (by the author's design) not yet fit for public consumption.
-Therefore, by default, they are excluded from range matching
-semantics.
-
-Second, a user who has opted into using a prerelease version has
-clearly indicated the intent to use *that specific* set of
-alpha/beta/rc versions. By including a prerelease tag in the range,
-the user is indicating that they are aware of the risk. However, it
-is still not appropriate to assume that they have opted into taking a
-similar risk on the *next* set of prerelease versions.
-
-#### Prerelease Identifiers
-
-The method `.inc` takes an additional `identifier` string argument that
-will append the value of the string as a prerelease identifier:
-
-```javascript
-> semver.inc('1.2.3', 'prerelease', 'beta')
-'1.2.4-beta.0'
-```
-
-command-line example:
-
-```shell
-$ semver 1.2.3 -i prerelease --preid beta
-1.2.4-beta.0
-```
-
-Which then can be used to increment further:
-
-```shell
-$ semver 1.2.4-beta.0 -i prerelease
-1.2.4-beta.1
-```
-
-### Advanced Range Syntax
-
-Advanced range syntax desugars to primitive comparators in
-deterministic ways.
-
-Advanced ranges may be combined in the same way as primitive
-comparators using white space or `||`.
-
-#### Hyphen Ranges `X.Y.Z - A.B.C`
-
-Specifies an inclusive set.
-
-* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`
-
-If a partial version is provided as the first version in the inclusive
-range, then the missing pieces are replaced with zeroes.
-
-* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4`
-
-If a partial version is provided as the second version in the
-inclusive range, then all versions that start with the supplied parts
-of the tuple are accepted, but nothing that would be greater than the
-provided tuple parts.
-
-* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0`
-* `1.2.3 - 2` := `>=1.2.3 <3.0.0`
-
-#### X-Ranges `1.2.x` `1.X` `1.2.*` `*`
-
-Any of `X`, `x`, or `*` may be used to "stand in" for one of the
-numeric values in the `[major, minor, patch]` tuple.
-
-* `*` := `>=0.0.0` (Any version satisfies)
-* `1.x` := `>=1.0.0 <2.0.0` (Matching major version)
-* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions)
-
-A partial version range is treated as an X-Range, so the special
-character is in fact optional.
-
-* `""` (empty string) := `*` := `>=0.0.0`
-* `1` := `1.x.x` := `>=1.0.0 <2.0.0`
-* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0`
-
-#### Tilde Ranges `~1.2.3` `~1.2` `~1`
-
-Allows patch-level changes if a minor version is specified on the
-comparator. Allows minor-level changes if not.
-
-* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0`
-* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`)
-* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`)
-* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0`
-* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`)
-* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`)
-* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in
- the `1.2.3` version will be allowed, if they are greater than or
- equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
- `1.2.4-beta.2` would not, because it is a prerelease of a
- different `[major, minor, patch]` tuple.
-
-#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4`
-
-Allows changes that do not modify the left-most non-zero digit in the
-`[major, minor, patch]` tuple. In other words, this allows patch and
-minor updates for versions `1.0.0` and above, patch updates for
-versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`.
-
-Many authors treat a `0.x` version as if the `x` were the major
-"breaking-change" indicator.
-
-Caret ranges are ideal when an author may make breaking changes
-between `0.2.4` and `0.3.0` releases, which is a common practice.
-However, it presumes that there will *not* be breaking changes between
-`0.2.4` and `0.2.5`. It allows for changes that are presumed to be
-additive (but non-breaking), according to commonly observed practices.
-
-* `^1.2.3` := `>=1.2.3 <2.0.0`
-* `^0.2.3` := `>=0.2.3 <0.3.0`
-* `^0.0.3` := `>=0.0.3 <0.0.4`
-* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in
- the `1.2.3` version will be allowed, if they are greater than or
- equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
- `1.2.4-beta.2` would not, because it is a prerelease of a
- different `[major, minor, patch]` tuple.
-* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the
- `0.0.3` version *only* will be allowed, if they are greater than or
- equal to `beta`. So, `0.0.3-pr.2` would be allowed.
-
-When parsing caret ranges, a missing `patch` value desugars to the
-number `0`, but will allow flexibility within that value, even if the
-major and minor versions are both `0`.
-
-* `^1.2.x` := `>=1.2.0 <2.0.0`
-* `^0.0.x` := `>=0.0.0 <0.1.0`
-* `^0.0` := `>=0.0.0 <0.1.0`
-
-A missing `minor` and `patch` values will desugar to zero, but also
-allow flexibility within those values, even if the major version is
-zero.
-
-* `^1.x` := `>=1.0.0 <2.0.0`
-* `^0.x` := `>=0.0.0 <1.0.0`
-
-### Range Grammar
-
-Putting all this together, here is a Backus-Naur grammar for ranges,
-for the benefit of parser authors:
-
-```bnf
-range-set ::= range ( logical-or range ) *
-logical-or ::= ( ' ' ) * '||' ( ' ' ) *
-range ::= hyphen | simple ( ' ' simple ) * | ''
-hyphen ::= partial ' - ' partial
-simple ::= primitive | partial | tilde | caret
-primitive ::= ( '<' | '>' | '>=' | '<=' | '=' | ) partial
-partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
-xr ::= 'x' | 'X' | '*' | nr
-nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) *
-tilde ::= '~' partial
-caret ::= '^' partial
-qualifier ::= ( '-' pre )? ( '+' build )?
-pre ::= parts
-build ::= parts
-parts ::= part ( '.' part ) *
-part ::= nr | [-0-9A-Za-z]+
-```
-
-## Functions
-
-All methods and classes take a final `loose` boolean argument that, if
-true, will be more forgiving about not-quite-valid semver strings.
-The resulting output will always be 100% strict, of course.
-
-Strict-mode Comparators and Ranges will be strict about the SemVer
-strings that they parse.
-
-* `valid(v)`: Return the parsed version, or null if it's not valid.
-* `inc(v, release)`: Return the version incremented by the release
- type (`major`, `premajor`, `minor`, `preminor`, `patch`,
- `prepatch`, or `prerelease`), or null if it's not valid
- * `premajor` in one call will bump the version up to the next major
- version and down to a prerelease of that major version.
- `preminor`, and `prepatch` work the same way.
- * If called from a non-prerelease version, the `prerelease` will work the
- same as `prepatch`. It increments the patch version, then makes a
- prerelease. If the input version is already a prerelease it simply
- increments it.
-* `prerelease(v)`: Returns an array of prerelease components, or null
- if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]`
-* `major(v)`: Return the major version number.
-* `minor(v)`: Return the minor version number.
-* `patch(v)`: Return the patch version number.
-
-### Comparison
-
-* `gt(v1, v2)`: `v1 > v2`
-* `gte(v1, v2)`: `v1 >= v2`
-* `lt(v1, v2)`: `v1 < v2`
-* `lte(v1, v2)`: `v1 <= v2`
-* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent,
- even if they're not the exact same string. You already know how to
- compare strings.
-* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`.
-* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call
- the corresponding function above. `"==="` and `"!=="` do simple
- string comparison, but are included for completeness. Throws if an
- invalid comparison string is provided.
-* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if
- `v2` is greater. Sorts in ascending order if passed to `Array.sort()`.
-* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions
- in descending order when passed to `Array.sort()`.
-* `diff(v1, v2)`: Returns difference between two versions by the release type
- (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`),
- or null if the versions are the same.
-
-
-### Ranges
-
-* `validRange(range)`: Return the valid range or null if it's not valid
-* `satisfies(version, range)`: Return true if the version satisfies the
- range.
-* `maxSatisfying(versions, range)`: Return the highest version in the list
- that satisfies the range, or `null` if none of them do.
-* `minSatisfying(versions, range)`: Return the lowest version in the list
- that satisfies the range, or `null` if none of them do.
-* `gtr(version, range)`: Return `true` if version is greater than all the
- versions possible in the range.
-* `ltr(version, range)`: Return `true` if version is less than all the
- versions possible in the range.
-* `outside(version, range, hilo)`: Return true if the version is outside
- the bounds of the range in either the high or low direction. The
- `hilo` argument must be either the string `'>'` or `'<'`. (This is
- the function called by `gtr` and `ltr`.)
-
-Note that, since ranges may be non-contiguous, a version might not be
-greater than a range, less than a range, *or* satisfy a range! For
-example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9`
-until `2.0.0`, so the version `1.2.10` would not be greater than the
-range (because `2.0.1` satisfies, which is higher), nor less than the
-range (since `1.2.8` satisfies, which is lower), and it also does not
-satisfy the range.
-
-If you want to know if a version satisfies or does not satisfy a
-range, use the `satisfies(version, range)` function.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/bin/semver b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/bin/semver
deleted file mode 100755
index c5f2e857e8..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/bin/semver
+++ /dev/null
@@ -1,133 +0,0 @@
-#!/usr/bin/env node
-// Standalone semver comparison program.
-// Exits successfully and prints matching version(s) if
-// any supplied version is valid and passes all tests.
-
-var argv = process.argv.slice(2)
- , versions = []
- , range = []
- , gt = []
- , lt = []
- , eq = []
- , inc = null
- , version = require("../package.json").version
- , loose = false
- , identifier = undefined
- , semver = require("../semver")
- , reverse = false
-
-main()
-
-function main () {
- if (!argv.length) return help()
- while (argv.length) {
- var a = argv.shift()
- var i = a.indexOf('=')
- if (i !== -1) {
- a = a.slice(0, i)
- argv.unshift(a.slice(i + 1))
- }
- switch (a) {
- case "-rv": case "-rev": case "--rev": case "--reverse":
- reverse = true
- break
- case "-l": case "--loose":
- loose = true
- break
- case "-v": case "--version":
- versions.push(argv.shift())
- break
- case "-i": case "--inc": case "--increment":
- switch (argv[0]) {
- case "major": case "minor": case "patch": case "prerelease":
- case "premajor": case "preminor": case "prepatch":
- inc = argv.shift()
- break
- default:
- inc = "patch"
- break
- }
- break
- case "--preid":
- identifier = argv.shift()
- break
- case "-r": case "--range":
- range.push(argv.shift())
- break
- case "-h": case "--help": case "-?":
- return help()
- default:
- versions.push(a)
- break
- }
- }
-
- versions = versions.filter(function (v) {
- return semver.valid(v, loose)
- })
- if (!versions.length) return fail()
- if (inc && (versions.length !== 1 || range.length))
- return failInc()
-
- for (var i = 0, l = range.length; i < l ; i ++) {
- versions = versions.filter(function (v) {
- return semver.satisfies(v, range[i], loose)
- })
- if (!versions.length) return fail()
- }
- return success(versions)
-}
-
-function failInc () {
- console.error("--inc can only be used on a single version with no range")
- fail()
-}
-
-function fail () { process.exit(1) }
-
-function success () {
- var compare = reverse ? "rcompare" : "compare"
- versions.sort(function (a, b) {
- return semver[compare](a, b, loose)
- }).map(function (v) {
- return semver.clean(v, loose)
- }).map(function (v) {
- return inc ? semver.inc(v, inc, loose, identifier) : v
- }).forEach(function (v,i,_) { console.log(v) })
-}
-
-function help () {
- console.log(["SemVer " + version
- ,""
- ,"A JavaScript implementation of the http://semver.org/ specification"
- ,"Copyright Isaac Z. Schlueter"
- ,""
- ,"Usage: semver [options] <version> [<version> [...]]"
- ,"Prints valid versions sorted by SemVer precedence"
- ,""
- ,"Options:"
- ,"-r --range <range>"
- ," Print versions that match the specified range."
- ,""
- ,"-i --increment [<level>]"
- ," Increment a version by the specified level. Level can"
- ," be one of: major, minor, patch, premajor, preminor,"
- ," prepatch, or prerelease. Default level is 'patch'."
- ," Only one version may be specified."
- ,""
- ,"--preid <identifier>"
- ," Identifier to be used to prefix premajor, preminor,"
- ," prepatch or prerelease version increments."
- ,""
- ,"-l --loose"
- ," Interpret versions and ranges loosely"
- ,""
- ,"Program exits successfully if any valid version satisfies"
- ,"all supplied ranges, and prints all satisfying versions."
- ,""
- ,"If no satisfying versions are found, then exits failure."
- ,""
- ,"Versions are printed in ascending order, so supplying"
- ,"multiple versions to the utility will just sort them."
- ].join("\n"))
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/package.json b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/package.json
deleted file mode 100644
index 8fbdcec9c7..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/package.json
+++ /dev/null
@@ -1,54 +0,0 @@
-{
- "_from": "semver@~5.3.0",
- "_id": "semver@5.3.0",
- "_inBundle": false,
- "_integrity": "sha1-myzl094C0XxgEq0yaqa00M9U+U8=",
- "_location": "/npm-lifecycle/node-gyp/semver",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "semver@~5.3.0",
- "name": "semver",
- "escapedName": "semver",
- "rawSpec": "~5.3.0",
- "saveSpec": null,
- "fetchSpec": "~5.3.0"
- },
- "_requiredBy": [
- "/npm-lifecycle/node-gyp"
- ],
- "_resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz",
- "_shasum": "9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f",
- "_spec": "semver@~5.3.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-lifecycle/node_modules/node-gyp",
- "bin": {
- "semver": "./bin/semver"
- },
- "bugs": {
- "url": "https://github.com/npm/node-semver/issues"
- },
- "bundleDependencies": false,
- "deprecated": false,
- "description": "The semantic version parser used by npm.",
- "devDependencies": {
- "tap": "^2.0.0"
- },
- "files": [
- "bin",
- "range.bnf",
- "semver.js"
- ],
- "homepage": "https://github.com/npm/node-semver#readme",
- "license": "ISC",
- "main": "semver.js",
- "name": "semver",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/npm/node-semver.git"
- },
- "scripts": {
- "test": "tap test/*.js"
- },
- "version": "5.3.0"
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/range.bnf b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/range.bnf
deleted file mode 100644
index 25ebd5c832..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/range.bnf
+++ /dev/null
@@ -1,16 +0,0 @@
-range-set ::= range ( logical-or range ) *
-logical-or ::= ( ' ' ) * '||' ( ' ' ) *
-range ::= hyphen | simple ( ' ' simple ) * | ''
-hyphen ::= partial ' - ' partial
-simple ::= primitive | partial | tilde | caret
-primitive ::= ( '<' | '>' | '>=' | '<=' | '=' | ) partial
-partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
-xr ::= 'x' | 'X' | '*' | nr
-nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) *
-tilde ::= '~' partial
-caret ::= '^' partial
-qualifier ::= ( '-' pre )? ( '+' build )?
-pre ::= parts
-build ::= parts
-parts ::= part ( '.' part ) *
-part ::= nr | [-0-9A-Za-z]+
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/semver.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/semver.js
deleted file mode 100644
index 5f1a3c5c9e..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/semver/semver.js
+++ /dev/null
@@ -1,1203 +0,0 @@
-exports = module.exports = SemVer;
-
-// The debug function is excluded entirely from the minified version.
-/* nomin */ var debug;
-/* nomin */ if (typeof process === 'object' &&
- /* nomin */ process.env &&
- /* nomin */ process.env.NODE_DEBUG &&
- /* nomin */ /\bsemver\b/i.test(process.env.NODE_DEBUG))
- /* nomin */ debug = function() {
- /* nomin */ var args = Array.prototype.slice.call(arguments, 0);
- /* nomin */ args.unshift('SEMVER');
- /* nomin */ console.log.apply(console, args);
- /* nomin */ };
-/* nomin */ else
- /* nomin */ debug = function() {};
-
-// Note: this is the semver.org version of the spec that it implements
-// Not necessarily the package version of this code.
-exports.SEMVER_SPEC_VERSION = '2.0.0';
-
-var MAX_LENGTH = 256;
-var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991;
-
-// The actual regexps go on exports.re
-var re = exports.re = [];
-var src = exports.src = [];
-var R = 0;
-
-// The following Regular Expressions can be used for tokenizing,
-// validating, and parsing SemVer version strings.
-
-// ## Numeric Identifier
-// A single `0`, or a non-zero digit followed by zero or more digits.
-
-var NUMERICIDENTIFIER = R++;
-src[NUMERICIDENTIFIER] = '0|[1-9]\\d*';
-var NUMERICIDENTIFIERLOOSE = R++;
-src[NUMERICIDENTIFIERLOOSE] = '[0-9]+';
-
-
-// ## Non-numeric Identifier
-// Zero or more digits, followed by a letter or hyphen, and then zero or
-// more letters, digits, or hyphens.
-
-var NONNUMERICIDENTIFIER = R++;
-src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*';
-
-
-// ## Main Version
-// Three dot-separated numeric identifiers.
-
-var MAINVERSION = R++;
-src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' +
- '(' + src[NUMERICIDENTIFIER] + ')\\.' +
- '(' + src[NUMERICIDENTIFIER] + ')';
-
-var MAINVERSIONLOOSE = R++;
-src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
- '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
- '(' + src[NUMERICIDENTIFIERLOOSE] + ')';
-
-// ## Pre-release Version Identifier
-// A numeric identifier, or a non-numeric identifier.
-
-var PRERELEASEIDENTIFIER = R++;
-src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] +
- '|' + src[NONNUMERICIDENTIFIER] + ')';
-
-var PRERELEASEIDENTIFIERLOOSE = R++;
-src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] +
- '|' + src[NONNUMERICIDENTIFIER] + ')';
-
-
-// ## Pre-release Version
-// Hyphen, followed by one or more dot-separated pre-release version
-// identifiers.
-
-var PRERELEASE = R++;
-src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] +
- '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))';
-
-var PRERELEASELOOSE = R++;
-src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] +
- '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))';
-
-// ## Build Metadata Identifier
-// Any combination of digits, letters, or hyphens.
-
-var BUILDIDENTIFIER = R++;
-src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+';
-
-// ## Build Metadata
-// Plus sign, followed by one or more period-separated build metadata
-// identifiers.
-
-var BUILD = R++;
-src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] +
- '(?:\\.' + src[BUILDIDENTIFIER] + ')*))';
-
-
-// ## Full Version String
-// A main version, followed optionally by a pre-release version and
-// build metadata.
-
-// Note that the only major, minor, patch, and pre-release sections of
-// the version string are capturing groups. The build metadata is not a
-// capturing group, because it should not ever be used in version
-// comparison.
-
-var FULL = R++;
-var FULLPLAIN = 'v?' + src[MAINVERSION] +
- src[PRERELEASE] + '?' +
- src[BUILD] + '?';
-
-src[FULL] = '^' + FULLPLAIN + '$';
-
-// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
-// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
-// common in the npm registry.
-var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] +
- src[PRERELEASELOOSE] + '?' +
- src[BUILD] + '?';
-
-var LOOSE = R++;
-src[LOOSE] = '^' + LOOSEPLAIN + '$';
-
-var GTLT = R++;
-src[GTLT] = '((?:<|>)?=?)';
-
-// Something like "2.*" or "1.2.x".
-// Note that "x.x" is a valid xRange identifer, meaning "any version"
-// Only the first item is strictly required.
-var XRANGEIDENTIFIERLOOSE = R++;
-src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*';
-var XRANGEIDENTIFIER = R++;
-src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*';
-
-var XRANGEPLAIN = R++;
-src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:' + src[PRERELEASE] + ')?' +
- src[BUILD] + '?' +
- ')?)?';
-
-var XRANGEPLAINLOOSE = R++;
-src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:' + src[PRERELEASELOOSE] + ')?' +
- src[BUILD] + '?' +
- ')?)?';
-
-var XRANGE = R++;
-src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$';
-var XRANGELOOSE = R++;
-src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$';
-
-// Tilde ranges.
-// Meaning is "reasonably at or greater than"
-var LONETILDE = R++;
-src[LONETILDE] = '(?:~>?)';
-
-var TILDETRIM = R++;
-src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+';
-re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g');
-var tildeTrimReplace = '$1~';
-
-var TILDE = R++;
-src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$';
-var TILDELOOSE = R++;
-src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$';
-
-// Caret ranges.
-// Meaning is "at least and backwards compatible with"
-var LONECARET = R++;
-src[LONECARET] = '(?:\\^)';
-
-var CARETTRIM = R++;
-src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+';
-re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g');
-var caretTrimReplace = '$1^';
-
-var CARET = R++;
-src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$';
-var CARETLOOSE = R++;
-src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$';
-
-// A simple gt/lt/eq thing, or just "" to indicate "any version"
-var COMPARATORLOOSE = R++;
-src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$';
-var COMPARATOR = R++;
-src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$';
-
-
-// An expression to strip any whitespace between the gtlt and the thing
-// it modifies, so that `> 1.2.3` ==> `>1.2.3`
-var COMPARATORTRIM = R++;
-src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] +
- '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')';
-
-// this one has to use the /g flag
-re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g');
-var comparatorTrimReplace = '$1$2$3';
-
-
-// Something like `1.2.3 - 1.2.4`
-// Note that these all use the loose form, because they'll be
-// checked against either the strict or loose comparator form
-// later.
-var HYPHENRANGE = R++;
-src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' +
- '\\s+-\\s+' +
- '(' + src[XRANGEPLAIN] + ')' +
- '\\s*$';
-
-var HYPHENRANGELOOSE = R++;
-src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' +
- '\\s+-\\s+' +
- '(' + src[XRANGEPLAINLOOSE] + ')' +
- '\\s*$';
-
-// Star ranges basically just allow anything at all.
-var STAR = R++;
-src[STAR] = '(<|>)?=?\\s*\\*';
-
-// Compile to actual regexp objects.
-// All are flag-free, unless they were created above with a flag.
-for (var i = 0; i < R; i++) {
- debug(i, src[i]);
- if (!re[i])
- re[i] = new RegExp(src[i]);
-}
-
-exports.parse = parse;
-function parse(version, loose) {
- if (version instanceof SemVer)
- return version;
-
- if (typeof version !== 'string')
- return null;
-
- if (version.length > MAX_LENGTH)
- return null;
-
- var r = loose ? re[LOOSE] : re[FULL];
- if (!r.test(version))
- return null;
-
- try {
- return new SemVer(version, loose);
- } catch (er) {
- return null;
- }
-}
-
-exports.valid = valid;
-function valid(version, loose) {
- var v = parse(version, loose);
- return v ? v.version : null;
-}
-
-
-exports.clean = clean;
-function clean(version, loose) {
- var s = parse(version.trim().replace(/^[=v]+/, ''), loose);
- return s ? s.version : null;
-}
-
-exports.SemVer = SemVer;
-
-function SemVer(version, loose) {
- if (version instanceof SemVer) {
- if (version.loose === loose)
- return version;
- else
- version = version.version;
- } else if (typeof version !== 'string') {
- throw new TypeError('Invalid Version: ' + version);
- }
-
- if (version.length > MAX_LENGTH)
- throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
-
- if (!(this instanceof SemVer))
- return new SemVer(version, loose);
-
- debug('SemVer', version, loose);
- this.loose = loose;
- var m = version.trim().match(loose ? re[LOOSE] : re[FULL]);
-
- if (!m)
- throw new TypeError('Invalid Version: ' + version);
-
- this.raw = version;
-
- // these are actually numbers
- this.major = +m[1];
- this.minor = +m[2];
- this.patch = +m[3];
-
- if (this.major > MAX_SAFE_INTEGER || this.major < 0)
- throw new TypeError('Invalid major version')
-
- if (this.minor > MAX_SAFE_INTEGER || this.minor < 0)
- throw new TypeError('Invalid minor version')
-
- if (this.patch > MAX_SAFE_INTEGER || this.patch < 0)
- throw new TypeError('Invalid patch version')
-
- // numberify any prerelease numeric ids
- if (!m[4])
- this.prerelease = [];
- else
- this.prerelease = m[4].split('.').map(function(id) {
- if (/^[0-9]+$/.test(id)) {
- var num = +id;
- if (num >= 0 && num < MAX_SAFE_INTEGER)
- return num;
- }
- return id;
- });
-
- this.build = m[5] ? m[5].split('.') : [];
- this.format();
-}
-
-SemVer.prototype.format = function() {
- this.version = this.major + '.' + this.minor + '.' + this.patch;
- if (this.prerelease.length)
- this.version += '-' + this.prerelease.join('.');
- return this.version;
-};
-
-SemVer.prototype.toString = function() {
- return this.version;
-};
-
-SemVer.prototype.compare = function(other) {
- debug('SemVer.compare', this.version, this.loose, other);
- if (!(other instanceof SemVer))
- other = new SemVer(other, this.loose);
-
- return this.compareMain(other) || this.comparePre(other);
-};
-
-SemVer.prototype.compareMain = function(other) {
- if (!(other instanceof SemVer))
- other = new SemVer(other, this.loose);
-
- return compareIdentifiers(this.major, other.major) ||
- compareIdentifiers(this.minor, other.minor) ||
- compareIdentifiers(this.patch, other.patch);
-};
-
-SemVer.prototype.comparePre = function(other) {
- if (!(other instanceof SemVer))
- other = new SemVer(other, this.loose);
-
- // NOT having a prerelease is > having one
- if (this.prerelease.length && !other.prerelease.length)
- return -1;
- else if (!this.prerelease.length && other.prerelease.length)
- return 1;
- else if (!this.prerelease.length && !other.prerelease.length)
- return 0;
-
- var i = 0;
- do {
- var a = this.prerelease[i];
- var b = other.prerelease[i];
- debug('prerelease compare', i, a, b);
- if (a === undefined && b === undefined)
- return 0;
- else if (b === undefined)
- return 1;
- else if (a === undefined)
- return -1;
- else if (a === b)
- continue;
- else
- return compareIdentifiers(a, b);
- } while (++i);
-};
-
-// preminor will bump the version up to the next minor release, and immediately
-// down to pre-release. premajor and prepatch work the same way.
-SemVer.prototype.inc = function(release, identifier) {
- switch (release) {
- case 'premajor':
- this.prerelease.length = 0;
- this.patch = 0;
- this.minor = 0;
- this.major++;
- this.inc('pre', identifier);
- break;
- case 'preminor':
- this.prerelease.length = 0;
- this.patch = 0;
- this.minor++;
- this.inc('pre', identifier);
- break;
- case 'prepatch':
- // If this is already a prerelease, it will bump to the next version
- // drop any prereleases that might already exist, since they are not
- // relevant at this point.
- this.prerelease.length = 0;
- this.inc('patch', identifier);
- this.inc('pre', identifier);
- break;
- // If the input is a non-prerelease version, this acts the same as
- // prepatch.
- case 'prerelease':
- if (this.prerelease.length === 0)
- this.inc('patch', identifier);
- this.inc('pre', identifier);
- break;
-
- case 'major':
- // If this is a pre-major version, bump up to the same major version.
- // Otherwise increment major.
- // 1.0.0-5 bumps to 1.0.0
- // 1.1.0 bumps to 2.0.0
- if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0)
- this.major++;
- this.minor = 0;
- this.patch = 0;
- this.prerelease = [];
- break;
- case 'minor':
- // If this is a pre-minor version, bump up to the same minor version.
- // Otherwise increment minor.
- // 1.2.0-5 bumps to 1.2.0
- // 1.2.1 bumps to 1.3.0
- if (this.patch !== 0 || this.prerelease.length === 0)
- this.minor++;
- this.patch = 0;
- this.prerelease = [];
- break;
- case 'patch':
- // If this is not a pre-release version, it will increment the patch.
- // If it is a pre-release it will bump up to the same patch version.
- // 1.2.0-5 patches to 1.2.0
- // 1.2.0 patches to 1.2.1
- if (this.prerelease.length === 0)
- this.patch++;
- this.prerelease = [];
- break;
- // This probably shouldn't be used publicly.
- // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
- case 'pre':
- if (this.prerelease.length === 0)
- this.prerelease = [0];
- else {
- var i = this.prerelease.length;
- while (--i >= 0) {
- if (typeof this.prerelease[i] === 'number') {
- this.prerelease[i]++;
- i = -2;
- }
- }
- if (i === -1) // didn't increment anything
- this.prerelease.push(0);
- }
- if (identifier) {
- // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
- // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
- if (this.prerelease[0] === identifier) {
- if (isNaN(this.prerelease[1]))
- this.prerelease = [identifier, 0];
- } else
- this.prerelease = [identifier, 0];
- }
- break;
-
- default:
- throw new Error('invalid increment argument: ' + release);
- }
- this.format();
- this.raw = this.version;
- return this;
-};
-
-exports.inc = inc;
-function inc(version, release, loose, identifier) {
- if (typeof(loose) === 'string') {
- identifier = loose;
- loose = undefined;
- }
-
- try {
- return new SemVer(version, loose).inc(release, identifier).version;
- } catch (er) {
- return null;
- }
-}
-
-exports.diff = diff;
-function diff(version1, version2) {
- if (eq(version1, version2)) {
- return null;
- } else {
- var v1 = parse(version1);
- var v2 = parse(version2);
- if (v1.prerelease.length || v2.prerelease.length) {
- for (var key in v1) {
- if (key === 'major' || key === 'minor' || key === 'patch') {
- if (v1[key] !== v2[key]) {
- return 'pre'+key;
- }
- }
- }
- return 'prerelease';
- }
- for (var key in v1) {
- if (key === 'major' || key === 'minor' || key === 'patch') {
- if (v1[key] !== v2[key]) {
- return key;
- }
- }
- }
- }
-}
-
-exports.compareIdentifiers = compareIdentifiers;
-
-var numeric = /^[0-9]+$/;
-function compareIdentifiers(a, b) {
- var anum = numeric.test(a);
- var bnum = numeric.test(b);
-
- if (anum && bnum) {
- a = +a;
- b = +b;
- }
-
- return (anum && !bnum) ? -1 :
- (bnum && !anum) ? 1 :
- a < b ? -1 :
- a > b ? 1 :
- 0;
-}
-
-exports.rcompareIdentifiers = rcompareIdentifiers;
-function rcompareIdentifiers(a, b) {
- return compareIdentifiers(b, a);
-}
-
-exports.major = major;
-function major(a, loose) {
- return new SemVer(a, loose).major;
-}
-
-exports.minor = minor;
-function minor(a, loose) {
- return new SemVer(a, loose).minor;
-}
-
-exports.patch = patch;
-function patch(a, loose) {
- return new SemVer(a, loose).patch;
-}
-
-exports.compare = compare;
-function compare(a, b, loose) {
- return new SemVer(a, loose).compare(b);
-}
-
-exports.compareLoose = compareLoose;
-function compareLoose(a, b) {
- return compare(a, b, true);
-}
-
-exports.rcompare = rcompare;
-function rcompare(a, b, loose) {
- return compare(b, a, loose);
-}
-
-exports.sort = sort;
-function sort(list, loose) {
- return list.sort(function(a, b) {
- return exports.compare(a, b, loose);
- });
-}
-
-exports.rsort = rsort;
-function rsort(list, loose) {
- return list.sort(function(a, b) {
- return exports.rcompare(a, b, loose);
- });
-}
-
-exports.gt = gt;
-function gt(a, b, loose) {
- return compare(a, b, loose) > 0;
-}
-
-exports.lt = lt;
-function lt(a, b, loose) {
- return compare(a, b, loose) < 0;
-}
-
-exports.eq = eq;
-function eq(a, b, loose) {
- return compare(a, b, loose) === 0;
-}
-
-exports.neq = neq;
-function neq(a, b, loose) {
- return compare(a, b, loose) !== 0;
-}
-
-exports.gte = gte;
-function gte(a, b, loose) {
- return compare(a, b, loose) >= 0;
-}
-
-exports.lte = lte;
-function lte(a, b, loose) {
- return compare(a, b, loose) <= 0;
-}
-
-exports.cmp = cmp;
-function cmp(a, op, b, loose) {
- var ret;
- switch (op) {
- case '===':
- if (typeof a === 'object') a = a.version;
- if (typeof b === 'object') b = b.version;
- ret = a === b;
- break;
- case '!==':
- if (typeof a === 'object') a = a.version;
- if (typeof b === 'object') b = b.version;
- ret = a !== b;
- break;
- case '': case '=': case '==': ret = eq(a, b, loose); break;
- case '!=': ret = neq(a, b, loose); break;
- case '>': ret = gt(a, b, loose); break;
- case '>=': ret = gte(a, b, loose); break;
- case '<': ret = lt(a, b, loose); break;
- case '<=': ret = lte(a, b, loose); break;
- default: throw new TypeError('Invalid operator: ' + op);
- }
- return ret;
-}
-
-exports.Comparator = Comparator;
-function Comparator(comp, loose) {
- if (comp instanceof Comparator) {
- if (comp.loose === loose)
- return comp;
- else
- comp = comp.value;
- }
-
- if (!(this instanceof Comparator))
- return new Comparator(comp, loose);
-
- debug('comparator', comp, loose);
- this.loose = loose;
- this.parse(comp);
-
- if (this.semver === ANY)
- this.value = '';
- else
- this.value = this.operator + this.semver.version;
-
- debug('comp', this);
-}
-
-var ANY = {};
-Comparator.prototype.parse = function(comp) {
- var r = this.loose ? re[COMPARATORLOOSE] : re[COMPARATOR];
- var m = comp.match(r);
-
- if (!m)
- throw new TypeError('Invalid comparator: ' + comp);
-
- this.operator = m[1];
- if (this.operator === '=')
- this.operator = '';
-
- // if it literally is just '>' or '' then allow anything.
- if (!m[2])
- this.semver = ANY;
- else
- this.semver = new SemVer(m[2], this.loose);
-};
-
-Comparator.prototype.toString = function() {
- return this.value;
-};
-
-Comparator.prototype.test = function(version) {
- debug('Comparator.test', version, this.loose);
-
- if (this.semver === ANY)
- return true;
-
- if (typeof version === 'string')
- version = new SemVer(version, this.loose);
-
- return cmp(version, this.operator, this.semver, this.loose);
-};
-
-
-exports.Range = Range;
-function Range(range, loose) {
- if ((range instanceof Range) && range.loose === loose)
- return range;
-
- if (!(this instanceof Range))
- return new Range(range, loose);
-
- this.loose = loose;
-
- // First, split based on boolean or ||
- this.raw = range;
- this.set = range.split(/\s*\|\|\s*/).map(function(range) {
- return this.parseRange(range.trim());
- }, this).filter(function(c) {
- // throw out any that are not relevant for whatever reason
- return c.length;
- });
-
- if (!this.set.length) {
- throw new TypeError('Invalid SemVer Range: ' + range);
- }
-
- this.format();
-}
-
-Range.prototype.format = function() {
- this.range = this.set.map(function(comps) {
- return comps.join(' ').trim();
- }).join('||').trim();
- return this.range;
-};
-
-Range.prototype.toString = function() {
- return this.range;
-};
-
-Range.prototype.parseRange = function(range) {
- var loose = this.loose;
- range = range.trim();
- debug('range', range, loose);
- // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
- var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE];
- range = range.replace(hr, hyphenReplace);
- debug('hyphen replace', range);
- // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
- range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace);
- debug('comparator trim', range, re[COMPARATORTRIM]);
-
- // `~ 1.2.3` => `~1.2.3`
- range = range.replace(re[TILDETRIM], tildeTrimReplace);
-
- // `^ 1.2.3` => `^1.2.3`
- range = range.replace(re[CARETTRIM], caretTrimReplace);
-
- // normalize spaces
- range = range.split(/\s+/).join(' ');
-
- // At this point, the range is completely trimmed and
- // ready to be split into comparators.
-
- var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR];
- var set = range.split(' ').map(function(comp) {
- return parseComparator(comp, loose);
- }).join(' ').split(/\s+/);
- if (this.loose) {
- // in loose mode, throw out any that are not valid comparators
- set = set.filter(function(comp) {
- return !!comp.match(compRe);
- });
- }
- set = set.map(function(comp) {
- return new Comparator(comp, loose);
- });
-
- return set;
-};
-
-// Mostly just for testing and legacy API reasons
-exports.toComparators = toComparators;
-function toComparators(range, loose) {
- return new Range(range, loose).set.map(function(comp) {
- return comp.map(function(c) {
- return c.value;
- }).join(' ').trim().split(' ');
- });
-}
-
-// comprised of xranges, tildes, stars, and gtlt's at this point.
-// already replaced the hyphen ranges
-// turn into a set of JUST comparators.
-function parseComparator(comp, loose) {
- debug('comp', comp);
- comp = replaceCarets(comp, loose);
- debug('caret', comp);
- comp = replaceTildes(comp, loose);
- debug('tildes', comp);
- comp = replaceXRanges(comp, loose);
- debug('xrange', comp);
- comp = replaceStars(comp, loose);
- debug('stars', comp);
- return comp;
-}
-
-function isX(id) {
- return !id || id.toLowerCase() === 'x' || id === '*';
-}
-
-// ~, ~> --> * (any, kinda silly)
-// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
-// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
-// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
-// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
-// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
-function replaceTildes(comp, loose) {
- return comp.trim().split(/\s+/).map(function(comp) {
- return replaceTilde(comp, loose);
- }).join(' ');
-}
-
-function replaceTilde(comp, loose) {
- var r = loose ? re[TILDELOOSE] : re[TILDE];
- return comp.replace(r, function(_, M, m, p, pr) {
- debug('tilde', comp, _, M, m, p, pr);
- var ret;
-
- if (isX(M))
- ret = '';
- else if (isX(m))
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0';
- else if (isX(p))
- // ~1.2 == >=1.2.0 <1.3.0
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0';
- else if (pr) {
- debug('replaceTilde pr', pr);
- if (pr.charAt(0) !== '-')
- pr = '-' + pr;
- ret = '>=' + M + '.' + m + '.' + p + pr +
- ' <' + M + '.' + (+m + 1) + '.0';
- } else
- // ~1.2.3 == >=1.2.3 <1.3.0
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + (+m + 1) + '.0';
-
- debug('tilde return', ret);
- return ret;
- });
-}
-
-// ^ --> * (any, kinda silly)
-// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
-// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
-// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
-// ^1.2.3 --> >=1.2.3 <2.0.0
-// ^1.2.0 --> >=1.2.0 <2.0.0
-function replaceCarets(comp, loose) {
- return comp.trim().split(/\s+/).map(function(comp) {
- return replaceCaret(comp, loose);
- }).join(' ');
-}
-
-function replaceCaret(comp, loose) {
- debug('caret', comp, loose);
- var r = loose ? re[CARETLOOSE] : re[CARET];
- return comp.replace(r, function(_, M, m, p, pr) {
- debug('caret', comp, _, M, m, p, pr);
- var ret;
-
- if (isX(M))
- ret = '';
- else if (isX(m))
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0';
- else if (isX(p)) {
- if (M === '0')
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0';
- else
- ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0';
- } else if (pr) {
- debug('replaceCaret pr', pr);
- if (pr.charAt(0) !== '-')
- pr = '-' + pr;
- if (M === '0') {
- if (m === '0')
- ret = '>=' + M + '.' + m + '.' + p + pr +
- ' <' + M + '.' + m + '.' + (+p + 1);
- else
- ret = '>=' + M + '.' + m + '.' + p + pr +
- ' <' + M + '.' + (+m + 1) + '.0';
- } else
- ret = '>=' + M + '.' + m + '.' + p + pr +
- ' <' + (+M + 1) + '.0.0';
- } else {
- debug('no pr');
- if (M === '0') {
- if (m === '0')
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + m + '.' + (+p + 1);
- else
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + (+m + 1) + '.0';
- } else
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + (+M + 1) + '.0.0';
- }
-
- debug('caret return', ret);
- return ret;
- });
-}
-
-function replaceXRanges(comp, loose) {
- debug('replaceXRanges', comp, loose);
- return comp.split(/\s+/).map(function(comp) {
- return replaceXRange(comp, loose);
- }).join(' ');
-}
-
-function replaceXRange(comp, loose) {
- comp = comp.trim();
- var r = loose ? re[XRANGELOOSE] : re[XRANGE];
- return comp.replace(r, function(ret, gtlt, M, m, p, pr) {
- debug('xRange', comp, ret, gtlt, M, m, p, pr);
- var xM = isX(M);
- var xm = xM || isX(m);
- var xp = xm || isX(p);
- var anyX = xp;
-
- if (gtlt === '=' && anyX)
- gtlt = '';
-
- if (xM) {
- if (gtlt === '>' || gtlt === '<') {
- // nothing is allowed
- ret = '<0.0.0';
- } else {
- // nothing is forbidden
- ret = '*';
- }
- } else if (gtlt && anyX) {
- // replace X with 0
- if (xm)
- m = 0;
- if (xp)
- p = 0;
-
- if (gtlt === '>') {
- // >1 => >=2.0.0
- // >1.2 => >=1.3.0
- // >1.2.3 => >= 1.2.4
- gtlt = '>=';
- if (xm) {
- M = +M + 1;
- m = 0;
- p = 0;
- } else if (xp) {
- m = +m + 1;
- p = 0;
- }
- } else if (gtlt === '<=') {
- // <=0.7.x is actually <0.8.0, since any 0.7.x should
- // pass. Similarly, <=7.x is actually <8.0.0, etc.
- gtlt = '<';
- if (xm)
- M = +M + 1;
- else
- m = +m + 1;
- }
-
- ret = gtlt + M + '.' + m + '.' + p;
- } else if (xm) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0';
- } else if (xp) {
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0';
- }
-
- debug('xRange return', ret);
-
- return ret;
- });
-}
-
-// Because * is AND-ed with everything else in the comparator,
-// and '' means "any version", just remove the *s entirely.
-function replaceStars(comp, loose) {
- debug('replaceStars', comp, loose);
- // Looseness is ignored here. star is always as loose as it gets!
- return comp.trim().replace(re[STAR], '');
-}
-
-// This function is passed to string.replace(re[HYPHENRANGE])
-// M, m, patch, prerelease, build
-// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
-// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
-// 1.2 - 3.4 => >=1.2.0 <3.5.0
-function hyphenReplace($0,
- from, fM, fm, fp, fpr, fb,
- to, tM, tm, tp, tpr, tb) {
-
- if (isX(fM))
- from = '';
- else if (isX(fm))
- from = '>=' + fM + '.0.0';
- else if (isX(fp))
- from = '>=' + fM + '.' + fm + '.0';
- else
- from = '>=' + from;
-
- if (isX(tM))
- to = '';
- else if (isX(tm))
- to = '<' + (+tM + 1) + '.0.0';
- else if (isX(tp))
- to = '<' + tM + '.' + (+tm + 1) + '.0';
- else if (tpr)
- to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr;
- else
- to = '<=' + to;
-
- return (from + ' ' + to).trim();
-}
-
-
-// if ANY of the sets match ALL of its comparators, then pass
-Range.prototype.test = function(version) {
- if (!version)
- return false;
-
- if (typeof version === 'string')
- version = new SemVer(version, this.loose);
-
- for (var i = 0; i < this.set.length; i++) {
- if (testSet(this.set[i], version))
- return true;
- }
- return false;
-};
-
-function testSet(set, version) {
- for (var i = 0; i < set.length; i++) {
- if (!set[i].test(version))
- return false;
- }
-
- if (version.prerelease.length) {
- // Find the set of versions that are allowed to have prereleases
- // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
- // That should allow `1.2.3-pr.2` to pass.
- // However, `1.2.4-alpha.notready` should NOT be allowed,
- // even though it's within the range set by the comparators.
- for (var i = 0; i < set.length; i++) {
- debug(set[i].semver);
- if (set[i].semver === ANY)
- continue;
-
- if (set[i].semver.prerelease.length > 0) {
- var allowed = set[i].semver;
- if (allowed.major === version.major &&
- allowed.minor === version.minor &&
- allowed.patch === version.patch)
- return true;
- }
- }
-
- // Version has a -pre, but it's not one of the ones we like.
- return false;
- }
-
- return true;
-}
-
-exports.satisfies = satisfies;
-function satisfies(version, range, loose) {
- try {
- range = new Range(range, loose);
- } catch (er) {
- return false;
- }
- return range.test(version);
-}
-
-exports.maxSatisfying = maxSatisfying;
-function maxSatisfying(versions, range, loose) {
- return versions.filter(function(version) {
- return satisfies(version, range, loose);
- }).sort(function(a, b) {
- return rcompare(a, b, loose);
- })[0] || null;
-}
-
-exports.minSatisfying = minSatisfying;
-function minSatisfying(versions, range, loose) {
- return versions.filter(function(version) {
- return satisfies(version, range, loose);
- }).sort(function(a, b) {
- return compare(a, b, loose);
- })[0] || null;
-}
-
-exports.validRange = validRange;
-function validRange(range, loose) {
- try {
- // Return '*' instead of '' so that truthiness works.
- // This will throw if it's invalid anyway
- return new Range(range, loose).range || '*';
- } catch (er) {
- return null;
- }
-}
-
-// Determine if version is less than all the versions possible in the range
-exports.ltr = ltr;
-function ltr(version, range, loose) {
- return outside(version, range, '<', loose);
-}
-
-// Determine if version is greater than all the versions possible in the range.
-exports.gtr = gtr;
-function gtr(version, range, loose) {
- return outside(version, range, '>', loose);
-}
-
-exports.outside = outside;
-function outside(version, range, hilo, loose) {
- version = new SemVer(version, loose);
- range = new Range(range, loose);
-
- var gtfn, ltefn, ltfn, comp, ecomp;
- switch (hilo) {
- case '>':
- gtfn = gt;
- ltefn = lte;
- ltfn = lt;
- comp = '>';
- ecomp = '>=';
- break;
- case '<':
- gtfn = lt;
- ltefn = gte;
- ltfn = gt;
- comp = '<';
- ecomp = '<=';
- break;
- default:
- throw new TypeError('Must provide a hilo val of "<" or ">"');
- }
-
- // If it satisifes the range it is not outside
- if (satisfies(version, range, loose)) {
- return false;
- }
-
- // From now on, variable terms are as if we're in "gtr" mode.
- // but note that everything is flipped for the "ltr" function.
-
- for (var i = 0; i < range.set.length; ++i) {
- var comparators = range.set[i];
-
- var high = null;
- var low = null;
-
- comparators.forEach(function(comparator) {
- if (comparator.semver === ANY) {
- comparator = new Comparator('>=0.0.0')
- }
- high = high || comparator;
- low = low || comparator;
- if (gtfn(comparator.semver, high.semver, loose)) {
- high = comparator;
- } else if (ltfn(comparator.semver, low.semver, loose)) {
- low = comparator;
- }
- });
-
- // If the edge version comparator has a operator then our version
- // isn't outside it
- if (high.operator === comp || high.operator === ecomp) {
- return false;
- }
-
- // If the lowest version comparator has an operator and our version
- // is less than it then it isn't higher than the range
- if ((!low.operator || low.operator === comp) &&
- ltefn(version, low.semver)) {
- return false;
- } else if (low.operator === ecomp && ltfn(version, low.semver)) {
- return false;
- }
- }
- return true;
-}
-
-exports.prerelease = prerelease;
-function prerelease(version, loose) {
- var parsed = parse(version, loose);
- return (parsed && parsed.prerelease.length) ? parsed.prerelease : null;
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/.npmignore b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/.npmignore
deleted file mode 100644
index c167ad5b1c..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/.npmignore
+++ /dev/null
@@ -1,5 +0,0 @@
-.*.swp
-node_modules
-examples/extract/
-test/tmp/
-test/fixtures/
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/.travis.yml b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/.travis.yml
deleted file mode 100644
index fca8ef0194..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/.travis.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-language: node_js
-node_js:
- - 0.10
- - 0.11
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/LICENSE b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/LICENSE
deleted file mode 100644
index 019b7e40ea..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/LICENSE
+++ /dev/null
@@ -1,12 +0,0 @@
-The ISC License
-Copyright (c) Isaac Z. Schlueter and Contributors
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/README.md b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/README.md
deleted file mode 100644
index cfda2ac180..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/README.md
+++ /dev/null
@@ -1,50 +0,0 @@
-# node-tar
-
-Tar for Node.js.
-
-[![NPM](https://nodei.co/npm/tar.png)](https://nodei.co/npm/tar/)
-
-## API
-
-See `examples/` for usage examples.
-
-### var tar = require('tar')
-
-Returns an object with `.Pack`, `.Extract` and `.Parse` methods.
-
-### tar.Pack([properties])
-
-Returns a through stream. Use
-[fstream](https://npmjs.org/package/fstream) to write files into the
-pack stream and you will receive tar archive data from the pack
-stream.
-
-This only works with directories, it does not work with individual files.
-
-The optional `properties` object are used to set properties in the tar
-'Global Extended Header'. If the `fromBase` property is set to true,
-the tar will contain files relative to the path passed, and not with
-the path included.
-
-### tar.Extract([options])
-
-Returns a through stream. Write tar data to the stream and the files
-in the tarball will be extracted onto the filesystem.
-
-`options` can be:
-
-```js
-{
- path: '/path/to/extract/tar/into',
- strip: 0, // how many path segments to strip from the root when extracting
-}
-```
-
-`options` also get passed to the `fstream.Writer` instance that `tar`
-uses internally.
-
-### tar.Parse()
-
-Returns a writable stream. Write tar data to it and it will emit
-`entry` events for each entry parsed from the tarball. This is used by
-`tar.Extract`.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/examples/extracter.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/examples/extracter.js
deleted file mode 100644
index f6253a72c5..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/examples/extracter.js
+++ /dev/null
@@ -1,19 +0,0 @@
-var tar = require("../tar.js")
- , fs = require("fs")
-
-
-function onError(err) {
- console.error('An error occurred:', err)
-}
-
-function onEnd() {
- console.log('Extracted!')
-}
-
-var extractor = tar.Extract({path: __dirname + "/extract"})
- .on('error', onError)
- .on('end', onEnd);
-
-fs.createReadStream(__dirname + "/../test/fixtures/c.tar")
- .on('error', onError)
- .pipe(extractor);
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/examples/packer.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/examples/packer.js
deleted file mode 100644
index 039969ce30..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/examples/packer.js
+++ /dev/null
@@ -1,24 +0,0 @@
-var tar = require("../tar.js")
- , fstream = require("fstream")
- , fs = require("fs")
-
-var dirDest = fs.createWriteStream('dir.tar')
-
-
-function onError(err) {
- console.error('An error occurred:', err)
-}
-
-function onEnd() {
- console.log('Packed!')
-}
-
-var packer = tar.Pack({ noProprietary: true })
- .on('error', onError)
- .on('end', onEnd);
-
-// This must be a "directory"
-fstream.Reader({ path: __dirname, type: "Directory" })
- .on('error', onError)
- .pipe(packer)
- .pipe(dirDest)
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/examples/reader.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/examples/reader.js
deleted file mode 100644
index 8d113ad30d..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/examples/reader.js
+++ /dev/null
@@ -1,35 +0,0 @@
-var tar = require("../tar.js")
- , fs = require("fs")
-
-fs.createReadStream(__dirname + "/../test/fixtures/c.tar")
- .pipe(tar.Parse())
- .on("extendedHeader", function (e) {
- console.error("extended pax header", e.props)
- e.on("end", function () {
- console.error("extended pax fields:", e.fields)
- })
- })
- .on("ignoredEntry", function (e) {
- console.error("ignoredEntry?!?", e.props)
- })
- .on("longLinkpath", function (e) {
- console.error("longLinkpath entry", e.props)
- e.on("end", function () {
- console.error("value=%j", e.body.toString())
- })
- })
- .on("longPath", function (e) {
- console.error("longPath entry", e.props)
- e.on("end", function () {
- console.error("value=%j", e.body.toString())
- })
- })
- .on("entry", function (e) {
- console.error("entry", e.props)
- e.on("data", function (c) {
- console.error(" >>>" + c.toString().replace(/\n/g, "\\n"))
- })
- e.on("end", function () {
- console.error(" <<<EOF")
- })
- })
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/buffer-entry.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/buffer-entry.js
deleted file mode 100644
index 6c1da2373a..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/buffer-entry.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// just like the Entry class, but it buffers the contents
-//
-// XXX It would be good to set a maximum BufferEntry filesize,
-// since it eats up memory. In normal operation,
-// these are only for long filenames or link names, which are
-// rarely very big.
-
-module.exports = BufferEntry
-
-var inherits = require("inherits")
- , Entry = require("./entry.js")
-
-function BufferEntry () {
- Entry.apply(this, arguments)
- this._buffer = new Buffer(this.props.size)
- this._offset = 0
- this.body = ""
- this.on("end", function () {
- this.body = this._buffer.toString().slice(0, -1)
- })
-}
-
-inherits(BufferEntry, Entry)
-
-// collect the bytes as they come in.
-BufferEntry.prototype.write = function (c) {
- c.copy(this._buffer, this._offset)
- this._offset += c.length
- Entry.prototype.write.call(this, c)
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/entry-writer.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/entry-writer.js
deleted file mode 100644
index 8e09042d01..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/entry-writer.js
+++ /dev/null
@@ -1,169 +0,0 @@
-module.exports = EntryWriter
-
-var tar = require("../tar.js")
- , TarHeader = require("./header.js")
- , Entry = require("./entry.js")
- , inherits = require("inherits")
- , BlockStream = require("block-stream")
- , ExtendedHeaderWriter
- , Stream = require("stream").Stream
- , EOF = {}
-
-inherits(EntryWriter, Stream)
-
-function EntryWriter (props) {
- var me = this
-
- if (!(me instanceof EntryWriter)) {
- return new EntryWriter(props)
- }
-
- Stream.apply(this)
-
- me.writable = true
- me.readable = true
-
- me._stream = new BlockStream(512)
-
- me._stream.on("data", function (c) {
- me.emit("data", c)
- })
-
- me._stream.on("drain", function () {
- me.emit("drain")
- })
-
- me._stream.on("end", function () {
- me.emit("end")
- me.emit("close")
- })
-
- me.props = props
- if (props.type === "Directory") {
- props.size = 0
- }
- props.ustar = "ustar\0"
- props.ustarver = "00"
- me.path = props.path
-
- me._buffer = []
- me._didHeader = false
- me._meta = false
-
- me.on("pipe", function () {
- me._process()
- })
-}
-
-EntryWriter.prototype.write = function (c) {
- // console.error(".. ew write")
- if (this._ended) return this.emit("error", new Error("write after end"))
- this._buffer.push(c)
- this._process()
- this._needDrain = this._buffer.length > 0
- return !this._needDrain
-}
-
-EntryWriter.prototype.end = function (c) {
- // console.error(".. ew end")
- if (c) this._buffer.push(c)
- this._buffer.push(EOF)
- this._ended = true
- this._process()
- this._needDrain = this._buffer.length > 0
-}
-
-EntryWriter.prototype.pause = function () {
- // console.error(".. ew pause")
- this._paused = true
- this.emit("pause")
-}
-
-EntryWriter.prototype.resume = function () {
- // console.error(".. ew resume")
- this._paused = false
- this.emit("resume")
- this._process()
-}
-
-EntryWriter.prototype.add = function (entry) {
- // console.error(".. ew add")
- if (!this.parent) return this.emit("error", new Error("no parent"))
-
- // make sure that the _header and such is emitted, and clear out
- // the _currentEntry link on the parent.
- if (!this._ended) this.end()
-
- return this.parent.add(entry)
-}
-
-EntryWriter.prototype._header = function () {
- // console.error(".. ew header")
- if (this._didHeader) return
- this._didHeader = true
-
- var headerBlock = TarHeader.encode(this.props)
-
- if (this.props.needExtended && !this._meta) {
- var me = this
-
- ExtendedHeaderWriter = ExtendedHeaderWriter ||
- require("./extended-header-writer.js")
-
- ExtendedHeaderWriter(this.props)
- .on("data", function (c) {
- me.emit("data", c)
- })
- .on("error", function (er) {
- me.emit("error", er)
- })
- .end()
- }
-
- // console.error(".. .. ew headerBlock emitting")
- this.emit("data", headerBlock)
- this.emit("header")
-}
-
-EntryWriter.prototype._process = function () {
- // console.error(".. .. ew process")
- if (!this._didHeader && !this._meta) {
- this._header()
- }
-
- if (this._paused || this._processing) {
- // console.error(".. .. .. paused=%j, processing=%j", this._paused, this._processing)
- return
- }
-
- this._processing = true
-
- var buf = this._buffer
- for (var i = 0; i < buf.length; i ++) {
- // console.error(".. .. .. i=%d", i)
-
- var c = buf[i]
-
- if (c === EOF) this._stream.end()
- else this._stream.write(c)
-
- if (this._paused) {
- // console.error(".. .. .. paused mid-emission")
- this._processing = false
- if (i < buf.length) {
- this._needDrain = true
- this._buffer = buf.slice(i + 1)
- }
- return
- }
- }
-
- // console.error(".. .. .. emitted")
- this._buffer.length = 0
- this._processing = false
-
- // console.error(".. .. .. emitting drain")
- this.emit("drain")
-}
-
-EntryWriter.prototype.destroy = function () {}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/entry.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/entry.js
deleted file mode 100644
index 5f5dd3c268..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/entry.js
+++ /dev/null
@@ -1,220 +0,0 @@
-// A passthrough read/write stream that sets its properties
-// based on a header, extendedHeader, and globalHeader
-//
-// Can be either a file system object of some sort, or
-// a pax/ustar metadata entry.
-
-module.exports = Entry
-
-var TarHeader = require("./header.js")
- , tar = require("../tar")
- , assert = require("assert").ok
- , Stream = require("stream").Stream
- , inherits = require("inherits")
- , fstream = require("fstream").Abstract
-
-function Entry (header, extended, global) {
- Stream.call(this)
- this.readable = true
- this.writable = true
-
- this._needDrain = false
- this._paused = false
- this._reading = false
- this._ending = false
- this._ended = false
- this._remaining = 0
- this._abort = false
- this._queue = []
- this._index = 0
- this._queueLen = 0
-
- this._read = this._read.bind(this)
-
- this.props = {}
- this._header = header
- this._extended = extended || {}
-
- // globals can change throughout the course of
- // a file parse operation. Freeze it at its current state.
- this._global = {}
- var me = this
- Object.keys(global || {}).forEach(function (g) {
- me._global[g] = global[g]
- })
-
- this._setProps()
-}
-
-inherits(Entry, Stream)
-
-Entry.prototype.write = function (c) {
- if (this._ending) this.error("write() after end()", null, true)
- if (this._remaining === 0) {
- this.error("invalid bytes past eof")
- }
-
- // often we'll get a bunch of \0 at the end of the last write,
- // since chunks will always be 512 bytes when reading a tarball.
- if (c.length > this._remaining) {
- c = c.slice(0, this._remaining)
- }
- this._remaining -= c.length
-
- // put it on the stack.
- var ql = this._queueLen
- this._queue.push(c)
- this._queueLen ++
-
- this._read()
-
- // either paused, or buffered
- if (this._paused || ql > 0) {
- this._needDrain = true
- return false
- }
-
- return true
-}
-
-Entry.prototype.end = function (c) {
- if (c) this.write(c)
- this._ending = true
- this._read()
-}
-
-Entry.prototype.pause = function () {
- this._paused = true
- this.emit("pause")
-}
-
-Entry.prototype.resume = function () {
- // console.error(" Tar Entry resume", this.path)
- this.emit("resume")
- this._paused = false
- this._read()
- return this._queueLen - this._index > 1
-}
-
- // This is bound to the instance
-Entry.prototype._read = function () {
- // console.error(" Tar Entry _read", this.path)
-
- if (this._paused || this._reading || this._ended) return
-
- // set this flag so that event handlers don't inadvertently
- // get multiple _read() calls running.
- this._reading = true
-
- // have any data to emit?
- while (this._index < this._queueLen && !this._paused) {
- var chunk = this._queue[this._index ++]
- this.emit("data", chunk)
- }
-
- // check if we're drained
- if (this._index >= this._queueLen) {
- this._queue.length = this._queueLen = this._index = 0
- if (this._needDrain) {
- this._needDrain = false
- this.emit("drain")
- }
- if (this._ending) {
- this._ended = true
- this.emit("end")
- }
- }
-
- // if the queue gets too big, then pluck off whatever we can.
- // this should be fairly rare.
- var mql = this._maxQueueLen
- if (this._queueLen > mql && this._index > 0) {
- mql = Math.min(this._index, mql)
- this._index -= mql
- this._queueLen -= mql
- this._queue = this._queue.slice(mql)
- }
-
- this._reading = false
-}
-
-Entry.prototype._setProps = function () {
- // props = extended->global->header->{}
- var header = this._header
- , extended = this._extended
- , global = this._global
- , props = this.props
-
- // first get the values from the normal header.
- var fields = tar.fields
- for (var f = 0; fields[f] !== null; f ++) {
- var field = fields[f]
- , val = header[field]
- if (typeof val !== "undefined") props[field] = val
- }
-
- // next, the global header for this file.
- // numeric values, etc, will have already been parsed.
- ;[global, extended].forEach(function (p) {
- Object.keys(p).forEach(function (f) {
- if (typeof p[f] !== "undefined") props[f] = p[f]
- })
- })
-
- // no nulls allowed in path or linkpath
- ;["path", "linkpath"].forEach(function (p) {
- if (props.hasOwnProperty(p)) {
- props[p] = props[p].split("\0")[0]
- }
- })
-
-
- // set date fields to be a proper date
- ;["mtime", "ctime", "atime"].forEach(function (p) {
- if (props.hasOwnProperty(p)) {
- props[p] = new Date(props[p] * 1000)
- }
- })
-
- // set the type so that we know what kind of file to create
- var type
- switch (tar.types[props.type]) {
- case "OldFile":
- case "ContiguousFile":
- type = "File"
- break
-
- case "GNUDumpDir":
- type = "Directory"
- break
-
- case undefined:
- type = "Unknown"
- break
-
- case "Link":
- case "SymbolicLink":
- case "CharacterDevice":
- case "BlockDevice":
- case "Directory":
- case "FIFO":
- default:
- type = tar.types[props.type]
- }
-
- this.type = type
- this.path = props.path
- this.size = props.size
-
- // size is special, since it signals when the file needs to end.
- this._remaining = props.size
-}
-
-// the parser may not call write if _abort is true.
-// useful for skipping data from some files quickly.
-Entry.prototype.abort = function(){
- this._abort = true
-}
-
-Entry.prototype.warn = fstream.warn
-Entry.prototype.error = fstream.error
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js
deleted file mode 100644
index 1728c4583a..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js
+++ /dev/null
@@ -1,191 +0,0 @@
-
-module.exports = ExtendedHeaderWriter
-
-var inherits = require("inherits")
- , EntryWriter = require("./entry-writer.js")
-
-inherits(ExtendedHeaderWriter, EntryWriter)
-
-var tar = require("../tar.js")
- , path = require("path")
- , TarHeader = require("./header.js")
-
-// props is the props of the thing we need to write an
-// extended header for.
-// Don't be shy with it. Just encode everything.
-function ExtendedHeaderWriter (props) {
- // console.error(">> ehw ctor")
- var me = this
-
- if (!(me instanceof ExtendedHeaderWriter)) {
- return new ExtendedHeaderWriter(props)
- }
-
- me.fields = props
-
- var p =
- { path : ("PaxHeader" + path.join("/", props.path || ""))
- .replace(/\\/g, "/").substr(0, 100)
- , mode : props.mode || 0666
- , uid : props.uid || 0
- , gid : props.gid || 0
- , size : 0 // will be set later
- , mtime : props.mtime || Date.now() / 1000
- , type : "x"
- , linkpath : ""
- , ustar : "ustar\0"
- , ustarver : "00"
- , uname : props.uname || ""
- , gname : props.gname || ""
- , devmaj : props.devmaj || 0
- , devmin : props.devmin || 0
- }
-
-
- EntryWriter.call(me, p)
- // console.error(">> ehw props", me.props)
- me.props = p
-
- me._meta = true
-}
-
-ExtendedHeaderWriter.prototype.end = function () {
- // console.error(">> ehw end")
- var me = this
-
- if (me._ended) return
- me._ended = true
-
- me._encodeFields()
-
- if (me.props.size === 0) {
- // nothing to write!
- me._ready = true
- me._stream.end()
- return
- }
-
- me._stream.write(TarHeader.encode(me.props))
- me.body.forEach(function (l) {
- me._stream.write(l)
- })
- me._ready = true
-
- // console.error(">> ehw _process calling end()", me.props)
- this._stream.end()
-}
-
-ExtendedHeaderWriter.prototype._encodeFields = function () {
- // console.error(">> ehw _encodeFields")
- this.body = []
- if (this.fields.prefix) {
- this.fields.path = this.fields.prefix + "/" + this.fields.path
- this.fields.prefix = ""
- }
- encodeFields(this.fields, "", this.body, this.fields.noProprietary)
- var me = this
- this.body.forEach(function (l) {
- me.props.size += l.length
- })
-}
-
-function encodeFields (fields, prefix, body, nop) {
- // console.error(">> >> ehw encodeFields")
- // "%d %s=%s\n", <length>, <keyword>, <value>
- // The length is a decimal number, and includes itself and the \n
- // Numeric values are decimal strings.
-
- Object.keys(fields).forEach(function (k) {
- var val = fields[k]
- , numeric = tar.numeric[k]
-
- if (prefix) k = prefix + "." + k
-
- // already including NODETAR.type, don't need File=true also
- if (k === fields.type && val === true) return
-
- switch (k) {
- // don't include anything that's always handled just fine
- // in the normal header, or only meaningful in the context
- // of nodetar
- case "mode":
- case "cksum":
- case "ustar":
- case "ustarver":
- case "prefix":
- case "basename":
- case "dirname":
- case "needExtended":
- case "block":
- case "filter":
- return
-
- case "rdev":
- if (val === 0) return
- break
-
- case "nlink":
- case "dev": // Truly a hero among men, Creator of Star!
- case "ino": // Speak his name with reverent awe! It is:
- k = "SCHILY." + k
- break
-
- default: break
- }
-
- if (val && typeof val === "object" &&
- !Buffer.isBuffer(val)) encodeFields(val, k, body, nop)
- else if (val === null || val === undefined) return
- else body.push.apply(body, encodeField(k, val, nop))
- })
-
- return body
-}
-
-function encodeField (k, v, nop) {
- // lowercase keys must be valid, otherwise prefix with
- // "NODETAR."
- if (k.charAt(0) === k.charAt(0).toLowerCase()) {
- var m = k.split(".")[0]
- if (!tar.knownExtended[m]) k = "NODETAR." + k
- }
-
- // no proprietary
- if (nop && k.charAt(0) !== k.charAt(0).toLowerCase()) {
- return []
- }
-
- if (typeof val === "number") val = val.toString(10)
-
- var s = new Buffer(" " + k + "=" + v + "\n")
- , digits = Math.floor(Math.log(s.length) / Math.log(10)) + 1
-
- // console.error("1 s=%j digits=%j s.length=%d", s.toString(), digits, s.length)
-
- // if adding that many digits will make it go over that length,
- // then add one to it. For example, if the string is:
- // " foo=bar\n"
- // then that's 9 characters. With the "9", that bumps the length
- // up to 10. However, this is invalid:
- // "10 foo=bar\n"
- // but, since that's actually 11 characters, since 10 adds another
- // character to the length, and the length includes the number
- // itself. In that case, just bump it up again.
- if (s.length + digits >= Math.pow(10, digits)) digits += 1
- // console.error("2 s=%j digits=%j s.length=%d", s.toString(), digits, s.length)
-
- var len = digits + s.length
- // console.error("3 s=%j digits=%j s.length=%d len=%d", s.toString(), digits, s.length, len)
- var lenBuf = new Buffer("" + len)
- if (lenBuf.length + s.length !== len) {
- throw new Error("Bad length calculation\n"+
- "len="+len+"\n"+
- "lenBuf="+JSON.stringify(lenBuf.toString())+"\n"+
- "lenBuf.length="+lenBuf.length+"\n"+
- "digits="+digits+"\n"+
- "s="+JSON.stringify(s.toString())+"\n"+
- "s.length="+s.length)
- }
-
- return [lenBuf, s]
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/extended-header.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/extended-header.js
deleted file mode 100644
index 74f432ceee..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/extended-header.js
+++ /dev/null
@@ -1,140 +0,0 @@
-// An Entry consisting of:
-//
-// "%d %s=%s\n", <length>, <keyword>, <value>
-//
-// The length is a decimal number, and includes itself and the \n
-// \0 does not terminate anything. Only the length terminates the string.
-// Numeric values are decimal strings.
-
-module.exports = ExtendedHeader
-
-var Entry = require("./entry.js")
- , inherits = require("inherits")
- , tar = require("../tar.js")
- , numeric = tar.numeric
- , keyTrans = { "SCHILY.dev": "dev"
- , "SCHILY.ino": "ino"
- , "SCHILY.nlink": "nlink" }
-
-function ExtendedHeader () {
- Entry.apply(this, arguments)
- this.on("data", this._parse)
- this.fields = {}
- this._position = 0
- this._fieldPos = 0
- this._state = SIZE
- this._sizeBuf = []
- this._keyBuf = []
- this._valBuf = []
- this._size = -1
- this._key = ""
-}
-
-inherits(ExtendedHeader, Entry)
-ExtendedHeader.prototype._parse = parse
-
-var s = 0
- , states = ExtendedHeader.states = {}
- , SIZE = states.SIZE = s++
- , KEY = states.KEY = s++
- , VAL = states.VAL = s++
- , ERR = states.ERR = s++
-
-Object.keys(states).forEach(function (s) {
- states[states[s]] = states[s]
-})
-
-states[s] = null
-
-// char code values for comparison
-var _0 = "0".charCodeAt(0)
- , _9 = "9".charCodeAt(0)
- , point = ".".charCodeAt(0)
- , a = "a".charCodeAt(0)
- , Z = "Z".charCodeAt(0)
- , a = "a".charCodeAt(0)
- , z = "z".charCodeAt(0)
- , space = " ".charCodeAt(0)
- , eq = "=".charCodeAt(0)
- , cr = "\n".charCodeAt(0)
-
-function parse (c) {
- if (this._state === ERR) return
-
- for ( var i = 0, l = c.length
- ; i < l
- ; this._position++, this._fieldPos++, i++) {
- // console.error("top of loop, size="+this._size)
-
- var b = c[i]
-
- if (this._size >= 0 && this._fieldPos > this._size) {
- error(this, "field exceeds length="+this._size)
- return
- }
-
- switch (this._state) {
- case ERR: return
-
- case SIZE:
- // console.error("parsing size, b=%d, rest=%j", b, c.slice(i).toString())
- if (b === space) {
- this._state = KEY
- // this._fieldPos = this._sizeBuf.length
- this._size = parseInt(new Buffer(this._sizeBuf).toString(), 10)
- this._sizeBuf.length = 0
- continue
- }
- if (b < _0 || b > _9) {
- error(this, "expected [" + _0 + ".." + _9 + "], got " + b)
- return
- }
- this._sizeBuf.push(b)
- continue
-
- case KEY:
- // can be any char except =, not > size.
- if (b === eq) {
- this._state = VAL
- this._key = new Buffer(this._keyBuf).toString()
- if (keyTrans[this._key]) this._key = keyTrans[this._key]
- this._keyBuf.length = 0
- continue
- }
- this._keyBuf.push(b)
- continue
-
- case VAL:
- // field must end with cr
- if (this._fieldPos === this._size - 1) {
- // console.error("finished with "+this._key)
- if (b !== cr) {
- error(this, "expected \\n at end of field")
- return
- }
- var val = new Buffer(this._valBuf).toString()
- if (numeric[this._key]) {
- val = parseFloat(val)
- }
- this.fields[this._key] = val
-
- this._valBuf.length = 0
- this._state = SIZE
- this._size = -1
- this._fieldPos = -1
- continue
- }
- this._valBuf.push(b)
- continue
- }
- }
-}
-
-function error (me, msg) {
- msg = "invalid header: " + msg
- + "\nposition=" + me._position
- + "\nfield position=" + me._fieldPos
-
- me.error(msg)
- me.state = ERR
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/extract.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/extract.js
deleted file mode 100644
index fe1bb976eb..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/extract.js
+++ /dev/null
@@ -1,94 +0,0 @@
-// give it a tarball and a path, and it'll dump the contents
-
-module.exports = Extract
-
-var tar = require("../tar.js")
- , fstream = require("fstream")
- , inherits = require("inherits")
- , path = require("path")
-
-function Extract (opts) {
- if (!(this instanceof Extract)) return new Extract(opts)
- tar.Parse.apply(this)
-
- if (typeof opts !== "object") {
- opts = { path: opts }
- }
-
- // better to drop in cwd? seems more standard.
- opts.path = opts.path || path.resolve("node-tar-extract")
- opts.type = "Directory"
- opts.Directory = true
-
- // similar to --strip or --strip-components
- opts.strip = +opts.strip
- if (!opts.strip || opts.strip <= 0) opts.strip = 0
-
- this._fst = fstream.Writer(opts)
-
- this.pause()
- var me = this
-
- // Hardlinks in tarballs are relative to the root
- // of the tarball. So, they need to be resolved against
- // the target directory in order to be created properly.
- me.on("entry", function (entry) {
- // if there's a "strip" argument, then strip off that many
- // path components.
- if (opts.strip) {
- var p = entry.path.split("/").slice(opts.strip).join("/")
- entry.path = entry.props.path = p
- if (entry.linkpath) {
- var lp = entry.linkpath.split("/").slice(opts.strip).join("/")
- entry.linkpath = entry.props.linkpath = lp
- }
- }
- if (entry.type === "Link") {
- entry.linkpath = entry.props.linkpath =
- path.join(opts.path, path.join("/", entry.props.linkpath))
- }
-
- if (entry.type === "SymbolicLink") {
- var dn = path.dirname(entry.path) || ""
- var linkpath = entry.props.linkpath
- var target = path.resolve(opts.path, dn, linkpath)
- if (target.indexOf(opts.path) !== 0) {
- linkpath = path.join(opts.path, path.join("/", linkpath))
- }
- entry.linkpath = entry.props.linkpath = linkpath
- }
- })
-
- this._fst.on("ready", function () {
- me.pipe(me._fst, { end: false })
- me.resume()
- })
-
- this._fst.on('error', function(err) {
- me.emit('error', err)
- })
-
- this._fst.on('drain', function() {
- me.emit('drain')
- })
-
- // this._fst.on("end", function () {
- // console.error("\nEEEE Extract End", me._fst.path)
- // })
-
- this._fst.on("close", function () {
- // console.error("\nEEEE Extract End", me._fst.path)
- me.emit("finish")
- me.emit("end")
- me.emit("close")
- })
-}
-
-inherits(Extract, tar.Parse)
-
-Extract.prototype._streamEnd = function () {
- var me = this
- if (!me._ended || me._entry) me.error("unexpected eof")
- me._fst.end()
- // my .end() is coming later.
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js
deleted file mode 100644
index 0bfc7b80aa..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js
+++ /dev/null
@@ -1,14 +0,0 @@
-module.exports = GlobalHeaderWriter
-
-var ExtendedHeaderWriter = require("./extended-header-writer.js")
- , inherits = require("inherits")
-
-inherits(GlobalHeaderWriter, ExtendedHeaderWriter)
-
-function GlobalHeaderWriter (props) {
- if (!(this instanceof GlobalHeaderWriter)) {
- return new GlobalHeaderWriter(props)
- }
- ExtendedHeaderWriter.call(this, props)
- this.props.type = "g"
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/header.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/header.js
deleted file mode 100644
index 3741d5d3f2..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/header.js
+++ /dev/null
@@ -1,384 +0,0 @@
-// parse a 512-byte header block to a data object, or vice-versa
-// If the data won't fit nicely in a simple header, then generate
-// the appropriate extended header file, and return that.
-
-module.exports = TarHeader
-
-var tar = require("../tar.js")
- , fields = tar.fields
- , fieldOffs = tar.fieldOffs
- , fieldEnds = tar.fieldEnds
- , fieldSize = tar.fieldSize
- , numeric = tar.numeric
- , assert = require("assert").ok
- , space = " ".charCodeAt(0)
- , slash = "/".charCodeAt(0)
- , bslash = process.platform === "win32" ? "\\".charCodeAt(0) : null
-
-function TarHeader (block) {
- if (!(this instanceof TarHeader)) return new TarHeader(block)
- if (block) this.decode(block)
-}
-
-TarHeader.prototype =
- { decode : decode
- , encode: encode
- , calcSum: calcSum
- , checkSum: checkSum
- }
-
-TarHeader.parseNumeric = parseNumeric
-TarHeader.encode = encode
-TarHeader.decode = decode
-
-// note that this will only do the normal ustar header, not any kind
-// of extended posix header file. If something doesn't fit comfortably,
-// then it will set obj.needExtended = true, and set the block to
-// the closest approximation.
-function encode (obj) {
- if (!obj && !(this instanceof TarHeader)) throw new Error(
- "encode must be called on a TarHeader, or supplied an object")
-
- obj = obj || this
- var block = obj.block = new Buffer(512)
-
- // if the object has a "prefix", then that's actually an extension of
- // the path field.
- if (obj.prefix) {
- // console.error("%% header encoding, got a prefix", obj.prefix)
- obj.path = obj.prefix + "/" + obj.path
- // console.error("%% header encoding, prefixed path", obj.path)
- obj.prefix = ""
- }
-
- obj.needExtended = false
-
- if (obj.mode) {
- if (typeof obj.mode === "string") obj.mode = parseInt(obj.mode, 8)
- obj.mode = obj.mode & 0777
- }
-
- for (var f = 0; fields[f] !== null; f ++) {
- var field = fields[f]
- , off = fieldOffs[f]
- , end = fieldEnds[f]
- , ret
-
- switch (field) {
- case "cksum":
- // special, done below, after all the others
- break
-
- case "prefix":
- // special, this is an extension of the "path" field.
- // console.error("%% header encoding, skip prefix later")
- break
-
- case "type":
- // convert from long name to a single char.
- var type = obj.type || "0"
- if (type.length > 1) {
- type = tar.types[obj.type]
- if (!type) type = "0"
- }
- writeText(block, off, end, type)
- break
-
- case "path":
- // uses the "prefix" field if > 100 bytes, but <= 255
- var pathLen = Buffer.byteLength(obj.path)
- , pathFSize = fieldSize[fields.path]
- , prefFSize = fieldSize[fields.prefix]
-
- // paths between 100 and 255 should use the prefix field.
- // longer than 255
- if (pathLen > pathFSize &&
- pathLen <= pathFSize + prefFSize) {
- // need to find a slash somewhere in the middle so that
- // path and prefix both fit in their respective fields
- var searchStart = pathLen - 1 - pathFSize
- , searchEnd = prefFSize
- , found = false
- , pathBuf = new Buffer(obj.path)
-
- for ( var s = searchStart
- ; (s <= searchEnd)
- ; s ++ ) {
- if (pathBuf[s] === slash || pathBuf[s] === bslash) {
- found = s
- break
- }
- }
-
- if (found !== false) {
- prefix = pathBuf.slice(0, found).toString("utf8")
- path = pathBuf.slice(found + 1).toString("utf8")
-
- ret = writeText(block, off, end, path)
- off = fieldOffs[fields.prefix]
- end = fieldEnds[fields.prefix]
- // console.error("%% header writing prefix", off, end, prefix)
- ret = writeText(block, off, end, prefix) || ret
- break
- }
- }
-
- // paths less than 100 chars don't need a prefix
- // and paths longer than 255 need an extended header and will fail
- // on old implementations no matter what we do here.
- // Null out the prefix, and fallthrough to default.
- // console.error("%% header writing no prefix")
- var poff = fieldOffs[fields.prefix]
- , pend = fieldEnds[fields.prefix]
- writeText(block, poff, pend, "")
- // fallthrough
-
- // all other fields are numeric or text
- default:
- ret = numeric[field]
- ? writeNumeric(block, off, end, obj[field])
- : writeText(block, off, end, obj[field] || "")
- break
- }
- obj.needExtended = obj.needExtended || ret
- }
-
- var off = fieldOffs[fields.cksum]
- , end = fieldEnds[fields.cksum]
-
- writeNumeric(block, off, end, calcSum.call(this, block))
-
- return block
-}
-
-// if it's a negative number, or greater than will fit,
-// then use write256.
-var MAXNUM = { 12: 077777777777
- , 11: 07777777777
- , 8 : 07777777
- , 7 : 0777777 }
-function writeNumeric (block, off, end, num) {
- var writeLen = end - off
- , maxNum = MAXNUM[writeLen] || 0
-
- num = num || 0
- // console.error(" numeric", num)
-
- if (num instanceof Date ||
- Object.prototype.toString.call(num) === "[object Date]") {
- num = num.getTime() / 1000
- }
-
- if (num > maxNum || num < 0) {
- write256(block, off, end, num)
- // need an extended header if negative or too big.
- return true
- }
-
- // god, tar is so annoying
- // if the string is small enough, you should put a space
- // between the octal string and the \0, but if it doesn't
- // fit, then don't.
- var numStr = Math.floor(num).toString(8)
- if (num < MAXNUM[writeLen - 1]) numStr += " "
-
- // pad with "0" chars
- if (numStr.length < writeLen) {
- numStr = (new Array(writeLen - numStr.length).join("0")) + numStr
- }
-
- if (numStr.length !== writeLen - 1) {
- throw new Error("invalid length: " + JSON.stringify(numStr) + "\n" +
- "expected: "+writeLen)
- }
- block.write(numStr, off, writeLen, "utf8")
- block[end - 1] = 0
-}
-
-function write256 (block, off, end, num) {
- var buf = block.slice(off, end)
- var positive = num >= 0
- buf[0] = positive ? 0x80 : 0xFF
-
- // get the number as a base-256 tuple
- if (!positive) num *= -1
- var tuple = []
- do {
- var n = num % 256
- tuple.push(n)
- num = (num - n) / 256
- } while (num)
-
- var bytes = tuple.length
-
- var fill = buf.length - bytes
- for (var i = 1; i < fill; i ++) {
- buf[i] = positive ? 0 : 0xFF
- }
-
- // tuple is a base256 number, with [0] as the *least* significant byte
- // if it's negative, then we need to flip all the bits once we hit the
- // first non-zero bit. The 2's-complement is (0x100 - n), and the 1's-
- // complement is (0xFF - n).
- var zero = true
- for (i = bytes; i > 0; i --) {
- var byte = tuple[bytes - i]
- if (positive) buf[fill + i] = byte
- else if (zero && byte === 0) buf[fill + i] = 0
- else if (zero) {
- zero = false
- buf[fill + i] = 0x100 - byte
- } else buf[fill + i] = 0xFF - byte
- }
-}
-
-function writeText (block, off, end, str) {
- // strings are written as utf8, then padded with \0
- var strLen = Buffer.byteLength(str)
- , writeLen = Math.min(strLen, end - off)
- // non-ascii fields need extended headers
- // long fields get truncated
- , needExtended = strLen !== str.length || strLen > writeLen
-
- // write the string, and null-pad
- if (writeLen > 0) block.write(str, off, writeLen, "utf8")
- for (var i = off + writeLen; i < end; i ++) block[i] = 0
-
- return needExtended
-}
-
-function calcSum (block) {
- block = block || this.block
- assert(Buffer.isBuffer(block) && block.length === 512)
-
- if (!block) throw new Error("Need block to checksum")
-
- // now figure out what it would be if the cksum was " "
- var sum = 0
- , start = fieldOffs[fields.cksum]
- , end = fieldEnds[fields.cksum]
-
- for (var i = 0; i < fieldOffs[fields.cksum]; i ++) {
- sum += block[i]
- }
-
- for (var i = start; i < end; i ++) {
- sum += space
- }
-
- for (var i = end; i < 512; i ++) {
- sum += block[i]
- }
-
- return sum
-}
-
-
-function checkSum (block) {
- var sum = calcSum.call(this, block)
- block = block || this.block
-
- var cksum = block.slice(fieldOffs[fields.cksum], fieldEnds[fields.cksum])
- cksum = parseNumeric(cksum)
-
- return cksum === sum
-}
-
-function decode (block) {
- block = block || this.block
- assert(Buffer.isBuffer(block) && block.length === 512)
-
- this.block = block
- this.cksumValid = this.checkSum()
-
- var prefix = null
-
- // slice off each field.
- for (var f = 0; fields[f] !== null; f ++) {
- var field = fields[f]
- , val = block.slice(fieldOffs[f], fieldEnds[f])
-
- switch (field) {
- case "ustar":
- // if not ustar, then everything after that is just padding.
- if (val.toString() !== "ustar\0") {
- this.ustar = false
- return
- } else {
- // console.error("ustar:", val, val.toString())
- this.ustar = val.toString()
- }
- break
-
- // prefix is special, since it might signal the xstar header
- case "prefix":
- var atime = parseNumeric(val.slice(131, 131 + 12))
- , ctime = parseNumeric(val.slice(131 + 12, 131 + 12 + 12))
- if ((val[130] === 0 || val[130] === space) &&
- typeof atime === "number" &&
- typeof ctime === "number" &&
- val[131 + 12] === space &&
- val[131 + 12 + 12] === space) {
- this.atime = atime
- this.ctime = ctime
- val = val.slice(0, 130)
- }
- prefix = val.toString("utf8").replace(/\0+$/, "")
- // console.error("%% header reading prefix", prefix)
- break
-
- // all other fields are null-padding text
- // or a number.
- default:
- if (numeric[field]) {
- this[field] = parseNumeric(val)
- } else {
- this[field] = val.toString("utf8").replace(/\0+$/, "")
- }
- break
- }
- }
-
- // if we got a prefix, then prepend it to the path.
- if (prefix) {
- this.path = prefix + "/" + this.path
- // console.error("%% header got a prefix", this.path)
- }
-}
-
-function parse256 (buf) {
- // first byte MUST be either 80 or FF
- // 80 for positive, FF for 2's comp
- var positive
- if (buf[0] === 0x80) positive = true
- else if (buf[0] === 0xFF) positive = false
- else return null
-
- // build up a base-256 tuple from the least sig to the highest
- var zero = false
- , tuple = []
- for (var i = buf.length - 1; i > 0; i --) {
- var byte = buf[i]
- if (positive) tuple.push(byte)
- else if (zero && byte === 0) tuple.push(0)
- else if (zero) {
- zero = false
- tuple.push(0x100 - byte)
- } else tuple.push(0xFF - byte)
- }
-
- for (var sum = 0, i = 0, l = tuple.length; i < l; i ++) {
- sum += tuple[i] * Math.pow(256, i)
- }
-
- return positive ? sum : -1 * sum
-}
-
-function parseNumeric (f) {
- if (f[0] & 0x80) return parse256(f)
-
- var str = f.toString("utf8").split("\0")[0].trim()
- , res = parseInt(str, 8)
-
- return isNaN(res) ? null : res
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/pack.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/pack.js
deleted file mode 100644
index 5a3bb95a12..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/pack.js
+++ /dev/null
@@ -1,236 +0,0 @@
-// pipe in an fstream, and it'll make a tarball.
-// key-value pair argument is global extended header props.
-
-module.exports = Pack
-
-var EntryWriter = require("./entry-writer.js")
- , Stream = require("stream").Stream
- , path = require("path")
- , inherits = require("inherits")
- , GlobalHeaderWriter = require("./global-header-writer.js")
- , collect = require("fstream").collect
- , eof = new Buffer(512)
-
-for (var i = 0; i < 512; i ++) eof[i] = 0
-
-inherits(Pack, Stream)
-
-function Pack (props) {
- // console.error("-- p ctor")
- var me = this
- if (!(me instanceof Pack)) return new Pack(props)
-
- if (props) me._noProprietary = props.noProprietary
- else me._noProprietary = false
-
- me._global = props
-
- me.readable = true
- me.writable = true
- me._buffer = []
- // console.error("-- -- set current to null in ctor")
- me._currentEntry = null
- me._processing = false
-
- me._pipeRoot = null
- me.on("pipe", function (src) {
- if (src.root === me._pipeRoot) return
- me._pipeRoot = src
- src.on("end", function () {
- me._pipeRoot = null
- })
- me.add(src)
- })
-}
-
-Pack.prototype.addGlobal = function (props) {
- // console.error("-- p addGlobal")
- if (this._didGlobal) return
- this._didGlobal = true
-
- var me = this
- GlobalHeaderWriter(props)
- .on("data", function (c) {
- me.emit("data", c)
- })
- .end()
-}
-
-Pack.prototype.add = function (stream) {
- if (this._global && !this._didGlobal) this.addGlobal(this._global)
-
- if (this._ended) return this.emit("error", new Error("add after end"))
-
- collect(stream)
- this._buffer.push(stream)
- this._process()
- this._needDrain = this._buffer.length > 0
- return !this._needDrain
-}
-
-Pack.prototype.pause = function () {
- this._paused = true
- if (this._currentEntry) this._currentEntry.pause()
- this.emit("pause")
-}
-
-Pack.prototype.resume = function () {
- this._paused = false
- if (this._currentEntry) this._currentEntry.resume()
- this.emit("resume")
- this._process()
-}
-
-Pack.prototype.end = function () {
- this._ended = true
- this._buffer.push(eof)
- this._process()
-}
-
-Pack.prototype._process = function () {
- var me = this
- if (me._paused || me._processing) {
- return
- }
-
- var entry = me._buffer.shift()
-
- if (!entry) {
- if (me._needDrain) {
- me.emit("drain")
- }
- return
- }
-
- if (entry.ready === false) {
- // console.error("-- entry is not ready", entry)
- me._buffer.unshift(entry)
- entry.on("ready", function () {
- // console.error("-- -- ready!", entry)
- me._process()
- })
- return
- }
-
- me._processing = true
-
- if (entry === eof) {
- // need 2 ending null blocks.
- me.emit("data", eof)
- me.emit("data", eof)
- me.emit("end")
- me.emit("close")
- return
- }
-
- // Change the path to be relative to the root dir that was
- // added to the tarball.
- //
- // XXX This should be more like how -C works, so you can
- // explicitly set a root dir, and also explicitly set a pathname
- // in the tarball to use. That way we can skip a lot of extra
- // work when resolving symlinks for bundled dependencies in npm.
-
- var root = path.dirname((entry.root || entry).path);
- if (me._global && me._global.fromBase && entry.root && entry.root.path) {
- // user set 'fromBase: true' indicating tar root should be directory itself
- root = entry.root.path;
- }
-
- var wprops = {}
-
- Object.keys(entry.props || {}).forEach(function (k) {
- wprops[k] = entry.props[k]
- })
-
- if (me._noProprietary) wprops.noProprietary = true
-
- wprops.path = path.relative(root, entry.path || '')
-
- // actually not a matter of opinion or taste.
- if (process.platform === "win32") {
- wprops.path = wprops.path.replace(/\\/g, "/")
- }
-
- if (!wprops.type)
- wprops.type = 'Directory'
-
- switch (wprops.type) {
- // sockets not supported
- case "Socket":
- return
-
- case "Directory":
- wprops.path += "/"
- wprops.size = 0
- break
-
- case "Link":
- var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
- wprops.linkpath = path.relative(root, lp) || "."
- wprops.size = 0
- break
-
- case "SymbolicLink":
- var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
- wprops.linkpath = path.relative(path.dirname(entry.path), lp) || "."
- wprops.size = 0
- break
- }
-
- // console.error("-- new writer", wprops)
- // if (!wprops.type) {
- // // console.error("-- no type?", entry.constructor.name, entry)
- // }
-
- // console.error("-- -- set current to new writer", wprops.path)
- var writer = me._currentEntry = EntryWriter(wprops)
-
- writer.parent = me
-
- // writer.on("end", function () {
- // // console.error("-- -- writer end", writer.path)
- // })
-
- writer.on("data", function (c) {
- me.emit("data", c)
- })
-
- writer.on("header", function () {
- Buffer.prototype.toJSON = function () {
- return this.toString().split(/\0/).join(".")
- }
- // console.error("-- -- writer header %j", writer.props)
- if (writer.props.size === 0) nextEntry()
- })
- writer.on("close", nextEntry)
-
- var ended = false
- function nextEntry () {
- if (ended) return
- ended = true
-
- // console.error("-- -- writer close", writer.path)
- // console.error("-- -- set current to null", wprops.path)
- me._currentEntry = null
- me._processing = false
- me._process()
- }
-
- writer.on("error", function (er) {
- // console.error("-- -- writer error", writer.path)
- me.emit("error", er)
- })
-
- // if it's the root, then there's no need to add its entries,
- // or data, since they'll be added directly.
- if (entry === me._pipeRoot) {
- // console.error("-- is the root, don't auto-add")
- writer.add = null
- }
-
- entry.pipe(writer)
-}
-
-Pack.prototype.destroy = function () {}
-Pack.prototype.write = function () {}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/parse.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/parse.js
deleted file mode 100644
index 600ad782f0..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/lib/parse.js
+++ /dev/null
@@ -1,275 +0,0 @@
-
-// A writable stream.
-// It emits "entry" events, which provide a readable stream that has
-// header info attached.
-
-module.exports = Parse.create = Parse
-
-var stream = require("stream")
- , Stream = stream.Stream
- , BlockStream = require("block-stream")
- , tar = require("../tar.js")
- , TarHeader = require("./header.js")
- , Entry = require("./entry.js")
- , BufferEntry = require("./buffer-entry.js")
- , ExtendedHeader = require("./extended-header.js")
- , assert = require("assert").ok
- , inherits = require("inherits")
- , fstream = require("fstream")
-
-// reading a tar is a lot like reading a directory
-// However, we're actually not going to run the ctor,
-// since it does a stat and various other stuff.
-// This inheritance gives us the pause/resume/pipe
-// behavior that is desired.
-inherits(Parse, fstream.Reader)
-
-function Parse () {
- var me = this
- if (!(me instanceof Parse)) return new Parse()
-
- // doesn't apply fstream.Reader ctor?
- // no, becasue we don't want to stat/etc, we just
- // want to get the entry/add logic from .pipe()
- Stream.apply(me)
-
- me.writable = true
- me.readable = true
- me._stream = new BlockStream(512)
- me.position = 0
- me._ended = false
-
- me._stream.on("error", function (e) {
- me.emit("error", e)
- })
-
- me._stream.on("data", function (c) {
- me._process(c)
- })
-
- me._stream.on("end", function () {
- me._streamEnd()
- })
-
- me._stream.on("drain", function () {
- me.emit("drain")
- })
-}
-
-// overridden in Extract class, since it needs to
-// wait for its DirWriter part to finish before
-// emitting "end"
-Parse.prototype._streamEnd = function () {
- var me = this
- if (!me._ended || me._entry) me.error("unexpected eof")
- me.emit("end")
-}
-
-// a tar reader is actually a filter, not just a readable stream.
-// So, you should pipe a tarball stream into it, and it needs these
-// write/end methods to do that.
-Parse.prototype.write = function (c) {
- if (this._ended) {
- // gnutar puts a LOT of nulls at the end.
- // you can keep writing these things forever.
- // Just ignore them.
- for (var i = 0, l = c.length; i > l; i ++) {
- if (c[i] !== 0) return this.error("write() after end()")
- }
- return
- }
- return this._stream.write(c)
-}
-
-Parse.prototype.end = function (c) {
- this._ended = true
- return this._stream.end(c)
-}
-
-// don't need to do anything, since we're just
-// proxying the data up from the _stream.
-// Just need to override the parent's "Not Implemented"
-// error-thrower.
-Parse.prototype._read = function () {}
-
-Parse.prototype._process = function (c) {
- assert(c && c.length === 512, "block size should be 512")
-
- // one of three cases.
- // 1. A new header
- // 2. A part of a file/extended header
- // 3. One of two or more EOF null blocks
-
- if (this._entry) {
- var entry = this._entry
- if(!entry._abort) entry.write(c)
- else {
- entry._remaining -= c.length
- if(entry._remaining < 0) entry._remaining = 0
- }
- if (entry._remaining === 0) {
- entry.end()
- this._entry = null
- }
- } else {
- // either zeroes or a header
- var zero = true
- for (var i = 0; i < 512 && zero; i ++) {
- zero = c[i] === 0
- }
-
- // eof is *at least* 2 blocks of nulls, and then the end of the
- // file. you can put blocks of nulls between entries anywhere,
- // so appending one tarball to another is technically valid.
- // ending without the eof null blocks is not allowed, however.
- if (zero) {
- if (this._eofStarted)
- this._ended = true
- this._eofStarted = true
- } else {
- this._eofStarted = false
- this._startEntry(c)
- }
- }
-
- this.position += 512
-}
-
-// take a header chunk, start the right kind of entry.
-Parse.prototype._startEntry = function (c) {
- var header = new TarHeader(c)
- , self = this
- , entry
- , ev
- , EntryType
- , onend
- , meta = false
-
- if (null === header.size || !header.cksumValid) {
- var e = new Error("invalid tar file")
- e.header = header
- e.tar_file_offset = this.position
- e.tar_block = this.position / 512
- return this.emit("error", e)
- }
-
- switch (tar.types[header.type]) {
- case "File":
- case "OldFile":
- case "Link":
- case "SymbolicLink":
- case "CharacterDevice":
- case "BlockDevice":
- case "Directory":
- case "FIFO":
- case "ContiguousFile":
- case "GNUDumpDir":
- // start a file.
- // pass in any extended headers
- // These ones consumers are typically most interested in.
- EntryType = Entry
- ev = "entry"
- break
-
- case "GlobalExtendedHeader":
- // extended headers that apply to the rest of the tarball
- EntryType = ExtendedHeader
- onend = function () {
- self._global = self._global || {}
- Object.keys(entry.fields).forEach(function (k) {
- self._global[k] = entry.fields[k]
- })
- }
- ev = "globalExtendedHeader"
- meta = true
- break
-
- case "ExtendedHeader":
- case "OldExtendedHeader":
- // extended headers that apply to the next entry
- EntryType = ExtendedHeader
- onend = function () {
- self._extended = entry.fields
- }
- ev = "extendedHeader"
- meta = true
- break
-
- case "NextFileHasLongLinkpath":
- // set linkpath=<contents> in extended header
- EntryType = BufferEntry
- onend = function () {
- self._extended = self._extended || {}
- self._extended.linkpath = entry.body
- }
- ev = "longLinkpath"
- meta = true
- break
-
- case "NextFileHasLongPath":
- case "OldGnuLongPath":
- // set path=<contents> in file-extended header
- EntryType = BufferEntry
- onend = function () {
- self._extended = self._extended || {}
- self._extended.path = entry.body
- }
- ev = "longPath"
- meta = true
- break
-
- default:
- // all the rest we skip, but still set the _entry
- // member, so that we can skip over their data appropriately.
- // emit an event to say that this is an ignored entry type?
- EntryType = Entry
- ev = "ignoredEntry"
- break
- }
-
- var global, extended
- if (meta) {
- global = extended = null
- } else {
- var global = this._global
- var extended = this._extended
-
- // extendedHeader only applies to one entry, so once we start
- // an entry, it's over.
- this._extended = null
- }
- entry = new EntryType(header, extended, global)
- entry.meta = meta
-
- // only proxy data events of normal files.
- if (!meta) {
- entry.on("data", function (c) {
- me.emit("data", c)
- })
- }
-
- if (onend) entry.on("end", onend)
-
- this._entry = entry
- var me = this
-
- entry.on("pause", function () {
- me.pause()
- })
-
- entry.on("resume", function () {
- me.resume()
- })
-
- if (this.listeners("*").length) {
- this.emit("*", ev, entry)
- }
-
- this.emit(ev, entry)
-
- // Zero-byte entry. End immediately.
- if (entry.props.size === 0) {
- entry.end()
- this._entry = null
- }
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENCE b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENCE
deleted file mode 100644
index 74489e2e26..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENCE
+++ /dev/null
@@ -1,25 +0,0 @@
-Copyright (c) Isaac Z. Schlueter
-All rights reserved.
-
-The BSD License
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
-``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
-BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENSE b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENSE
deleted file mode 100644
index 19129e315f..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/README.md b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/README.md
deleted file mode 100644
index c16e9c4688..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/README.md
+++ /dev/null
@@ -1,14 +0,0 @@
-# block-stream
-
-A stream of blocks.
-
-Write data into it, and it'll output data in buffer blocks the size you
-specify, padding with zeroes if necessary.
-
-```javascript
-var block = new BlockStream(512)
-fs.createReadStream("some-file").pipe(block)
-block.pipe(fs.createWriteStream("block-file"))
-```
-
-When `.end()` or `.flush()` is called, it'll pad the block with zeroes.
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/block-stream.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/block-stream.js
deleted file mode 100644
index 008de035c2..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/block-stream.js
+++ /dev/null
@@ -1,209 +0,0 @@
-// write data to it, and it'll emit data in 512 byte blocks.
-// if you .end() or .flush(), it'll emit whatever it's got,
-// padded with nulls to 512 bytes.
-
-module.exports = BlockStream
-
-var Stream = require("stream").Stream
- , inherits = require("inherits")
- , assert = require("assert").ok
- , debug = process.env.DEBUG ? console.error : function () {}
-
-function BlockStream (size, opt) {
- this.writable = this.readable = true
- this._opt = opt || {}
- this._chunkSize = size || 512
- this._offset = 0
- this._buffer = []
- this._bufferLength = 0
- if (this._opt.nopad) this._zeroes = false
- else {
- this._zeroes = new Buffer(this._chunkSize)
- for (var i = 0; i < this._chunkSize; i ++) {
- this._zeroes[i] = 0
- }
- }
-}
-
-inherits(BlockStream, Stream)
-
-BlockStream.prototype.write = function (c) {
- // debug(" BS write", c)
- if (this._ended) throw new Error("BlockStream: write after end")
- if (c && !Buffer.isBuffer(c)) c = new Buffer(c + "")
- if (c.length) {
- this._buffer.push(c)
- this._bufferLength += c.length
- }
- // debug("pushed onto buffer", this._bufferLength)
- if (this._bufferLength >= this._chunkSize) {
- if (this._paused) {
- // debug(" BS paused, return false, need drain")
- this._needDrain = true
- return false
- }
- this._emitChunk()
- }
- return true
-}
-
-BlockStream.prototype.pause = function () {
- // debug(" BS pausing")
- this._paused = true
-}
-
-BlockStream.prototype.resume = function () {
- // debug(" BS resume")
- this._paused = false
- return this._emitChunk()
-}
-
-BlockStream.prototype.end = function (chunk) {
- // debug("end", chunk)
- if (typeof chunk === "function") cb = chunk, chunk = null
- if (chunk) this.write(chunk)
- this._ended = true
- this.flush()
-}
-
-BlockStream.prototype.flush = function () {
- this._emitChunk(true)
-}
-
-BlockStream.prototype._emitChunk = function (flush) {
- // debug("emitChunk flush=%j emitting=%j paused=%j", flush, this._emitting, this._paused)
-
- // emit a <chunkSize> chunk
- if (flush && this._zeroes) {
- // debug(" BS push zeroes", this._bufferLength)
- // push a chunk of zeroes
- var padBytes = (this._bufferLength % this._chunkSize)
- if (padBytes !== 0) padBytes = this._chunkSize - padBytes
- if (padBytes > 0) {
- // debug("padBytes", padBytes, this._zeroes.slice(0, padBytes))
- this._buffer.push(this._zeroes.slice(0, padBytes))
- this._bufferLength += padBytes
- // debug(this._buffer[this._buffer.length - 1].length, this._bufferLength)
- }
- }
-
- if (this._emitting || this._paused) return
- this._emitting = true
-
- // debug(" BS entering loops")
- var bufferIndex = 0
- while (this._bufferLength >= this._chunkSize &&
- (flush || !this._paused)) {
- // debug(" BS data emission loop", this._bufferLength)
-
- var out
- , outOffset = 0
- , outHas = this._chunkSize
-
- while (outHas > 0 && (flush || !this._paused) ) {
- // debug(" BS data inner emit loop", this._bufferLength)
- var cur = this._buffer[bufferIndex]
- , curHas = cur.length - this._offset
- // debug("cur=", cur)
- // debug("curHas=%j", curHas)
- // If it's not big enough to fill the whole thing, then we'll need
- // to copy multiple buffers into one. However, if it is big enough,
- // then just slice out the part we want, to save unnecessary copying.
- // Also, need to copy if we've already done some copying, since buffers
- // can't be joined like cons strings.
- if (out || curHas < outHas) {
- out = out || new Buffer(this._chunkSize)
- cur.copy(out, outOffset,
- this._offset, this._offset + Math.min(curHas, outHas))
- } else if (cur.length === outHas && this._offset === 0) {
- // shortcut -- cur is exactly long enough, and no offset.
- out = cur
- } else {
- // slice out the piece of cur that we need.
- out = cur.slice(this._offset, this._offset + outHas)
- }
-
- if (curHas > outHas) {
- // means that the current buffer couldn't be completely output
- // update this._offset to reflect how much WAS written
- this._offset += outHas
- outHas = 0
- } else {
- // output the entire current chunk.
- // toss it away
- outHas -= curHas
- outOffset += curHas
- bufferIndex ++
- this._offset = 0
- }
- }
-
- this._bufferLength -= this._chunkSize
- assert(out.length === this._chunkSize)
- // debug("emitting data", out)
- // debug(" BS emitting, paused=%j", this._paused, this._bufferLength)
- this.emit("data", out)
- out = null
- }
- // debug(" BS out of loops", this._bufferLength)
-
- // whatever is left, it's not enough to fill up a block, or we're paused
- this._buffer = this._buffer.slice(bufferIndex)
- if (this._paused) {
- // debug(" BS paused, leaving", this._bufferLength)
- this._needsDrain = true
- this._emitting = false
- return
- }
-
- // if flushing, and not using null-padding, then need to emit the last
- // chunk(s) sitting in the queue. We know that it's not enough to
- // fill up a whole block, because otherwise it would have been emitted
- // above, but there may be some offset.
- var l = this._buffer.length
- if (flush && !this._zeroes && l) {
- if (l === 1) {
- if (this._offset) {
- this.emit("data", this._buffer[0].slice(this._offset))
- } else {
- this.emit("data", this._buffer[0])
- }
- } else {
- var outHas = this._bufferLength
- , out = new Buffer(outHas)
- , outOffset = 0
- for (var i = 0; i < l; i ++) {
- var cur = this._buffer[i]
- , curHas = cur.length - this._offset
- cur.copy(out, outOffset, this._offset)
- this._offset = 0
- outOffset += curHas
- this._bufferLength -= curHas
- }
- this.emit("data", out)
- }
- // truncate
- this._buffer.length = 0
- this._bufferLength = 0
- this._offset = 0
- }
-
- // now either drained or ended
- // debug("either draining, or ended", this._bufferLength, this._ended)
- // means that we've flushed out all that we can so far.
- if (this._needDrain) {
- // debug("emitting drain", this._bufferLength)
- this._needDrain = false
- this.emit("drain")
- }
-
- if ((this._bufferLength === 0) && this._ended && !this._endEmitted) {
- // debug("emitting end", this._bufferLength)
- this._endEmitted = true
- this.emit("end")
- }
-
- this._emitting = false
-
- // debug(" BS no longer emitting", flush, this._paused, this._emitting, this._bufferLength, this._chunkSize)
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/package.json b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/package.json
deleted file mode 100644
index 19895afd9a..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/package.json
+++ /dev/null
@@ -1,66 +0,0 @@
-{
- "_args": [
- [
- "block-stream@0.0.9",
- "/Users/rebecca/code/npm"
- ]
- ],
- "_from": "block-stream@*",
- "_id": "block-stream@0.0.9",
- "_inBundle": false,
- "_integrity": "sha1-E+v+d4oDIFz+A3UUgeu0szAMEmo=",
- "_location": "/npm-lifecycle/node-gyp/tar/block-stream",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "block-stream@*",
- "name": "block-stream",
- "escapedName": "block-stream",
- "rawSpec": "*",
- "saveSpec": null,
- "fetchSpec": "*"
- },
- "_requiredBy": [
- "/npm-lifecycle/node-gyp/tar"
- ],
- "_resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz",
- "_shasum": "13ebfe778a03205cfe03751481ebb4b3300c126a",
- "_spec": "block-stream@*",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/isaacs/block-stream/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "inherits": "~2.0.0"
- },
- "deprecated": false,
- "description": "a stream of blocks",
- "devDependencies": {
- "tap": "^5.7.1"
- },
- "engines": {
- "node": "0.4 || >=0.5.8"
- },
- "files": [
- "block-stream.js"
- ],
- "homepage": "https://github.com/isaacs/block-stream#readme",
- "license": "ISC",
- "main": "block-stream.js",
- "name": "block-stream",
- "repository": {
- "type": "git",
- "url": "git://github.com/isaacs/block-stream.git"
- },
- "scripts": {
- "test": "tap test/*.js --cov"
- },
- "version": "0.0.9"
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/package.json b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/package.json
deleted file mode 100644
index 50badc2fbb..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/package.json
+++ /dev/null
@@ -1,67 +0,0 @@
-{
- "_args": [
- [
- "tar@2.2.1",
- "/Users/rebecca/code/npm"
- ]
- ],
- "_from": "tar@^2.0.0",
- "_id": "tar@2.2.1",
- "_inBundle": false,
- "_integrity": "sha1-jk0qJWwOIYXGsYrWlK7JaLg8sdE=",
- "_location": "/npm-lifecycle/node-gyp/tar",
- "_phantomChildren": {
- "inherits": "2.0.3"
- },
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "tar@^2.0.0",
- "name": "tar",
- "escapedName": "tar",
- "rawSpec": "^2.0.0",
- "saveSpec": null,
- "fetchSpec": "^2.0.0"
- },
- "_requiredBy": [
- "/npm-lifecycle/node-gyp"
- ],
- "_resolved": "https://registry.npmjs.org/tar/-/tar-2.2.1.tgz",
- "_shasum": "8e4d2a256c0e2185c6b18ad694aec968b83cb1d1",
- "_spec": "tar@^2.0.0",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-lifecycle/node_modules/node-gyp",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bugs": {
- "url": "https://github.com/isaacs/node-tar/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "block-stream": "*",
- "fstream": "^1.0.2",
- "inherits": "2"
- },
- "deprecated": false,
- "description": "tar for node",
- "devDependencies": {
- "graceful-fs": "^4.1.2",
- "mkdirp": "^0.5.0",
- "rimraf": "1.x",
- "tap": "0.x"
- },
- "homepage": "https://github.com/isaacs/node-tar#readme",
- "license": "ISC",
- "main": "tar.js",
- "name": "tar",
- "repository": {
- "type": "git",
- "url": "git://github.com/isaacs/node-tar.git"
- },
- "scripts": {
- "test": "tap test/*.js"
- },
- "version": "2.2.1"
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/tar.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/tar.js
deleted file mode 100644
index a81298b9a0..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/tar.js
+++ /dev/null
@@ -1,173 +0,0 @@
-// field paths that every tar file must have.
-// header is padded to 512 bytes.
-var f = 0
- , fields = {}
- , path = fields.path = f++
- , mode = fields.mode = f++
- , uid = fields.uid = f++
- , gid = fields.gid = f++
- , size = fields.size = f++
- , mtime = fields.mtime = f++
- , cksum = fields.cksum = f++
- , type = fields.type = f++
- , linkpath = fields.linkpath = f++
- , headerSize = 512
- , blockSize = 512
- , fieldSize = []
-
-fieldSize[path] = 100
-fieldSize[mode] = 8
-fieldSize[uid] = 8
-fieldSize[gid] = 8
-fieldSize[size] = 12
-fieldSize[mtime] = 12
-fieldSize[cksum] = 8
-fieldSize[type] = 1
-fieldSize[linkpath] = 100
-
-// "ustar\0" may introduce another bunch of headers.
-// these are optional, and will be nulled out if not present.
-
-var ustar = fields.ustar = f++
- , ustarver = fields.ustarver = f++
- , uname = fields.uname = f++
- , gname = fields.gname = f++
- , devmaj = fields.devmaj = f++
- , devmin = fields.devmin = f++
- , prefix = fields.prefix = f++
- , fill = fields.fill = f++
-
-// terminate fields.
-fields[f] = null
-
-fieldSize[ustar] = 6
-fieldSize[ustarver] = 2
-fieldSize[uname] = 32
-fieldSize[gname] = 32
-fieldSize[devmaj] = 8
-fieldSize[devmin] = 8
-fieldSize[prefix] = 155
-fieldSize[fill] = 12
-
-// nb: prefix field may in fact be 130 bytes of prefix,
-// a null char, 12 bytes for atime, 12 bytes for ctime.
-//
-// To recognize this format:
-// 1. prefix[130] === ' ' or '\0'
-// 2. atime and ctime are octal numeric values
-// 3. atime and ctime have ' ' in their last byte
-
-var fieldEnds = {}
- , fieldOffs = {}
- , fe = 0
-for (var i = 0; i < f; i ++) {
- fieldOffs[i] = fe
- fieldEnds[i] = (fe += fieldSize[i])
-}
-
-// build a translation table of field paths.
-Object.keys(fields).forEach(function (f) {
- if (fields[f] !== null) fields[fields[f]] = f
-})
-
-// different values of the 'type' field
-// paths match the values of Stats.isX() functions, where appropriate
-var types =
- { 0: "File"
- , "\0": "OldFile" // like 0
- , "": "OldFile"
- , 1: "Link"
- , 2: "SymbolicLink"
- , 3: "CharacterDevice"
- , 4: "BlockDevice"
- , 5: "Directory"
- , 6: "FIFO"
- , 7: "ContiguousFile" // like 0
- // posix headers
- , g: "GlobalExtendedHeader" // k=v for the rest of the archive
- , x: "ExtendedHeader" // k=v for the next file
- // vendor-specific stuff
- , A: "SolarisACL" // skip
- , D: "GNUDumpDir" // like 5, but with data, which should be skipped
- , I: "Inode" // metadata only, skip
- , K: "NextFileHasLongLinkpath" // data = link path of next file
- , L: "NextFileHasLongPath" // data = path of next file
- , M: "ContinuationFile" // skip
- , N: "OldGnuLongPath" // like L
- , S: "SparseFile" // skip
- , V: "TapeVolumeHeader" // skip
- , X: "OldExtendedHeader" // like x
- }
-
-Object.keys(types).forEach(function (t) {
- types[types[t]] = types[types[t]] || t
-})
-
-// values for the mode field
-var modes =
- { suid: 04000 // set uid on extraction
- , sgid: 02000 // set gid on extraction
- , svtx: 01000 // set restricted deletion flag on dirs on extraction
- , uread: 0400
- , uwrite: 0200
- , uexec: 0100
- , gread: 040
- , gwrite: 020
- , gexec: 010
- , oread: 4
- , owrite: 2
- , oexec: 1
- , all: 07777
- }
-
-var numeric =
- { mode: true
- , uid: true
- , gid: true
- , size: true
- , mtime: true
- , devmaj: true
- , devmin: true
- , cksum: true
- , atime: true
- , ctime: true
- , dev: true
- , ino: true
- , nlink: true
- }
-
-Object.keys(modes).forEach(function (t) {
- modes[modes[t]] = modes[modes[t]] || t
-})
-
-var knownExtended =
- { atime: true
- , charset: true
- , comment: true
- , ctime: true
- , gid: true
- , gname: true
- , linkpath: true
- , mtime: true
- , path: true
- , realtime: true
- , security: true
- , size: true
- , uid: true
- , uname: true }
-
-
-exports.fields = fields
-exports.fieldSize = fieldSize
-exports.fieldOffs = fieldOffs
-exports.fieldEnds = fieldEnds
-exports.types = types
-exports.modes = modes
-exports.numeric = numeric
-exports.headerSize = headerSize
-exports.blockSize = blockSize
-exports.knownExtended = knownExtended
-
-exports.Pack = require("./lib/pack.js")
-exports.Parse = require("./lib/parse.js")
-exports.Extract = require("./lib/extract.js")
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js
deleted file mode 100644
index 1524ff7af0..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js
+++ /dev/null
@@ -1,53 +0,0 @@
-// the fixtures have some weird stuff that is painful
-// to include directly in the repo for various reasons.
-//
-// So, unpack the fixtures with the system tar first.
-//
-// This means, of course, that it'll only work if you
-// already have a tar implementation, and some of them
-// will not properly unpack the fixtures anyway.
-//
-// But, since usually those tests will fail on Windows
-// and other systems with less capable filesystems anyway,
-// at least this way we don't cause inconveniences by
-// merely cloning the repo or installing the package.
-
-var tap = require("tap")
-, child_process = require("child_process")
-, rimraf = require("rimraf")
-, test = tap.test
-, path = require("path")
-
-test("clean fixtures", function (t) {
- rimraf(path.resolve(__dirname, "fixtures"), function (er) {
- t.ifError(er, "rimraf ./fixtures/")
- t.end()
- })
-})
-
-test("clean tmp", function (t) {
- rimraf(path.resolve(__dirname, "tmp"), function (er) {
- t.ifError(er, "rimraf ./tmp/")
- t.end()
- })
-})
-
-test("extract fixtures", function (t) {
- var c = child_process.spawn("tar"
- ,["xzvf", "fixtures.tgz"]
- ,{ cwd: __dirname })
-
- c.stdout.on("data", errwrite)
- c.stderr.on("data", errwrite)
- function errwrite (chunk) {
- process.stderr.write(chunk)
- }
-
- c.on("exit", function (code) {
- t.equal(code, 0, "extract fixtures should exit with 0")
- if (code) {
- t.comment("Note, all tests from here on out will fail because of this.")
- }
- t.end()
- })
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgz b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgz
deleted file mode 100644
index 9e7014d85a..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgz
+++ /dev/null
Binary files differ
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js
deleted file mode 100644
index 9719c42f35..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js
+++ /dev/null
@@ -1,177 +0,0 @@
-// Set the umask, so that it works the same everywhere.
-process.umask(parseInt('22', 8))
-
-var fs = require('fs')
-var path = require('path')
-
-var fstream = require('fstream')
-var test = require('tap').test
-
-var tar = require('../tar.js')
-var file = path.resolve(__dirname, 'dir-normalization.tar')
-var target = path.resolve(__dirname, 'tmp/dir-normalization-test')
-var ee = 0
-
-var expectEntries = [
- { path: 'fixtures/',
- mode: '755',
- type: '5',
- linkpath: ''
- },
- { path: 'fixtures/a/',
- mode: '755',
- type: '5',
- linkpath: ''
- },
- { path: 'fixtures/the-chumbler',
- mode: '755',
- type: '2',
- linkpath: path.resolve(target, 'a/b/c/d/the-chumbler'),
- },
- { path: 'fixtures/a/b/',
- mode: '755',
- type: '5',
- linkpath: ''
- },
- { path: 'fixtures/a/x',
- mode: '644',
- type: '0',
- linkpath: ''
- },
- { path: 'fixtures/a/b/c/',
- mode: '755',
- type: '5',
- linkpath: ''
- },
- { path: 'fixtures/a/b/c/y',
- mode: '755',
- type: '2',
- linkpath: '../../x',
- }
-]
-
-var ef = 0
-var expectFiles = [
- { path: '',
- mode: '40755',
- type: 'Directory',
- depth: 0,
- linkpath: undefined
- },
- { path: '/fixtures',
- mode: '40755',
- type: 'Directory',
- depth: 1,
- linkpath: undefined
- },
- { path: '/fixtures/a',
- mode: '40755',
- type: 'Directory',
- depth: 2,
- linkpath: undefined
- },
- { path: '/fixtures/a/b',
- mode: '40755',
- type: 'Directory',
- depth: 3,
- linkpath: undefined
- },
- { path: '/fixtures/a/b/c',
- mode: '40755',
- type: 'Directory',
- depth: 4,
- linkpath: undefined
- },
- { path: '/fixtures/a/b/c/y',
- mode: '120755',
- type: 'SymbolicLink',
- depth: 5,
- linkpath: '../../x'
- },
- { path: '/fixtures/a/x',
- mode: '100644',
- type: 'File',
- depth: 3,
- linkpath: undefined
- },
- { path: '/fixtures/the-chumbler',
- mode: '120755',
- type: 'SymbolicLink',
- depth: 2,
- linkpath: path.resolve(target, 'a/b/c/d/the-chumbler')
- }
-]
-
-test('preclean', function (t) {
- require('rimraf').sync(path.join(__dirname, '/tmp/dir-normalization-test'))
- t.pass('cleaned!')
- t.end()
-})
-
-test('extract test', function (t) {
- var extract = tar.Extract(target)
- var inp = fs.createReadStream(file)
-
- inp.pipe(extract)
-
- extract.on('end', function () {
- t.equal(ee, expectEntries.length, 'should see ' + expectEntries.length + ' entries')
-
- // should get no more entries after end
- extract.removeAllListeners('entry')
- extract.on('entry', function (e) {
- t.fail('Should not get entries after end!')
- })
-
- next()
- })
-
- extract.on('entry', function (entry) {
- var mode = entry.props.mode & (~parseInt('22', 8))
- var found = {
- path: entry.path,
- mode: mode.toString(8),
- type: entry.props.type,
- linkpath: entry.props.linkpath,
- }
-
- var wanted = expectEntries[ee++]
- t.equivalent(found, wanted, 'tar entry ' + ee + ' ' + (wanted && wanted.path))
- })
-
- function next () {
- var r = fstream.Reader({
- path: target,
- type: 'Directory',
- sort: 'alpha'
- })
-
- r.on('ready', function () {
- foundEntry(r)
- })
-
- r.on('end', finish)
-
- function foundEntry (entry) {
- var p = entry.path.substr(target.length)
- var mode = entry.props.mode & (~parseInt('22', 8))
- var found = {
- path: p,
- mode: mode.toString(8),
- type: entry.props.type,
- depth: entry.props.depth,
- linkpath: entry.props.linkpath
- }
-
- var wanted = expectFiles[ef++]
- t.equivalent(found, wanted, 'unpacked file ' + ef + ' ' + (wanted && wanted.path))
-
- entry.on('entry', foundEntry)
- }
-
- function finish () {
- t.equal(ef, expectFiles.length, 'should have ' + ef + ' items')
- t.end()
- }
- }
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tar b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tar
deleted file mode 100644
index 3c4845356c..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tar
+++ /dev/null
Binary files differ
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js
deleted file mode 100644
index e484920fd9..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js
+++ /dev/null
@@ -1,33 +0,0 @@
-var fs = require('fs')
-var path = require('path')
-var zlib = require('zlib')
-
-var tap = require('tap')
-
-var tar = require('../tar.js')
-
-var file = path.join(__dirname, 'cb-never-called-1.0.1.tgz')
-var target = path.join(__dirname, 'tmp/extract-test')
-
-tap.test('preclean', function (t) {
- require('rimraf').sync(__dirname + '/tmp/extract-test')
- t.pass('cleaned!')
- t.end()
-})
-
-tap.test('extract test', function (t) {
- var extract = tar.Extract(target)
- var inp = fs.createReadStream(file)
-
- inp.pipe(zlib.createGunzip()).pipe(extract)
-
- extract.on('error', function (er) {
- t.equal(er.message, 'unexpected eof', 'error noticed')
- t.end()
- })
-
- extract.on('end', function () {
- t.fail('shouldn\'t reach this point due to errors')
- t.end()
- })
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/extract-move.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/extract-move.js
deleted file mode 100644
index 45400cd9bb..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/extract-move.js
+++ /dev/null
@@ -1,132 +0,0 @@
-// Set the umask, so that it works the same everywhere.
-process.umask(parseInt('22', 8))
-
-var tap = require("tap")
- , tar = require("../tar.js")
- , fs = require("fs")
- , gfs = require("graceful-fs")
- , path = require("path")
- , file = path.resolve(__dirname, "fixtures/dir.tar")
- , target = path.resolve(__dirname, "tmp/extract-test")
- , index = 0
- , fstream = require("fstream")
- , rimraf = require("rimraf")
- , mkdirp = require("mkdirp")
-
- , ee = 0
- , expectEntries = [
- {
- "path" : "dir/",
- "mode" : "750",
- "type" : "5",
- "depth" : undefined,
- "size" : 0,
- "linkpath" : "",
- "nlink" : undefined,
- "dev" : undefined,
- "ino" : undefined
- },
- {
- "path" : "dir/sub/",
- "mode" : "750",
- "type" : "5",
- "depth" : undefined,
- "size" : 0,
- "linkpath" : "",
- "nlink" : undefined,
- "dev" : undefined,
- "ino" : undefined
- } ]
-
-function slow (fs, method, t1, t2) {
- var orig = fs[method]
- if (!orig) return null
- fs[method] = function () {
- var args = [].slice.call(arguments)
- console.error("slow", method, args[0])
- var cb = args.pop()
-
- setTimeout(function () {
- orig.apply(fs, args.concat(function(er, data) {
- setTimeout(function() {
- cb(er, data)
- }, t2)
- }))
- }, t1)
- }
-}
-
-// Make sure we get the graceful-fs that fstream is using.
-var gfs2
-try {
- gfs2 = require("fstream/node_modules/graceful-fs")
-} catch (er) {}
-
-var slowMethods = ["chown", "chmod", "utimes", "lutimes"]
-slowMethods.forEach(function (method) {
- var t1 = 500
- var t2 = 0
- slow(fs, method, t1, t2)
- slow(gfs, method, t1, t2)
- if (gfs2) {
- slow(gfs2, method, t1, t2)
- }
-})
-
-
-
-// The extract class basically just pipes the input
-// to a Reader, and then to a fstream.DirWriter
-
-// So, this is as much a test of fstream.Reader and fstream.Writer
-// as it is of tar.Extract, but it sort of makes sense.
-
-tap.test("preclean", function (t) {
- rimraf.sync(target)
- /mkdirp.sync(target)
- t.pass("cleaned!")
- t.end()
-})
-
-tap.test("extract test", function (t) {
- var extract = tar.Extract(target)
- var inp = fs.createReadStream(file)
-
- // give it a weird buffer size to try to break in odd places
- inp.bufferSize = 1234
-
- inp.pipe(extract)
-
- extract.on("end", function () {
- rimraf.sync(target)
-
- t.equal(ee, expectEntries.length, "should see "+ee+" entries")
-
- // should get no more entries after end
- extract.removeAllListeners("entry")
- extract.on("entry", function (e) {
- t.fail("Should not get entries after end!")
- })
-
- t.end()
- })
-
-
- extract.on("entry", function (entry) {
- var found =
- { path: entry.path
- , mode: entry.props.mode.toString(8)
- , type: entry.props.type
- , depth: entry.props.depth
- , size: entry.props.size
- , linkpath: entry.props.linkpath
- , nlink: entry.props.nlink
- , dev: entry.props.dev
- , ino: entry.props.ino
- }
-
- var wanted = expectEntries[ee ++]
-
- t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path)
- })
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/extract.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/extract.js
deleted file mode 100644
index eca4e7cc96..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/extract.js
+++ /dev/null
@@ -1,367 +0,0 @@
-// Set the umask, so that it works the same everywhere.
-process.umask(parseInt('22', 8))
-
-var tap = require("tap")
- , tar = require("../tar.js")
- , fs = require("fs")
- , path = require("path")
- , file = path.resolve(__dirname, "fixtures/c.tar")
- , target = path.resolve(__dirname, "tmp/extract-test")
- , index = 0
- , fstream = require("fstream")
-
- , ee = 0
- , expectEntries =
-[ { path: 'c.txt',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 513,
- linkpath: '',
- nlink: undefined,
- dev: undefined,
- ino: undefined },
- { path: 'cc.txt',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 513,
- linkpath: '',
- nlink: undefined,
- dev: undefined,
- ino: undefined },
- { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 100,
- linkpath: '',
- nlink: undefined,
- dev: undefined,
- ino: undefined },
- { path: 'Ω.txt',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 2,
- linkpath: '',
- nlink: undefined,
- dev: undefined,
- ino: undefined },
- { path: 'Ω.txt',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 2,
- linkpath: '',
- nlink: 1,
- dev: 234881026,
- ino: 51693379 },
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 200,
- linkpath: '',
- nlink: 1,
- dev: 234881026,
- ino: 51681874 },
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 201,
- linkpath: '',
- nlink: undefined,
- dev: undefined,
- ino: undefined },
- { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',
- mode: '777',
- type: '2',
- depth: undefined,
- size: 0,
- linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- nlink: undefined,
- dev: undefined,
- ino: undefined },
- { path: '200-hard',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 200,
- linkpath: '',
- nlink: 2,
- dev: 234881026,
- ino: 51681874 },
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: '644',
- type: '1',
- depth: undefined,
- size: 0,
- linkpath: path.resolve(target, '200-hard'),
- nlink: 2,
- dev: 234881026,
- ino: 51681874 } ]
-
- , ef = 0
- , expectFiles =
-[ { path: '',
- mode: '40755',
- type: 'Directory',
- depth: 0,
- linkpath: undefined },
- { path: '/200-hard',
- mode: '100644',
- type: 'File',
- depth: 1,
- size: 200,
- linkpath: undefined,
- nlink: 2 },
- { path: '/200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',
- mode: '120777',
- type: 'SymbolicLink',
- depth: 1,
- size: 200,
- linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- nlink: 1 },
- { path: '/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: '100644',
- type: 'Link',
- depth: 1,
- size: 200,
- linkpath: path.join(target, '200-hard'),
- nlink: 2 },
- { path: '/c.txt',
- mode: '100644',
- type: 'File',
- depth: 1,
- size: 513,
- linkpath: undefined,
- nlink: 1 },
- { path: '/cc.txt',
- mode: '100644',
- type: 'File',
- depth: 1,
- size: 513,
- linkpath: undefined,
- nlink: 1 },
- { path: '/r',
- mode: '40755',
- type: 'Directory',
- depth: 1,
- linkpath: undefined },
- { path: '/r/e',
- mode: '40755',
- type: 'Directory',
- depth: 2,
- linkpath: undefined },
- { path: '/r/e/a',
- mode: '40755',
- type: 'Directory',
- depth: 3,
- linkpath: undefined },
- { path: '/r/e/a/l',
- mode: '40755',
- type: 'Directory',
- depth: 4,
- linkpath: undefined },
- { path: '/r/e/a/l/l',
- mode: '40755',
- type: 'Directory',
- depth: 5,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y',
- mode: '40755',
- type: 'Directory',
- depth: 6,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-',
- mode: '40755',
- type: 'Directory',
- depth: 7,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d',
- mode: '40755',
- type: 'Directory',
- depth: 8,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e',
- mode: '40755',
- type: 'Directory',
- depth: 9,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e',
- mode: '40755',
- type: 'Directory',
- depth: 10,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p',
- mode: '40755',
- type: 'Directory',
- depth: 11,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-',
- mode: '40755',
- type: 'Directory',
- depth: 12,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f',
- mode: '40755',
- type: 'Directory',
- depth: 13,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o',
- mode: '40755',
- type: 'Directory',
- depth: 14,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l',
- mode: '40755',
- type: 'Directory',
- depth: 15,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d',
- mode: '40755',
- type: 'Directory',
- depth: 16,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e',
- mode: '40755',
- type: 'Directory',
- depth: 17,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r',
- mode: '40755',
- type: 'Directory',
- depth: 18,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-',
- mode: '40755',
- type: 'Directory',
- depth: 19,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p',
- mode: '40755',
- type: 'Directory',
- depth: 20,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a',
- mode: '40755',
- type: 'Directory',
- depth: 21,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t',
- mode: '40755',
- type: 'Directory',
- depth: 22,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h',
- mode: '40755',
- type: 'Directory',
- depth: 23,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: '100644',
- type: 'File',
- depth: 24,
- size: 100,
- linkpath: undefined,
- nlink: 1 },
- { path: '/Ω.txt',
- mode: '100644',
- type: 'File',
- depth: 1,
- size: 2,
- linkpath: undefined,
- nlink: 1 } ]
-
-
-
-// The extract class basically just pipes the input
-// to a Reader, and then to a fstream.DirWriter
-
-// So, this is as much a test of fstream.Reader and fstream.Writer
-// as it is of tar.Extract, but it sort of makes sense.
-
-tap.test("preclean", function (t) {
- require("rimraf").sync(__dirname + "/tmp/extract-test")
- t.pass("cleaned!")
- t.end()
-})
-
-tap.test("extract test", function (t) {
- var extract = tar.Extract(target)
- var inp = fs.createReadStream(file)
-
- // give it a weird buffer size to try to break in odd places
- inp.bufferSize = 1234
-
- inp.pipe(extract)
-
- extract.on("end", function () {
- t.equal(ee, expectEntries.length, "should see "+ee+" entries")
-
- // should get no more entries after end
- extract.removeAllListeners("entry")
- extract.on("entry", function (e) {
- t.fail("Should not get entries after end!")
- })
-
- next()
- })
-
- extract.on("entry", function (entry) {
- var found =
- { path: entry.path
- , mode: entry.props.mode.toString(8)
- , type: entry.props.type
- , depth: entry.props.depth
- , size: entry.props.size
- , linkpath: entry.props.linkpath
- , nlink: entry.props.nlink
- , dev: entry.props.dev
- , ino: entry.props.ino
- }
-
- var wanted = expectEntries[ee ++]
-
- t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path)
- })
-
- function next () {
- var r = fstream.Reader({ path: target
- , type: "Directory"
- // this is just to encourage consistency
- , sort: "alpha" })
-
- r.on("ready", function () {
- foundEntry(r)
- })
-
- r.on("end", finish)
-
- function foundEntry (entry) {
- var p = entry.path.substr(target.length)
- var found =
- { path: p
- , mode: entry.props.mode.toString(8)
- , type: entry.props.type
- , depth: entry.props.depth
- , size: entry.props.size
- , linkpath: entry.props.linkpath
- , nlink: entry.props.nlink
- }
-
- var wanted = expectFiles[ef ++]
-
- t.has(found, wanted, "unpacked file " + ef + " " + wanted.path)
-
- entry.on("entry", foundEntry)
- }
-
- function finish () {
- t.equal(ef, expectFiles.length, "should have "+ef+" items")
- t.end()
- }
- }
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/fixtures.tgz b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/fixtures.tgz
deleted file mode 100644
index f1676023af..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/fixtures.tgz
+++ /dev/null
Binary files differ
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/header.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/header.js
deleted file mode 100644
index 8ea6f79500..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/header.js
+++ /dev/null
@@ -1,183 +0,0 @@
-var tap = require("tap")
-var TarHeader = require("../lib/header.js")
-var tar = require("../tar.js")
-var fs = require("fs")
-
-
-var headers =
- { "a.txt file header":
- [ "612e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303430312031313635313336303333332030313234353100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
- , { cksumValid: true
- , path: 'a.txt'
- , mode: 420
- , uid: 24561
- , gid: 20
- , size: 257
- , mtime: 1319493851
- , cksum: 5417
- , type: '0'
- , linkpath: ''
- , ustar: 'ustar\0'
- , ustarver: '00'
- , uname: 'isaacs'
- , gname: 'staff'
- , devmaj: 0
- , devmin: 0
- , fill: '' }
- ]
-
- , "omega pax": // the extended header from omega tar.
- [ "5061784865616465722fcea92e74787400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303137302031313534333731303631312030313530353100207800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
- , { cksumValid: true
- , path: 'PaxHeader/Ω.txt'
- , mode: 420
- , uid: 24561
- , gid: 20
- , size: 120
- , mtime: 1301254537
- , cksum: 6697
- , type: 'x'
- , linkpath: ''
- , ustar: 'ustar\0'
- , ustarver: '00'
- , uname: 'isaacs'
- , gname: 'staff'
- , devmaj: 0
- , devmin: 0
- , fill: '' } ]
-
- , "omega file header":
- [ "cea92e7478740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030322031313534333731303631312030313330373200203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
- , { cksumValid: true
- , path: 'Ω.txt'
- , mode: 420
- , uid: 24561
- , gid: 20
- , size: 2
- , mtime: 1301254537
- , cksum: 5690
- , type: '0'
- , linkpath: ''
- , ustar: 'ustar\0'
- , ustarver: '00'
- , uname: 'isaacs'
- , gname: 'staff'
- , devmaj: 0
- , devmin: 0
- , fill: '' } ]
-
- , "foo.js file header":
- [ "666f6f2e6a730000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030342031313534333637303734312030313236313700203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
- , { cksumValid: true
- , path: 'foo.js'
- , mode: 420
- , uid: 24561
- , gid: 20
- , size: 4
- , mtime: 1301246433
- , cksum: 5519
- , type: '0'
- , linkpath: ''
- , ustar: 'ustar\0'
- , ustarver: '00'
- , uname: 'isaacs'
- , gname: 'staff'
- , devmaj: 0
- , devmin: 0
- , fill: '' }
- ]
-
- , "b.txt file header":
- [ "622e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030313030302031313635313336303637372030313234363100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
- , { cksumValid: true
- , path: 'b.txt'
- , mode: 420
- , uid: 24561
- , gid: 20
- , size: 512
- , mtime: 1319494079
- , cksum: 5425
- , type: '0'
- , linkpath: ''
- , ustar: 'ustar\0'
- , ustarver: '00'
- , uname: 'isaacs'
- , gname: 'staff'
- , devmaj: 0
- , devmin: 0
- , fill: '' }
- ]
-
- , "deep nested file":
- [ "636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363633030303634342000303537373631200030303030323420003030303030303030313434203131363532313531353333203034333331340020300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000075737461720030306973616163730000000000000000000000000000000000000000000000000000737461666600000000000000000000000000000000000000000000000000000030303030303020003030303030302000722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d2f662f6f2f6c2f642f652f722f2d2f702f612f742f680000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
- , { cksumValid: true,
- path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'
- , mode: 420
- , uid: 24561
- , gid: 20
- , size: 100
- , mtime: 1319687003
- , cksum: 18124
- , type: '0'
- , linkpath: ''
- , ustar: 'ustar\0'
- , ustarver: '00'
- , uname: 'isaacs'
- , gname: 'staff'
- , devmaj: 0
- , devmin: 0
- , fill: '' }
- ]
- }
-
-tap.test("parsing", function (t) {
- Object.keys(headers).forEach(function (name) {
- var h = headers[name]
- , header = new Buffer(h[0], "hex")
- , expect = h[1]
- , parsed = new TarHeader(header)
-
- // console.error(parsed)
- t.has(parsed, expect, "parse " + name)
- })
- t.end()
-})
-
-tap.test("encoding", function (t) {
- Object.keys(headers).forEach(function (name) {
- var h = headers[name]
- , expect = new Buffer(h[0], "hex")
- , encoded = TarHeader.encode(h[1])
-
- // might have slightly different bytes, since the standard
- // isn't very strict, but should have the same semantics
- // checkSum will be different, but cksumValid will be true
-
- var th = new TarHeader(encoded)
- delete h[1].block
- delete h[1].needExtended
- delete h[1].cksum
- t.has(th, h[1], "fields "+name)
- })
- t.end()
-})
-
-// test these manually. they're a bit rare to find in the wild
-tap.test("parseNumeric tests", function (t) {
- var parseNumeric = TarHeader.parseNumeric
- , numbers =
- { "303737373737373700": 2097151
- , "30373737373737373737373700": 8589934591
- , "303030303036343400": 420
- , "800000ffffffffffff": 281474976710655
- , "ffffff000000000001": -281474976710654
- , "ffffff000000000000": -281474976710655
- , "800000000000200000": 2097152
- , "8000000000001544c5": 1393861
- , "ffffffffffff1544c5": -15383354 }
- Object.keys(numbers).forEach(function (n) {
- var b = new Buffer(n, "hex")
- t.equal(parseNumeric(b), numbers[n], n + " === " + numbers[n])
- })
- t.end()
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js
deleted file mode 100644
index d4b03a1fe9..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js
+++ /dev/null
@@ -1,886 +0,0 @@
-// This is exactly like test/pack.js, except that it's excluding
-// any proprietary headers.
-//
-// This loses some information about the filesystem, but creates
-// tarballs that are supported by more versions of tar, especially
-// old non-spec-compliant copies of gnutar.
-
-// the symlink file is excluded from git, because it makes
-// windows freak the hell out.
-var fs = require("fs")
- , path = require("path")
- , symlink = path.resolve(__dirname, "fixtures/symlink")
-try { fs.unlinkSync(symlink) } catch (e) {}
-fs.symlinkSync("./hardlink-1", symlink)
-process.on("exit", function () {
- fs.unlinkSync(symlink)
-})
-
-var tap = require("tap")
- , tar = require("../tar.js")
- , pkg = require("../package.json")
- , Pack = tar.Pack
- , fstream = require("fstream")
- , Reader = fstream.Reader
- , Writer = fstream.Writer
- , input = path.resolve(__dirname, "fixtures/")
- , target = path.resolve(__dirname, "tmp/pack.tar")
- , uid = process.getuid ? process.getuid() : 0
- , gid = process.getgid ? process.getgid() : 0
-
- , entries =
-
- // the global header and root fixtures/ dir are going to get
- // a different date each time, so omit that bit.
- // Also, dev/ino values differ across machines, so that's not
- // included.
- [ [ 'entry',
- { path: 'fixtures/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'extendedHeader',
- { path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: uid,
- gid: gid,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- uid: uid,
- gid: gid,
- size: 200 } ]
-
- , [ 'entry',
- { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 200,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/a.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 257,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/b.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 512,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/c.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 513,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/cc.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 513,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/dir/',
- mode: 488,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/dir/sub/',
- mode: 488,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/foo.js',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 4,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/hardlink-1',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 200,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/hardlink-2',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 0,
- type: '1',
- linkpath: 'fixtures/hardlink-1',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/omega.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/omega.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/star.4.html',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 54081,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'extendedHeader',
- { path: 'PaxHeader/fixtures/packtest/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: 'fixtures/packtest/Ω.txt',
- uid: uid,
- gid: gid,
- size: 2 } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 100,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/symlink',
- uid: uid,
- gid: gid,
- size: 0,
- type: '2',
- linkpath: 'hardlink-1',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'extendedHeader',
- { path: 'PaxHeader/fixtures/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: "fixtures/Ω.txt"
- , uid: uid
- , gid: gid
- , size: 2 } ]
-
- , [ 'entry',
- { path: 'fixtures/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
- ]
-
-
-// first, make sure that the hardlinks are actually hardlinks, or this
-// won't work. Git has a way of replacing them with a copy.
-var hard1 = path.resolve(__dirname, "fixtures/hardlink-1")
- , hard2 = path.resolve(__dirname, "fixtures/hardlink-2")
- , fs = require("fs")
-
-try { fs.unlinkSync(hard2) } catch (e) {}
-fs.linkSync(hard1, hard2)
-
-tap.test("with global header", { timeout: 10000 }, function (t) {
- runTest(t, true)
-})
-
-tap.test("without global header", { timeout: 10000 }, function (t) {
- runTest(t, false)
-})
-
-function alphasort (a, b) {
- return a === b ? 0
- : a.toLowerCase() > b.toLowerCase() ? 1
- : a.toLowerCase() < b.toLowerCase() ? -1
- : a > b ? 1
- : -1
-}
-
-
-function runTest (t, doGH) {
- var reader = Reader({ path: input
- , filter: function () {
- return !this.path.match(/\.(tar|hex)$/)
- }
- , sort: alphasort
- })
-
- var props = doGH ? pkg : {}
- props.noProprietary = true
- var pack = Pack(props)
- var writer = Writer(target)
-
- // global header should be skipped regardless, since it has no content.
- var entry = 0
-
- t.ok(reader, "reader ok")
- t.ok(pack, "pack ok")
- t.ok(writer, "writer ok")
-
- pack.pipe(writer)
-
- var parse = tar.Parse()
- t.ok(parse, "parser should be ok")
-
- pack.on("data", function (c) {
- // console.error("PACK DATA")
- if (c.length !== 512) {
- // this one is too noisy, only assert if it'll be relevant
- t.equal(c.length, 512, "parser should emit data in 512byte blocks")
- }
- parse.write(c)
- })
-
- pack.on("end", function () {
- // console.error("PACK END")
- t.pass("parser ends")
- parse.end()
- })
-
- pack.on("error", function (er) {
- t.fail("pack error", er)
- })
-
- parse.on("error", function (er) {
- t.fail("parse error", er)
- })
-
- writer.on("error", function (er) {
- t.fail("writer error", er)
- })
-
- reader.on("error", function (er) {
- t.fail("reader error", er)
- })
-
- parse.on("*", function (ev, e) {
- var wanted = entries[entry++]
- if (!wanted) {
- t.fail("unexpected event: "+ev)
- return
- }
- t.equal(ev, wanted[0], "event type should be "+wanted[0])
-
- if (ev !== wanted[0] || e.path !== wanted[1].path) {
- console.error("wanted", wanted)
- console.error([ev, e.props])
- e.on("end", function () {
- console.error(e.fields)
- throw "break"
- })
- }
-
- t.has(e.props, wanted[1], "properties "+wanted[1].path)
- if (wanted[2]) {
- e.on("end", function () {
- if (!e.fields) {
- t.ok(e.fields, "should get fields")
- } else {
- t.has(e.fields, wanted[2], "should get expected fields")
- }
- })
- }
- })
-
- reader.pipe(pack)
-
- writer.on("close", function () {
- t.equal(entry, entries.length, "should get all expected entries")
- t.pass("it finished")
- t.end()
- })
-
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/pack.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/pack.js
deleted file mode 100644
index 0f16c07bb0..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/pack.js
+++ /dev/null
@@ -1,952 +0,0 @@
-
-// the symlink file is excluded from git, because it makes
-// windows freak the hell out.
-var fs = require("fs")
- , path = require("path")
- , symlink = path.resolve(__dirname, "fixtures/symlink")
-try { fs.unlinkSync(symlink) } catch (e) {}
-fs.symlinkSync("./hardlink-1", symlink)
-process.on("exit", function () {
- fs.unlinkSync(symlink)
-})
-
-
-var tap = require("tap")
- , tar = require("../tar.js")
- , pkg = require("../package.json")
- , Pack = tar.Pack
- , fstream = require("fstream")
- , Reader = fstream.Reader
- , Writer = fstream.Writer
- , input = path.resolve(__dirname, "fixtures/")
- , target = path.resolve(__dirname, "tmp/pack.tar")
- , uid = process.getuid ? process.getuid() : 0
- , gid = process.getgid ? process.getgid() : 0
-
- , entries =
-
- // the global header and root fixtures/ dir are going to get
- // a different date each time, so omit that bit.
- // Also, dev/ino values differ across machines, so that's not
- // included.
- [ [ 'globalExtendedHeader',
- { path: 'PaxHeader/',
- mode: 438,
- uid: 0,
- gid: 0,
- type: 'g',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { "NODETAR.author": pkg.author,
- "NODETAR.name": pkg.name,
- "NODETAR.description": pkg.description,
- "NODETAR.version": pkg.version,
- "NODETAR.repository.type": pkg.repository.type,
- "NODETAR.repository.url": pkg.repository.url,
- "NODETAR.main": pkg.main,
- "NODETAR.scripts.test": pkg.scripts.test } ]
-
- , [ 'entry',
- { path: 'fixtures/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'extendedHeader',
- { path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: uid,
- gid: gid,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- 'NODETAR.depth': '1',
- 'NODETAR.type': 'File',
- nlink: 1,
- uid: uid,
- gid: gid,
- size: 200,
- 'NODETAR.blksize': '4096',
- 'NODETAR.blocks': '8' } ]
-
- , [ 'entry',
- { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 200,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '',
- 'NODETAR.depth': '1',
- 'NODETAR.type': 'File',
- nlink: 1,
- 'NODETAR.blksize': '4096',
- 'NODETAR.blocks': '8' } ]
-
- , [ 'entry',
- { path: 'fixtures/a.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 257,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/b.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 512,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/c.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 513,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/cc.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 513,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/dir/',
- mode: 488,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/dir/sub/',
- mode: 488,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
-
- , [ 'entry',
- { path: 'fixtures/foo.js',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 4,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/hardlink-1',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 200,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/hardlink-2',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 0,
- type: '1',
- linkpath: 'fixtures/hardlink-1',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/omega.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/omega.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/star.4.html',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 54081,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'extendedHeader',
- { path: 'PaxHeader/fixtures/packtest/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: 'fixtures/packtest/Ω.txt',
- 'NODETAR.depth': '2',
- 'NODETAR.type': 'File',
- nlink: 1,
- uid: uid,
- gid: gid,
- size: 2,
- 'NODETAR.blksize': '4096',
- 'NODETAR.blocks': '8' } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '',
- 'NODETAR.depth': '2',
- 'NODETAR.type': 'File',
- nlink: 1,
- 'NODETAR.blksize': '4096',
- 'NODETAR.blocks': '8' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 100,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/symlink',
- uid: uid,
- gid: gid,
- size: 0,
- type: '2',
- linkpath: 'hardlink-1',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'extendedHeader',
- { path: 'PaxHeader/fixtures/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: "fixtures/Ω.txt"
- , "NODETAR.depth": "1"
- , "NODETAR.type": "File"
- , nlink: 1
- , uid: uid
- , gid: gid
- , size: 2
- , "NODETAR.blksize": "4096"
- , "NODETAR.blocks": "8" } ]
-
- , [ 'entry',
- { path: 'fixtures/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '',
- 'NODETAR.depth': '1',
- 'NODETAR.type': 'File',
- nlink: 1,
- 'NODETAR.blksize': '4096',
- 'NODETAR.blocks': '8' } ]
- ]
-
-
-// first, make sure that the hardlinks are actually hardlinks, or this
-// won't work. Git has a way of replacing them with a copy.
-var hard1 = path.resolve(__dirname, "fixtures/hardlink-1")
- , hard2 = path.resolve(__dirname, "fixtures/hardlink-2")
- , fs = require("fs")
-
-try { fs.unlinkSync(hard2) } catch (e) {}
-fs.linkSync(hard1, hard2)
-
-tap.test("with global header", { timeout: 10000 }, function (t) {
- runTest(t, true)
-})
-
-tap.test("without global header", { timeout: 10000 }, function (t) {
- runTest(t, false)
-})
-
-tap.test("with from base", { timeout: 10000 }, function (t) {
- runTest(t, true, true)
-})
-
-function alphasort (a, b) {
- return a === b ? 0
- : a.toLowerCase() > b.toLowerCase() ? 1
- : a.toLowerCase() < b.toLowerCase() ? -1
- : a > b ? 1
- : -1
-}
-
-
-function runTest (t, doGH, doFromBase) {
- var reader = Reader({ path: input
- , filter: function () {
- return !this.path.match(/\.(tar|hex)$/)
- }
- , sort: alphasort
- })
-
- var props = doGH ? pkg : {}
- if(doFromBase) props.fromBase = true;
-
- var pack = Pack(props)
- var writer = Writer(target)
-
- // skip the global header if we're not doing that.
- var entry = doGH ? 0 : 1
-
- t.ok(reader, "reader ok")
- t.ok(pack, "pack ok")
- t.ok(writer, "writer ok")
-
- pack.pipe(writer)
-
- var parse = tar.Parse()
- t.ok(parse, "parser should be ok")
-
- pack.on("data", function (c) {
- // console.error("PACK DATA")
- if (c.length !== 512) {
- // this one is too noisy, only assert if it'll be relevant
- t.equal(c.length, 512, "parser should emit data in 512byte blocks")
- }
- parse.write(c)
- })
-
- pack.on("end", function () {
- // console.error("PACK END")
- t.pass("parser ends")
- parse.end()
- })
-
- pack.on("error", function (er) {
- t.fail("pack error", er)
- })
-
- parse.on("error", function (er) {
- t.fail("parse error", er)
- })
-
- writer.on("error", function (er) {
- t.fail("writer error", er)
- })
-
- reader.on("error", function (er) {
- t.fail("reader error", er)
- })
-
- parse.on("*", function (ev, e) {
- var wanted = entries[entry++]
- if (!wanted) {
- t.fail("unexpected event: "+ev)
- return
- }
- t.equal(ev, wanted[0], "event type should be "+wanted[0])
-
- if(doFromBase) {
- if(wanted[1].path.indexOf('fixtures/') && wanted[1].path.length == 100)
- wanted[1].path = wanted[1].path.replace('fixtures/', '') + 'ccccccccc'
-
- if(wanted[1]) wanted[1].path = wanted[1].path.replace('fixtures/', '').replace('//', '/')
- if(wanted[1].path == '') wanted[1].path = '/'
- if(wanted[2] && wanted[2].path) wanted[2].path = wanted[2].path.replace('fixtures', '').replace(/^\//, '')
-
- wanted[1].linkpath = wanted[1].linkpath.replace('fixtures/', '')
- }
-
- if (ev !== wanted[0] || e.path !== wanted[1].path) {
- console.error("wanted", wanted)
- console.error([ev, e.props])
- e.on("end", function () {
- console.error(e.fields)
- throw "break"
- })
- }
-
-
- t.has(e.props, wanted[1], "properties "+wanted[1].path)
- if (wanted[2]) {
- e.on("end", function () {
- if (!e.fields) {
- t.ok(e.fields, "should get fields")
- } else {
- t.has(e.fields, wanted[2], "should get expected fields")
- }
- })
- }
- })
-
- reader.pipe(pack)
-
- writer.on("close", function () {
- t.equal(entry, entries.length, "should get all expected entries")
- t.pass("it finished")
- t.end()
- })
-
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/parse-discard.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/parse-discard.js
deleted file mode 100644
index 79408c274b..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/parse-discard.js
+++ /dev/null
@@ -1,29 +0,0 @@
-var tap = require("tap")
- , tar = require("../tar.js")
- , fs = require("fs")
- , path = require("path")
- , file = path.resolve(__dirname, "fixtures/c.tar")
-
-tap.test("parser test", function (t) {
- var parser = tar.Parse()
- var total = 0
- var dataTotal = 0
-
- parser.on("end", function () {
-
- t.equals(total-513,dataTotal,'should have discarded only c.txt')
-
- t.end()
- })
-
- fs.createReadStream(file)
- .pipe(parser)
- .on('entry',function(entry){
- if(entry.path === 'c.txt') entry.abort()
-
- total += entry.size;
- entry.on('data',function(data){
- dataTotal += data.length
- })
- })
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/parse.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/parse.js
deleted file mode 100644
index f765a50129..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/parse.js
+++ /dev/null
@@ -1,359 +0,0 @@
-var tap = require("tap")
- , tar = require("../tar.js")
- , fs = require("fs")
- , path = require("path")
- , file = path.resolve(__dirname, "fixtures/c.tar")
- , index = 0
-
- , expect =
-[ [ 'entry',
- { path: 'c.txt',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 513,
- mtime: new Date('Wed, 26 Oct 2011 01:10:58 GMT'),
- cksum: 5422,
- type: '0',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- undefined ],
- [ 'entry',
- { path: 'cc.txt',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 513,
- mtime: new Date('Wed, 26 Oct 2011 01:11:02 GMT'),
- cksum: 5525,
- type: '0',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- undefined ],
- [ 'entry',
- { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 100,
- mtime: new Date('Thu, 27 Oct 2011 03:43:23 GMT'),
- cksum: 18124,
- type: '0',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- undefined ],
- [ 'entry',
- { path: 'Ω.txt',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 2,
- mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
- cksum: 5695,
- type: '0',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- undefined ],
- [ 'extendedHeader',
- { path: 'PaxHeader/Ω.txt',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 120,
- mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
- cksum: 6702,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: 'Ω.txt',
- ctime: 1319737909,
- atime: 1319739061,
- dev: 234881026,
- ino: 51693379,
- nlink: 1 } ],
- [ 'entry',
- { path: 'Ω.txt',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 2,
- mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
- cksum: 5695,
- type: '0',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '',
- ctime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
- atime: new Date('Thu, 27 Oct 2011 18:11:01 GMT'),
- dev: 234881026,
- ino: 51693379,
- nlink: 1 },
- undefined ],
- [ 'extendedHeader',
- { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 353,
- mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
- cksum: 14488,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- ctime: 1319686868,
- atime: 1319741254,
- 'LIBARCHIVE.creationtime': '1319686852',
- dev: 234881026,
- ino: 51681874,
- nlink: 1 } ],
- [ 'entry',
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 200,
- mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
- cksum: 14570,
- type: '0',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '',
- ctime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
- atime: new Date('Thu, 27 Oct 2011 18:47:34 GMT'),
- 'LIBARCHIVE.creationtime': '1319686852',
- dev: 234881026,
- ino: 51681874,
- nlink: 1 },
- undefined ],
- [ 'longPath',
- { path: '././@LongLink',
- mode: 0,
- uid: 0,
- gid: 0,
- size: 201,
- mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'),
- cksum: 4976,
- type: 'L',
- linkpath: '',
- ustar: false },
- '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ],
- [ 'entry',
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: 1000,
- gid: 1000,
- size: 201,
- mtime: new Date('Thu, 27 Oct 2011 22:21:50 GMT'),
- cksum: 14086,
- type: '0',
- linkpath: '',
- ustar: false },
- undefined ],
- [ 'longLinkpath',
- { path: '././@LongLink',
- mode: 0,
- uid: 0,
- gid: 0,
- size: 201,
- mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'),
- cksum: 4975,
- type: 'K',
- linkpath: '',
- ustar: false },
- '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ],
- [ 'longPath',
- { path: '././@LongLink',
- mode: 0,
- uid: 0,
- gid: 0,
- size: 201,
- mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'),
- cksum: 4976,
- type: 'L',
- linkpath: '',
- ustar: false },
- '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL' ],
- [ 'entry',
- { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',
- mode: 511,
- uid: 1000,
- gid: 1000,
- size: 0,
- mtime: new Date('Fri, 28 Oct 2011 23:05:17 GMT'),
- cksum: 21603,
- type: '2',
- linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- ustar: false },
- undefined ],
- [ 'extendedHeader',
- { path: 'PaxHeader/200-hard',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 143,
- mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
- cksum: 6533,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { ctime: 1320617144,
- atime: 1320617232,
- 'LIBARCHIVE.creationtime': '1319686852',
- dev: 234881026,
- ino: 51681874,
- nlink: 2 } ],
- [ 'entry',
- { path: '200-hard',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 200,
- mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
- cksum: 5526,
- type: '0',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '',
- ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'),
- atime: new Date('Sun, 06 Nov 2011 22:07:12 GMT'),
- 'LIBARCHIVE.creationtime': '1319686852',
- dev: 234881026,
- ino: 51681874,
- nlink: 2 },
- undefined ],
- [ 'extendedHeader',
- { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 353,
- mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
- cksum: 14488,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- ctime: 1320617144,
- atime: 1320617406,
- 'LIBARCHIVE.creationtime': '1319686852',
- dev: 234881026,
- ino: 51681874,
- nlink: 2 } ],
- [ 'entry',
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 0,
- mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
- cksum: 15173,
- type: '1',
- linkpath: '200-hard',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '',
- ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'),
- atime: new Date('Sun, 06 Nov 2011 22:10:06 GMT'),
- 'LIBARCHIVE.creationtime': '1319686852',
- dev: 234881026,
- ino: 51681874,
- nlink: 2 },
- undefined ] ]
-
-
-tap.test("parser test", function (t) {
- var parser = tar.Parse()
-
- parser.on("end", function () {
- t.equal(index, expect.length, "saw all expected events")
- t.end()
- })
-
- fs.createReadStream(file)
- .pipe(parser)
- .on("*", function (ev, entry) {
- var wanted = expect[index]
- if (!wanted) {
- return t.fail("Unexpected event: " + ev)
- }
- var result = [ev, entry.props]
- entry.on("end", function () {
- result.push(entry.fields || entry.body)
-
- t.equal(ev, wanted[0], index + " event type")
- t.equivalent(entry.props, wanted[1], wanted[1].path + " entry properties")
- if (wanted[2]) {
- t.equivalent(result[2], wanted[2], "metadata values")
- }
- index ++
- })
- })
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js
deleted file mode 100644
index a00ff7faa0..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js
+++ /dev/null
@@ -1,20 +0,0 @@
-// clean up the fixtures
-
-var tap = require("tap")
-, rimraf = require("rimraf")
-, test = tap.test
-, path = require("path")
-
-test("clean fixtures", function (t) {
- rimraf(path.resolve(__dirname, "fixtures"), function (er) {
- t.ifError(er, "rimraf ./fixtures/")
- t.end()
- })
-})
-
-test("clean tmp", function (t) {
- rimraf(path.resolve(__dirname, "tmp"), function (er) {
- t.ifError(er, "rimraf ./tmp/")
- t.end()
- })
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/package.json b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/package.json
deleted file mode 100644
index e778e439b1..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/package.json
+++ /dev/null
@@ -1,92 +0,0 @@
-{
- "_from": "node-gyp@^3.6.2",
- "_id": "node-gyp@3.6.2",
- "_inBundle": false,
- "_integrity": "sha1-m/vlRWIoYoSDjnUOrAUpWFP6HGA=",
- "_location": "/npm-lifecycle/node-gyp",
- "_phantomChildren": {
- "abbrev": "1.1.1",
- "graceful-fs": "4.1.11",
- "inherits": "2.0.3",
- "mkdirp": "0.5.1",
- "rimraf": "2.6.2"
- },
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "node-gyp@^3.6.2",
- "name": "node-gyp",
- "escapedName": "node-gyp",
- "rawSpec": "^3.6.2",
- "saveSpec": null,
- "fetchSpec": "^3.6.2"
- },
- "_requiredBy": [
- "/npm-lifecycle"
- ],
- "_resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-3.6.2.tgz",
- "_shasum": "9bfbe54562286284838e750eac05295853fa1c60",
- "_spec": "node-gyp@^3.6.2",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/npm-lifecycle",
- "author": {
- "name": "Nathan Rajlich",
- "email": "nathan@tootallnate.net",
- "url": "http://tootallnate.net"
- },
- "bin": {
- "node-gyp": "./bin/node-gyp.js"
- },
- "bugs": {
- "url": "https://github.com/nodejs/node-gyp/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "fstream": "^1.0.0",
- "glob": "^7.0.3",
- "graceful-fs": "^4.1.2",
- "minimatch": "^3.0.2",
- "mkdirp": "^0.5.0",
- "nopt": "2 || 3",
- "npmlog": "0 || 1 || 2 || 3 || 4",
- "osenv": "0",
- "request": "2",
- "rimraf": "2",
- "semver": "~5.3.0",
- "tar": "^2.0.0",
- "which": "1"
- },
- "deprecated": false,
- "description": "Node.js native addon build tool",
- "devDependencies": {
- "bindings": "~1.2.1",
- "nan": "^2.0.0",
- "require-inject": "~1.3.0",
- "tape": "~4.2.0"
- },
- "engines": {
- "node": ">= 0.8.0"
- },
- "homepage": "https://github.com/nodejs/node-gyp#readme",
- "installVersion": 9,
- "keywords": [
- "native",
- "addon",
- "module",
- "c",
- "c++",
- "bindings",
- "gyp"
- ],
- "license": "MIT",
- "main": "./lib/node-gyp.js",
- "name": "node-gyp",
- "preferGlobal": true,
- "repository": {
- "type": "git",
- "url": "git://github.com/nodejs/node-gyp.git"
- },
- "scripts": {
- "test": "tape test/test-*"
- },
- "version": "3.6.2"
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/src/win_delay_load_hook.cc b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/src/win_delay_load_hook.cc
deleted file mode 100644
index e75954b605..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/src/win_delay_load_hook.cc
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * When this file is linked to a DLL, it sets up a delay-load hook that
- * intervenes when the DLL is trying to load 'node.exe' or 'iojs.exe'
- * dynamically. Instead of trying to locate the .exe file it'll just return
- * a handle to the process image.
- *
- * This allows compiled addons to work when node.exe or iojs.exe is renamed.
- */
-
-#ifdef _MSC_VER
-
-#ifndef WIN32_LEAN_AND_MEAN
-#define WIN32_LEAN_AND_MEAN
-#endif
-
-#include <windows.h>
-
-#include <delayimp.h>
-#include <string.h>
-
-static FARPROC WINAPI load_exe_hook(unsigned int event, DelayLoadInfo* info) {
- HMODULE m;
- if (event != dliNotePreLoadLibrary)
- return NULL;
-
- if (_stricmp(info->szDll, "iojs.exe") != 0 &&
- _stricmp(info->szDll, "node.exe") != 0)
- return NULL;
-
- m = GetModuleHandle(NULL);
- return (FARPROC) m;
-}
-
-decltype(__pfnDliNotifyHook2) __pfnDliNotifyHook2 = load_exe_hook;
-
-#endif
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/docker.sh b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/docker.sh
deleted file mode 100755
index ac21aa8d75..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/docker.sh
+++ /dev/null
@@ -1,164 +0,0 @@
-#!/bin/bash
-
-#set -e
-
-test_node_versions="0.8.28 0.10.40 0.12.7 4.3.0 5.6.0"
-test_iojs_versions="1.8.4 2.4.0 3.3.0"
-
-myuid=$(id -u)
-mygid=$(id -g)
-__dirname="$(CDPATH= cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
-dot_node_gyp=${__dirname}/.node-gyp/
-
-# borrows from https://github.com/rvagg/dnt/
-
-# Simple setup function for a container:
-# setup_container(image id, base image, commands to run to set up)
-setup_container() {
- local container_id="$1"
- local base_container="$2"
- local run_cmd="$3"
-
- # Does this image exist? If yes, ignore
- docker inspect "$container_id" &> /dev/null
- if [[ $? -eq 0 ]]; then
- echo "Found existing container [$container_id]"
- else
- # No such image, so make it
- echo "Did not find container [$container_id], creating..."
- docker run -i $base_container /bin/bash -c "$run_cmd"
- sleep 2
- docker commit $(docker ps -l -q) $container_id
- fi
-}
-
-# Run tests inside each of the versioned containers, copy cwd into npm's copy of node-gyp
-# so it'll be invoked by npm when a compile is needed
-# run_tests(version, test-commands)
-run_tests() {
- local version="$1"
- local run_cmd="$2"
-
- run_cmd="rsync -aAXx --delete --exclude .git --exclude build /node-gyp-src/ /usr/lib/node_modules/npm/node_modules/node-gyp/;
- /bin/su -s /bin/bash node-gyp -c 'cd && ${run_cmd}'"
-
- rm -rf $dot_node_gyp
- mkdir $dot_node_gyp
-
- docker run \
- --rm -i \
- -v ~/.npm/:/node-gyp/.npm/ \
- -v ${dot_node_gyp}:/node-gyp/.node-gyp/ \
- -v $(pwd):/node-gyp-src/:ro \
- node-gyp-test/${version} /bin/bash -c "${run_cmd}"
-}
-
-# A base image with build tools and a user account
-setup_container "node-gyp-test/base" "ubuntu:14.04" "
- adduser --gecos node-gyp --home /node-gyp/ --disabled-login node-gyp --uid $myuid &&
- echo "node-gyp:node-gyp" | chpasswd &&
- apt-get update &&
- apt-get install -y build-essential python git rsync curl
-"
-
-# An image on top of the base containing clones of repos we want to use for testing
-setup_container "node-gyp-test/clones" "node-gyp-test/base" "
- cd /node-gyp/ && git clone https://github.com/justmoon/node-bignum.git &&
- cd /node-gyp/ && git clone https://github.com/bnoordhuis/node-buffertools.git &&
- chown -R node-gyp.node-gyp /node-gyp/
-"
-
-# An image for each of the node versions we want to test with that version installed and the latest npm
-for v in $test_node_versions; do
- setup_container "node-gyp-test/${v}" "node-gyp-test/clones" "
- curl -sL https://nodejs.org/dist/v${v}/node-v${v}-linux-x64.tar.gz | tar -zxv --strip-components=1 -C /usr/ &&
- npm install npm@latest -g &&
- node -v && npm -v
- "
-done
-
-# An image for each of the io.js versions we want to test with that version installed and the latest npm
-for v in $test_iojs_versions; do
- setup_container "node-gyp-test/${v}" "node-gyp-test/clones" "
- curl -sL https://iojs.org/dist/v${v}/iojs-v${v}-linux-x64.tar.gz | tar -zxv --strip-components=1 -C /usr/ &&
- npm install npm@latest -g &&
- node -v && npm -v
- "
-done
-
-# Run the tests for all of the test images we've created,
-# we should see node-gyp doing its download, configure and run thing
-# _NOTE: bignum doesn't compile on 0.8 currently so it'll fail for that version only_
-for v in $test_node_versions $test_iojs_versions; do
- run_tests $v "
- cd node-buffertools && npm install --loglevel=info && npm test && cd
- "
- # removed for now, too noisy: cd node-bignum && npm install --loglevel=info && npm test
-done
-
-# Test use of --target=x.y.z to compile against alternate versions
-test_download_node_version() {
- local run_with_ver="$1"
- local expected_dir="$2"
- local expected_ver="$3"
- run_tests $run_with_ver "cd node-buffertools && npm install --loglevel=info --target=${expected_ver}"
- local node_ver=$(cat "${dot_node_gyp}${expected_dir}/node_version.h" | grep '#define NODE_\w*_VERSION [0-9]*$')
- node_ver=$(echo $node_ver | sed 's/#define NODE_[A-Z]*_VERSION //g' | sed 's/ /./g')
- if [ "X$(echo $node_ver)" != "X${expected_ver}" ]; then
- echo "Did not download v${expected_ver} using --target, instead got: $(echo $node_ver)"
- exit 1
- fi
- echo "Verified correct download of [v${node_ver}]"
-}
-
-test_download_node_version "0.12.7" "0.10.30/src" "0.10.30"
-test_download_node_version "3.3.0" "iojs-1.8.4/src" "1.8.4"
-# should download the headers file
-test_download_node_version "3.3.0" "iojs-3.3.0/include/node" "3.3.0"
-test_download_node_version "4.3.0" "4.3.0/include/node" "4.3.0"
-test_download_node_version "5.6.0" "5.6.0/include/node" "5.6.0"
-
-# TODO: test --dist-url by starting up a localhost server and serving up tarballs
-
-# testing --dist-url, using simple-proxy.js to make localhost work as a distribution
-# point for tarballs
-# we can test whether it uses the proxy because after 2 connections the proxy will
-# die and therefore should not be running at the end of the test, `nc` can tell us this
-run_tests "3.3.0" "
- (node /node-gyp-src/test/simple-proxy.js 8080 /foobar/ https://iojs.org/dist/ &) &&
- cd node-buffertools &&
- /node-gyp-src/bin/node-gyp.js --loglevel=info --dist-url=http://localhost:8080/foobar/ rebuild &&
- nc -z localhost 8080 && echo -e \"\\n\\n\\033[31mFAILED TO USE LOCAL PROXY\\033[39m\\n\\n\"
-"
-
-# REMOVE after next semver-major
-run_tests "3.3.0" "
- (node /node-gyp-src/test/simple-proxy.js 8080 /doobar/ https://iojs.org/dist/ &) &&
- cd node-buffertools &&
- NVM_IOJS_ORG_MIRROR=http://localhost:8080/doobar/ /node-gyp-src/bin/node-gyp.js --loglevel=info rebuild &&
- nc -z localhost 8080 && echo -e \"\\n\\n\\033[31mFAILED TO USE LOCAL PROXY\\033[39m\\n\\n\"
-"
-
-# REMOVE after next semver-major
-run_tests "0.12.7" "
- (node /node-gyp-src/test/simple-proxy.js 8080 /boombar/ https://nodejs.org/dist/ &) &&
- cd node-buffertools &&
- NVM_NODEJS_ORG_MIRROR=http://localhost:8080/boombar/ /node-gyp-src/bin/node-gyp.js --loglevel=info rebuild &&
- nc -z localhost 8080 && echo -e \"\\n\\n\\033[31mFAILED TO USE LOCAL PROXY\\033[39m\\n\\n\"
-"
-
-run_tests "3.3.0" "
- (node /node-gyp-src/test/simple-proxy.js 8080 /doobar/ https://iojs.org/dist/ &) &&
- cd node-buffertools &&
- IOJS_ORG_MIRROR=http://localhost:8080/doobar/ /node-gyp-src/bin/node-gyp.js --loglevel=info rebuild &&
- nc -z localhost 8080 && echo -e \"\\n\\n\\033[31mFAILED TO USE LOCAL PROXY\\033[39m\\n\\n\"
-"
-
-run_tests "0.12.7" "
- (node /node-gyp-src/test/simple-proxy.js 8080 /boombar/ https://nodejs.org/dist/ &) &&
- cd node-buffertools &&
- NODEJS_ORG_MIRROR=http://localhost:8080/boombar/ /node-gyp-src/bin/node-gyp.js --loglevel=info rebuild &&
- nc -z localhost 8080 && echo -e \"\\n\\n\\033[31mFAILED TO USE LOCAL PROXY\\033[39m\\n\\n\"
-"
-
-rm -rf $dot_node_gyp
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/ca-bundle.crt b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/ca-bundle.crt
deleted file mode 100644
index fb1dea98a7..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/ca-bundle.crt
+++ /dev/null
@@ -1,40 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDJjCCAg4CAhnOMA0GCSqGSIb3DQEBBQUAMH0xCzAJBgNVBAYTAlVTMQswCQYD
-VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n
-TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv
-bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMBkxFzAV
-BgNVBAMMDnN0cm9uZ2xvb3AuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
-CgKCAQEAwOYI7OZ2FX/YjRgLZoDQlbPc5UZXU/j0e1wwiJNPtPEax9Y5Uoza0Pnt
-Ikzkc2SfvQ+IJrhXo385tI0W5juuqbHnE7UrjUuPjUX6NHevkxcs/flmjan5wnZM
-cPsGhH71WDuUEEflvZihf2Se2x+xgZtMhc5XGmVmRuZFYKvkgUhA2/w8/QrK+jPT
-n9QRJxZjWNh2RBdC1B7u4jffSmOSUljYFH1I2eTeY+Rdi6YUIYSU9gEoZxsv3Tia
-SomfMF5jt2Mouo6MzA+IhLvvFjcrcph1Qxgi9RkfdCMMd+Ipm9YWELkyG1bDRpQy
-0iyHD4gvVsAqz1Y2KdRSdc3Kt+nTqwIDAQABoxkwFzAVBgNVHREEDjAMhwQAAAAA
-hwR/AAABMA0GCSqGSIb3DQEBBQUAA4IBAQAhy4J0hML3NgmDRHdL5/iTucBe22Mf
-jJjg2aifD1S187dHm+Il4qZNO2plWwAhN0h704f+8wpsaALxUvBIu6nvlvcMP5PH
-jGN5JLe2Km3UaPvYOQU2SgacLilu+uBcIo2JSHLV6O7ziqUj5Gior6YxDLCtEZie
-Ea8aX5/YjuACtEMJ1JjRqjgkM66XAoUe0E8onOK3FgTIO3tGoTJwRp0zS50pFuP0
-PsZtT04ck6mmXEXXknNoAyBCvPypfms9OHqcUIW9fiQnrGbS/Ri4QSQYj0DtFk/1
-na4fY1gf3zTHxH8259b/TOOaPfTnCEsOQtjUrWNR4xhmVZ+HJy4yytUW
------END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIIDbzCCAlcCAmm6MA0GCSqGSIb3DQEBCwUAMH0xCzAJBgNVBAYTAlVTMQswCQYD
-VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n
-TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv
-bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMH0xCzAJ
-BgNVBAYTAlVTMQswCQYDVQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZ
-MBcGA1UECgwQU3Ryb25nTG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRow
-GAYDVQQDDBFjYS5zdHJvbmdsb29wLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEP
-ADCCAQoCggEBANfj86jkvvYDjHBgiqWhk9Cj+bqiMq3MqnV0CBO4iuK33Fo6XssE
-H+yVdXlIBFbFe6t655MdBVOR2Sfj7WqNh96vhu6PyDHiwcQlTaiLU6nhIed1J4Wv
-lvnJHFmp8Wbtx5AgLT4UYu03ftvXEl2DLi3vhSL2tRM1ebXHB/KPbRWkb25DPX0P
-foOHot3f2dgNe2x6kponf7E/QDmAu3s7Nlkfh+ryDhgGU7wocXEhXbprNqRqOGNo
-xbXgUI+/9XDxYT/7Gn5LF/fPjtN+aB0SKMnTsDhprVlZie83mlqJ46fOOrR+vrsQ
-mi/1m/TadrARtZoIExC/cQRdVM05EK4tUa8CAwEAATANBgkqhkiG9w0BAQsFAAOC
-AQEAQ7k5WhyhDTIGYCNzRnrMHWSzGqa1y4tJMW06wafJNRqTm1cthq1ibc6Hfq5a
-K10K0qMcgauRTfQ1MWrVCTW/KnJ1vkhiTOH+RvxapGn84gSaRmV6KZen0+gMsgae
-KEGe/3Hn+PmDVV+PTamHgPACfpTww38WHIe/7Ce9gHfG7MZ8cKHNZhDy0IAYPln+
-YRwMLd7JNQffHAbWb2CE1mcea4H/12U8JZW5tHCF6y9V+7IuDzqwIrLKcW3lG17n
-VUG6ODF/Ryqn3V5X+TL91YyXi6c34y34IpC7MQDV/67U7+5Bp5CfeDPWW2wVSrW+
-uGZtfEvhbNm6m2i4UNmpCXxUZQ==
------END CERTIFICATE-----
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/ca.crt b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/ca.crt
deleted file mode 100644
index 9d2755a74f..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/ca.crt
+++ /dev/null
@@ -1,21 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDbzCCAlcCAmm6MA0GCSqGSIb3DQEBCwUAMH0xCzAJBgNVBAYTAlVTMQswCQYD
-VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n
-TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv
-bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMH0xCzAJ
-BgNVBAYTAlVTMQswCQYDVQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZ
-MBcGA1UECgwQU3Ryb25nTG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRow
-GAYDVQQDDBFjYS5zdHJvbmdsb29wLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEP
-ADCCAQoCggEBANfj86jkvvYDjHBgiqWhk9Cj+bqiMq3MqnV0CBO4iuK33Fo6XssE
-H+yVdXlIBFbFe6t655MdBVOR2Sfj7WqNh96vhu6PyDHiwcQlTaiLU6nhIed1J4Wv
-lvnJHFmp8Wbtx5AgLT4UYu03ftvXEl2DLi3vhSL2tRM1ebXHB/KPbRWkb25DPX0P
-foOHot3f2dgNe2x6kponf7E/QDmAu3s7Nlkfh+ryDhgGU7wocXEhXbprNqRqOGNo
-xbXgUI+/9XDxYT/7Gn5LF/fPjtN+aB0SKMnTsDhprVlZie83mlqJ46fOOrR+vrsQ
-mi/1m/TadrARtZoIExC/cQRdVM05EK4tUa8CAwEAATANBgkqhkiG9w0BAQsFAAOC
-AQEAQ7k5WhyhDTIGYCNzRnrMHWSzGqa1y4tJMW06wafJNRqTm1cthq1ibc6Hfq5a
-K10K0qMcgauRTfQ1MWrVCTW/KnJ1vkhiTOH+RvxapGn84gSaRmV6KZen0+gMsgae
-KEGe/3Hn+PmDVV+PTamHgPACfpTww38WHIe/7Ce9gHfG7MZ8cKHNZhDy0IAYPln+
-YRwMLd7JNQffHAbWb2CE1mcea4H/12U8JZW5tHCF6y9V+7IuDzqwIrLKcW3lG17n
-VUG6ODF/Ryqn3V5X+TL91YyXi6c34y34IpC7MQDV/67U7+5Bp5CfeDPWW2wVSrW+
-uGZtfEvhbNm6m2i4UNmpCXxUZQ==
------END CERTIFICATE-----
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/server.crt b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/server.crt
deleted file mode 100644
index fe13bb96c5..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/server.crt
+++ /dev/null
@@ -1,19 +0,0 @@
------BEGIN CERTIFICATE-----
-MIIDJjCCAg4CAhnOMA0GCSqGSIb3DQEBBQUAMH0xCzAJBgNVBAYTAlVTMQswCQYD
-VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n
-TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv
-bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMBkxFzAV
-BgNVBAMMDnN0cm9uZ2xvb3AuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
-CgKCAQEAwOYI7OZ2FX/YjRgLZoDQlbPc5UZXU/j0e1wwiJNPtPEax9Y5Uoza0Pnt
-Ikzkc2SfvQ+IJrhXo385tI0W5juuqbHnE7UrjUuPjUX6NHevkxcs/flmjan5wnZM
-cPsGhH71WDuUEEflvZihf2Se2x+xgZtMhc5XGmVmRuZFYKvkgUhA2/w8/QrK+jPT
-n9QRJxZjWNh2RBdC1B7u4jffSmOSUljYFH1I2eTeY+Rdi6YUIYSU9gEoZxsv3Tia
-SomfMF5jt2Mouo6MzA+IhLvvFjcrcph1Qxgi9RkfdCMMd+Ipm9YWELkyG1bDRpQy
-0iyHD4gvVsAqz1Y2KdRSdc3Kt+nTqwIDAQABoxkwFzAVBgNVHREEDjAMhwQAAAAA
-hwR/AAABMA0GCSqGSIb3DQEBBQUAA4IBAQAhy4J0hML3NgmDRHdL5/iTucBe22Mf
-jJjg2aifD1S187dHm+Il4qZNO2plWwAhN0h704f+8wpsaALxUvBIu6nvlvcMP5PH
-jGN5JLe2Km3UaPvYOQU2SgacLilu+uBcIo2JSHLV6O7ziqUj5Gior6YxDLCtEZie
-Ea8aX5/YjuACtEMJ1JjRqjgkM66XAoUe0E8onOK3FgTIO3tGoTJwRp0zS50pFuP0
-PsZtT04ck6mmXEXXknNoAyBCvPypfms9OHqcUIW9fiQnrGbS/Ri4QSQYj0DtFk/1
-na4fY1gf3zTHxH8259b/TOOaPfTnCEsOQtjUrWNR4xhmVZ+HJy4yytUW
------END CERTIFICATE-----
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/server.key b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/server.key
deleted file mode 100644
index f8227f4c0c..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/fixtures/server.key
+++ /dev/null
@@ -1,28 +0,0 @@
------BEGIN PRIVATE KEY-----
-MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDA5gjs5nYVf9iN
-GAtmgNCVs9zlRldT+PR7XDCIk0+08RrH1jlSjNrQ+e0iTORzZJ+9D4gmuFejfzm0
-jRbmO66psecTtSuNS4+NRfo0d6+TFyz9+WaNqfnCdkxw+waEfvVYO5QQR+W9mKF/
-ZJ7bH7GBm0yFzlcaZWZG5kVgq+SBSEDb/Dz9Csr6M9Of1BEnFmNY2HZEF0LUHu7i
-N99KY5JSWNgUfUjZ5N5j5F2LphQhhJT2AShnGy/dOJpKiZ8wXmO3Yyi6jozMD4iE
-u+8WNytymHVDGCL1GR90Iwx34imb1hYQuTIbVsNGlDLSLIcPiC9WwCrPVjYp1FJ1
-zcq36dOrAgMBAAECggEACg60Xm2xsHNG/ixHw+NpfLSxCr89JGKxlJD88tIDcOK1
-S8AOoxA3BHhTddteeenALmJV7fbkkuC6SICmtgBcnfppmuxyRd6vsGT6o6ut2tR1
-gxRy1WYMYKg8WhOshlH8RspscODeyKDhorvDUJd5cNGBDuTwQ68PwxiUe3La6iac
-EVQoKohg9EmRIhMF1i8I00zXE8p3XENrlTc491ipc+gLPIP5vtqHyQztEUkZHkWd
-dXbs+n1hGCr+4FxrphGYEW80HINzmume7dGChr8nvF4ZZcuWW13DJuNim6pQno1i
-hM8VdXm8XphLh0XEGI5OCfu/CetkBILZRXKltZk6AQKBgQDoBqJzRlp7regYNU4q
-usfS+43tPNaJ0o4DIzcLawqpmK/B/cZStzHl14Sm62BVkKV6cnWAJPeLkENPMFoV
-7Q7wLZBJxpPzqXkpeiDkKN4Wovca891Rffne5Sz6IDB5mOxMjfKIEPd5RkmB5Lkp
-qQLwm3YJ2AJcLagG/Gi1DFDRAQKBgQDU1G9T43Mjke6TXG0u7gCSb+VwyDRsrvJA
-u2vy6+MANRc1EEF31YLmTKOU5XxUmhtIu7TUbgPoNi0HuRFXx4Zul3BPlAosLMJv
-kNQbA/9d0YQAfSgTsploN5CX65dLZ4ejIzVgDZREzpIBWTze6YZTA2DT5iOIet84
-DD5DujY4qwKBgG0PuUo/9oYOD3tZiv1wwD5+uY6auykbTF9TLStzzBY9y9d+hrsY
-mx6zOAoRtz1g+TdeF7b9KVJzo//T9XQ68nuYnyreaWrt7SK+4jj8sK+pOEd1+0Cz
-20CXLpX/jWmKpP+y9R5aA0kA7cpdjV90rwoTuN8Vpr5XQ5TNDhaTzGUBAoGABYig
-fGXlkH8y3NICZL37ddNC+/O4qTrDQbudyusnM9ItkEuj6CG9DY/gkPaGjQyUuQdo
-ZD2YDGmcMh81vDqL3ERDv03yFcP0KkJxwWIRObdA32JhsGFsa7FGKS0O+f7vH+bC
-dITl3gQg97gCRSl9PJtR4TCSq/HF7Acld01YK5ECgYEAwLFB5JIuxrowJe74cCMP
-n5Rwuc8vWdOsg+ytvQTv0/hVCdzcaLet6YvagnWTWaU7PUwTFxZs/mLQ9CAWVutK
-IRzs/GWxGFjH5xotDaJdDDzSdQye4tUqvUVxv7zzzsVycCPBYFkyRQ8Tmr5FLtUJ
-Cl48TZ6J8Rx5avjdtOw3QC8=
------END PRIVATE KEY-----
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/simple-proxy.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/simple-proxy.js
deleted file mode 100644
index e55330c445..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/simple-proxy.js
+++ /dev/null
@@ -1,24 +0,0 @@
-var http = require('http')
- , https = require('https')
- , server = http.createServer(handler)
- , port = +process.argv[2]
- , prefix = process.argv[3]
- , upstream = process.argv[4]
- , calls = 0
-
-server.listen(port)
-
-function handler (req, res) {
- if (req.url.indexOf(prefix) != 0)
- throw new Error('request url [' + req.url + '] does not start with [' + prefix + ']')
-
- var upstreamUrl = upstream + req.url.substring(prefix.length)
- console.log(req.url + ' -> ' + upstreamUrl)
- https.get(upstreamUrl, function (ures) {
- ures.on('end', function () {
- if (++calls == 2)
- server.close()
- })
- ures.pipe(res)
- })
-}
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-addon.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-addon.js
deleted file mode 100644
index c2a71f4498..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-addon.js
+++ /dev/null
@@ -1,28 +0,0 @@
-'use strict'
-
-var test = require('tape')
-var execFile = require('child_process').execFile
-var path = require('path')
-var addonPath = path.resolve(__dirname, 'node_modules', 'hello_world')
-var nodeGyp = path.resolve(__dirname, '..', 'bin', 'node-gyp.js')
-
-test('build simple addon', function (t) {
- t.plan(3)
-
- // Set the loglevel otherwise the output disappears when run via 'npm test'
- var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose']
- var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) {
- var logLines = stderr.toString().trim().split(/\r?\n/)
- var lastLine = logLines[logLines.length-1]
- t.strictEqual(err, null)
- t.strictEqual(lastLine, 'gyp info ok', 'should end in ok')
- try {
- var binding = require('hello_world')
- t.strictEqual(binding.hello(), 'world')
- } catch (error) {
- t.error(error, 'load module')
- }
- })
- proc.stdout.setEncoding('utf-8')
- proc.stderr.setEncoding('utf-8')
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-configure-python.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-configure-python.js
deleted file mode 100644
index f235bdbba1..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-configure-python.js
+++ /dev/null
@@ -1,74 +0,0 @@
-'use strict'
-
-var test = require('tape')
-var path = require('path')
-var gyp = require('../lib/node-gyp')
-var requireInject = require('require-inject')
-var configure = requireInject('../lib/configure', {
- 'graceful-fs': {
- 'openSync': function (file, mode) { return 0; },
- 'closeSync': function (fd) { },
- 'writeFile': function (file, data, cb) { cb() },
- 'stat': function (file, cb) { cb(null, {}) }
- }
-})
-
-var EXPECTED_PYPATH = path.join(__dirname, '..', 'gyp', 'pylib')
-var SEPARATOR = process.platform == 'win32' ? ';' : ':'
-var SPAWN_RESULT = { on: function () { } }
-
-test('configure PYTHONPATH with no existing env', function (t) {
- t.plan(1)
-
- delete process.env.PYTHONPATH
-
- var prog = gyp()
- prog.parseArgv([])
- prog.spawn = function () {
- t.equal(process.env.PYTHONPATH, EXPECTED_PYPATH)
- return SPAWN_RESULT
- }
- configure(prog, [], t.fail)
-})
-
-test('configure PYTHONPATH with existing env of one dir', function (t) {
- t.plan(2)
-
- var existingPath = path.join('a', 'b')
- process.env.PYTHONPATH = existingPath
-
- var prog = gyp()
- prog.parseArgv([])
- prog.spawn = function () {
-
- t.equal(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR))
-
- var dirs = process.env.PYTHONPATH.split(SEPARATOR)
- t.deepEqual(dirs, [EXPECTED_PYPATH, existingPath])
-
- return SPAWN_RESULT
- }
- configure(prog, [], t.fail)
-})
-
-test('configure PYTHONPATH with existing env of multiple dirs', function (t) {
- t.plan(2)
-
- var pythonDir1 = path.join('a', 'b')
- var pythonDir2 = path.join('b', 'c')
- var existingPath = [pythonDir1, pythonDir2].join(SEPARATOR)
- process.env.PYTHONPATH = existingPath
-
- var prog = gyp()
- prog.parseArgv([])
- prog.spawn = function () {
-
- t.equal(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR))
-
- var dirs = process.env.PYTHONPATH.split(SEPARATOR)
- t.deepEqual(dirs, [EXPECTED_PYPATH, pythonDir1, pythonDir2])
-
- return SPAWN_RESULT
- }
- configure(prog, [], t.fail)
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-download.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-download.js
deleted file mode 100644
index 6e6f64f058..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-download.js
+++ /dev/null
@@ -1,102 +0,0 @@
-'use strict'
-
-var fs = require('fs')
-var http = require('http')
-var https = require('https')
-var test = require('tape')
-var install = require('../lib/install')
-
-test('download over http', function (t) {
- t.plan(2)
-
- var server = http.createServer(function (req, res) {
- t.strictEqual(req.headers['user-agent'],
- 'node-gyp v42 (node ' + process.version + ')')
- res.end('ok')
- server.close()
- })
-
- var host = '127.0.0.1'
- server.listen(0, host, function () {
- var port = this.address().port
- var gyp = {
- opts: {},
- version: '42',
- }
- var url = 'http://' + host + ':' + port
- var req = install.test.download(gyp, {}, url)
- req.on('response', function (res) {
- var body = ''
- res.setEncoding('utf8')
- res.on('data', function(data) {
- body += data
- })
- res.on('end', function() {
- t.strictEqual(body, 'ok')
- })
- })
- })
-})
-
-test('download over https with custom ca', function (t) {
- t.plan(3)
-
- var cert = fs.readFileSync(__dirname + '/fixtures/server.crt', 'utf8')
- var key = fs.readFileSync(__dirname + '/fixtures/server.key', 'utf8')
-
- var cafile = __dirname + '/fixtures/ca.crt'
- var ca = install.test.readCAFile(cafile)
- t.strictEqual(ca.length, 1)
-
- var options = { ca: ca, cert: cert, key: key }
- var server = https.createServer(options, function (req, res) {
- t.strictEqual(req.headers['user-agent'],
- 'node-gyp v42 (node ' + process.version + ')')
- res.end('ok')
- server.close()
- })
-
- server.on('clientError', function (err) {
- throw err
- })
-
- var host = '127.0.0.1'
- server.listen(8000, host, function () {
- var port = this.address().port
- var gyp = {
- opts: { cafile: cafile },
- version: '42',
- }
- var url = 'https://' + host + ':' + port
- var req = install.test.download(gyp, {}, url)
- req.on('response', function (res) {
- var body = ''
- res.setEncoding('utf8')
- res.on('data', function(data) {
- body += data
- })
- res.on('end', function() {
- t.strictEqual(body, 'ok')
- })
- })
- })
-})
-
-test('download with missing cafile', function (t) {
- t.plan(1)
- var gyp = {
- opts: { cafile: 'no.such.file' },
- }
- try {
- install.test.download(gyp, {}, 'http://bad/')
- } catch (e) {
- t.ok(/no.such.file/.test(e.message))
- }
-})
-
-test('check certificate splitting', function (t) {
- var cas = install.test.readCAFile(__dirname + '/fixtures/ca-bundle.crt')
- t.plan(2)
- t.strictEqual(cas.length, 2)
- t.notStrictEqual(cas[0], cas[1])
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-find-accessible-sync.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-find-accessible-sync.js
deleted file mode 100644
index d336243dd0..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-find-accessible-sync.js
+++ /dev/null
@@ -1,86 +0,0 @@
-'use strict'
-
-var test = require('tape')
-var path = require('path')
-var requireInject = require('require-inject')
-var configure = requireInject('../lib/configure', {
- 'graceful-fs': {
- 'closeSync': function (fd) { return undefined },
- 'openSync': function (path) {
- if (readableFiles.some(function (f) { return f === path} )) {
- return 0
- } else {
- var error = new Error('ENOENT - not found')
- throw error
- }
- }
- }
-})
-
-var dir = path.sep + 'testdir'
-var readableFile = 'readable_file'
-var anotherReadableFile = 'another_readable_file'
-var readableFileInDir = 'somedir' + path.sep + readableFile
-var readableFiles = [
- path.resolve(dir, readableFile),
- path.resolve(dir, anotherReadableFile),
- path.resolve(dir, readableFileInDir)
-]
-
-test('find accessible - empty array', function (t) {
- t.plan(1)
-
- var candidates = []
- var found = configure.test.findAccessibleSync('test', dir, candidates)
- t.strictEqual(found, undefined)
-})
-
-test('find accessible - single item array, readable', function (t) {
- t.plan(1)
-
- var candidates = [ readableFile ]
- var found = configure.test.findAccessibleSync('test', dir, candidates)
- t.strictEqual(found, path.resolve(dir, readableFile))
-})
-
-test('find accessible - single item array, readable in subdir', function (t) {
- t.plan(1)
-
- var candidates = [ readableFileInDir ]
- var found = configure.test.findAccessibleSync('test', dir, candidates)
- t.strictEqual(found, path.resolve(dir, readableFileInDir))
-})
-
-test('find accessible - single item array, unreadable', function (t) {
- t.plan(1)
-
- var candidates = [ 'unreadable_file' ]
- var found = configure.test.findAccessibleSync('test', dir, candidates)
- t.strictEqual(found, undefined)
-})
-
-
-test('find accessible - multi item array, no matches', function (t) {
- t.plan(1)
-
- var candidates = [ 'non_existent_file', 'unreadable_file' ]
- var found = configure.test.findAccessibleSync('test', dir, candidates)
- t.strictEqual(found, undefined)
-})
-
-
-test('find accessible - multi item array, single match', function (t) {
- t.plan(1)
-
- var candidates = [ 'non_existent_file', readableFile ]
- var found = configure.test.findAccessibleSync('test', dir, candidates)
- t.strictEqual(found, path.resolve(dir, readableFile))
-})
-
-test('find accessible - multi item array, return first match', function (t) {
- t.plan(1)
-
- var candidates = [ 'non_existent_file', anotherReadableFile, readableFile ]
- var found = configure.test.findAccessibleSync('test', dir, candidates)
- t.strictEqual(found, path.resolve(dir, anotherReadableFile))
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-find-node-directory.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-find-node-directory.js
deleted file mode 100644
index 46659d0cfe..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-find-node-directory.js
+++ /dev/null
@@ -1,115 +0,0 @@
-var test = require('tape')
-var path = require('path')
-var findNodeDirectory = require('../lib/find-node-directory')
-
-var platforms = ['darwin', 'freebsd', 'linux', 'sunos', 'win32', 'aix']
-
-// we should find the directory based on the directory
-// the script is running in and it should match the layout
-// in a build tree where npm is installed in
-// .... /deps/npm
-test('test find-node-directory - node install', function (t) {
- t.plan(platforms.length)
- for (var next = 0; next < platforms.length; next++) {
- var processObj = {execPath: '/x/y/bin/node', platform: platforms[next]}
- t.equal(
- findNodeDirectory('/x/deps/npm/node_modules/node-gyp/lib', processObj),
- path.join('/x'))
- }
-})
-
-// we should find the directory based on the directory
-// the script is running in and it should match the layout
-// in an installed tree where npm is installed in
-// .... /lib/node_modules/npm or .../node_modules/npm
-// depending on the patform
-test('test find-node-directory - node build', function (t) {
- t.plan(platforms.length)
- for (var next = 0; next < platforms.length; next++) {
- var processObj = {execPath: '/x/y/bin/node', platform: platforms[next]}
- if (platforms[next] === 'win32') {
- t.equal(
- findNodeDirectory('/y/node_modules/npm/node_modules/node-gyp/lib',
- processObj), path.join('/y'))
- } else {
- t.equal(
- findNodeDirectory('/y/lib/node_modules/npm/node_modules/node-gyp/lib',
- processObj), path.join('/y'))
- }
- }
-})
-
-// we should find the directory based on the execPath
-// for node and match because it was in the bin directory
-test('test find-node-directory - node in bin directory', function (t) {
- t.plan(platforms.length)
- for (var next = 0; next < platforms.length; next++) {
- var processObj = {execPath: '/x/y/bin/node', platform: platforms[next]}
- t.equal(
- findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj),
- path.join('/x/y'))
- }
-})
-
-// we should find the directory based on the execPath
-// for node and match because it was in the Release directory
-test('test find-node-directory - node in build release dir', function (t) {
- t.plan(platforms.length)
- for (var next = 0; next < platforms.length; next++) {
- var processObj
- if (platforms[next] === 'win32') {
- processObj = {execPath: '/x/y/Release/node', platform: platforms[next]}
- } else {
- processObj = {execPath: '/x/y/out/Release/node',
- platform: platforms[next]}
- }
-
- t.equal(
- findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj),
- path.join('/x/y'))
- }
-})
-
-// we should find the directory based on the execPath
-// for node and match because it was in the Debug directory
-test('test find-node-directory - node in Debug release dir', function (t) {
- t.plan(platforms.length)
- for (var next = 0; next < platforms.length; next++) {
- var processObj
- if (platforms[next] === 'win32') {
- processObj = {execPath: '/a/b/Debug/node', platform: platforms[next]}
- } else {
- processObj = {execPath: '/a/b/out/Debug/node', platform: platforms[next]}
- }
-
- t.equal(
- findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj),
- path.join('/a/b'))
- }
-})
-
-// we should not find it as it will not match based on the execPath nor
-// the directory from which the script is running
-test('test find-node-directory - not found', function (t) {
- t.plan(platforms.length)
- for (var next = 0; next < platforms.length; next++) {
- var processObj = {execPath: '/x/y/z/y', platform:next}
- t.equal(findNodeDirectory('/a/b/c/d', processObj), '')
- }
-})
-
-// we should find the directory based on the directory
-// the script is running in and it should match the layout
-// in a build tree where npm is installed in
-// .... /deps/npm
-// same test as above but make sure additional directory entries
-// don't cause an issue
-test('test find-node-directory - node install', function (t) {
- t.plan(platforms.length)
- for (var next = 0; next < platforms.length; next++) {
- var processObj = {execPath: '/x/y/bin/node', platform: platforms[next]}
- t.equal(
- findNodeDirectory('/x/y/z/a/b/c/deps/npm/node_modules/node-gyp/lib',
- processObj), path.join('/x/y/z/a/b/c'))
- }
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-find-python.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-find-python.js
deleted file mode 100644
index 2d9f171c57..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-find-python.js
+++ /dev/null
@@ -1,339 +0,0 @@
-'use strict'
-
-var test = require('tape')
-var path = require('path')
-var configure = require('../lib/configure')
-var execFile = require('child_process').execFile
-var PythonFinder = configure.test.PythonFinder
-
-test('find python', function (t) {
- t.plan(4)
-
- configure.test.findPython('python', function (err, found) {
- t.strictEqual(err, null)
- var proc = execFile(found, ['-V'], function (err, stdout, stderr) {
- t.strictEqual(err, null)
- t.strictEqual(stdout, '')
- t.ok(/Python 2/.test(stderr))
- })
- proc.stdout.setEncoding('utf-8')
- proc.stderr.setEncoding('utf-8')
- })
-})
-
-function poison(object, property) {
- function fail() {
- throw new Error('Property ' + property + ' should not have been accessed.')
- }
- var descriptor = {
- configurable: true,
- enumerable: false,
- writable: true,
- getter: fail,
- setter: fail,
- }
- Object.defineProperty(object, property, descriptor)
-}
-
-// Work around a v0.10.x CI issue where path.resolve() on UNIX systems prefixes
-// Windows paths with the current working directory. v0.12 and up are free of
-// this issue because they use path.win32.resolve() which does the right thing.
-var resolve = path.win32 && path.win32.resolve || function() {
- function rstrip(s) { return s.replace(/\\+$/, '') }
- return [].slice.call(arguments).map(rstrip).join('\\')
-}
-
-function TestPythonFinder() { PythonFinder.apply(this, arguments) }
-TestPythonFinder.prototype = Object.create(PythonFinder.prototype)
-poison(TestPythonFinder.prototype, 'env')
-poison(TestPythonFinder.prototype, 'execFile')
-poison(TestPythonFinder.prototype, 'resolve')
-poison(TestPythonFinder.prototype, 'stat')
-poison(TestPythonFinder.prototype, 'which')
-poison(TestPythonFinder.prototype, 'win')
-
-test('find python - python', function (t) {
- t.plan(5)
-
- var f = new TestPythonFinder('python', done)
- f.which = function(program, cb) {
- t.strictEqual(program, 'python')
- cb(null, program)
- }
- f.execFile = function(program, args, opts, cb) {
- t.strictEqual(program, 'python')
- t.ok(/import platform/.test(args[1]))
- cb(null, '2.7.0')
- }
- f.checkPython()
-
- function done(err, python) {
- t.strictEqual(err, null)
- t.strictEqual(python, 'python')
- }
-})
-
-test('find python - python too old', function (t) {
- t.plan(4)
-
- var f = new TestPythonFinder('python', done)
- f.which = function(program, cb) {
- t.strictEqual(program, 'python')
- cb(null, program)
- }
- f.execFile = function(program, args, opts, cb) {
- t.strictEqual(program, 'python')
- t.ok(/import platform/.test(args[1]))
- cb(null, '2.3.4')
- }
- f.checkPython()
-
- function done(err, python) {
- t.ok(/is not supported by gyp/.test(err))
- }
-})
-
-test('find python - python too new', function (t) {
- t.plan(4)
-
- var f = new TestPythonFinder('python', done)
- f.which = function(program, cb) {
- t.strictEqual(program, 'python')
- cb(null, program)
- }
- f.execFile = function(program, args, opts, cb) {
- t.strictEqual(program, 'python')
- t.ok(/import platform/.test(args[1]))
- cb(null, '3.0.0')
- }
- f.checkPython()
-
- function done(err, python) {
- t.ok(/is not supported by gyp/.test(err))
- }
-})
-
-test('find python - no python', function (t) {
- t.plan(2)
-
- var f = new TestPythonFinder('python', done)
- f.which = function(program, cb) {
- t.strictEqual(program, 'python')
- cb(new Error('not found'))
- }
- f.checkPython()
-
- function done(err, python) {
- t.ok(/Can't find Python executable/.test(err))
- }
-})
-
-test('find python - no python2', function (t) {
- t.plan(6)
-
- var f = new TestPythonFinder('python2', done)
- f.which = function(program, cb) {
- f.which = function(program, cb) {
- t.strictEqual(program, 'python')
- cb(null, program)
- }
- t.strictEqual(program, 'python2')
- cb(new Error('not found'))
- }
- f.execFile = function(program, args, opts, cb) {
- t.strictEqual(program, 'python')
- t.ok(/import platform/.test(args[1]))
- cb(null, '2.7.0')
- }
- f.checkPython()
-
- function done(err, python) {
- t.strictEqual(err, null)
- t.strictEqual(python, 'python')
- }
-})
-
-test('find python - no python2, no python, unix', function (t) {
- t.plan(3)
-
- var f = new TestPythonFinder('python2', done)
- poison(f, 'checkPythonLauncher')
- f.win = false
-
- f.which = function(program, cb) {
- f.which = function(program, cb) {
- t.strictEqual(program, 'python')
- cb(new Error('not found'))
- }
- t.strictEqual(program, 'python2')
- cb(new Error('not found'))
- }
- f.checkPython()
-
- function done(err, python) {
- t.ok(/Can't find Python executable/.test(err))
- }
-})
-
-test('find python - no python, use python launcher', function (t) {
- t.plan(8)
-
- var f = new TestPythonFinder('python', done)
- f.env = {}
- f.win = true
-
- f.which = function(program, cb) {
- t.strictEqual(program, 'python')
- cb(new Error('not found'))
- }
- f.execFile = function(program, args, opts, cb) {
- f.execFile = function(program, args, opts, cb) {
- t.strictEqual(program, 'Z:\\snake.exe')
- t.ok(/import platform/.test(args[1]))
- cb(null, '2.7.0')
- }
- t.strictEqual(program, 'py.exe')
- t.notEqual(args.indexOf('-2'), -1)
- t.notEqual(args.indexOf('-c'), -1)
- cb(null, 'Z:\\snake.exe')
- }
- f.checkPython()
-
- function done(err, python) {
- t.strictEqual(err, null)
- t.strictEqual(python, 'Z:\\snake.exe')
- }
-})
-
-test('find python - python 3, use python launcher', function (t) {
- t.plan(10)
-
- var f = new TestPythonFinder('python', done)
- f.env = {}
- f.win = true
-
- f.which = function(program, cb) {
- t.strictEqual(program, 'python')
- cb(null, program)
- }
- f.execFile = function(program, args, opts, cb) {
- f.execFile = function(program, args, opts, cb) {
- f.execFile = function(program, args, opts, cb) {
- t.strictEqual(program, 'Z:\\snake.exe')
- t.ok(/import platform/.test(args[1]))
- cb(null, '2.7.0')
- }
- t.strictEqual(program, 'py.exe')
- t.notEqual(args.indexOf('-2'), -1)
- t.notEqual(args.indexOf('-c'), -1)
- cb(null, 'Z:\\snake.exe')
- }
- t.strictEqual(program, 'python')
- t.ok(/import platform/.test(args[1]))
- cb(null, '3.0.0')
- }
- f.checkPython()
-
- function done(err, python) {
- t.strictEqual(err, null)
- t.strictEqual(python, 'Z:\\snake.exe')
- }
-})
-
-test('find python - python 3, use python launcher, python 2 too old',
- function (t) {
- t.plan(9)
-
- var f = new TestPythonFinder('python', done)
- f.checkedPythonLauncher = false
- f.env = {}
- f.win = true
-
- f.which = function(program, cb) {
- t.strictEqual(program, 'python')
- cb(null, program)
- }
- f.execFile = function(program, args, opts, cb) {
- f.execFile = function(program, args, opts, cb) {
- f.execFile = function(program, args, opts, cb) {
- t.strictEqual(program, 'Z:\\snake.exe')
- t.ok(/import platform/.test(args[1]))
- cb(null, '2.3.4')
- }
- t.strictEqual(program, 'py.exe')
- t.notEqual(args.indexOf('-2'), -1)
- t.notEqual(args.indexOf('-c'), -1)
- cb(null, 'Z:\\snake.exe')
- }
- t.strictEqual(program, 'python')
- t.ok(/import platform/.test(args[1]))
- cb(null, '3.0.0')
- }
- f.checkPython()
-
- function done(err, python) {
- t.ok(/is not supported by gyp/.test(err))
- }
-})
-
-test('find python - no python, no python launcher, good guess', function (t) {
- t.plan(6)
-
- var re = /C:[\\\/]Python27[\\\/]python[.]exe/
- var f = new TestPythonFinder('python', done)
- f.env = {}
- f.win = true
-
- f.which = function(program, cb) {
- t.strictEqual(program, 'python')
- cb(new Error('not found'))
- }
- f.execFile = function(program, args, opts, cb) {
- f.execFile = function(program, args, opts, cb) {
- t.ok(re.test(program))
- t.ok(/import platform/.test(args[1]))
- cb(null, '2.7.0')
- }
- t.strictEqual(program, 'py.exe')
- cb(new Error('not found'))
- }
- f.resolve = resolve
- f.stat = function(path, cb) {
- t.ok(re.test(path))
- cb(null, {})
- }
- f.checkPython()
-
- function done(err, python) {
- t.ok(re.test(python))
- }
-})
-
-test('find python - no python, no python launcher, bad guess', function (t) {
- t.plan(4)
-
- var f = new TestPythonFinder('python', done)
- f.env = { SystemDrive: 'Z:\\' }
- f.win = true
-
- f.which = function(program, cb) {
- t.strictEqual(program, 'python')
- cb(new Error('not found'))
- }
- f.execFile = function(program, args, opts, cb) {
- t.strictEqual(program, 'py.exe')
- cb(new Error('not found'))
- }
- f.resolve = resolve
- f.stat = function(path, cb) {
- t.ok(/Z:[\\\/]Python27[\\\/]python.exe/.test(path))
- var err = new Error('not found')
- err.code = 'ENOENT'
- cb(err)
- }
- f.checkPython()
-
- function done(err, python) {
- t.ok(/Can't find Python executable/.test(err))
- }
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-options.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-options.js
deleted file mode 100644
index d097f81be6..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-options.js
+++ /dev/null
@@ -1,25 +0,0 @@
-'use strict';
-
-var test = require('tape')
-var gyp = require('../lib/node-gyp')
-
-test('options in environment', function (t) {
- t.plan(1)
-
- // `npm test` dumps a ton of npm_config_* variables in the environment.
- Object.keys(process.env)
- .filter(function(key) { return /^npm_config_/.test(key) })
- .forEach(function(key) { delete process.env[key] })
-
- // Zero-length keys should get filtered out.
- process.env.npm_config_ = '42'
- // Other keys should get added.
- process.env.npm_config_x = '42'
- // Except loglevel.
- process.env.npm_config_loglevel = 'debug'
-
- var g = gyp();
- g.parseArgv(['rebuild']) // Also sets opts.argv.
-
- t.deepEqual(Object.keys(g.opts).sort(), ['argv', 'x'])
-})
diff --git a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-process-release.js b/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-process-release.js
deleted file mode 100644
index 48411ae0a7..0000000000
--- a/deps/npm/node_modules/npm-lifecycle/node_modules/node-gyp/test/test-process-release.js
+++ /dev/null
@@ -1,637 +0,0 @@
-var test = require('tape')
-var processRelease = require('../lib/process-release')
-
-test('test process release - process.version = 0.8.20', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: {} }, 'v0.8.20', null)
-
- t.equal(release.semver.version, '0.8.20')
- delete release.semver
-
- t.deepEqual(release, {
- version: '0.8.20',
- name: 'node',
- baseUrl: 'https://nodejs.org/dist/v0.8.20/',
- tarballUrl: 'https://nodejs.org/dist/v0.8.20/node-v0.8.20.tar.gz',
- shasumsUrl: 'https://nodejs.org/dist/v0.8.20/SHASUMS256.txt',
- versionDir: '0.8.20',
- libUrl32: 'https://nodejs.org/dist/v0.8.20/node.lib',
- libUrl64: 'https://nodejs.org/dist/v0.8.20/x64/node.lib',
- libPath32: 'node.lib',
- libPath64: 'x64/node.lib'
- })
-})
-
-test('test process release - process.version = 0.10.21', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: {} }, 'v0.10.21', null)
-
- t.equal(release.semver.version, '0.10.21')
- delete release.semver
-
- t.deepEqual(release, {
- version: '0.10.21',
- name: 'node',
- baseUrl: 'https://nodejs.org/dist/v0.10.21/',
- tarballUrl: 'https://nodejs.org/dist/v0.10.21/node-v0.10.21.tar.gz',
- shasumsUrl: 'https://nodejs.org/dist/v0.10.21/SHASUMS256.txt',
- versionDir: '0.10.21',
- libUrl32: 'https://nodejs.org/dist/v0.10.21/node.lib',
- libUrl64: 'https://nodejs.org/dist/v0.10.21/x64/node.lib',
- libPath32: 'node.lib',
- libPath64: 'x64/node.lib'
- })
-})
-
-// prior to -headers.tar.gz
-test('test process release - process.version = 0.12.9', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: {} }, 'v0.12.9', null)
-
- t.equal(release.semver.version, '0.12.9')
- delete release.semver
-
- t.deepEqual(release, {
- version: '0.12.9',
- name: 'node',
- baseUrl: 'https://nodejs.org/dist/v0.12.9/',
- tarballUrl: 'https://nodejs.org/dist/v0.12.9/node-v0.12.9.tar.gz',
- shasumsUrl: 'https://nodejs.org/dist/v0.12.9/SHASUMS256.txt',
- versionDir: '0.12.9',
- libUrl32: 'https://nodejs.org/dist/v0.12.9/node.lib',
- libUrl64: 'https://nodejs.org/dist/v0.12.9/x64/node.lib',
- libPath32: 'node.lib',
- libPath64: 'x64/node.lib'
- })
-})
-
-// prior to -headers.tar.gz
-test('test process release - process.version = 0.10.41', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: {} }, 'v0.10.41', null)
-
- t.equal(release.semver.version, '0.10.41')
- delete release.semver
-
- t.deepEqual(release, {
- version: '0.10.41',
- name: 'node',
- baseUrl: 'https://nodejs.org/dist/v0.10.41/',
- tarballUrl: 'https://nodejs.org/dist/v0.10.41/node-v0.10.41.tar.gz',
- shasumsUrl: 'https://nodejs.org/dist/v0.10.41/SHASUMS256.txt',
- versionDir: '0.10.41',
- libUrl32: 'https://nodejs.org/dist/v0.10.41/node.lib',
- libUrl64: 'https://nodejs.org/dist/v0.10.41/x64/node.lib',
- libPath32: 'node.lib',
- libPath64: 'x64/node.lib'
- })
-})
-
-// has -headers.tar.gz
-test('test process release - process.release ~ node@0.10.42', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: {} }, 'v0.10.42', null)
-
- t.equal(release.semver.version, '0.10.42')
- delete release.semver
-
- t.deepEqual(release, {
- version: '0.10.42',
- name: 'node',
- baseUrl: 'https://nodejs.org/dist/v0.10.42/',
- tarballUrl: 'https://nodejs.org/dist/v0.10.42/node-v0.10.42-headers.tar.gz',
- shasumsUrl: 'https://nodejs.org/dist/v0.10.42/SHASUMS256.txt',
- versionDir: '0.10.42',
- libUrl32: 'https://nodejs.org/dist/v0.10.42/node.lib',
- libUrl64: 'https://nodejs.org/dist/v0.10.42/x64/node.lib',
- libPath32: 'node.lib',
- libPath64: 'x64/node.lib'
- })
-})
-
-// has -headers.tar.gz
-test('test process release - process.release ~ node@0.12.10', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: {} }, 'v0.12.10', null)
-
- t.equal(release.semver.version, '0.12.10')
- delete release.semver
-
- t.deepEqual(release, {
- version: '0.12.10',
- name: 'node',
- baseUrl: 'https://nodejs.org/dist/v0.12.10/',
- tarballUrl: 'https://nodejs.org/dist/v0.12.10/node-v0.12.10-headers.tar.gz',
- shasumsUrl: 'https://nodejs.org/dist/v0.12.10/SHASUMS256.txt',
- versionDir: '0.12.10',
- libUrl32: 'https://nodejs.org/dist/v0.12.10/node.lib',
- libUrl64: 'https://nodejs.org/dist/v0.12.10/x64/node.lib',
- libPath32: 'node.lib',
- libPath64: 'x64/node.lib'
- })
-})
-
-test('test process release - process.release ~ node@4.1.23', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: {} }, 'v4.1.23', {
- name: 'node',
- headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz'
- })
-
- t.equal(release.semver.version, '4.1.23')
- delete release.semver
-
- t.deepEqual(release, {
- version: '4.1.23',
- name: 'node',
- baseUrl: 'https://nodejs.org/dist/v4.1.23/',
- tarballUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz',
- shasumsUrl: 'https://nodejs.org/dist/v4.1.23/SHASUMS256.txt',
- versionDir: '4.1.23',
- libUrl32: 'https://nodejs.org/dist/v4.1.23/win-x86/node.lib',
- libUrl64: 'https://nodejs.org/dist/v4.1.23/win-x64/node.lib',
- libPath32: 'win-x86/node.lib',
- libPath64: 'win-x64/node.lib'
- })
-})
-
-test('test process release - process.release ~ node@4.1.23 / corp build', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: {} }, 'v4.1.23', {
- name: 'node',
- headersUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz'
- })
-
- t.equal(release.semver.version, '4.1.23')
- delete release.semver
-
- t.deepEqual(release, {
- version: '4.1.23',
- name: 'node',
- baseUrl: 'https://some.custom.location/',
- tarballUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz',
- shasumsUrl: 'https://some.custom.location/SHASUMS256.txt',
- versionDir: '4.1.23',
- libUrl32: 'https://some.custom.location/win-x86/node.lib',
- libUrl64: 'https://some.custom.location/win-x64/node.lib',
- libPath32: 'win-x86/node.lib',
- libPath64: 'win-x64/node.lib'
- })
-})
-
-test('test process release - process.version = 1.8.4', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: {} }, 'v1.8.4', null)
-
- t.equal(release.semver.version, '1.8.4')
- delete release.semver
-
- t.deepEqual(release, {
- version: '1.8.4',
- name: 'iojs',
- baseUrl: 'https://iojs.org/download/release/v1.8.4/',
- tarballUrl: 'https://iojs.org/download/release/v1.8.4/iojs-v1.8.4.tar.gz',
- shasumsUrl: 'https://iojs.org/download/release/v1.8.4/SHASUMS256.txt',
- versionDir: 'iojs-1.8.4',
- libUrl32: 'https://iojs.org/download/release/v1.8.4/win-x86/iojs.lib',
- libUrl64: 'https://iojs.org/download/release/v1.8.4/win-x64/iojs.lib',
- libPath32: 'win-x86/iojs.lib',
- libPath64: 'win-x64/iojs.lib'
- })
-})
-
-test('test process release - process.release ~ iojs@3.2.24', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: {} }, 'v3.2.24', {
- name: 'io.js',
- headersUrl: 'https://iojs.org/download/release/v3.2.24/iojs-v3.2.24-headers.tar.gz'
- })
-
- t.equal(release.semver.version, '3.2.24')
- delete release.semver
-
- t.deepEqual(release, {
- version: '3.2.24',
- name: 'iojs',
- baseUrl: 'https://iojs.org/download/release/v3.2.24/',
- tarballUrl: 'https://iojs.org/download/release/v3.2.24/iojs-v3.2.24-headers.tar.gz',
- shasumsUrl: 'https://iojs.org/download/release/v3.2.24/SHASUMS256.txt',
- versionDir: 'iojs-3.2.24',
- libUrl32: 'https://iojs.org/download/release/v3.2.24/win-x86/iojs.lib',
- libUrl64: 'https://iojs.org/download/release/v3.2.24/win-x64/iojs.lib',
- libPath32: 'win-x86/iojs.lib',
- libPath64: 'win-x64/iojs.lib'
- })
-})
-
-test('test process release - process.release ~ iojs@3.2.11 +libUrl32', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: {} }, 'v3.2.11', {
- name: 'io.js',
- headersUrl: 'https://iojs.org/download/release/v3.2.11/iojs-v3.2.11-headers.tar.gz',
- libUrl: 'https://iojs.org/download/release/v3.2.11/win-x86/iojs.lib' // custom
- })
-
- t.equal(release.semver.version, '3.2.11')
- delete release.semver
-
- t.deepEqual(release, {
- version: '3.2.11',
- name: 'iojs',
- baseUrl: 'https://iojs.org/download/release/v3.2.11/',
- tarballUrl: 'https://iojs.org/download/release/v3.2.11/iojs-v3.2.11-headers.tar.gz',
- shasumsUrl: 'https://iojs.org/download/release/v3.2.11/SHASUMS256.txt',
- versionDir: 'iojs-3.2.11',
- libUrl32: 'https://iojs.org/download/release/v3.2.11/win-x86/iojs.lib',
- libUrl64: 'https://iojs.org/download/release/v3.2.11/win-x64/iojs.lib',
- libPath32: 'win-x86/iojs.lib',
- libPath64: 'win-x64/iojs.lib'
- })
-})
-
-test('test process release - process.release ~ iojs@3.2.101 +libUrl64', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: {} }, 'v3.2.101', {
- name: 'io.js',
- headersUrl: 'https://iojs.org/download/release/v3.2.101/iojs-v3.2.101-headers.tar.gz',
- libUrl: 'https://iojs.org/download/release/v3.2.101/win-x64/iojs.lib' // custom
- })
-
- t.equal(release.semver.version, '3.2.101')
- delete release.semver
-
- t.deepEqual(release, {
- version: '3.2.101',
- name: 'iojs',
- baseUrl: 'https://iojs.org/download/release/v3.2.101/',
- tarballUrl: 'https://iojs.org/download/release/v3.2.101/iojs-v3.2.101-headers.tar.gz',
- shasumsUrl: 'https://iojs.org/download/release/v3.2.101/SHASUMS256.txt',
- versionDir: 'iojs-3.2.101',
- libUrl32: 'https://iojs.org/download/release/v3.2.101/win-x86/iojs.lib',
- libUrl64: 'https://iojs.org/download/release/v3.2.101/win-x64/iojs.lib',
- libPath32: 'win-x86/iojs.lib',
- libPath64: 'win-x64/iojs.lib'
- })
-})
-
-test('test process release - process.release ~ iojs@3.3.0 - borked win-ia32', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: {} }, 'v3.2.101', {
- name: 'io.js',
- headersUrl: 'https://iojs.org/download/release/v3.2.101/iojs-v3.2.101-headers.tar.gz',
- libUrl: 'https://iojs.org/download/release/v3.2.101/win-ia32/iojs.lib' // custom
- })
-
- t.equal(release.semver.version, '3.2.101')
- delete release.semver
-
- t.deepEqual(release, {
- version: '3.2.101',
- name: 'iojs',
- baseUrl: 'https://iojs.org/download/release/v3.2.101/',
- tarballUrl: 'https://iojs.org/download/release/v3.2.101/iojs-v3.2.101-headers.tar.gz',
- shasumsUrl: 'https://iojs.org/download/release/v3.2.101/SHASUMS256.txt',
- versionDir: 'iojs-3.2.101',
- libUrl32: 'https://iojs.org/download/release/v3.2.101/win-x86/iojs.lib',
- libUrl64: 'https://iojs.org/download/release/v3.2.101/win-x64/iojs.lib',
- libPath32: 'win-x86/iojs.lib',
- libPath64: 'win-x64/iojs.lib'
- })
-})
-
-test('test process release - process.release ~ node@4.1.23 --target=0.10.40', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: { target: '0.10.40' } }, 'v4.1.23', {
- name: 'node',
- headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz'
- })
-
- t.equal(release.semver.version, '0.10.40')
- delete release.semver
-
- t.deepEqual(release, {
- version: '0.10.40',
- name: 'node',
- baseUrl: 'https://nodejs.org/dist/v0.10.40/',
- tarballUrl: 'https://nodejs.org/dist/v0.10.40/node-v0.10.40.tar.gz',
- shasumsUrl: 'https://nodejs.org/dist/v0.10.40/SHASUMS256.txt',
- versionDir: '0.10.40',
- libUrl32: 'https://nodejs.org/dist/v0.10.40/node.lib',
- libUrl64: 'https://nodejs.org/dist/v0.10.40/x64/node.lib',
- libPath32: 'node.lib',
- libPath64: 'x64/node.lib'
- })
-})
-
-test('test process release - process.release ~ node@4.1.23 --target=1.8.4', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: { target: '1.8.4' } }, 'v4.1.23', {
- name: 'node',
- headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz'
- })
-
- t.equal(release.semver.version, '1.8.4')
- delete release.semver
-
- t.deepEqual(release, {
- version: '1.8.4',
- name: 'iojs',
- baseUrl: 'https://iojs.org/download/release/v1.8.4/',
- tarballUrl: 'https://iojs.org/download/release/v1.8.4/iojs-v1.8.4.tar.gz',
- shasumsUrl: 'https://iojs.org/download/release/v1.8.4/SHASUMS256.txt',
- versionDir: 'iojs-1.8.4',
- libUrl32: 'https://iojs.org/download/release/v1.8.4/win-x86/iojs.lib',
- libUrl64: 'https://iojs.org/download/release/v1.8.4/win-x64/iojs.lib',
- libPath32: 'win-x86/iojs.lib',
- libPath64: 'win-x64/iojs.lib'
- })
-})
-
-test('test process release - process.release ~ node@4.1.23 --dist-url=https://foo.bar/baz', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: { 'dist-url': 'https://foo.bar/baz' } }, 'v4.1.23', {
- name: 'node',
- headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz'
- })
-
- t.equal(release.semver.version, '4.1.23')
- delete release.semver
-
- t.deepEqual(release, {
- version: '4.1.23',
- name: 'node',
- baseUrl: 'https://foo.bar/baz/v4.1.23/',
- tarballUrl: 'https://foo.bar/baz/v4.1.23/node-v4.1.23-headers.tar.gz',
- shasumsUrl: 'https://foo.bar/baz/v4.1.23/SHASUMS256.txt',
- versionDir: '4.1.23',
- libUrl32: 'https://foo.bar/baz/v4.1.23/win-x86/node.lib',
- libUrl64: 'https://foo.bar/baz/v4.1.23/win-x64/node.lib',
- libPath32: 'win-x86/node.lib',
- libPath64: 'win-x64/node.lib'
- })
-})
-
-test('test process release - process.release ~ frankenstein@4.1.23', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: {} }, 'v4.1.23', {
- name: 'frankenstein',
- headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz'
- })
-
- t.equal(release.semver.version, '4.1.23')
- delete release.semver
-
- t.deepEqual(release, {
- version: '4.1.23',
- name: 'frankenstein',
- baseUrl: 'https://frankensteinjs.org/dist/v4.1.23/',
- tarballUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz',
- shasumsUrl: 'https://frankensteinjs.org/dist/v4.1.23/SHASUMS256.txt',
- versionDir: 'frankenstein-4.1.23',
- libUrl32: 'https://frankensteinjs.org/dist/v4.1.23/win-x86/frankenstein.lib',
- libUrl64: 'https://frankensteinjs.org/dist/v4.1.23/win-x64/frankenstein.lib',
- libPath32: 'win-x86/frankenstein.lib',
- libPath64: 'win-x64/frankenstein.lib'
- })
-})
-
-
-test('test process release - process.release ~ frankenstein@4.1.23 --dist-url=http://foo.bar/baz/', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: { 'dist-url': 'http://foo.bar/baz/' } }, 'v4.1.23', {
- name: 'frankenstein',
- headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23.tar.gz'
- })
-
- t.equal(release.semver.version, '4.1.23')
- delete release.semver
-
- t.deepEqual(release, {
- version: '4.1.23',
- name: 'frankenstein',
- baseUrl: 'http://foo.bar/baz/v4.1.23/',
- tarballUrl: 'http://foo.bar/baz/v4.1.23/frankenstein-v4.1.23-headers.tar.gz',
- shasumsUrl: 'http://foo.bar/baz/v4.1.23/SHASUMS256.txt',
- versionDir: 'frankenstein-4.1.23',
- libUrl32: 'http://foo.bar/baz/v4.1.23/win-x86/frankenstein.lib',
- libUrl64: 'http://foo.bar/baz/v4.1.23/win-x64/frankenstein.lib',
- libPath32: 'win-x86/frankenstein.lib',
- libPath64: 'win-x64/frankenstein.lib'
- })
-})
-
-test('test process release - process.release ~ node@4.0.0-rc.4', function (t) {
- t.plan(2)
-
- var release = processRelease([], { opts: {} }, 'v4.0.0-rc.4', {
- name: 'node',
- headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz'
- })
-
- t.equal(release.semver.version, '4.0.0-rc.4')
- delete release.semver
-
- t.deepEqual(release, {
- version: '4.0.0-rc.4',
- name: 'node',
- baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/',
- tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz',
- shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt',
- versionDir: '4.0.0-rc.4',
- libUrl32: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib',
- libUrl64: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib',
- libPath32: 'win-x86/node.lib',
- libPath64: 'win-x64/node.lib'
- })
-})
-
-
-test('test process release - process.release ~ node@4.0.0-rc.4 passed as argv[0]', function (t) {
- t.plan(2)
-
- // note the missing 'v' on the arg, it should normalise when checking
- // whether we're on the default or not
- var release = processRelease([ '4.0.0-rc.4' ], { opts: {} }, 'v4.0.0-rc.4', {
- name: 'node',
- headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz'
- })
-
- t.equal(release.semver.version, '4.0.0-rc.4')
- delete release.semver
-
- t.deepEqual(release, {
- version: '4.0.0-rc.4',
- name: 'node',
- baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/',
- tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz',
- shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt',
- versionDir: '4.0.0-rc.4',
- libUrl32: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib',
- libUrl64: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib',
- libPath32: 'win-x86/node.lib',
- libPath64: 'win-x64/node.lib'
- })
-})
-
-
-test('test process release - process.release ~ node@4.0.0-rc.4 - bogus string passed as argv[0]', function (t) {
- t.plan(2)
-
- // additional arguments can be passed in on the commandline that should be ignored if they
- // are not specifying a valid version @ position 0
- var release = processRelease([ 'this is no version!' ], { opts: {} }, 'v4.0.0-rc.4', {
- name: 'node',
- headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz'
- })
-
- t.equal(release.semver.version, '4.0.0-rc.4')
- delete release.semver
-
- t.deepEqual(release, {
- version: '4.0.0-rc.4',
- name: 'node',
- baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/',
- tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz',
- shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt',
- versionDir: '4.0.0-rc.4',
- libUrl32: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib',
- libUrl64: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib',
- libPath32: 'win-x86/node.lib',
- libPath64: 'win-x64/node.lib'
- })
-})
-
-test('test process release - NODEJS_ORG_MIRROR', function (t) {
- t.plan(2)
-
- process.env.NODEJS_ORG_MIRROR = 'http://foo.bar'
-
- var release = processRelease([], { opts: {} }, 'v4.1.23', {
- name: 'node',
- headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz'
- })
-
- t.equal(release.semver.version, '4.1.23')
- delete release.semver
-
- t.deepEqual(release, {
- version: '4.1.23',
- name: 'node',
- baseUrl: 'http://foo.bar/v4.1.23/',
- tarballUrl: 'http://foo.bar/v4.1.23/node-v4.1.23-headers.tar.gz',
- shasumsUrl: 'http://foo.bar/v4.1.23/SHASUMS256.txt',
- versionDir: '4.1.23',
- libUrl32: 'http://foo.bar/v4.1.23/win-x86/node.lib',
- libUrl64: 'http://foo.bar/v4.1.23/win-x64/node.lib',
- libPath32: 'win-x86/node.lib',
- libPath64: 'win-x64/node.lib'
- })
-
- delete process.env.NODEJS_ORG_MIRROR
-})
-
-test('test process release - NVM_NODEJS_ORG_MIRROR', function (t) {
- t.plan(2)
-
- process.env.NVM_NODEJS_ORG_MIRROR = 'http://foo.bar'
-
- var release = processRelease([], { opts: {} }, 'v4.1.23', {
- name: 'node',
- headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz'
- })
-
- t.equal(release.semver.version, '4.1.23')
- delete release.semver
-
- t.deepEqual(release, {
- version: '4.1.23',
- name: 'node',
- baseUrl: 'http://foo.bar/v4.1.23/',
- tarballUrl: 'http://foo.bar/v4.1.23/node-v4.1.23-headers.tar.gz',
- shasumsUrl: 'http://foo.bar/v4.1.23/SHASUMS256.txt',
- versionDir: '4.1.23',
- libUrl32: 'http://foo.bar/v4.1.23/win-x86/node.lib',
- libUrl64: 'http://foo.bar/v4.1.23/win-x64/node.lib',
- libPath32: 'win-x86/node.lib',
- libPath64: 'win-x64/node.lib'
- })
-
- delete process.env.NVM_NODEJS_ORG_MIRROR
-})
-
-test('test process release - IOJS_ORG_MIRROR', function (t) {
- t.plan(2)
-
- process.env.IOJS_ORG_MIRROR = 'http://foo.bar'
-
- var release = processRelease([], { opts: {} }, 'v3.2.24', {
- name: 'io.js',
- headersUrl: 'https://iojs.org/download/release/v3.2.24/iojs-v3.2.24-headers.tar.gz'
- })
-
- t.equal(release.semver.version, '3.2.24')
- delete release.semver
-
- t.deepEqual(release, {
- version: '3.2.24',
- name: 'iojs',
- baseUrl: 'http://foo.bar/v3.2.24/',
- tarballUrl: 'http://foo.bar/v3.2.24/iojs-v3.2.24-headers.tar.gz',
- shasumsUrl: 'http://foo.bar/v3.2.24/SHASUMS256.txt',
- versionDir: 'iojs-3.2.24',
- libUrl32: 'http://foo.bar/v3.2.24/win-x86/iojs.lib',
- libUrl64: 'http://foo.bar/v3.2.24/win-x64/iojs.lib',
- libPath32: 'win-x86/iojs.lib',
- libPath64: 'win-x64/iojs.lib'
- })
-
- delete process.env.IOJS_ORG_MIRROR
-})
-
-
-test('test process release - NVM_IOJS_ORG_MIRROR', function (t) {
- t.plan(2)
-
- process.env.NVM_IOJS_ORG_MIRROR = 'http://foo.bar'
-
- var release = processRelease([], { opts: {} }, 'v3.2.24', {
- name: 'io.js',
- headersUrl: 'https://iojs.org/download/release/v3.2.24/iojs-v3.2.24-headers.tar.gz'
- })
-
- t.equal(release.semver.version, '3.2.24')
- delete release.semver
-
- t.deepEqual(release, {
- version: '3.2.24',
- name: 'iojs',
- baseUrl: 'http://foo.bar/v3.2.24/',
- tarballUrl: 'http://foo.bar/v3.2.24/iojs-v3.2.24-headers.tar.gz',
- shasumsUrl: 'http://foo.bar/v3.2.24/SHASUMS256.txt',
- versionDir: 'iojs-3.2.24',
- libUrl32: 'http://foo.bar/v3.2.24/win-x86/iojs.lib',
- libUrl64: 'http://foo.bar/v3.2.24/win-x64/iojs.lib',
- libPath32: 'win-x86/iojs.lib',
- libPath64: 'win-x64/iojs.lib'
- })
-
- delete process.env.NVM_IOJS_ORG_MIRROR
-})