aboutsummaryrefslogtreecommitdiff
path: root/deps/npm/node_modules/node-gyp/node_modules
diff options
context:
space:
mode:
Diffstat (limited to 'deps/npm/node_modules/node-gyp/node_modules')
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/.npmignore5
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/.travis.yml9
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/LICENSE15
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/README.md76
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/examples/filter-pipe.js134
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/examples/pipe.js118
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/examples/reader.js68
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/examples/symlink-write.js27
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/fstream.js35
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/lib/abstract.js85
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/lib/collect.js70
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/lib/dir-reader.js252
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/lib/dir-writer.js174
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/lib/file-reader.js150
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/lib/file-writer.js107
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/lib/get-type.js33
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/lib/link-reader.js53
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/lib/link-writer.js95
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/lib/proxy-reader.js95
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/lib/proxy-writer.js111
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/lib/reader.js255
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/lib/socket-reader.js36
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/lib/writer.js390
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/fstream/package.json62
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/LICENSE15
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/README.md209
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/minimatch.js923
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/README.md123
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/index.js200
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/.npmignore5
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/LICENSE.md21
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/README.md91
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/index.js59
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json77
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/.travis.yml4
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/LICENSE18
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/README.markdown62
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/example/map.js6
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/index.js13
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json92
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/test/map.js39
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/package.json75
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/minimatch/package.json63
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/nopt/.npmignore1
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/nopt/.travis.yml9
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/nopt/LICENSE15
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/nopt/README.md211
-rwxr-xr-xdeps/npm/node_modules/node-gyp/node_modules/nopt/bin/nopt.js54
-rwxr-xr-xdeps/npm/node_modules/node-gyp/node_modules/nopt/examples/my-program.js30
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt.js415
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/nopt/package.json59
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/nopt/test/basic.js273
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/LICENSE15
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/README.md350
-rwxr-xr-xdeps/npm/node_modules/node-gyp/node_modules/semver/bin/semver133
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/package.json54
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/range.bnf16
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/semver/semver.js1203
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/.npmignore5
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/.travis.yml4
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/LICENSE12
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/README.md50
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/examples/extracter.js19
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/examples/packer.js24
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/examples/reader.js35
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/lib/buffer-entry.js30
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/lib/entry-writer.js169
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/lib/entry.js220
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js191
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/lib/extended-header.js140
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/lib/extract.js94
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js14
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/lib/header.js384
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/lib/pack.js236
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/lib/parse.js275
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENCE25
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENSE15
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/README.md14
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/block-stream.js209
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/package.json63
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/package.json64
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/tar.js173
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js53
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgzbin0 -> 4096 bytes
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js177
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tarbin0 -> 4608 bytes
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js33
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/test/extract-move.js132
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/test/extract.js367
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/test/fixtures.tgzbin0 -> 19352 bytes
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/test/header.js183
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js886
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/test/pack.js952
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/test/parse-discard.js29
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/test/parse.js359
-rw-r--r--deps/npm/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js20
96 files changed, 13044 insertions, 0 deletions
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/.npmignore b/deps/npm/node_modules/node-gyp/node_modules/fstream/.npmignore
new file mode 100644
index 0000000000..494272a81a
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/.npmignore
@@ -0,0 +1,5 @@
+.*.swp
+node_modules/
+examples/deep-copy/
+examples/path/
+examples/filter-copy/
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/.travis.yml b/deps/npm/node_modules/node-gyp/node_modules/fstream/.travis.yml
new file mode 100644
index 0000000000..9f5972ab5a
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/.travis.yml
@@ -0,0 +1,9 @@
+language: node_js
+node_js:
+ - "6"
+ - "4"
+ - "0.10"
+ - "0.12"
+before_install:
+ - "npm config set spin false"
+ - "npm install -g npm/npm"
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/fstream/LICENSE
new file mode 100644
index 0000000000..19129e315f
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/README.md b/deps/npm/node_modules/node-gyp/node_modules/fstream/README.md
new file mode 100644
index 0000000000..9d8cb77e5c
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/README.md
@@ -0,0 +1,76 @@
+Like FS streams, but with stat on them, and supporting directories and
+symbolic links, as well as normal files. Also, you can use this to set
+the stats on a file, even if you don't change its contents, or to create
+a symlink, etc.
+
+So, for example, you can "write" a directory, and it'll call `mkdir`. You
+can specify a uid and gid, and it'll call `chown`. You can specify a
+`mtime` and `atime`, and it'll call `utimes`. You can call it a symlink
+and provide a `linkpath` and it'll call `symlink`.
+
+Note that it won't automatically resolve symbolic links. So, if you
+call `fstream.Reader('/some/symlink')` then you'll get an object
+that stats and then ends immediately (since it has no data). To follow
+symbolic links, do this: `fstream.Reader({path:'/some/symlink', follow:
+true })`.
+
+There are various checks to make sure that the bytes emitted are the
+same as the intended size, if the size is set.
+
+## Examples
+
+```javascript
+fstream
+ .Writer({ path: "path/to/file"
+ , mode: 0755
+ , size: 6
+ })
+ .write("hello\n")
+ .end()
+```
+
+This will create the directories if they're missing, and then write
+`hello\n` into the file, chmod it to 0755, and assert that 6 bytes have
+been written when it's done.
+
+```javascript
+fstream
+ .Writer({ path: "path/to/file"
+ , mode: 0755
+ , size: 6
+ , flags: "a"
+ })
+ .write("hello\n")
+ .end()
+```
+
+You can pass flags in, if you want to append to a file.
+
+```javascript
+fstream
+ .Writer({ path: "path/to/symlink"
+ , linkpath: "./file"
+ , SymbolicLink: true
+ , mode: "0755" // octal strings supported
+ })
+ .end()
+```
+
+If isSymbolicLink is a function, it'll be called, and if it returns
+true, then it'll treat it as a symlink. If it's not a function, then
+any truish value will make a symlink, or you can set `type:
+'SymbolicLink'`, which does the same thing.
+
+Note that the linkpath is relative to the symbolic link location, not
+the parent dir or cwd.
+
+```javascript
+fstream
+ .Reader("path/to/dir")
+ .pipe(fstream.Writer("path/to/other/dir"))
+```
+
+This will do like `cp -Rp path/to/dir path/to/other/dir`. If the other
+dir exists and isn't a directory, then it'll emit an error. It'll also
+set the uid, gid, mode, etc. to be identical. In this way, it's more
+like `rsync -a` than simply a copy.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/filter-pipe.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/filter-pipe.js
new file mode 100644
index 0000000000..83dadef8a6
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/filter-pipe.js
@@ -0,0 +1,134 @@
+var fstream = require('../fstream.js')
+var path = require('path')
+
+var r = fstream.Reader({
+ path: path.dirname(__dirname),
+ filter: function () {
+ return !this.basename.match(/^\./) &&
+ !this.basename.match(/^node_modules$/) &&
+ !this.basename.match(/^deep-copy$/) &&
+ !this.basename.match(/^filter-copy$/)
+ }
+})
+
+// this writer will only write directories
+var w = fstream.Writer({
+ path: path.resolve(__dirname, 'filter-copy'),
+ type: 'Directory',
+ filter: function () {
+ return this.type === 'Directory'
+ }
+})
+
+var indent = ''
+
+r.on('entry', appears)
+r.on('ready', function () {
+ console.error('ready to begin!', r.path)
+})
+
+function appears (entry) {
+ console.error(indent + 'a %s appears!', entry.type, entry.basename, typeof entry.basename)
+ if (foggy) {
+ console.error('FOGGY!')
+ var p = entry
+ do {
+ console.error(p.depth, p.path, p._paused)
+ p = p.parent
+ } while (p)
+
+ throw new Error('\u001b[mshould not have entries while foggy')
+ }
+ indent += '\t'
+ entry.on('data', missile(entry))
+ entry.on('end', runaway(entry))
+ entry.on('entry', appears)
+}
+
+var foggy
+function missile (entry) {
+ function liftFog (who) {
+ if (!foggy) return
+ if (who) {
+ console.error('%s breaks the spell!', who && who.path)
+ } else {
+ console.error('the spell expires!')
+ }
+ console.error('\u001b[mthe fog lifts!\n')
+ clearTimeout(foggy)
+ foggy = null
+ if (entry._paused) entry.resume()
+ }
+
+ if (entry.type === 'Directory') {
+ var ended = false
+ entry.once('end', function () { ended = true })
+ return function (c) {
+ // throw in some pathological pause()/resume() behavior
+ // just for extra fun.
+ process.nextTick(function () {
+ if (!foggy && !ended) { // && Math.random() < 0.3) {
+ console.error(indent + '%s casts a spell', entry.basename)
+ console.error('\na slowing fog comes over the battlefield...\n\u001b[32m')
+ entry.pause()
+ entry.once('resume', liftFog)
+ foggy = setTimeout(liftFog, 1000)
+ }
+ })
+ }
+ }
+
+ return function (c) {
+ var e = Math.random() < 0.5
+ console.error(indent + '%s %s for %d damage!',
+ entry.basename,
+ e ? 'is struck' : 'fires a chunk',
+ c.length)
+ }
+}
+
+function runaway (entry) {
+ return function () {
+ var e = Math.random() < 0.5
+ console.error(indent + '%s %s',
+ entry.basename,
+ e ? 'turns to flee' : 'is vanquished!')
+ indent = indent.slice(0, -1)
+ }
+}
+
+w.on('entry', attacks)
+// w.on('ready', function () { attacks(w) })
+function attacks (entry) {
+ console.error(indent + '%s %s!', entry.basename,
+ entry.type === 'Directory' ? 'calls for backup' : 'attacks')
+ entry.on('entry', attacks)
+}
+
+var ended = false
+var i = 1
+r.on('end', function () {
+ if (foggy) clearTimeout(foggy)
+ console.error("\u001b[mIT'S OVER!!")
+ console.error('A WINNAR IS YOU!')
+
+ console.log('ok ' + (i++) + ' A WINNAR IS YOU')
+ ended = true
+ // now go through and verify that everything in there is a dir.
+ var p = path.resolve(__dirname, 'filter-copy')
+ var checker = fstream.Reader({ path: p })
+ checker.checker = true
+ checker.on('child', function (e) {
+ var ok = e.type === 'Directory'
+ console.log((ok ? '' : 'not ') + 'ok ' + (i++) +
+ ' should be a dir: ' +
+ e.path.substr(checker.path.length + 1))
+ })
+})
+
+process.on('exit', function () {
+ console.log((ended ? '' : 'not ') + 'ok ' + (i) + ' ended')
+ console.log('1..' + i)
+})
+
+r.pipe(w)
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/pipe.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/pipe.js
new file mode 100644
index 0000000000..3de42ef32b
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/pipe.js
@@ -0,0 +1,118 @@
+var fstream = require('../fstream.js')
+var path = require('path')
+
+var r = fstream.Reader({
+ path: path.dirname(__dirname),
+ filter: function () {
+ return !this.basename.match(/^\./) &&
+ !this.basename.match(/^node_modules$/) &&
+ !this.basename.match(/^deep-copy$/)
+ }
+})
+
+var w = fstream.Writer({
+ path: path.resolve(__dirname, 'deep-copy'),
+ type: 'Directory'
+})
+
+var indent = ''
+
+r.on('entry', appears)
+r.on('ready', function () {
+ console.error('ready to begin!', r.path)
+})
+
+function appears (entry) {
+ console.error(indent + 'a %s appears!', entry.type, entry.basename, typeof entry.basename, entry)
+ if (foggy) {
+ console.error('FOGGY!')
+ var p = entry
+ do {
+ console.error(p.depth, p.path, p._paused)
+ p = p.parent
+ } while (p)
+
+ throw new Error('\u001b[mshould not have entries while foggy')
+ }
+ indent += '\t'
+ entry.on('data', missile(entry))
+ entry.on('end', runaway(entry))
+ entry.on('entry', appears)
+}
+
+var foggy
+function missile (entry) {
+ function liftFog (who) {
+ if (!foggy) return
+ if (who) {
+ console.error('%s breaks the spell!', who && who.path)
+ } else {
+ console.error('the spell expires!')
+ }
+ console.error('\u001b[mthe fog lifts!\n')
+ clearTimeout(foggy)
+ foggy = null
+ if (entry._paused) entry.resume()
+ }
+
+ if (entry.type === 'Directory') {
+ var ended = false
+ entry.once('end', function () { ended = true })
+ return function (c) {
+ // throw in some pathological pause()/resume() behavior
+ // just for extra fun.
+ process.nextTick(function () {
+ if (!foggy && !ended) { // && Math.random() < 0.3) {
+ console.error(indent + '%s casts a spell', entry.basename)
+ console.error('\na slowing fog comes over the battlefield...\n\u001b[32m')
+ entry.pause()
+ entry.once('resume', liftFog)
+ foggy = setTimeout(liftFog, 10)
+ }
+ })
+ }
+ }
+
+ return function (c) {
+ var e = Math.random() < 0.5
+ console.error(indent + '%s %s for %d damage!',
+ entry.basename,
+ e ? 'is struck' : 'fires a chunk',
+ c.length)
+ }
+}
+
+function runaway (entry) {
+ return function () {
+ var e = Math.random() < 0.5
+ console.error(indent + '%s %s',
+ entry.basename,
+ e ? 'turns to flee' : 'is vanquished!')
+ indent = indent.slice(0, -1)
+ }
+}
+
+w.on('entry', attacks)
+// w.on('ready', function () { attacks(w) })
+function attacks (entry) {
+ console.error(indent + '%s %s!', entry.basename,
+ entry.type === 'Directory' ? 'calls for backup' : 'attacks')
+ entry.on('entry', attacks)
+}
+
+var ended = false
+r.on('end', function () {
+ if (foggy) clearTimeout(foggy)
+ console.error("\u001b[mIT'S OVER!!")
+ console.error('A WINNAR IS YOU!')
+
+ console.log('ok 1 A WINNAR IS YOU')
+ ended = true
+})
+
+process.on('exit', function () {
+ console.log((ended ? '' : 'not ') + 'ok 2 ended')
+ console.log('1..2')
+})
+
+r.pipe(w)
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/reader.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/reader.js
new file mode 100644
index 0000000000..19affbe7e6
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/reader.js
@@ -0,0 +1,68 @@
+var fstream = require('../fstream.js')
+var tap = require('tap')
+var fs = require('fs')
+var path = require('path')
+var dir = path.dirname(__dirname)
+
+tap.test('reader test', function (t) {
+ var children = -1
+ var gotReady = false
+ var ended = false
+
+ var r = fstream.Reader({
+ path: dir,
+ filter: function () {
+ // return this.parent === r
+ return this.parent === r || this === r
+ }
+ })
+
+ r.on('ready', function () {
+ gotReady = true
+ children = fs.readdirSync(dir).length
+ console.error('Setting expected children to ' + children)
+ t.equal(r.type, 'Directory', 'should be a directory')
+ })
+
+ r.on('entry', function (entry) {
+ children--
+ if (!gotReady) {
+ t.fail('children before ready!')
+ }
+ t.equal(entry.dirname, r.path, 'basename is parent dir')
+ })
+
+ r.on('error', function (er) {
+ t.fail(er)
+ t.end()
+ process.exit(1)
+ })
+
+ r.on('end', function () {
+ t.equal(children, 0, 'should have seen all children')
+ ended = true
+ })
+
+ var closed = false
+ r.on('close', function () {
+ t.ok(ended, 'saw end before close')
+ t.notOk(closed, 'close should only happen once')
+ closed = true
+ t.end()
+ })
+})
+
+tap.test('reader error test', function (t) {
+ // assumes non-root on a *nix system
+ var r = fstream.Reader({ path: '/etc/shadow' })
+
+ r.once('error', function (er) {
+ t.ok(true)
+ t.end()
+ })
+
+ r.on('end', function () {
+ t.fail('reader ended without error')
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/symlink-write.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/symlink-write.js
new file mode 100644
index 0000000000..19e81eea9f
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/examples/symlink-write.js
@@ -0,0 +1,27 @@
+var fstream = require('../fstream.js')
+var notOpen = false
+process.chdir(__dirname)
+
+fstream
+ .Writer({
+ path: 'path/to/symlink',
+ linkpath: './file',
+ isSymbolicLink: true,
+ mode: '0755' // octal strings supported
+ })
+ .on('close', function () {
+ notOpen = true
+ var fs = require('fs')
+ var s = fs.lstatSync('path/to/symlink')
+ var isSym = s.isSymbolicLink()
+ console.log((isSym ? '' : 'not ') + 'ok 1 should be symlink')
+ var t = fs.readlinkSync('path/to/symlink')
+ var isTarget = t === './file'
+ console.log((isTarget ? '' : 'not ') + 'ok 2 should link to ./file')
+ })
+ .end()
+
+process.on('exit', function () {
+ console.log((notOpen ? '' : 'not ') + 'ok 3 should be closed')
+ console.log('1..3')
+})
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/fstream.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/fstream.js
new file mode 100644
index 0000000000..c0eb3bea78
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/fstream.js
@@ -0,0 +1,35 @@
+exports.Abstract = require('./lib/abstract.js')
+exports.Reader = require('./lib/reader.js')
+exports.Writer = require('./lib/writer.js')
+
+exports.File = {
+ Reader: require('./lib/file-reader.js'),
+ Writer: require('./lib/file-writer.js')
+}
+
+exports.Dir = {
+ Reader: require('./lib/dir-reader.js'),
+ Writer: require('./lib/dir-writer.js')
+}
+
+exports.Link = {
+ Reader: require('./lib/link-reader.js'),
+ Writer: require('./lib/link-writer.js')
+}
+
+exports.Proxy = {
+ Reader: require('./lib/proxy-reader.js'),
+ Writer: require('./lib/proxy-writer.js')
+}
+
+exports.Reader.Dir = exports.DirReader = exports.Dir.Reader
+exports.Reader.File = exports.FileReader = exports.File.Reader
+exports.Reader.Link = exports.LinkReader = exports.Link.Reader
+exports.Reader.Proxy = exports.ProxyReader = exports.Proxy.Reader
+
+exports.Writer.Dir = exports.DirWriter = exports.Dir.Writer
+exports.Writer.File = exports.FileWriter = exports.File.Writer
+exports.Writer.Link = exports.LinkWriter = exports.Link.Writer
+exports.Writer.Proxy = exports.ProxyWriter = exports.Proxy.Writer
+
+exports.collect = require('./lib/collect.js')
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/abstract.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/abstract.js
new file mode 100644
index 0000000000..97c120e1d5
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/abstract.js
@@ -0,0 +1,85 @@
+// the parent class for all fstreams.
+
+module.exports = Abstract
+
+var Stream = require('stream').Stream
+var inherits = require('inherits')
+
+function Abstract () {
+ Stream.call(this)
+}
+
+inherits(Abstract, Stream)
+
+Abstract.prototype.on = function (ev, fn) {
+ if (ev === 'ready' && this.ready) {
+ process.nextTick(fn.bind(this))
+ } else {
+ Stream.prototype.on.call(this, ev, fn)
+ }
+ return this
+}
+
+Abstract.prototype.abort = function () {
+ this._aborted = true
+ this.emit('abort')
+}
+
+Abstract.prototype.destroy = function () {}
+
+Abstract.prototype.warn = function (msg, code) {
+ var self = this
+ var er = decorate(msg, code, self)
+ if (!self.listeners('warn')) {
+ console.error('%s %s\n' +
+ 'path = %s\n' +
+ 'syscall = %s\n' +
+ 'fstream_type = %s\n' +
+ 'fstream_path = %s\n' +
+ 'fstream_unc_path = %s\n' +
+ 'fstream_class = %s\n' +
+ 'fstream_stack =\n%s\n',
+ code || 'UNKNOWN',
+ er.stack,
+ er.path,
+ er.syscall,
+ er.fstream_type,
+ er.fstream_path,
+ er.fstream_unc_path,
+ er.fstream_class,
+ er.fstream_stack.join('\n'))
+ } else {
+ self.emit('warn', er)
+ }
+}
+
+Abstract.prototype.info = function (msg, code) {
+ this.emit('info', msg, code)
+}
+
+Abstract.prototype.error = function (msg, code, th) {
+ var er = decorate(msg, code, this)
+ if (th) throw er
+ else this.emit('error', er)
+}
+
+function decorate (er, code, self) {
+ if (!(er instanceof Error)) er = new Error(er)
+ er.code = er.code || code
+ er.path = er.path || self.path
+ er.fstream_type = er.fstream_type || self.type
+ er.fstream_path = er.fstream_path || self.path
+ if (self._path !== self.path) {
+ er.fstream_unc_path = er.fstream_unc_path || self._path
+ }
+ if (self.linkpath) {
+ er.fstream_linkpath = er.fstream_linkpath || self.linkpath
+ }
+ er.fstream_class = er.fstream_class || self.constructor.name
+ er.fstream_stack = er.fstream_stack ||
+ new Error().stack.split(/\n/).slice(3).map(function (s) {
+ return s.replace(/^ {4}at /, '')
+ })
+
+ return er
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/collect.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/collect.js
new file mode 100644
index 0000000000..e5d4f35833
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/collect.js
@@ -0,0 +1,70 @@
+module.exports = collect
+
+function collect (stream) {
+ if (stream._collected) return
+
+ if (stream._paused) return stream.on('resume', collect.bind(null, stream))
+
+ stream._collected = true
+ stream.pause()
+
+ stream.on('data', save)
+ stream.on('end', save)
+ var buf = []
+ function save (b) {
+ if (typeof b === 'string') b = new Buffer(b)
+ if (Buffer.isBuffer(b) && !b.length) return
+ buf.push(b)
+ }
+
+ stream.on('entry', saveEntry)
+ var entryBuffer = []
+ function saveEntry (e) {
+ collect(e)
+ entryBuffer.push(e)
+ }
+
+ stream.on('proxy', proxyPause)
+ function proxyPause (p) {
+ p.pause()
+ }
+
+ // replace the pipe method with a new version that will
+ // unlock the buffered stuff. if you just call .pipe()
+ // without a destination, then it'll re-play the events.
+ stream.pipe = (function (orig) {
+ return function (dest) {
+ // console.error(' === open the pipes', dest && dest.path)
+
+ // let the entries flow through one at a time.
+ // Once they're all done, then we can resume completely.
+ var e = 0
+ ;(function unblockEntry () {
+ var entry = entryBuffer[e++]
+ // console.error(" ==== unblock entry", entry && entry.path)
+ if (!entry) return resume()
+ entry.on('end', unblockEntry)
+ if (dest) dest.add(entry)
+ else stream.emit('entry', entry)
+ })()
+
+ function resume () {
+ stream.removeListener('entry', saveEntry)
+ stream.removeListener('data', save)
+ stream.removeListener('end', save)
+
+ stream.pipe = orig
+ if (dest) stream.pipe(dest)
+
+ buf.forEach(function (b) {
+ if (b) stream.emit('data', b)
+ else stream.emit('end')
+ })
+
+ stream.resume()
+ }
+
+ return dest
+ }
+ })(stream.pipe)
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/dir-reader.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/dir-reader.js
new file mode 100644
index 0000000000..820cdc85a8
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/dir-reader.js
@@ -0,0 +1,252 @@
+// A thing that emits "entry" events with Reader objects
+// Pausing it causes it to stop emitting entry events, and also
+// pauses the current entry if there is one.
+
+module.exports = DirReader
+
+var fs = require('graceful-fs')
+var inherits = require('inherits')
+var path = require('path')
+var Reader = require('./reader.js')
+var assert = require('assert').ok
+
+inherits(DirReader, Reader)
+
+function DirReader (props) {
+ var self = this
+ if (!(self instanceof DirReader)) {
+ throw new Error('DirReader must be called as constructor.')
+ }
+
+ // should already be established as a Directory type
+ if (props.type !== 'Directory' || !props.Directory) {
+ throw new Error('Non-directory type ' + props.type)
+ }
+
+ self.entries = null
+ self._index = -1
+ self._paused = false
+ self._length = -1
+
+ if (props.sort) {
+ this.sort = props.sort
+ }
+
+ Reader.call(this, props)
+}
+
+DirReader.prototype._getEntries = function () {
+ var self = this
+
+ // race condition. might pause() before calling _getEntries,
+ // and then resume, and try to get them a second time.
+ if (self._gotEntries) return
+ self._gotEntries = true
+
+ fs.readdir(self._path, function (er, entries) {
+ if (er) return self.error(er)
+
+ self.entries = entries
+
+ self.emit('entries', entries)
+ if (self._paused) self.once('resume', processEntries)
+ else processEntries()
+
+ function processEntries () {
+ self._length = self.entries.length
+ if (typeof self.sort === 'function') {
+ self.entries = self.entries.sort(self.sort.bind(self))
+ }
+ self._read()
+ }
+ })
+}
+
+// start walking the dir, and emit an "entry" event for each one.
+DirReader.prototype._read = function () {
+ var self = this
+
+ if (!self.entries) return self._getEntries()
+
+ if (self._paused || self._currentEntry || self._aborted) {
+ // console.error('DR paused=%j, current=%j, aborted=%j', self._paused, !!self._currentEntry, self._aborted)
+ return
+ }
+
+ self._index++
+ if (self._index >= self.entries.length) {
+ if (!self._ended) {
+ self._ended = true
+ self.emit('end')
+ self.emit('close')
+ }
+ return
+ }
+
+ // ok, handle this one, then.
+
+ // save creating a proxy, by stat'ing the thing now.
+ var p = path.resolve(self._path, self.entries[self._index])
+ assert(p !== self._path)
+ assert(self.entries[self._index])
+
+ // set this to prevent trying to _read() again in the stat time.
+ self._currentEntry = p
+ fs[ self.props.follow ? 'stat' : 'lstat' ](p, function (er, stat) {
+ if (er) return self.error(er)
+
+ var who = self._proxy || self
+
+ stat.path = p
+ stat.basename = path.basename(p)
+ stat.dirname = path.dirname(p)
+ var childProps = self.getChildProps.call(who, stat)
+ childProps.path = p
+ childProps.basename = path.basename(p)
+ childProps.dirname = path.dirname(p)
+
+ var entry = Reader(childProps, stat)
+
+ // console.error("DR Entry", p, stat.size)
+
+ self._currentEntry = entry
+
+ // "entry" events are for direct entries in a specific dir.
+ // "child" events are for any and all children at all levels.
+ // This nomenclature is not completely final.
+
+ entry.on('pause', function (who) {
+ if (!self._paused && !entry._disowned) {
+ self.pause(who)
+ }
+ })
+
+ entry.on('resume', function (who) {
+ if (self._paused && !entry._disowned) {
+ self.resume(who)
+ }
+ })
+
+ entry.on('stat', function (props) {
+ self.emit('_entryStat', entry, props)
+ if (entry._aborted) return
+ if (entry._paused) {
+ entry.once('resume', function () {
+ self.emit('entryStat', entry, props)
+ })
+ } else self.emit('entryStat', entry, props)
+ })
+
+ entry.on('ready', function EMITCHILD () {
+ // console.error("DR emit child", entry._path)
+ if (self._paused) {
+ // console.error(" DR emit child - try again later")
+ // pause the child, and emit the "entry" event once we drain.
+ // console.error("DR pausing child entry")
+ entry.pause(self)
+ return self.once('resume', EMITCHILD)
+ }
+
+ // skip over sockets. they can't be piped around properly,
+ // so there's really no sense even acknowledging them.
+ // if someone really wants to see them, they can listen to
+ // the "socket" events.
+ if (entry.type === 'Socket') {
+ self.emit('socket', entry)
+ } else {
+ self.emitEntry(entry)
+ }
+ })
+
+ var ended = false
+ entry.on('close', onend)
+ entry.on('disown', onend)
+ function onend () {
+ if (ended) return
+ ended = true
+ self.emit('childEnd', entry)
+ self.emit('entryEnd', entry)
+ self._currentEntry = null
+ if (!self._paused) {
+ self._read()
+ }
+ }
+
+ // XXX Remove this. Works in node as of 0.6.2 or so.
+ // Long filenames should not break stuff.
+ entry.on('error', function (er) {
+ if (entry._swallowErrors) {
+ self.warn(er)
+ entry.emit('end')
+ entry.emit('close')
+ } else {
+ self.emit('error', er)
+ }
+ })
+
+ // proxy up some events.
+ ;[
+ 'child',
+ 'childEnd',
+ 'warn'
+ ].forEach(function (ev) {
+ entry.on(ev, self.emit.bind(self, ev))
+ })
+ })
+}
+
+DirReader.prototype.disown = function (entry) {
+ entry.emit('beforeDisown')
+ entry._disowned = true
+ entry.parent = entry.root = null
+ if (entry === this._currentEntry) {
+ this._currentEntry = null
+ }
+ entry.emit('disown')
+}
+
+DirReader.prototype.getChildProps = function () {
+ return {
+ depth: this.depth + 1,
+ root: this.root || this,
+ parent: this,
+ follow: this.follow,
+ filter: this.filter,
+ sort: this.props.sort,
+ hardlinks: this.props.hardlinks
+ }
+}
+
+DirReader.prototype.pause = function (who) {
+ var self = this
+ if (self._paused) return
+ who = who || self
+ self._paused = true
+ if (self._currentEntry && self._currentEntry.pause) {
+ self._currentEntry.pause(who)
+ }
+ self.emit('pause', who)
+}
+
+DirReader.prototype.resume = function (who) {
+ var self = this
+ if (!self._paused) return
+ who = who || self
+
+ self._paused = false
+ // console.error('DR Emit Resume', self._path)
+ self.emit('resume', who)
+ if (self._paused) {
+ // console.error('DR Re-paused', self._path)
+ return
+ }
+
+ if (self._currentEntry) {
+ if (self._currentEntry.resume) self._currentEntry.resume(who)
+ } else self._read()
+}
+
+DirReader.prototype.emitEntry = function (entry) {
+ this.emit('entry', entry)
+ this.emit('child', entry)
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/dir-writer.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/dir-writer.js
new file mode 100644
index 0000000000..ec50dca900
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/dir-writer.js
@@ -0,0 +1,174 @@
+// It is expected that, when .add() returns false, the consumer
+// of the DirWriter will pause until a "drain" event occurs. Note
+// that this is *almost always going to be the case*, unless the
+// thing being written is some sort of unsupported type, and thus
+// skipped over.
+
+module.exports = DirWriter
+
+var Writer = require('./writer.js')
+var inherits = require('inherits')
+var mkdir = require('mkdirp')
+var path = require('path')
+var collect = require('./collect.js')
+
+inherits(DirWriter, Writer)
+
+function DirWriter (props) {
+ var self = this
+ if (!(self instanceof DirWriter)) {
+ self.error('DirWriter must be called as constructor.', null, true)
+ }
+
+ // should already be established as a Directory type
+ if (props.type !== 'Directory' || !props.Directory) {
+ self.error('Non-directory type ' + props.type + ' ' +
+ JSON.stringify(props), null, true)
+ }
+
+ Writer.call(this, props)
+}
+
+DirWriter.prototype._create = function () {
+ var self = this
+ mkdir(self._path, Writer.dirmode, function (er) {
+ if (er) return self.error(er)
+ // ready to start getting entries!
+ self.ready = true
+ self.emit('ready')
+ self._process()
+ })
+}
+
+// a DirWriter has an add(entry) method, but its .write() doesn't
+// do anything. Why a no-op rather than a throw? Because this
+// leaves open the door for writing directory metadata for
+// gnu/solaris style dumpdirs.
+DirWriter.prototype.write = function () {
+ return true
+}
+
+DirWriter.prototype.end = function () {
+ this._ended = true
+ this._process()
+}
+
+DirWriter.prototype.add = function (entry) {
+ var self = this
+
+ // console.error('\tadd', entry._path, '->', self._path)
+ collect(entry)
+ if (!self.ready || self._currentEntry) {
+ self._buffer.push(entry)
+ return false
+ }
+
+ // create a new writer, and pipe the incoming entry into it.
+ if (self._ended) {
+ return self.error('add after end')
+ }
+
+ self._buffer.push(entry)
+ self._process()
+
+ return this._buffer.length === 0
+}
+
+DirWriter.prototype._process = function () {
+ var self = this
+
+ // console.error('DW Process p=%j', self._processing, self.basename)
+
+ if (self._processing) return
+
+ var entry = self._buffer.shift()
+ if (!entry) {
+ // console.error("DW Drain")
+ self.emit('drain')
+ if (self._ended) self._finish()
+ return
+ }
+
+ self._processing = true
+ // console.error("DW Entry", entry._path)
+
+ self.emit('entry', entry)
+
+ // ok, add this entry
+ //
+ // don't allow recursive copying
+ var p = entry
+ var pp
+ do {
+ pp = p._path || p.path
+ if (pp === self.root._path || pp === self._path ||
+ (pp && pp.indexOf(self._path) === 0)) {
+ // console.error('DW Exit (recursive)', entry.basename, self._path)
+ self._processing = false
+ if (entry._collected) entry.pipe()
+ return self._process()
+ }
+ p = p.parent
+ } while (p)
+
+ // console.error("DW not recursive")
+
+ // chop off the entry's root dir, replace with ours
+ var props = {
+ parent: self,
+ root: self.root || self,
+ type: entry.type,
+ depth: self.depth + 1
+ }
+
+ pp = entry._path || entry.path || entry.props.path
+ if (entry.parent) {
+ pp = pp.substr(entry.parent._path.length + 1)
+ }
+ // get rid of any ../../ shenanigans
+ props.path = path.join(self.path, path.join('/', pp))
+
+ // if i have a filter, the child should inherit it.
+ props.filter = self.filter
+
+ // all the rest of the stuff, copy over from the source.
+ Object.keys(entry.props).forEach(function (k) {
+ if (!props.hasOwnProperty(k)) {
+ props[k] = entry.props[k]
+ }
+ })
+
+ // not sure at this point what kind of writer this is.
+ var child = self._currentChild = new Writer(props)
+ child.on('ready', function () {
+ // console.error("DW Child Ready", child.type, child._path)
+ // console.error(" resuming", entry._path)
+ entry.pipe(child)
+ entry.resume()
+ })
+
+ // XXX Make this work in node.
+ // Long filenames should not break stuff.
+ child.on('error', function (er) {
+ if (child._swallowErrors) {
+ self.warn(er)
+ child.emit('end')
+ child.emit('close')
+ } else {
+ self.emit('error', er)
+ }
+ })
+
+ // we fire _end internally *after* end, so that we don't move on
+ // until any "end" listeners have had their chance to do stuff.
+ child.on('close', onend)
+ var ended = false
+ function onend () {
+ if (ended) return
+ ended = true
+ // console.error("* DW Child end", child.basename)
+ self._currentChild = null
+ self._processing = false
+ self._process()
+ }
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/file-reader.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/file-reader.js
new file mode 100644
index 0000000000..baa01f4b3d
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/file-reader.js
@@ -0,0 +1,150 @@
+// Basically just a wrapper around an fs.ReadStream
+
+module.exports = FileReader
+
+var fs = require('graceful-fs')
+var inherits = require('inherits')
+var Reader = require('./reader.js')
+var EOF = {EOF: true}
+var CLOSE = {CLOSE: true}
+
+inherits(FileReader, Reader)
+
+function FileReader (props) {
+ // console.error(" FR create", props.path, props.size, new Error().stack)
+ var self = this
+ if (!(self instanceof FileReader)) {
+ throw new Error('FileReader must be called as constructor.')
+ }
+
+ // should already be established as a File type
+ // XXX Todo: preserve hardlinks by tracking dev+inode+nlink,
+ // with a HardLinkReader class.
+ if (!((props.type === 'Link' && props.Link) ||
+ (props.type === 'File' && props.File))) {
+ throw new Error('Non-file type ' + props.type)
+ }
+
+ self._buffer = []
+ self._bytesEmitted = 0
+ Reader.call(self, props)
+}
+
+FileReader.prototype._getStream = function () {
+ var self = this
+ var stream = self._stream = fs.createReadStream(self._path, self.props)
+
+ if (self.props.blksize) {
+ stream.bufferSize = self.props.blksize
+ }
+
+ stream.on('open', self.emit.bind(self, 'open'))
+
+ stream.on('data', function (c) {
+ // console.error('\t\t%d %s', c.length, self.basename)
+ self._bytesEmitted += c.length
+ // no point saving empty chunks
+ if (!c.length) {
+ return
+ } else if (self._paused || self._buffer.length) {
+ self._buffer.push(c)
+ self._read()
+ } else self.emit('data', c)
+ })
+
+ stream.on('end', function () {
+ if (self._paused || self._buffer.length) {
+ // console.error('FR Buffering End', self._path)
+ self._buffer.push(EOF)
+ self._read()
+ } else {
+ self.emit('end')
+ }
+
+ if (self._bytesEmitted !== self.props.size) {
+ self.error("Didn't get expected byte count\n" +
+ 'expect: ' + self.props.size + '\n' +
+ 'actual: ' + self._bytesEmitted)
+ }
+ })
+
+ stream.on('close', function () {
+ if (self._paused || self._buffer.length) {
+ // console.error('FR Buffering Close', self._path)
+ self._buffer.push(CLOSE)
+ self._read()
+ } else {
+ // console.error('FR close 1', self._path)
+ self.emit('close')
+ }
+ })
+
+ stream.on('error', function (e) {
+ self.emit('error', e)
+ })
+
+ self._read()
+}
+
+FileReader.prototype._read = function () {
+ var self = this
+ // console.error('FR _read', self._path)
+ if (self._paused) {
+ // console.error('FR _read paused', self._path)
+ return
+ }
+
+ if (!self._stream) {
+ // console.error('FR _getStream calling', self._path)
+ return self._getStream()
+ }
+
+ // clear out the buffer, if there is one.
+ if (self._buffer.length) {
+ // console.error('FR _read has buffer', self._buffer.length, self._path)
+ var buf = self._buffer
+ for (var i = 0, l = buf.length; i < l; i++) {
+ var c = buf[i]
+ if (c === EOF) {
+ // console.error('FR Read emitting buffered end', self._path)
+ self.emit('end')
+ } else if (c === CLOSE) {
+ // console.error('FR Read emitting buffered close', self._path)
+ self.emit('close')
+ } else {
+ // console.error('FR Read emitting buffered data', self._path)
+ self.emit('data', c)
+ }
+
+ if (self._paused) {
+ // console.error('FR Read Re-pausing at '+i, self._path)
+ self._buffer = buf.slice(i)
+ return
+ }
+ }
+ self._buffer.length = 0
+ }
+// console.error("FR _read done")
+// that's about all there is to it.
+}
+
+FileReader.prototype.pause = function (who) {
+ var self = this
+ // console.error('FR Pause', self._path)
+ if (self._paused) return
+ who = who || self
+ self._paused = true
+ if (self._stream) self._stream.pause()
+ self.emit('pause', who)
+}
+
+FileReader.prototype.resume = function (who) {
+ var self = this
+ // console.error('FR Resume', self._path)
+ if (!self._paused) return
+ who = who || self
+ self.emit('resume', who)
+ self._paused = false
+ if (self._stream) self._stream.resume()
+ self._read()
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/file-writer.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/file-writer.js
new file mode 100644
index 0000000000..4c803d8d68
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/file-writer.js
@@ -0,0 +1,107 @@
+module.exports = FileWriter
+
+var fs = require('graceful-fs')
+var Writer = require('./writer.js')
+var inherits = require('inherits')
+var EOF = {}
+
+inherits(FileWriter, Writer)
+
+function FileWriter (props) {
+ var self = this
+ if (!(self instanceof FileWriter)) {
+ throw new Error('FileWriter must be called as constructor.')
+ }
+
+ // should already be established as a File type
+ if (props.type !== 'File' || !props.File) {
+ throw new Error('Non-file type ' + props.type)
+ }
+
+ self._buffer = []
+ self._bytesWritten = 0
+
+ Writer.call(this, props)
+}
+
+FileWriter.prototype._create = function () {
+ var self = this
+ if (self._stream) return
+
+ var so = {}
+ if (self.props.flags) so.flags = self.props.flags
+ so.mode = Writer.filemode
+ if (self._old && self._old.blksize) so.bufferSize = self._old.blksize
+
+ self._stream = fs.createWriteStream(self._path, so)
+
+ self._stream.on('open', function () {
+ // console.error("FW open", self._buffer, self._path)
+ self.ready = true
+ self._buffer.forEach(function (c) {
+ if (c === EOF) self._stream.end()
+ else self._stream.write(c)
+ })
+ self.emit('ready')
+ // give this a kick just in case it needs it.
+ self.emit('drain')
+ })
+
+ self._stream.on('error', function (er) { self.emit('error', er) })
+
+ self._stream.on('drain', function () { self.emit('drain') })
+
+ self._stream.on('close', function () {
+ // console.error('\n\nFW Stream Close', self._path, self.size)
+ self._finish()
+ })
+}
+
+FileWriter.prototype.write = function (c) {
+ var self = this
+
+ self._bytesWritten += c.length
+
+ if (!self.ready) {
+ if (!Buffer.isBuffer(c) && typeof c !== 'string') {
+ throw new Error('invalid write data')
+ }
+ self._buffer.push(c)
+ return false
+ }
+
+ var ret = self._stream.write(c)
+ // console.error('\t-- fw wrote, _stream says', ret, self._stream._queue.length)
+
+ // allow 2 buffered writes, because otherwise there's just too
+ // much stop and go bs.
+ if (ret === false && self._stream._queue) {
+ return self._stream._queue.length <= 2
+ } else {
+ return ret
+ }
+}
+
+FileWriter.prototype.end = function (c) {
+ var self = this
+
+ if (c) self.write(c)
+
+ if (!self.ready) {
+ self._buffer.push(EOF)
+ return false
+ }
+
+ return self._stream.end()
+}
+
+FileWriter.prototype._finish = function () {
+ var self = this
+ if (typeof self.size === 'number' && self._bytesWritten !== self.size) {
+ self.error(
+ 'Did not get expected byte count.\n' +
+ 'expect: ' + self.size + '\n' +
+ 'actual: ' + self._bytesWritten)
+ }
+ Writer.prototype._finish.call(self)
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/get-type.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/get-type.js
new file mode 100644
index 0000000000..19f6a657db
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/get-type.js
@@ -0,0 +1,33 @@
+module.exports = getType
+
+function getType (st) {
+ var types = [
+ 'Directory',
+ 'File',
+ 'SymbolicLink',
+ 'Link', // special for hardlinks from tarballs
+ 'BlockDevice',
+ 'CharacterDevice',
+ 'FIFO',
+ 'Socket'
+ ]
+ var type
+
+ if (st.type && types.indexOf(st.type) !== -1) {
+ st[st.type] = true
+ return st.type
+ }
+
+ for (var i = 0, l = types.length; i < l; i++) {
+ type = types[i]
+ var is = st[type] || st['is' + type]
+ if (typeof is === 'function') is = is.call(st)
+ if (is) {
+ st[type] = true
+ st.type = type
+ return type
+ }
+ }
+
+ return null
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/link-reader.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/link-reader.js
new file mode 100644
index 0000000000..fb4cc67a98
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/link-reader.js
@@ -0,0 +1,53 @@
+// Basically just a wrapper around an fs.readlink
+//
+// XXX: Enhance this to support the Link type, by keeping
+// a lookup table of {<dev+inode>:<path>}, so that hardlinks
+// can be preserved in tarballs.
+
+module.exports = LinkReader
+
+var fs = require('graceful-fs')
+var inherits = require('inherits')
+var Reader = require('./reader.js')
+
+inherits(LinkReader, Reader)
+
+function LinkReader (props) {
+ var self = this
+ if (!(self instanceof LinkReader)) {
+ throw new Error('LinkReader must be called as constructor.')
+ }
+
+ if (!((props.type === 'Link' && props.Link) ||
+ (props.type === 'SymbolicLink' && props.SymbolicLink))) {
+ throw new Error('Non-link type ' + props.type)
+ }
+
+ Reader.call(self, props)
+}
+
+// When piping a LinkReader into a LinkWriter, we have to
+// already have the linkpath property set, so that has to
+// happen *before* the "ready" event, which means we need to
+// override the _stat method.
+LinkReader.prototype._stat = function (currentStat) {
+ var self = this
+ fs.readlink(self._path, function (er, linkpath) {
+ if (er) return self.error(er)
+ self.linkpath = self.props.linkpath = linkpath
+ self.emit('linkpath', linkpath)
+ Reader.prototype._stat.call(self, currentStat)
+ })
+}
+
+LinkReader.prototype._read = function () {
+ var self = this
+ if (self._paused) return
+ // basically just a no-op, since we got all the info we need
+ // from the _stat method
+ if (!self._ended) {
+ self.emit('end')
+ self.emit('close')
+ self._ended = true
+ }
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/link-writer.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/link-writer.js
new file mode 100644
index 0000000000..af54284008
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/link-writer.js
@@ -0,0 +1,95 @@
+module.exports = LinkWriter
+
+var fs = require('graceful-fs')
+var Writer = require('./writer.js')
+var inherits = require('inherits')
+var path = require('path')
+var rimraf = require('rimraf')
+
+inherits(LinkWriter, Writer)
+
+function LinkWriter (props) {
+ var self = this
+ if (!(self instanceof LinkWriter)) {
+ throw new Error('LinkWriter must be called as constructor.')
+ }
+
+ // should already be established as a Link type
+ if (!((props.type === 'Link' && props.Link) ||
+ (props.type === 'SymbolicLink' && props.SymbolicLink))) {
+ throw new Error('Non-link type ' + props.type)
+ }
+
+ if (props.linkpath === '') props.linkpath = '.'
+ if (!props.linkpath) {
+ self.error('Need linkpath property to create ' + props.type)
+ }
+
+ Writer.call(this, props)
+}
+
+LinkWriter.prototype._create = function () {
+ // console.error(" LW _create")
+ var self = this
+ var hard = self.type === 'Link' || process.platform === 'win32'
+ var link = hard ? 'link' : 'symlink'
+ var lp = hard ? path.resolve(self.dirname, self.linkpath) : self.linkpath
+
+ // can only change the link path by clobbering
+ // For hard links, let's just assume that's always the case, since
+ // there's no good way to read them if we don't already know.
+ if (hard) return clobber(self, lp, link)
+
+ fs.readlink(self._path, function (er, p) {
+ // only skip creation if it's exactly the same link
+ if (p && p === lp) return finish(self)
+ clobber(self, lp, link)
+ })
+}
+
+function clobber (self, lp, link) {
+ rimraf(self._path, function (er) {
+ if (er) return self.error(er)
+ create(self, lp, link)
+ })
+}
+
+function create (self, lp, link) {
+ fs[link](lp, self._path, function (er) {
+ // if this is a hard link, and we're in the process of writing out a
+ // directory, it's very possible that the thing we're linking to
+ // doesn't exist yet (especially if it was intended as a symlink),
+ // so swallow ENOENT errors here and just soldier in.
+ // Additionally, an EPERM or EACCES can happen on win32 if it's trying
+ // to make a link to a directory. Again, just skip it.
+ // A better solution would be to have fs.symlink be supported on
+ // windows in some nice fashion.
+ if (er) {
+ if ((er.code === 'ENOENT' ||
+ er.code === 'EACCES' ||
+ er.code === 'EPERM') && process.platform === 'win32') {
+ self.ready = true
+ self.emit('ready')
+ self.emit('end')
+ self.emit('close')
+ self.end = self._finish = function () {}
+ } else return self.error(er)
+ }
+ finish(self)
+ })
+}
+
+function finish (self) {
+ self.ready = true
+ self.emit('ready')
+ if (self._ended && !self._finished) self._finish()
+}
+
+LinkWriter.prototype.end = function () {
+ // console.error("LW finish in end")
+ this._ended = true
+ if (this.ready) {
+ this._finished = true
+ this._finish()
+ }
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/proxy-reader.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/proxy-reader.js
new file mode 100644
index 0000000000..4f431c9d9e
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/proxy-reader.js
@@ -0,0 +1,95 @@
+// A reader for when we don't yet know what kind of thing
+// the thing is.
+
+module.exports = ProxyReader
+
+var Reader = require('./reader.js')
+var getType = require('./get-type.js')
+var inherits = require('inherits')
+var fs = require('graceful-fs')
+
+inherits(ProxyReader, Reader)
+
+function ProxyReader (props) {
+ var self = this
+ if (!(self instanceof ProxyReader)) {
+ throw new Error('ProxyReader must be called as constructor.')
+ }
+
+ self.props = props
+ self._buffer = []
+ self.ready = false
+
+ Reader.call(self, props)
+}
+
+ProxyReader.prototype._stat = function () {
+ var self = this
+ var props = self.props
+ // stat the thing to see what the proxy should be.
+ var stat = props.follow ? 'stat' : 'lstat'
+
+ fs[stat](props.path, function (er, current) {
+ var type
+ if (er || !current) {
+ type = 'File'
+ } else {
+ type = getType(current)
+ }
+
+ props[type] = true
+ props.type = self.type = type
+
+ self._old = current
+ self._addProxy(Reader(props, current))
+ })
+}
+
+ProxyReader.prototype._addProxy = function (proxy) {
+ var self = this
+ if (self._proxyTarget) {
+ return self.error('proxy already set')
+ }
+
+ self._proxyTarget = proxy
+ proxy._proxy = self
+
+ ;[
+ 'error',
+ 'data',
+ 'end',
+ 'close',
+ 'linkpath',
+ 'entry',
+ 'entryEnd',
+ 'child',
+ 'childEnd',
+ 'warn',
+ 'stat'
+ ].forEach(function (ev) {
+ // console.error('~~ proxy event', ev, self.path)
+ proxy.on(ev, self.emit.bind(self, ev))
+ })
+
+ self.emit('proxy', proxy)
+
+ proxy.on('ready', function () {
+ // console.error("~~ proxy is ready!", self.path)
+ self.ready = true
+ self.emit('ready')
+ })
+
+ var calls = self._buffer
+ self._buffer.length = 0
+ calls.forEach(function (c) {
+ proxy[c[0]].apply(proxy, c[1])
+ })
+}
+
+ProxyReader.prototype.pause = function () {
+ return this._proxyTarget ? this._proxyTarget.pause() : false
+}
+
+ProxyReader.prototype.resume = function () {
+ return this._proxyTarget ? this._proxyTarget.resume() : false
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/proxy-writer.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/proxy-writer.js
new file mode 100644
index 0000000000..a6544621bf
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/proxy-writer.js
@@ -0,0 +1,111 @@
+// A writer for when we don't know what kind of thing
+// the thing is. That is, it's not explicitly set,
+// so we're going to make it whatever the thing already
+// is, or "File"
+//
+// Until then, collect all events.
+
+module.exports = ProxyWriter
+
+var Writer = require('./writer.js')
+var getType = require('./get-type.js')
+var inherits = require('inherits')
+var collect = require('./collect.js')
+var fs = require('fs')
+
+inherits(ProxyWriter, Writer)
+
+function ProxyWriter (props) {
+ var self = this
+ if (!(self instanceof ProxyWriter)) {
+ throw new Error('ProxyWriter must be called as constructor.')
+ }
+
+ self.props = props
+ self._needDrain = false
+
+ Writer.call(self, props)
+}
+
+ProxyWriter.prototype._stat = function () {
+ var self = this
+ var props = self.props
+ // stat the thing to see what the proxy should be.
+ var stat = props.follow ? 'stat' : 'lstat'
+
+ fs[stat](props.path, function (er, current) {
+ var type
+ if (er || !current) {
+ type = 'File'
+ } else {
+ type = getType(current)
+ }
+
+ props[type] = true
+ props.type = self.type = type
+
+ self._old = current
+ self._addProxy(Writer(props, current))
+ })
+}
+
+ProxyWriter.prototype._addProxy = function (proxy) {
+ // console.error("~~ set proxy", this.path)
+ var self = this
+ if (self._proxy) {
+ return self.error('proxy already set')
+ }
+
+ self._proxy = proxy
+ ;[
+ 'ready',
+ 'error',
+ 'close',
+ 'pipe',
+ 'drain',
+ 'warn'
+ ].forEach(function (ev) {
+ proxy.on(ev, self.emit.bind(self, ev))
+ })
+
+ self.emit('proxy', proxy)
+
+ var calls = self._buffer
+ calls.forEach(function (c) {
+ // console.error("~~ ~~ proxy buffered call", c[0], c[1])
+ proxy[c[0]].apply(proxy, c[1])
+ })
+ self._buffer.length = 0
+ if (self._needsDrain) self.emit('drain')
+}
+
+ProxyWriter.prototype.add = function (entry) {
+ // console.error("~~ proxy add")
+ collect(entry)
+
+ if (!this._proxy) {
+ this._buffer.push(['add', [entry]])
+ this._needDrain = true
+ return false
+ }
+ return this._proxy.add(entry)
+}
+
+ProxyWriter.prototype.write = function (c) {
+ // console.error('~~ proxy write')
+ if (!this._proxy) {
+ this._buffer.push(['write', [c]])
+ this._needDrain = true
+ return false
+ }
+ return this._proxy.write(c)
+}
+
+ProxyWriter.prototype.end = function (c) {
+ // console.error('~~ proxy end')
+ if (!this._proxy) {
+ this._buffer.push(['end', [c]])
+ return false
+ }
+ return this._proxy.end(c)
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/reader.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/reader.js
new file mode 100644
index 0000000000..be4f570eeb
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/reader.js
@@ -0,0 +1,255 @@
+module.exports = Reader
+
+var fs = require('graceful-fs')
+var Stream = require('stream').Stream
+var inherits = require('inherits')
+var path = require('path')
+var getType = require('./get-type.js')
+var hardLinks = Reader.hardLinks = {}
+var Abstract = require('./abstract.js')
+
+// Must do this *before* loading the child classes
+inherits(Reader, Abstract)
+
+var LinkReader = require('./link-reader.js')
+
+function Reader (props, currentStat) {
+ var self = this
+ if (!(self instanceof Reader)) return new Reader(props, currentStat)
+
+ if (typeof props === 'string') {
+ props = { path: props }
+ }
+
+ // polymorphism.
+ // call fstream.Reader(dir) to get a DirReader object, etc.
+ // Note that, unlike in the Writer case, ProxyReader is going
+ // to be the *normal* state of affairs, since we rarely know
+ // the type of a file prior to reading it.
+
+ var type
+ var ClassType
+
+ if (props.type && typeof props.type === 'function') {
+ type = props.type
+ ClassType = type
+ } else {
+ type = getType(props)
+ ClassType = Reader
+ }
+
+ if (currentStat && !type) {
+ type = getType(currentStat)
+ props[type] = true
+ props.type = type
+ }
+
+ switch (type) {
+ case 'Directory':
+ ClassType = require('./dir-reader.js')
+ break
+
+ case 'Link':
+ // XXX hard links are just files.
+ // However, it would be good to keep track of files' dev+inode
+ // and nlink values, and create a HardLinkReader that emits
+ // a linkpath value of the original copy, so that the tar
+ // writer can preserve them.
+ // ClassType = HardLinkReader
+ // break
+
+ case 'File':
+ ClassType = require('./file-reader.js')
+ break
+
+ case 'SymbolicLink':
+ ClassType = LinkReader
+ break
+
+ case 'Socket':
+ ClassType = require('./socket-reader.js')
+ break
+
+ case null:
+ ClassType = require('./proxy-reader.js')
+ break
+ }
+
+ if (!(self instanceof ClassType)) {
+ return new ClassType(props)
+ }
+
+ Abstract.call(self)
+
+ if (!props.path) {
+ self.error('Must provide a path', null, true)
+ }
+
+ self.readable = true
+ self.writable = false
+
+ self.type = type
+ self.props = props
+ self.depth = props.depth = props.depth || 0
+ self.parent = props.parent || null
+ self.root = props.root || (props.parent && props.parent.root) || self
+
+ self._path = self.path = path.resolve(props.path)
+ if (process.platform === 'win32') {
+ self.path = self._path = self.path.replace(/\?/g, '_')
+ if (self._path.length >= 260) {
+ // how DOES one create files on the moon?
+ // if the path has spaces in it, then UNC will fail.
+ self._swallowErrors = true
+ // if (self._path.indexOf(" ") === -1) {
+ self._path = '\\\\?\\' + self.path.replace(/\//g, '\\')
+ // }
+ }
+ }
+ self.basename = props.basename = path.basename(self.path)
+ self.dirname = props.dirname = path.dirname(self.path)
+
+ // these have served their purpose, and are now just noisy clutter
+ props.parent = props.root = null
+
+ // console.error("\n\n\n%s setting size to", props.path, props.size)
+ self.size = props.size
+ self.filter = typeof props.filter === 'function' ? props.filter : null
+ if (props.sort === 'alpha') props.sort = alphasort
+
+ // start the ball rolling.
+ // this will stat the thing, and then call self._read()
+ // to start reading whatever it is.
+ // console.error("calling stat", props.path, currentStat)
+ self._stat(currentStat)
+}
+
+function alphasort (a, b) {
+ return a === b ? 0
+ : a.toLowerCase() > b.toLowerCase() ? 1
+ : a.toLowerCase() < b.toLowerCase() ? -1
+ : a > b ? 1
+ : -1
+}
+
+Reader.prototype._stat = function (currentStat) {
+ var self = this
+ var props = self.props
+ var stat = props.follow ? 'stat' : 'lstat'
+ // console.error("Reader._stat", self._path, currentStat)
+ if (currentStat) process.nextTick(statCb.bind(null, null, currentStat))
+ else fs[stat](self._path, statCb)
+
+ function statCb (er, props_) {
+ // console.error("Reader._stat, statCb", self._path, props_, props_.nlink)
+ if (er) return self.error(er)
+
+ Object.keys(props_).forEach(function (k) {
+ props[k] = props_[k]
+ })
+
+ // if it's not the expected size, then abort here.
+ if (undefined !== self.size && props.size !== self.size) {
+ return self.error('incorrect size')
+ }
+ self.size = props.size
+
+ var type = getType(props)
+ var handleHardlinks = props.hardlinks !== false
+
+ // special little thing for handling hardlinks.
+ if (handleHardlinks && type !== 'Directory' && props.nlink && props.nlink > 1) {
+ var k = props.dev + ':' + props.ino
+ // console.error("Reader has nlink", self._path, k)
+ if (hardLinks[k] === self._path || !hardLinks[k]) {
+ hardLinks[k] = self._path
+ } else {
+ // switch into hardlink mode.
+ type = self.type = self.props.type = 'Link'
+ self.Link = self.props.Link = true
+ self.linkpath = self.props.linkpath = hardLinks[k]
+ // console.error("Hardlink detected, switching mode", self._path, self.linkpath)
+ // Setting __proto__ would arguably be the "correct"
+ // approach here, but that just seems too wrong.
+ self._stat = self._read = LinkReader.prototype._read
+ }
+ }
+
+ if (self.type && self.type !== type) {
+ self.error('Unexpected type: ' + type)
+ }
+
+ // if the filter doesn't pass, then just skip over this one.
+ // still have to emit end so that dir-walking can move on.
+ if (self.filter) {
+ var who = self._proxy || self
+ // special handling for ProxyReaders
+ if (!self.filter.call(who, who, props)) {
+ if (!self._disowned) {
+ self.abort()
+ self.emit('end')
+ self.emit('close')
+ }
+ return
+ }
+ }
+
+ // last chance to abort or disown before the flow starts!
+ var events = ['_stat', 'stat', 'ready']
+ var e = 0
+ ;(function go () {
+ if (self._aborted) {
+ self.emit('end')
+ self.emit('close')
+ return
+ }
+
+ if (self._paused && self.type !== 'Directory') {
+ self.once('resume', go)
+ return
+ }
+
+ var ev = events[e++]
+ if (!ev) {
+ return self._read()
+ }
+ self.emit(ev, props)
+ go()
+ })()
+ }
+}
+
+Reader.prototype.pipe = function (dest) {
+ var self = this
+ if (typeof dest.add === 'function') {
+ // piping to a multi-compatible, and we've got directory entries.
+ self.on('entry', function (entry) {
+ var ret = dest.add(entry)
+ if (ret === false) {
+ self.pause()
+ }
+ })
+ }
+
+ // console.error("R Pipe apply Stream Pipe")
+ return Stream.prototype.pipe.apply(this, arguments)
+}
+
+Reader.prototype.pause = function (who) {
+ this._paused = true
+ who = who || this
+ this.emit('pause', who)
+ if (this._stream) this._stream.pause(who)
+}
+
+Reader.prototype.resume = function (who) {
+ this._paused = false
+ who = who || this
+ this.emit('resume', who)
+ if (this._stream) this._stream.resume(who)
+ this._read()
+}
+
+Reader.prototype._read = function () {
+ this.error('Cannot read unknown type: ' + this.type)
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/socket-reader.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/socket-reader.js
new file mode 100644
index 0000000000..e0456ba890
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/socket-reader.js
@@ -0,0 +1,36 @@
+// Just get the stats, and then don't do anything.
+// You can't really "read" from a socket. You "connect" to it.
+// Mostly, this is here so that reading a dir with a socket in it
+// doesn't blow up.
+
+module.exports = SocketReader
+
+var inherits = require('inherits')
+var Reader = require('./reader.js')
+
+inherits(SocketReader, Reader)
+
+function SocketReader (props) {
+ var self = this
+ if (!(self instanceof SocketReader)) {
+ throw new Error('SocketReader must be called as constructor.')
+ }
+
+ if (!(props.type === 'Socket' && props.Socket)) {
+ throw new Error('Non-socket type ' + props.type)
+ }
+
+ Reader.call(self, props)
+}
+
+SocketReader.prototype._read = function () {
+ var self = this
+ if (self._paused) return
+ // basically just a no-op, since we got all the info we have
+ // from the _stat method
+ if (!self._ended) {
+ self.emit('end')
+ self.emit('close')
+ self._ended = true
+ }
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/writer.js b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/writer.js
new file mode 100644
index 0000000000..140e449e06
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/lib/writer.js
@@ -0,0 +1,390 @@
+module.exports = Writer
+
+var fs = require('graceful-fs')
+var inherits = require('inherits')
+var rimraf = require('rimraf')
+var mkdir = require('mkdirp')
+var path = require('path')
+var umask = process.platform === 'win32' ? 0 : process.umask()
+var getType = require('./get-type.js')
+var Abstract = require('./abstract.js')
+
+// Must do this *before* loading the child classes
+inherits(Writer, Abstract)
+
+Writer.dirmode = parseInt('0777', 8) & (~umask)
+Writer.filemode = parseInt('0666', 8) & (~umask)
+
+var DirWriter = require('./dir-writer.js')
+var LinkWriter = require('./link-writer.js')
+var FileWriter = require('./file-writer.js')
+var ProxyWriter = require('./proxy-writer.js')
+
+// props is the desired state. current is optionally the current stat,
+// provided here so that subclasses can avoid statting the target
+// more than necessary.
+function Writer (props, current) {
+ var self = this
+
+ if (typeof props === 'string') {
+ props = { path: props }
+ }
+
+ // polymorphism.
+ // call fstream.Writer(dir) to get a DirWriter object, etc.
+ var type = getType(props)
+ var ClassType = Writer
+
+ switch (type) {
+ case 'Directory':
+ ClassType = DirWriter
+ break
+ case 'File':
+ ClassType = FileWriter
+ break
+ case 'Link':
+ case 'SymbolicLink':
+ ClassType = LinkWriter
+ break
+ case null:
+ default:
+ // Don't know yet what type to create, so we wrap in a proxy.
+ ClassType = ProxyWriter
+ break
+ }
+
+ if (!(self instanceof ClassType)) return new ClassType(props)
+
+ // now get down to business.
+
+ Abstract.call(self)
+
+ if (!props.path) self.error('Must provide a path', null, true)
+
+ // props is what we want to set.
+ // set some convenience properties as well.
+ self.type = props.type
+ self.props = props
+ self.depth = props.depth || 0
+ self.clobber = props.clobber === false ? props.clobber : true
+ self.parent = props.parent || null
+ self.root = props.root || (props.parent && props.parent.root) || self
+
+ self._path = self.path = path.resolve(props.path)
+ if (process.platform === 'win32') {
+ self.path = self._path = self.path.replace(/\?/g, '_')
+ if (self._path.length >= 260) {
+ self._swallowErrors = true
+ self._path = '\\\\?\\' + self.path.replace(/\//g, '\\')
+ }
+ }
+ self.basename = path.basename(props.path)
+ self.dirname = path.dirname(props.path)
+ self.linkpath = props.linkpath || null
+
+ props.parent = props.root = null
+
+ // console.error("\n\n\n%s setting size to", props.path, props.size)
+ self.size = props.size
+
+ if (typeof props.mode === 'string') {
+ props.mode = parseInt(props.mode, 8)
+ }
+
+ self.readable = false
+ self.writable = true
+
+ // buffer until ready, or while handling another entry
+ self._buffer = []
+ self.ready = false
+
+ self.filter = typeof props.filter === 'function' ? props.filter : null
+
+ // start the ball rolling.
+ // this checks what's there already, and then calls
+ // self._create() to call the impl-specific creation stuff.
+ self._stat(current)
+}
+
+// Calling this means that it's something we can't create.
+// Just assert that it's already there, otherwise raise a warning.
+Writer.prototype._create = function () {
+ var self = this
+ fs[self.props.follow ? 'stat' : 'lstat'](self._path, function (er) {
+ if (er) {
+ return self.warn('Cannot create ' + self._path + '\n' +
+ 'Unsupported type: ' + self.type, 'ENOTSUP')
+ }
+ self._finish()
+ })
+}
+
+Writer.prototype._stat = function (current) {
+ var self = this
+ var props = self.props
+ var stat = props.follow ? 'stat' : 'lstat'
+ var who = self._proxy || self
+
+ if (current) statCb(null, current)
+ else fs[stat](self._path, statCb)
+
+ function statCb (er, current) {
+ if (self.filter && !self.filter.call(who, who, current)) {
+ self._aborted = true
+ self.emit('end')
+ self.emit('close')
+ return
+ }
+
+ // if it's not there, great. We'll just create it.
+ // if it is there, then we'll need to change whatever differs
+ if (er || !current) {
+ return create(self)
+ }
+
+ self._old = current
+ var currentType = getType(current)
+
+ // if it's a type change, then we need to clobber or error.
+ // if it's not a type change, then let the impl take care of it.
+ if (currentType !== self.type) {
+ return rimraf(self._path, function (er) {
+ if (er) return self.error(er)
+ self._old = null
+ create(self)
+ })
+ }
+
+ // otherwise, just handle in the app-specific way
+ // this creates a fs.WriteStream, or mkdir's, or whatever
+ create(self)
+ }
+}
+
+function create (self) {
+ // console.error("W create", self._path, Writer.dirmode)
+
+ // XXX Need to clobber non-dirs that are in the way,
+ // unless { clobber: false } in the props.
+ mkdir(path.dirname(self._path), Writer.dirmode, function (er, made) {
+ // console.error("W created", path.dirname(self._path), er)
+ if (er) return self.error(er)
+
+ // later on, we have to set the mode and owner for these
+ self._madeDir = made
+ return self._create()
+ })
+}
+
+function endChmod (self, want, current, path, cb) {
+ var wantMode = want.mode
+ var chmod = want.follow || self.type !== 'SymbolicLink'
+ ? 'chmod' : 'lchmod'
+
+ if (!fs[chmod]) return cb()
+ if (typeof wantMode !== 'number') return cb()
+
+ var curMode = current.mode & parseInt('0777', 8)
+ wantMode = wantMode & parseInt('0777', 8)
+ if (wantMode === curMode) return cb()
+
+ fs[chmod](path, wantMode, cb)
+}
+
+function endChown (self, want, current, path, cb) {
+ // Don't even try it unless root. Too easy to EPERM.
+ if (process.platform === 'win32') return cb()
+ if (!process.getuid || process.getuid() !== 0) return cb()
+ if (typeof want.uid !== 'number' &&
+ typeof want.gid !== 'number') return cb()
+
+ if (current.uid === want.uid &&
+ current.gid === want.gid) return cb()
+
+ var chown = (self.props.follow || self.type !== 'SymbolicLink')
+ ? 'chown' : 'lchown'
+ if (!fs[chown]) return cb()
+
+ if (typeof want.uid !== 'number') want.uid = current.uid
+ if (typeof want.gid !== 'number') want.gid = current.gid
+
+ fs[chown](path, want.uid, want.gid, cb)
+}
+
+function endUtimes (self, want, current, path, cb) {
+ if (!fs.utimes || process.platform === 'win32') return cb()
+
+ var utimes = (want.follow || self.type !== 'SymbolicLink')
+ ? 'utimes' : 'lutimes'
+
+ if (utimes === 'lutimes' && !fs[utimes]) {
+ utimes = 'utimes'
+ }
+
+ if (!fs[utimes]) return cb()
+
+ var curA = current.atime
+ var curM = current.mtime
+ var meA = want.atime
+ var meM = want.mtime
+
+ if (meA === undefined) meA = curA
+ if (meM === undefined) meM = curM
+
+ if (!isDate(meA)) meA = new Date(meA)
+ if (!isDate(meM)) meA = new Date(meM)
+
+ if (meA.getTime() === curA.getTime() &&
+ meM.getTime() === curM.getTime()) return cb()
+
+ fs[utimes](path, meA, meM, cb)
+}
+
+// XXX This function is beastly. Break it up!
+Writer.prototype._finish = function () {
+ var self = this
+
+ if (self._finishing) return
+ self._finishing = true
+
+ // console.error(" W Finish", self._path, self.size)
+
+ // set up all the things.
+ // At this point, we're already done writing whatever we've gotta write,
+ // adding files to the dir, etc.
+ var todo = 0
+ var errState = null
+ var done = false
+
+ if (self._old) {
+ // the times will almost *certainly* have changed.
+ // adds the utimes syscall, but remove another stat.
+ self._old.atime = new Date(0)
+ self._old.mtime = new Date(0)
+ // console.error(" W Finish Stale Stat", self._path, self.size)
+ setProps(self._old)
+ } else {
+ var stat = self.props.follow ? 'stat' : 'lstat'
+ // console.error(" W Finish Stating", self._path, self.size)
+ fs[stat](self._path, function (er, current) {
+ // console.error(" W Finish Stated", self._path, self.size, current)
+ if (er) {
+ // if we're in the process of writing out a
+ // directory, it's very possible that the thing we're linking to
+ // doesn't exist yet (especially if it was intended as a symlink),
+ // so swallow ENOENT errors here and just soldier on.
+ if (er.code === 'ENOENT' &&
+ (self.type === 'Link' || self.type === 'SymbolicLink') &&
+ process.platform === 'win32') {
+ self.ready = true
+ self.emit('ready')
+ self.emit('end')
+ self.emit('close')
+ self.end = self._finish = function () {}
+ return
+ } else return self.error(er)
+ }
+ setProps(self._old = current)
+ })
+ }
+
+ return
+
+ function setProps (current) {
+ todo += 3
+ endChmod(self, self.props, current, self._path, next('chmod'))
+ endChown(self, self.props, current, self._path, next('chown'))
+ endUtimes(self, self.props, current, self._path, next('utimes'))
+ }
+
+ function next (what) {
+ return function (er) {
+ // console.error(" W Finish", what, todo)
+ if (errState) return
+ if (er) {
+ er.fstream_finish_call = what
+ return self.error(errState = er)
+ }
+ if (--todo > 0) return
+ if (done) return
+ done = true
+
+ // we may still need to set the mode/etc. on some parent dirs
+ // that were created previously. delay end/close until then.
+ if (!self._madeDir) return end()
+ else endMadeDir(self, self._path, end)
+
+ function end (er) {
+ if (er) {
+ er.fstream_finish_call = 'setupMadeDir'
+ return self.error(er)
+ }
+ // all the props have been set, so we're completely done.
+ self.emit('end')
+ self.emit('close')
+ }
+ }
+ }
+}
+
+function endMadeDir (self, p, cb) {
+ var made = self._madeDir
+ // everything *between* made and path.dirname(self._path)
+ // needs to be set up. Note that this may just be one dir.
+ var d = path.dirname(p)
+
+ endMadeDir_(self, d, function (er) {
+ if (er) return cb(er)
+ if (d === made) {
+ return cb()
+ }
+ endMadeDir(self, d, cb)
+ })
+}
+
+function endMadeDir_ (self, p, cb) {
+ var dirProps = {}
+ Object.keys(self.props).forEach(function (k) {
+ dirProps[k] = self.props[k]
+
+ // only make non-readable dirs if explicitly requested.
+ if (k === 'mode' && self.type !== 'Directory') {
+ dirProps[k] = dirProps[k] | parseInt('0111', 8)
+ }
+ })
+
+ var todo = 3
+ var errState = null
+ fs.stat(p, function (er, current) {
+ if (er) return cb(errState = er)
+ endChmod(self, dirProps, current, p, next)
+ endChown(self, dirProps, current, p, next)
+ endUtimes(self, dirProps, current, p, next)
+ })
+
+ function next (er) {
+ if (errState) return
+ if (er) return cb(errState = er)
+ if (--todo === 0) return cb()
+ }
+}
+
+Writer.prototype.pipe = function () {
+ this.error("Can't pipe from writable stream")
+}
+
+Writer.prototype.add = function () {
+ this.error("Can't add to non-Directory type")
+}
+
+Writer.prototype.write = function () {
+ return true
+}
+
+function objectToString (d) {
+ return Object.prototype.toString.call(d)
+}
+
+function isDate (d) {
+ return typeof d === 'object' && objectToString(d) === '[object Date]'
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/fstream/package.json b/deps/npm/node_modules/node-gyp/node_modules/fstream/package.json
new file mode 100644
index 0000000000..5ac16f546b
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/fstream/package.json
@@ -0,0 +1,62 @@
+{
+ "_from": "fstream@^1.0.0",
+ "_id": "fstream@1.0.11",
+ "_inBundle": false,
+ "_integrity": "sha1-XB+x8RdHcRTwYyoOtLcbPLD9MXE=",
+ "_location": "/node-gyp/fstream",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "fstream@^1.0.0",
+ "name": "fstream",
+ "escapedName": "fstream",
+ "rawSpec": "^1.0.0",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.0"
+ },
+ "_requiredBy": [
+ "/node-gyp",
+ "/node-gyp/tar"
+ ],
+ "_resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.11.tgz",
+ "_shasum": "5c1fb1f117477114f0632a0eb4b71b3cb0fd3171",
+ "_spec": "fstream@^1.0.0",
+ "_where": "/Users/rebecca/code/npm/node_modules/node-gyp",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/fstream/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "graceful-fs": "^4.1.2",
+ "inherits": "~2.0.0",
+ "mkdirp": ">=0.5 0",
+ "rimraf": "2"
+ },
+ "deprecated": false,
+ "description": "Advanced file system stream things",
+ "devDependencies": {
+ "standard": "^4.0.0",
+ "tap": "^1.2.0"
+ },
+ "engines": {
+ "node": ">=0.6"
+ },
+ "homepage": "https://github.com/npm/fstream#readme",
+ "license": "ISC",
+ "main": "fstream.js",
+ "name": "fstream",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/fstream.git"
+ },
+ "scripts": {
+ "test": "standard && tap examples/*.js"
+ },
+ "version": "1.0.11"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/minimatch/LICENSE
new file mode 100644
index 0000000000..19129e315f
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/README.md b/deps/npm/node_modules/node-gyp/node_modules/minimatch/README.md
new file mode 100644
index 0000000000..ad72b8133e
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/README.md
@@ -0,0 +1,209 @@
+# minimatch
+
+A minimal matching utility.
+
+[![Build Status](https://secure.travis-ci.org/isaacs/minimatch.svg)](http://travis-ci.org/isaacs/minimatch)
+
+
+This is the matching library used internally by npm.
+
+It works by converting glob expressions into JavaScript `RegExp`
+objects.
+
+## Usage
+
+```javascript
+var minimatch = require("minimatch")
+
+minimatch("bar.foo", "*.foo") // true!
+minimatch("bar.foo", "*.bar") // false!
+minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy!
+```
+
+## Features
+
+Supports these glob features:
+
+* Brace Expansion
+* Extended glob matching
+* "Globstar" `**` matching
+
+See:
+
+* `man sh`
+* `man bash`
+* `man 3 fnmatch`
+* `man 5 gitignore`
+
+## Minimatch Class
+
+Create a minimatch object by instantiating the `minimatch.Minimatch` class.
+
+```javascript
+var Minimatch = require("minimatch").Minimatch
+var mm = new Minimatch(pattern, options)
+```
+
+### Properties
+
+* `pattern` The original pattern the minimatch object represents.
+* `options` The options supplied to the constructor.
+* `set` A 2-dimensional array of regexp or string expressions.
+ Each row in the
+ array corresponds to a brace-expanded pattern. Each item in the row
+ corresponds to a single path-part. For example, the pattern
+ `{a,b/c}/d` would expand to a set of patterns like:
+
+ [ [ a, d ]
+ , [ b, c, d ] ]
+
+ If a portion of the pattern doesn't have any "magic" in it
+ (that is, it's something like `"foo"` rather than `fo*o?`), then it
+ will be left as a string rather than converted to a regular
+ expression.
+
+* `regexp` Created by the `makeRe` method. A single regular expression
+ expressing the entire pattern. This is useful in cases where you wish
+ to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled.
+* `negate` True if the pattern is negated.
+* `comment` True if the pattern is a comment.
+* `empty` True if the pattern is `""`.
+
+### Methods
+
+* `makeRe` Generate the `regexp` member if necessary, and return it.
+ Will return `false` if the pattern is invalid.
+* `match(fname)` Return true if the filename matches the pattern, or
+ false otherwise.
+* `matchOne(fileArray, patternArray, partial)` Take a `/`-split
+ filename, and match it against a single row in the `regExpSet`. This
+ method is mainly for internal use, but is exposed so that it can be
+ used by a glob-walker that needs to avoid excessive filesystem calls.
+
+All other methods are internal, and will be called as necessary.
+
+### minimatch(path, pattern, options)
+
+Main export. Tests a path against the pattern using the options.
+
+```javascript
+var isJS = minimatch(file, "*.js", { matchBase: true })
+```
+
+### minimatch.filter(pattern, options)
+
+Returns a function that tests its
+supplied argument, suitable for use with `Array.filter`. Example:
+
+```javascript
+var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true}))
+```
+
+### minimatch.match(list, pattern, options)
+
+Match against the list of
+files, in the style of fnmatch or glob. If nothing is matched, and
+options.nonull is set, then return a list containing the pattern itself.
+
+```javascript
+var javascripts = minimatch.match(fileList, "*.js", {matchBase: true}))
+```
+
+### minimatch.makeRe(pattern, options)
+
+Make a regular expression object from the pattern.
+
+## Options
+
+All options are `false` by default.
+
+### debug
+
+Dump a ton of stuff to stderr.
+
+### nobrace
+
+Do not expand `{a,b}` and `{1..3}` brace sets.
+
+### noglobstar
+
+Disable `**` matching against multiple folder names.
+
+### dot
+
+Allow patterns to match filenames starting with a period, even if
+the pattern does not explicitly have a period in that spot.
+
+Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot`
+is set.
+
+### noext
+
+Disable "extglob" style patterns like `+(a|b)`.
+
+### nocase
+
+Perform a case-insensitive match.
+
+### nonull
+
+When a match is not found by `minimatch.match`, return a list containing
+the pattern itself if this option is set. When not set, an empty list
+is returned if there are no matches.
+
+### matchBase
+
+If set, then patterns without slashes will be matched
+against the basename of the path if it contains slashes. For example,
+`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.
+
+### nocomment
+
+Suppress the behavior of treating `#` at the start of a pattern as a
+comment.
+
+### nonegate
+
+Suppress the behavior of treating a leading `!` character as negation.
+
+### flipNegate
+
+Returns from negate expressions the same as if they were not negated.
+(Ie, true on a hit, false on a miss.)
+
+
+## Comparisons to other fnmatch/glob implementations
+
+While strict compliance with the existing standards is a worthwhile
+goal, some discrepancies exist between minimatch and other
+implementations, and are intentional.
+
+If the pattern starts with a `!` character, then it is negated. Set the
+`nonegate` flag to suppress this behavior, and treat leading `!`
+characters normally. This is perhaps relevant if you wish to start the
+pattern with a negative extglob pattern like `!(a|B)`. Multiple `!`
+characters at the start of a pattern will negate the pattern multiple
+times.
+
+If a pattern starts with `#`, then it is treated as a comment, and
+will not match anything. Use `\#` to match a literal `#` at the
+start of a line, or set the `nocomment` flag to suppress this behavior.
+
+The double-star character `**` is supported by default, unless the
+`noglobstar` flag is set. This is supported in the manner of bsdglob
+and bash 4.1, where `**` only has special significance if it is the only
+thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
+`a/**b` will not.
+
+If an escaped pattern has no matches, and the `nonull` flag is set,
+then minimatch.match returns the pattern as-provided, rather than
+interpreting the character escapes. For example,
+`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
+`"*a?"`. This is akin to setting the `nullglob` option in bash, except
+that it does not resolve escaped pattern characters.
+
+If brace expansion is not disabled, then it is performed before any
+other interpretation of the glob pattern. Thus, a pattern like
+`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
+**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
+checked for validity. Since those two are valid, matching proceeds.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/minimatch.js b/deps/npm/node_modules/node-gyp/node_modules/minimatch/minimatch.js
new file mode 100644
index 0000000000..5b5f8cf444
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/minimatch.js
@@ -0,0 +1,923 @@
+module.exports = minimatch
+minimatch.Minimatch = Minimatch
+
+var path = { sep: '/' }
+try {
+ path = require('path')
+} catch (er) {}
+
+var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
+var expand = require('brace-expansion')
+
+var plTypes = {
+ '!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
+ '?': { open: '(?:', close: ')?' },
+ '+': { open: '(?:', close: ')+' },
+ '*': { open: '(?:', close: ')*' },
+ '@': { open: '(?:', close: ')' }
+}
+
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+var qmark = '[^/]'
+
+// * => any number of characters
+var star = qmark + '*?'
+
+// ** when dots are allowed. Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
+
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
+
+// characters that need to be escaped in RegExp.
+var reSpecials = charSet('().*{}+?[]^$\\!')
+
+// "abc" -> { a:true, b:true, c:true }
+function charSet (s) {
+ return s.split('').reduce(function (set, c) {
+ set[c] = true
+ return set
+ }, {})
+}
+
+// normalizes slashes.
+var slashSplit = /\/+/
+
+minimatch.filter = filter
+function filter (pattern, options) {
+ options = options || {}
+ return function (p, i, list) {
+ return minimatch(p, pattern, options)
+ }
+}
+
+function ext (a, b) {
+ a = a || {}
+ b = b || {}
+ var t = {}
+ Object.keys(b).forEach(function (k) {
+ t[k] = b[k]
+ })
+ Object.keys(a).forEach(function (k) {
+ t[k] = a[k]
+ })
+ return t
+}
+
+minimatch.defaults = function (def) {
+ if (!def || !Object.keys(def).length) return minimatch
+
+ var orig = minimatch
+
+ var m = function minimatch (p, pattern, options) {
+ return orig.minimatch(p, pattern, ext(def, options))
+ }
+
+ m.Minimatch = function Minimatch (pattern, options) {
+ return new orig.Minimatch(pattern, ext(def, options))
+ }
+
+ return m
+}
+
+Minimatch.defaults = function (def) {
+ if (!def || !Object.keys(def).length) return Minimatch
+ return minimatch.defaults(def).Minimatch
+}
+
+function minimatch (p, pattern, options) {
+ if (typeof pattern !== 'string') {
+ throw new TypeError('glob pattern string required')
+ }
+
+ if (!options) options = {}
+
+ // shortcut: comments match nothing.
+ if (!options.nocomment && pattern.charAt(0) === '#') {
+ return false
+ }
+
+ // "" only matches ""
+ if (pattern.trim() === '') return p === ''
+
+ return new Minimatch(pattern, options).match(p)
+}
+
+function Minimatch (pattern, options) {
+ if (!(this instanceof Minimatch)) {
+ return new Minimatch(pattern, options)
+ }
+
+ if (typeof pattern !== 'string') {
+ throw new TypeError('glob pattern string required')
+ }
+
+ if (!options) options = {}
+ pattern = pattern.trim()
+
+ // windows support: need to use /, not \
+ if (path.sep !== '/') {
+ pattern = pattern.split(path.sep).join('/')
+ }
+
+ this.options = options
+ this.set = []
+ this.pattern = pattern
+ this.regexp = null
+ this.negate = false
+ this.comment = false
+ this.empty = false
+
+ // make the set of regexps etc.
+ this.make()
+}
+
+Minimatch.prototype.debug = function () {}
+
+Minimatch.prototype.make = make
+function make () {
+ // don't do it more than once.
+ if (this._made) return
+
+ var pattern = this.pattern
+ var options = this.options
+
+ // empty patterns and comments match nothing.
+ if (!options.nocomment && pattern.charAt(0) === '#') {
+ this.comment = true
+ return
+ }
+ if (!pattern) {
+ this.empty = true
+ return
+ }
+
+ // step 1: figure out negation, etc.
+ this.parseNegate()
+
+ // step 2: expand braces
+ var set = this.globSet = this.braceExpand()
+
+ if (options.debug) this.debug = console.error
+
+ this.debug(this.pattern, set)
+
+ // step 3: now we have a set, so turn each one into a series of path-portion
+ // matching patterns.
+ // These will be regexps, except in the case of "**", which is
+ // set to the GLOBSTAR object for globstar behavior,
+ // and will not contain any / characters
+ set = this.globParts = set.map(function (s) {
+ return s.split(slashSplit)
+ })
+
+ this.debug(this.pattern, set)
+
+ // glob --> regexps
+ set = set.map(function (s, si, set) {
+ return s.map(this.parse, this)
+ }, this)
+
+ this.debug(this.pattern, set)
+
+ // filter out everything that didn't compile properly.
+ set = set.filter(function (s) {
+ return s.indexOf(false) === -1
+ })
+
+ this.debug(this.pattern, set)
+
+ this.set = set
+}
+
+Minimatch.prototype.parseNegate = parseNegate
+function parseNegate () {
+ var pattern = this.pattern
+ var negate = false
+ var options = this.options
+ var negateOffset = 0
+
+ if (options.nonegate) return
+
+ for (var i = 0, l = pattern.length
+ ; i < l && pattern.charAt(i) === '!'
+ ; i++) {
+ negate = !negate
+ negateOffset++
+ }
+
+ if (negateOffset) this.pattern = pattern.substr(negateOffset)
+ this.negate = negate
+}
+
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+minimatch.braceExpand = function (pattern, options) {
+ return braceExpand(pattern, options)
+}
+
+Minimatch.prototype.braceExpand = braceExpand
+
+function braceExpand (pattern, options) {
+ if (!options) {
+ if (this instanceof Minimatch) {
+ options = this.options
+ } else {
+ options = {}
+ }
+ }
+
+ pattern = typeof pattern === 'undefined'
+ ? this.pattern : pattern
+
+ if (typeof pattern === 'undefined') {
+ throw new TypeError('undefined pattern')
+ }
+
+ if (options.nobrace ||
+ !pattern.match(/\{.*\}/)) {
+ // shortcut. no need to expand.
+ return [pattern]
+ }
+
+ return expand(pattern)
+}
+
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion. Otherwise, any series
+// of * is equivalent to a single *. Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+Minimatch.prototype.parse = parse
+var SUBPARSE = {}
+function parse (pattern, isSub) {
+ if (pattern.length > 1024 * 64) {
+ throw new TypeError('pattern is too long')
+ }
+
+ var options = this.options
+
+ // shortcuts
+ if (!options.noglobstar && pattern === '**') return GLOBSTAR
+ if (pattern === '') return ''
+
+ var re = ''
+ var hasMagic = !!options.nocase
+ var escaping = false
+ // ? => one single character
+ var patternListStack = []
+ var negativeLists = []
+ var stateChar
+ var inClass = false
+ var reClassStart = -1
+ var classStart = -1
+ // . and .. never match anything that doesn't start with .,
+ // even when options.dot is set.
+ var patternStart = pattern.charAt(0) === '.' ? '' // anything
+ // not (start or / followed by . or .. followed by / or end)
+ : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
+ : '(?!\\.)'
+ var self = this
+
+ function clearStateChar () {
+ if (stateChar) {
+ // we had some state-tracking character
+ // that wasn't consumed by this pass.
+ switch (stateChar) {
+ case '*':
+ re += star
+ hasMagic = true
+ break
+ case '?':
+ re += qmark
+ hasMagic = true
+ break
+ default:
+ re += '\\' + stateChar
+ break
+ }
+ self.debug('clearStateChar %j %j', stateChar, re)
+ stateChar = false
+ }
+ }
+
+ for (var i = 0, len = pattern.length, c
+ ; (i < len) && (c = pattern.charAt(i))
+ ; i++) {
+ this.debug('%s\t%s %s %j', pattern, i, re, c)
+
+ // skip over any that are escaped.
+ if (escaping && reSpecials[c]) {
+ re += '\\' + c
+ escaping = false
+ continue
+ }
+
+ switch (c) {
+ case '/':
+ // completely not allowed, even escaped.
+ // Should already be path-split by now.
+ return false
+
+ case '\\':
+ clearStateChar()
+ escaping = true
+ continue
+
+ // the various stateChar values
+ // for the "extglob" stuff.
+ case '?':
+ case '*':
+ case '+':
+ case '@':
+ case '!':
+ this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
+
+ // all of those are literals inside a class, except that
+ // the glob [!a] means [^a] in regexp
+ if (inClass) {
+ this.debug(' in class')
+ if (c === '!' && i === classStart + 1) c = '^'
+ re += c
+ continue
+ }
+
+ // if we already have a stateChar, then it means
+ // that there was something like ** or +? in there.
+ // Handle the stateChar, then proceed with this one.
+ self.debug('call clearStateChar %j', stateChar)
+ clearStateChar()
+ stateChar = c
+ // if extglob is disabled, then +(asdf|foo) isn't a thing.
+ // just clear the statechar *now*, rather than even diving into
+ // the patternList stuff.
+ if (options.noext) clearStateChar()
+ continue
+
+ case '(':
+ if (inClass) {
+ re += '('
+ continue
+ }
+
+ if (!stateChar) {
+ re += '\\('
+ continue
+ }
+
+ patternListStack.push({
+ type: stateChar,
+ start: i - 1,
+ reStart: re.length,
+ open: plTypes[stateChar].open,
+ close: plTypes[stateChar].close
+ })
+ // negation is (?:(?!js)[^/]*)
+ re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
+ this.debug('plType %j %j', stateChar, re)
+ stateChar = false
+ continue
+
+ case ')':
+ if (inClass || !patternListStack.length) {
+ re += '\\)'
+ continue
+ }
+
+ clearStateChar()
+ hasMagic = true
+ var pl = patternListStack.pop()
+ // negation is (?:(?!js)[^/]*)
+ // The others are (?:<pattern>)<type>
+ re += pl.close
+ if (pl.type === '!') {
+ negativeLists.push(pl)
+ }
+ pl.reEnd = re.length
+ continue
+
+ case '|':
+ if (inClass || !patternListStack.length || escaping) {
+ re += '\\|'
+ escaping = false
+ continue
+ }
+
+ clearStateChar()
+ re += '|'
+ continue
+
+ // these are mostly the same in regexp and glob
+ case '[':
+ // swallow any state-tracking char before the [
+ clearStateChar()
+
+ if (inClass) {
+ re += '\\' + c
+ continue
+ }
+
+ inClass = true
+ classStart = i
+ reClassStart = re.length
+ re += c
+ continue
+
+ case ']':
+ // a right bracket shall lose its special
+ // meaning and represent itself in
+ // a bracket expression if it occurs
+ // first in the list. -- POSIX.2 2.8.3.2
+ if (i === classStart + 1 || !inClass) {
+ re += '\\' + c
+ escaping = false
+ continue
+ }
+
+ // handle the case where we left a class open.
+ // "[z-a]" is valid, equivalent to "\[z-a\]"
+ if (inClass) {
+ // split where the last [ was, make sure we don't have
+ // an invalid re. if so, re-walk the contents of the
+ // would-be class to re-translate any characters that
+ // were passed through as-is
+ // TODO: It would probably be faster to determine this
+ // without a try/catch and a new RegExp, but it's tricky
+ // to do safely. For now, this is safe and works.
+ var cs = pattern.substring(classStart + 1, i)
+ try {
+ RegExp('[' + cs + ']')
+ } catch (er) {
+ // not a valid class!
+ var sp = this.parse(cs, SUBPARSE)
+ re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
+ hasMagic = hasMagic || sp[1]
+ inClass = false
+ continue
+ }
+ }
+
+ // finish up the class.
+ hasMagic = true
+ inClass = false
+ re += c
+ continue
+
+ default:
+ // swallow any state char that wasn't consumed
+ clearStateChar()
+
+ if (escaping) {
+ // no need
+ escaping = false
+ } else if (reSpecials[c]
+ && !(c === '^' && inClass)) {
+ re += '\\'
+ }
+
+ re += c
+
+ } // switch
+ } // for
+
+ // handle the case where we left a class open.
+ // "[abc" is valid, equivalent to "\[abc"
+ if (inClass) {
+ // split where the last [ was, and escape it
+ // this is a huge pita. We now have to re-walk
+ // the contents of the would-be class to re-translate
+ // any characters that were passed through as-is
+ cs = pattern.substr(classStart + 1)
+ sp = this.parse(cs, SUBPARSE)
+ re = re.substr(0, reClassStart) + '\\[' + sp[0]
+ hasMagic = hasMagic || sp[1]
+ }
+
+ // handle the case where we had a +( thing at the *end*
+ // of the pattern.
+ // each pattern list stack adds 3 chars, and we need to go through
+ // and escape any | chars that were passed through as-is for the regexp.
+ // Go through and escape them, taking care not to double-escape any
+ // | chars that were already escaped.
+ for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
+ var tail = re.slice(pl.reStart + pl.open.length)
+ this.debug('setting tail', re, pl)
+ // maybe some even number of \, then maybe 1 \, followed by a |
+ tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) {
+ if (!$2) {
+ // the | isn't already escaped, so escape it.
+ $2 = '\\'
+ }
+
+ // need to escape all those slashes *again*, without escaping the
+ // one that we need for escaping the | character. As it works out,
+ // escaping an even number of slashes can be done by simply repeating
+ // it exactly after itself. That's why this trick works.
+ //
+ // I am sorry that you have to see this.
+ return $1 + $1 + $2 + '|'
+ })
+
+ this.debug('tail=%j\n %s', tail, tail, pl, re)
+ var t = pl.type === '*' ? star
+ : pl.type === '?' ? qmark
+ : '\\' + pl.type
+
+ hasMagic = true
+ re = re.slice(0, pl.reStart) + t + '\\(' + tail
+ }
+
+ // handle trailing things that only matter at the very end.
+ clearStateChar()
+ if (escaping) {
+ // trailing \\
+ re += '\\\\'
+ }
+
+ // only need to apply the nodot start if the re starts with
+ // something that could conceivably capture a dot
+ var addPatternStart = false
+ switch (re.charAt(0)) {
+ case '.':
+ case '[':
+ case '(': addPatternStart = true
+ }
+
+ // Hack to work around lack of negative lookbehind in JS
+ // A pattern like: *.!(x).!(y|z) needs to ensure that a name
+ // like 'a.xyz.yz' doesn't match. So, the first negative
+ // lookahead, has to look ALL the way ahead, to the end of
+ // the pattern.
+ for (var n = negativeLists.length - 1; n > -1; n--) {
+ var nl = negativeLists[n]
+
+ var nlBefore = re.slice(0, nl.reStart)
+ var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
+ var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)
+ var nlAfter = re.slice(nl.reEnd)
+
+ nlLast += nlAfter
+
+ // Handle nested stuff like *(*.js|!(*.json)), where open parens
+ // mean that we should *not* include the ) in the bit that is considered
+ // "after" the negated section.
+ var openParensBefore = nlBefore.split('(').length - 1
+ var cleanAfter = nlAfter
+ for (i = 0; i < openParensBefore; i++) {
+ cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
+ }
+ nlAfter = cleanAfter
+
+ var dollar = ''
+ if (nlAfter === '' && isSub !== SUBPARSE) {
+ dollar = '$'
+ }
+ var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
+ re = newRe
+ }
+
+ // if the re is not "" at this point, then we need to make sure
+ // it doesn't match against an empty path part.
+ // Otherwise a/* will match a/, which it should not.
+ if (re !== '' && hasMagic) {
+ re = '(?=.)' + re
+ }
+
+ if (addPatternStart) {
+ re = patternStart + re
+ }
+
+ // parsing just a piece of a larger pattern.
+ if (isSub === SUBPARSE) {
+ return [re, hasMagic]
+ }
+
+ // skip the regexp for non-magical patterns
+ // unescape anything in it, though, so that it'll be
+ // an exact match against a file etc.
+ if (!hasMagic) {
+ return globUnescape(pattern)
+ }
+
+ var flags = options.nocase ? 'i' : ''
+ try {
+ var regExp = new RegExp('^' + re + '$', flags)
+ } catch (er) {
+ // If it was an invalid regular expression, then it can't match
+ // anything. This trick looks for a character after the end of
+ // the string, which is of course impossible, except in multi-line
+ // mode, but it's not a /m regex.
+ return new RegExp('$.')
+ }
+
+ regExp._glob = pattern
+ regExp._src = re
+
+ return regExp
+}
+
+minimatch.makeRe = function (pattern, options) {
+ return new Minimatch(pattern, options || {}).makeRe()
+}
+
+Minimatch.prototype.makeRe = makeRe
+function makeRe () {
+ if (this.regexp || this.regexp === false) return this.regexp
+
+ // at this point, this.set is a 2d array of partial
+ // pattern strings, or "**".
+ //
+ // It's better to use .match(). This function shouldn't
+ // be used, really, but it's pretty convenient sometimes,
+ // when you just want to work with a regex.
+ var set = this.set
+
+ if (!set.length) {
+ this.regexp = false
+ return this.regexp
+ }
+ var options = this.options
+
+ var twoStar = options.noglobstar ? star
+ : options.dot ? twoStarDot
+ : twoStarNoDot
+ var flags = options.nocase ? 'i' : ''
+
+ var re = set.map(function (pattern) {
+ return pattern.map(function (p) {
+ return (p === GLOBSTAR) ? twoStar
+ : (typeof p === 'string') ? regExpEscape(p)
+ : p._src
+ }).join('\\\/')
+ }).join('|')
+
+ // must match entire pattern
+ // ending in a * or ** will make it less strict.
+ re = '^(?:' + re + ')$'
+
+ // can match anything, as long as it's not this.
+ if (this.negate) re = '^(?!' + re + ').*$'
+
+ try {
+ this.regexp = new RegExp(re, flags)
+ } catch (ex) {
+ this.regexp = false
+ }
+ return this.regexp
+}
+
+minimatch.match = function (list, pattern, options) {
+ options = options || {}
+ var mm = new Minimatch(pattern, options)
+ list = list.filter(function (f) {
+ return mm.match(f)
+ })
+ if (mm.options.nonull && !list.length) {
+ list.push(pattern)
+ }
+ return list
+}
+
+Minimatch.prototype.match = match
+function match (f, partial) {
+ this.debug('match', f, this.pattern)
+ // short-circuit in the case of busted things.
+ // comments, etc.
+ if (this.comment) return false
+ if (this.empty) return f === ''
+
+ if (f === '/' && partial) return true
+
+ var options = this.options
+
+ // windows: need to use /, not \
+ if (path.sep !== '/') {
+ f = f.split(path.sep).join('/')
+ }
+
+ // treat the test path as a set of pathparts.
+ f = f.split(slashSplit)
+ this.debug(this.pattern, 'split', f)
+
+ // just ONE of the pattern sets in this.set needs to match
+ // in order for it to be valid. If negating, then just one
+ // match means that we have failed.
+ // Either way, return on the first hit.
+
+ var set = this.set
+ this.debug(this.pattern, 'set', set)
+
+ // Find the basename of the path by looking for the last non-empty segment
+ var filename
+ var i
+ for (i = f.length - 1; i >= 0; i--) {
+ filename = f[i]
+ if (filename) break
+ }
+
+ for (i = 0; i < set.length; i++) {
+ var pattern = set[i]
+ var file = f
+ if (options.matchBase && pattern.length === 1) {
+ file = [filename]
+ }
+ var hit = this.matchOne(file, pattern, partial)
+ if (hit) {
+ if (options.flipNegate) return true
+ return !this.negate
+ }
+ }
+
+ // didn't get any hits. this is success if it's a negative
+ // pattern, failure otherwise.
+ if (options.flipNegate) return false
+ return this.negate
+}
+
+// set partial to true to test if, for example,
+// "/a/b" matches the start of "/*/b/*/d"
+// Partial means, if you run out of file before you run
+// out of pattern, then that's fine, as long as all
+// the parts match.
+Minimatch.prototype.matchOne = function (file, pattern, partial) {
+ var options = this.options
+
+ this.debug('matchOne',
+ { 'this': this, file: file, pattern: pattern })
+
+ this.debug('matchOne', file.length, pattern.length)
+
+ for (var fi = 0,
+ pi = 0,
+ fl = file.length,
+ pl = pattern.length
+ ; (fi < fl) && (pi < pl)
+ ; fi++, pi++) {
+ this.debug('matchOne loop')
+ var p = pattern[pi]
+ var f = file[fi]
+
+ this.debug(pattern, p, f)
+
+ // should be impossible.
+ // some invalid regexp stuff in the set.
+ if (p === false) return false
+
+ if (p === GLOBSTAR) {
+ this.debug('GLOBSTAR', [pattern, p, f])
+
+ // "**"
+ // a/**/b/**/c would match the following:
+ // a/b/x/y/z/c
+ // a/x/y/z/b/c
+ // a/b/x/b/x/c
+ // a/b/c
+ // To do this, take the rest of the pattern after
+ // the **, and see if it would match the file remainder.
+ // If so, return success.
+ // If not, the ** "swallows" a segment, and try again.
+ // This is recursively awful.
+ //
+ // a/**/b/**/c matching a/b/x/y/z/c
+ // - a matches a
+ // - doublestar
+ // - matchOne(b/x/y/z/c, b/**/c)
+ // - b matches b
+ // - doublestar
+ // - matchOne(x/y/z/c, c) -> no
+ // - matchOne(y/z/c, c) -> no
+ // - matchOne(z/c, c) -> no
+ // - matchOne(c, c) yes, hit
+ var fr = fi
+ var pr = pi + 1
+ if (pr === pl) {
+ this.debug('** at the end')
+ // a ** at the end will just swallow the rest.
+ // We have found a match.
+ // however, it will not swallow /.x, unless
+ // options.dot is set.
+ // . and .. are *never* matched by **, for explosively
+ // exponential reasons.
+ for (; fi < fl; fi++) {
+ if (file[fi] === '.' || file[fi] === '..' ||
+ (!options.dot && file[fi].charAt(0) === '.')) return false
+ }
+ return true
+ }
+
+ // ok, let's see if we can swallow whatever we can.
+ while (fr < fl) {
+ var swallowee = file[fr]
+
+ this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
+
+ // XXX remove this slice. Just pass the start index.
+ if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+ this.debug('globstar found match!', fr, fl, swallowee)
+ // found a match.
+ return true
+ } else {
+ // can't swallow "." or ".." ever.
+ // can only swallow ".foo" when explicitly asked.
+ if (swallowee === '.' || swallowee === '..' ||
+ (!options.dot && swallowee.charAt(0) === '.')) {
+ this.debug('dot detected!', file, fr, pattern, pr)
+ break
+ }
+
+ // ** swallows a segment, and continue.
+ this.debug('globstar swallow a segment, and continue')
+ fr++
+ }
+ }
+
+ // no match was found.
+ // However, in partial mode, we can't say this is necessarily over.
+ // If there's more *pattern* left, then
+ if (partial) {
+ // ran out of file
+ this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
+ if (fr === fl) return true
+ }
+ return false
+ }
+
+ // something other than **
+ // non-magic patterns just have to match exactly
+ // patterns with magic have been turned into regexps.
+ var hit
+ if (typeof p === 'string') {
+ if (options.nocase) {
+ hit = f.toLowerCase() === p.toLowerCase()
+ } else {
+ hit = f === p
+ }
+ this.debug('string match', p, f, hit)
+ } else {
+ hit = f.match(p)
+ this.debug('pattern match', p, f, hit)
+ }
+
+ if (!hit) return false
+ }
+
+ // Note: ending in / means that we'll get a final ""
+ // at the end of the pattern. This can only match a
+ // corresponding "" at the end of the file.
+ // If the file ends in /, then it can only match a
+ // a pattern that ends in /, unless the pattern just
+ // doesn't have any more for it. But, a/b/ should *not*
+ // match "a/b/*", even though "" matches against the
+ // [^/]*? pattern, except in partial mode, where it might
+ // simply not be reached yet.
+ // However, a/b/ should still satisfy a/*
+
+ // now either we fell off the end of the pattern, or we're done.
+ if (fi === fl && pi === pl) {
+ // ran out of pattern and filename at the same time.
+ // an exact hit!
+ return true
+ } else if (fi === fl) {
+ // ran out of file, but still had pattern left.
+ // this is ok if we're doing the match as part of
+ // a glob fs traversal.
+ return partial
+ } else if (pi === pl) {
+ // ran out of pattern, still have file left.
+ // this is only acceptable if we're on the very last
+ // empty segment of a file with a trailing slash.
+ // a/* should match a/b/
+ var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
+ return emptyFileEnd
+ }
+
+ // should be unreachable.
+ throw new Error('wtf?')
+}
+
+// replace stuff like \* with *
+function globUnescape (s) {
+ return s.replace(/\\(.)/g, '$1')
+}
+
+function regExpEscape (s) {
+ return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/README.md b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/README.md
new file mode 100644
index 0000000000..778a1c3c1d
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/README.md
@@ -0,0 +1,123 @@
+# brace-expansion
+
+[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html),
+as known from sh/bash, in JavaScript.
+
+[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion)
+[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion)
+[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/)
+
+[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion)
+
+## Example
+
+```js
+var expand = require('brace-expansion');
+
+expand('file-{a,b,c}.jpg')
+// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
+
+expand('-v{,,}')
+// => ['-v', '-v', '-v']
+
+expand('file{0..2}.jpg')
+// => ['file0.jpg', 'file1.jpg', 'file2.jpg']
+
+expand('file-{a..c}.jpg')
+// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
+
+expand('file{2..0}.jpg')
+// => ['file2.jpg', 'file1.jpg', 'file0.jpg']
+
+expand('file{0..4..2}.jpg')
+// => ['file0.jpg', 'file2.jpg', 'file4.jpg']
+
+expand('file-{a..e..2}.jpg')
+// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg']
+
+expand('file{00..10..5}.jpg')
+// => ['file00.jpg', 'file05.jpg', 'file10.jpg']
+
+expand('{{A..C},{a..c}}')
+// => ['A', 'B', 'C', 'a', 'b', 'c']
+
+expand('ppp{,config,oe{,conf}}')
+// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf']
+```
+
+## API
+
+```js
+var expand = require('brace-expansion');
+```
+
+### var expanded = expand(str)
+
+Return an array of all possible and valid expansions of `str`. If none are
+found, `[str]` is returned.
+
+Valid expansions are:
+
+```js
+/^(.*,)+(.+)?$/
+// {a,b,...}
+```
+
+A comma seperated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`.
+
+```js
+/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
+// {x..y[..incr]}
+```
+
+A numeric sequence from `x` to `y` inclusive, with optional increment.
+If `x` or `y` start with a leading `0`, all the numbers will be padded
+to have equal length. Negative numbers and backwards iteration work too.
+
+```js
+/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
+// {x..y[..incr]}
+```
+
+An alphabetic sequence from `x` to `y` inclusive, with optional increment.
+`x` and `y` must be exactly one character, and if given, `incr` must be a
+number.
+
+For compatibility reasons, the string `${` is not eligible for brace expansion.
+
+## Installation
+
+With [npm](https://npmjs.org) do:
+
+```bash
+npm install brace-expansion
+```
+
+## Contributors
+
+- [Julian Gruber](https://github.com/juliangruber)
+- [Isaac Z. Schlueter](https://github.com/isaacs)
+
+## License
+
+(MIT)
+
+Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/index.js b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/index.js
new file mode 100644
index 0000000000..2b6f4f85c9
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/index.js
@@ -0,0 +1,200 @@
+var concatMap = require('concat-map');
+var balanced = require('balanced-match');
+
+module.exports = expandTop;
+
+var escSlash = '\0SLASH'+Math.random()+'\0';
+var escOpen = '\0OPEN'+Math.random()+'\0';
+var escClose = '\0CLOSE'+Math.random()+'\0';
+var escComma = '\0COMMA'+Math.random()+'\0';
+var escPeriod = '\0PERIOD'+Math.random()+'\0';
+
+function numeric(str) {
+ return parseInt(str, 10) == str
+ ? parseInt(str, 10)
+ : str.charCodeAt(0);
+}
+
+function escapeBraces(str) {
+ return str.split('\\\\').join(escSlash)
+ .split('\\{').join(escOpen)
+ .split('\\}').join(escClose)
+ .split('\\,').join(escComma)
+ .split('\\.').join(escPeriod);
+}
+
+function unescapeBraces(str) {
+ return str.split(escSlash).join('\\')
+ .split(escOpen).join('{')
+ .split(escClose).join('}')
+ .split(escComma).join(',')
+ .split(escPeriod).join('.');
+}
+
+
+// Basically just str.split(","), but handling cases
+// where we have nested braced sections, which should be
+// treated as individual members, like {a,{b,c},d}
+function parseCommaParts(str) {
+ if (!str)
+ return [''];
+
+ var parts = [];
+ var m = balanced('{', '}', str);
+
+ if (!m)
+ return str.split(',');
+
+ var pre = m.pre;
+ var body = m.body;
+ var post = m.post;
+ var p = pre.split(',');
+
+ p[p.length-1] += '{' + body + '}';
+ var postParts = parseCommaParts(post);
+ if (post.length) {
+ p[p.length-1] += postParts.shift();
+ p.push.apply(p, postParts);
+ }
+
+ parts.push.apply(parts, p);
+
+ return parts;
+}
+
+function expandTop(str) {
+ if (!str)
+ return [];
+
+ // I don't know why Bash 4.3 does this, but it does.
+ // Anything starting with {} will have the first two bytes preserved
+ // but *only* at the top level, so {},a}b will not expand to anything,
+ // but a{},b}c will be expanded to [a}c,abc].
+ // One could argue that this is a bug in Bash, but since the goal of
+ // this module is to match Bash's rules, we escape a leading {}
+ if (str.substr(0, 2) === '{}') {
+ str = '\\{\\}' + str.substr(2);
+ }
+
+ return expand(escapeBraces(str), true).map(unescapeBraces);
+}
+
+function identity(e) {
+ return e;
+}
+
+function embrace(str) {
+ return '{' + str + '}';
+}
+function isPadded(el) {
+ return /^-?0\d/.test(el);
+}
+
+function lte(i, y) {
+ return i <= y;
+}
+function gte(i, y) {
+ return i >= y;
+}
+
+function expand(str, isTop) {
+ var expansions = [];
+
+ var m = balanced('{', '}', str);
+ if (!m || /\$$/.test(m.pre)) return [str];
+
+ var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
+ var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
+ var isSequence = isNumericSequence || isAlphaSequence;
+ var isOptions = m.body.indexOf(',') >= 0;
+ if (!isSequence && !isOptions) {
+ // {a},b}
+ if (m.post.match(/,.*\}/)) {
+ str = m.pre + '{' + m.body + escClose + m.post;
+ return expand(str);
+ }
+ return [str];
+ }
+
+ var n;
+ if (isSequence) {
+ n = m.body.split(/\.\./);
+ } else {
+ n = parseCommaParts(m.body);
+ if (n.length === 1) {
+ // x{{a,b}}y ==> x{a}y x{b}y
+ n = expand(n[0], false).map(embrace);
+ if (n.length === 1) {
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+ return post.map(function(p) {
+ return m.pre + n[0] + p;
+ });
+ }
+ }
+ }
+
+ // at this point, n is the parts, and we know it's not a comma set
+ // with a single entry.
+
+ // no need to expand pre, since it is guaranteed to be free of brace-sets
+ var pre = m.pre;
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+
+ var N;
+
+ if (isSequence) {
+ var x = numeric(n[0]);
+ var y = numeric(n[1]);
+ var width = Math.max(n[0].length, n[1].length)
+ var incr = n.length == 3
+ ? Math.abs(numeric(n[2]))
+ : 1;
+ var test = lte;
+ var reverse = y < x;
+ if (reverse) {
+ incr *= -1;
+ test = gte;
+ }
+ var pad = n.some(isPadded);
+
+ N = [];
+
+ for (var i = x; test(i, y); i += incr) {
+ var c;
+ if (isAlphaSequence) {
+ c = String.fromCharCode(i);
+ if (c === '\\')
+ c = '';
+ } else {
+ c = String(i);
+ if (pad) {
+ var need = width - c.length;
+ if (need > 0) {
+ var z = new Array(need + 1).join('0');
+ if (i < 0)
+ c = '-' + z + c.slice(1);
+ else
+ c = z + c;
+ }
+ }
+ }
+ N.push(c);
+ }
+ } else {
+ N = concatMap(n, function(el) { return expand(el, false) });
+ }
+
+ for (var j = 0; j < N.length; j++) {
+ for (var k = 0; k < post.length; k++) {
+ var expansion = pre + N[j] + post[k];
+ if (!isTop || isSequence || expansion)
+ expansions.push(expansion);
+ }
+ }
+
+ return expansions;
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/.npmignore b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/.npmignore
new file mode 100644
index 0000000000..ae5d8c36ac
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/.npmignore
@@ -0,0 +1,5 @@
+test
+.gitignore
+.travis.yml
+Makefile
+example.js
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/LICENSE.md b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/LICENSE.md
new file mode 100644
index 0000000000..2cdc8e4148
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/LICENSE.md
@@ -0,0 +1,21 @@
+(MIT)
+
+Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/README.md b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/README.md
new file mode 100644
index 0000000000..08e918c0db
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/README.md
@@ -0,0 +1,91 @@
+# balanced-match
+
+Match balanced string pairs, like `{` and `}` or `<b>` and `</b>`. Supports regular expressions as well!
+
+[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match)
+[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match)
+
+[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match)
+
+## Example
+
+Get the first matching pair of braces:
+
+```js
+var balanced = require('balanced-match');
+
+console.log(balanced('{', '}', 'pre{in{nested}}post'));
+console.log(balanced('{', '}', 'pre{first}between{second}post'));
+console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post'));
+```
+
+The matches are:
+
+```bash
+$ node example.js
+{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' }
+{ start: 3,
+ end: 9,
+ pre: 'pre',
+ body: 'first',
+ post: 'between{second}post' }
+{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' }
+```
+
+## API
+
+### var m = balanced(a, b, str)
+
+For the first non-nested matching pair of `a` and `b` in `str`, return an
+object with those keys:
+
+* **start** the index of the first match of `a`
+* **end** the index of the matching `b`
+* **pre** the preamble, `a` and `b` not included
+* **body** the match, `a` and `b` not included
+* **post** the postscript, `a` and `b` not included
+
+If there's no match, `undefined` will be returned.
+
+If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`.
+
+### var r = balanced.range(a, b, str)
+
+For the first non-nested matching pair of `a` and `b` in `str`, return an
+array with indexes: `[ <a index>, <b index> ]`.
+
+If there's no match, `undefined` will be returned.
+
+If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`.
+
+## Installation
+
+With [npm](https://npmjs.org) do:
+
+```bash
+npm install balanced-match
+```
+
+## License
+
+(MIT)
+
+Copyright (c) 2013 Julian Gruber &lt;julian@juliangruber.com&gt;
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/index.js b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/index.js
new file mode 100644
index 0000000000..1685a76293
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/index.js
@@ -0,0 +1,59 @@
+'use strict';
+module.exports = balanced;
+function balanced(a, b, str) {
+ if (a instanceof RegExp) a = maybeMatch(a, str);
+ if (b instanceof RegExp) b = maybeMatch(b, str);
+
+ var r = range(a, b, str);
+
+ return r && {
+ start: r[0],
+ end: r[1],
+ pre: str.slice(0, r[0]),
+ body: str.slice(r[0] + a.length, r[1]),
+ post: str.slice(r[1] + b.length)
+ };
+}
+
+function maybeMatch(reg, str) {
+ var m = str.match(reg);
+ return m ? m[0] : null;
+}
+
+balanced.range = range;
+function range(a, b, str) {
+ var begs, beg, left, right, result;
+ var ai = str.indexOf(a);
+ var bi = str.indexOf(b, ai + 1);
+ var i = ai;
+
+ if (ai >= 0 && bi > 0) {
+ begs = [];
+ left = str.length;
+
+ while (i >= 0 && !result) {
+ if (i == ai) {
+ begs.push(i);
+ ai = str.indexOf(a, i + 1);
+ } else if (begs.length == 1) {
+ result = [ begs.pop(), bi ];
+ } else {
+ beg = begs.pop();
+ if (beg < left) {
+ left = beg;
+ right = bi;
+ }
+
+ bi = str.indexOf(b, i + 1);
+ }
+
+ i = ai < bi && ai >= 0 ? ai : bi;
+ }
+
+ if (begs.length) {
+ result = [ left, right ];
+ }
+ }
+
+ return result;
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json
new file mode 100644
index 0000000000..3e66f4f7df
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/balanced-match/package.json
@@ -0,0 +1,77 @@
+{
+ "_from": "balanced-match@^1.0.0",
+ "_id": "balanced-match@1.0.0",
+ "_inBundle": false,
+ "_integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=",
+ "_location": "/node-gyp/minimatch/brace-expansion/balanced-match",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "balanced-match@^1.0.0",
+ "name": "balanced-match",
+ "escapedName": "balanced-match",
+ "rawSpec": "^1.0.0",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.0"
+ },
+ "_requiredBy": [
+ "/node-gyp/minimatch/brace-expansion"
+ ],
+ "_resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
+ "_shasum": "89b4d199ab2bee49de164ea02b89ce462d71b767",
+ "_spec": "balanced-match@^1.0.0",
+ "_where": "/Users/rebecca/code/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion",
+ "author": {
+ "name": "Julian Gruber",
+ "email": "mail@juliangruber.com",
+ "url": "http://juliangruber.com"
+ },
+ "bugs": {
+ "url": "https://github.com/juliangruber/balanced-match/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {},
+ "deprecated": false,
+ "description": "Match balanced character pairs, like \"{\" and \"}\"",
+ "devDependencies": {
+ "matcha": "^0.7.0",
+ "tape": "^4.6.0"
+ },
+ "homepage": "https://github.com/juliangruber/balanced-match",
+ "keywords": [
+ "match",
+ "regexp",
+ "test",
+ "balanced",
+ "parse"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "balanced-match",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/juliangruber/balanced-match.git"
+ },
+ "scripts": {
+ "bench": "make bench",
+ "test": "make test"
+ },
+ "testling": {
+ "files": "test/*.js",
+ "browsers": [
+ "ie/8..latest",
+ "firefox/20..latest",
+ "firefox/nightly",
+ "chrome/25..latest",
+ "chrome/canary",
+ "opera/12..latest",
+ "opera/next",
+ "safari/5.1..latest",
+ "ipad/6.0..latest",
+ "iphone/6.0..latest",
+ "android-browser/4.2..latest"
+ ]
+ },
+ "version": "1.0.0"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/.travis.yml b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/.travis.yml
new file mode 100644
index 0000000000..f1d0f13c8a
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/.travis.yml
@@ -0,0 +1,4 @@
+language: node_js
+node_js:
+ - 0.4
+ - 0.6
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/LICENSE
new file mode 100644
index 0000000000..ee27ba4b44
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/LICENSE
@@ -0,0 +1,18 @@
+This software is released under the MIT license:
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/README.markdown b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/README.markdown
new file mode 100644
index 0000000000..408f70a1be
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/README.markdown
@@ -0,0 +1,62 @@
+concat-map
+==========
+
+Concatenative mapdashery.
+
+[![browser support](http://ci.testling.com/substack/node-concat-map.png)](http://ci.testling.com/substack/node-concat-map)
+
+[![build status](https://secure.travis-ci.org/substack/node-concat-map.png)](http://travis-ci.org/substack/node-concat-map)
+
+example
+=======
+
+``` js
+var concatMap = require('concat-map');
+var xs = [ 1, 2, 3, 4, 5, 6 ];
+var ys = concatMap(xs, function (x) {
+ return x % 2 ? [ x - 0.1, x, x + 0.1 ] : [];
+});
+console.dir(ys);
+```
+
+***
+
+```
+[ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ]
+```
+
+methods
+=======
+
+``` js
+var concatMap = require('concat-map')
+```
+
+concatMap(xs, fn)
+-----------------
+
+Return an array of concatenated elements by calling `fn(x, i)` for each element
+`x` and each index `i` in the array `xs`.
+
+When `fn(x, i)` returns an array, its result will be concatenated with the
+result array. If `fn(x, i)` returns anything else, that value will be pushed
+onto the end of the result array.
+
+install
+=======
+
+With [npm](http://npmjs.org) do:
+
+```
+npm install concat-map
+```
+
+license
+=======
+
+MIT
+
+notes
+=====
+
+This module was written while sitting high above the ground in a tree.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/example/map.js b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/example/map.js
new file mode 100644
index 0000000000..33656217b6
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/example/map.js
@@ -0,0 +1,6 @@
+var concatMap = require('../');
+var xs = [ 1, 2, 3, 4, 5, 6 ];
+var ys = concatMap(xs, function (x) {
+ return x % 2 ? [ x - 0.1, x, x + 0.1 ] : [];
+});
+console.dir(ys);
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/index.js b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/index.js
new file mode 100644
index 0000000000..b29a7812e5
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/index.js
@@ -0,0 +1,13 @@
+module.exports = function (xs, fn) {
+ var res = [];
+ for (var i = 0; i < xs.length; i++) {
+ var x = fn(xs[i], i);
+ if (isArray(x)) res.push.apply(res, x);
+ else res.push(x);
+ }
+ return res;
+};
+
+var isArray = Array.isArray || function (xs) {
+ return Object.prototype.toString.call(xs) === '[object Array]';
+};
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json
new file mode 100644
index 0000000000..76e10c8948
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/package.json
@@ -0,0 +1,92 @@
+{
+ "_from": "concat-map@0.0.1",
+ "_id": "concat-map@0.0.1",
+ "_integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
+ "_location": "/node-gyp/minimatch/brace-expansion/concat-map",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "version",
+ "registry": true,
+ "raw": "concat-map@0.0.1",
+ "name": "concat-map",
+ "escapedName": "concat-map",
+ "rawSpec": "0.0.1",
+ "saveSpec": null,
+ "fetchSpec": "0.0.1"
+ },
+ "_requiredBy": [
+ "/node-gyp/minimatch/brace-expansion"
+ ],
+ "_resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+ "_shasum": "d8a96bd77fd68df7793a73036a3ba0d5405d477b",
+ "_shrinkwrap": null,
+ "_spec": "concat-map@0.0.1",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion",
+ "author": {
+ "name": "James Halliday",
+ "email": "mail@substack.net",
+ "url": "http://substack.net"
+ },
+ "bin": null,
+ "bugs": {
+ "url": "https://github.com/substack/node-concat-map/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {},
+ "deprecated": false,
+ "description": "concatenative mapdashery",
+ "devDependencies": {
+ "tape": "~2.4.0"
+ },
+ "directories": {
+ "example": "example",
+ "test": "test"
+ },
+ "homepage": "https://github.com/substack/node-concat-map#readme",
+ "keywords": [
+ "concat",
+ "concatMap",
+ "map",
+ "functional",
+ "higher-order"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "concat-map",
+ "optionalDependencies": {},
+ "peerDependencies": {},
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/substack/node-concat-map.git"
+ },
+ "scripts": {
+ "test": "tape test/*.js"
+ },
+ "testling": {
+ "files": "test/*.js",
+ "browsers": {
+ "ie": [
+ 6,
+ 7,
+ 8,
+ 9
+ ],
+ "ff": [
+ 3.5,
+ 10,
+ 15
+ ],
+ "chrome": [
+ 10,
+ 22
+ ],
+ "safari": [
+ 5.1
+ ],
+ "opera": [
+ 12
+ ]
+ }
+ },
+ "version": "0.0.1"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/test/map.js b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/test/map.js
new file mode 100644
index 0000000000..fdbd7022f6
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/node_modules/concat-map/test/map.js
@@ -0,0 +1,39 @@
+var concatMap = require('../');
+var test = require('tape');
+
+test('empty or not', function (t) {
+ var xs = [ 1, 2, 3, 4, 5, 6 ];
+ var ixes = [];
+ var ys = concatMap(xs, function (x, ix) {
+ ixes.push(ix);
+ return x % 2 ? [ x - 0.1, x, x + 0.1 ] : [];
+ });
+ t.same(ys, [ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ]);
+ t.same(ixes, [ 0, 1, 2, 3, 4, 5 ]);
+ t.end();
+});
+
+test('always something', function (t) {
+ var xs = [ 'a', 'b', 'c', 'd' ];
+ var ys = concatMap(xs, function (x) {
+ return x === 'b' ? [ 'B', 'B', 'B' ] : [ x ];
+ });
+ t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]);
+ t.end();
+});
+
+test('scalars', function (t) {
+ var xs = [ 'a', 'b', 'c', 'd' ];
+ var ys = concatMap(xs, function (x) {
+ return x === 'b' ? [ 'B', 'B', 'B' ] : x;
+ });
+ t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]);
+ t.end();
+});
+
+test('undefs', function (t) {
+ var xs = [ 'a', 'b', 'c', 'd' ];
+ var ys = concatMap(xs, function () {});
+ t.same(ys, [ undefined, undefined, undefined, undefined ]);
+ t.end();
+});
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/package.json b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/package.json
new file mode 100644
index 0000000000..10ec0bbe52
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/node_modules/brace-expansion/package.json
@@ -0,0 +1,75 @@
+{
+ "_from": "brace-expansion@^1.1.7",
+ "_id": "brace-expansion@1.1.8",
+ "_inBundle": false,
+ "_integrity": "sha1-wHshHHyVLsH479Uad+8NHTmQopI=",
+ "_location": "/node-gyp/minimatch/brace-expansion",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "brace-expansion@^1.1.7",
+ "name": "brace-expansion",
+ "escapedName": "brace-expansion",
+ "rawSpec": "^1.1.7",
+ "saveSpec": null,
+ "fetchSpec": "^1.1.7"
+ },
+ "_requiredBy": [
+ "/node-gyp/minimatch"
+ ],
+ "_resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.8.tgz",
+ "_shasum": "c07b211c7c952ec1f8efd51a77ef0d1d3990a292",
+ "_spec": "brace-expansion@^1.1.7",
+ "_where": "/Users/rebecca/code/npm/node_modules/node-gyp/node_modules/minimatch",
+ "author": {
+ "name": "Julian Gruber",
+ "email": "mail@juliangruber.com",
+ "url": "http://juliangruber.com"
+ },
+ "bugs": {
+ "url": "https://github.com/juliangruber/brace-expansion/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ },
+ "deprecated": false,
+ "description": "Brace expansion as known from sh/bash",
+ "devDependencies": {
+ "matcha": "^0.7.0",
+ "tape": "^4.6.0"
+ },
+ "homepage": "https://github.com/juliangruber/brace-expansion",
+ "keywords": [],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "brace-expansion",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/juliangruber/brace-expansion.git"
+ },
+ "scripts": {
+ "bench": "matcha test/perf/bench.js",
+ "gentest": "bash test/generate.sh",
+ "test": "tape test/*.js"
+ },
+ "testling": {
+ "files": "test/*.js",
+ "browsers": [
+ "ie/8..latest",
+ "firefox/20..latest",
+ "firefox/nightly",
+ "chrome/25..latest",
+ "chrome/canary",
+ "opera/12..latest",
+ "opera/next",
+ "safari/5.1..latest",
+ "ipad/6.0..latest",
+ "iphone/6.0..latest",
+ "android-browser/4.2..latest"
+ ]
+ },
+ "version": "1.1.8"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/minimatch/package.json b/deps/npm/node_modules/node-gyp/node_modules/minimatch/package.json
new file mode 100644
index 0000000000..3cdb284428
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/minimatch/package.json
@@ -0,0 +1,63 @@
+{
+ "_from": "minimatch@^3.0.2",
+ "_id": "minimatch@3.0.4",
+ "_inBundle": false,
+ "_integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
+ "_location": "/node-gyp/minimatch",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "minimatch@^3.0.2",
+ "name": "minimatch",
+ "escapedName": "minimatch",
+ "rawSpec": "^3.0.2",
+ "saveSpec": null,
+ "fetchSpec": "^3.0.2"
+ },
+ "_requiredBy": [
+ "/node-gyp"
+ ],
+ "_resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
+ "_shasum": "5166e286457f03306064be5497e8dbb0c3d32083",
+ "_spec": "minimatch@^3.0.2",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/node-gyp",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/minimatch/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "brace-expansion": "^1.1.7"
+ },
+ "deprecated": false,
+ "description": "a glob matcher in javascript",
+ "devDependencies": {
+ "tap": "^10.3.2"
+ },
+ "engines": {
+ "node": "*"
+ },
+ "files": [
+ "minimatch.js"
+ ],
+ "homepage": "https://github.com/isaacs/minimatch#readme",
+ "license": "ISC",
+ "main": "minimatch.js",
+ "name": "minimatch",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/minimatch.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --cov"
+ },
+ "version": "3.0.4"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/.npmignore b/deps/npm/node_modules/node-gyp/node_modules/nopt/.npmignore
new file mode 100644
index 0000000000..3c3629e647
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/.npmignore
@@ -0,0 +1 @@
+node_modules
diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/.travis.yml b/deps/npm/node_modules/node-gyp/node_modules/nopt/.travis.yml
new file mode 100644
index 0000000000..99f2bbf506
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/.travis.yml
@@ -0,0 +1,9 @@
+language: node_js
+language: node_js
+node_js:
+ - '0.8'
+ - '0.10'
+ - '0.12'
+ - 'iojs'
+before_install:
+ - npm install -g npm@latest
diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/nopt/LICENSE
new file mode 100644
index 0000000000..19129e315f
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/README.md b/deps/npm/node_modules/node-gyp/node_modules/nopt/README.md
new file mode 100644
index 0000000000..f21a4b31c5
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/README.md
@@ -0,0 +1,211 @@
+If you want to write an option parser, and have it be good, there are
+two ways to do it. The Right Way, and the Wrong Way.
+
+The Wrong Way is to sit down and write an option parser. We've all done
+that.
+
+The Right Way is to write some complex configurable program with so many
+options that you hit the limit of your frustration just trying to
+manage them all, and defer it with duct-tape solutions until you see
+exactly to the core of the problem, and finally snap and write an
+awesome option parser.
+
+If you want to write an option parser, don't write an option parser.
+Write a package manager, or a source control system, or a service
+restarter, or an operating system. You probably won't end up with a
+good one of those, but if you don't give up, and you are relentless and
+diligent enough in your procrastination, you may just end up with a very
+nice option parser.
+
+## USAGE
+
+ // my-program.js
+ var nopt = require("nopt")
+ , Stream = require("stream").Stream
+ , path = require("path")
+ , knownOpts = { "foo" : [String, null]
+ , "bar" : [Stream, Number]
+ , "baz" : path
+ , "bloo" : [ "big", "medium", "small" ]
+ , "flag" : Boolean
+ , "pick" : Boolean
+ , "many1" : [String, Array]
+ , "many2" : [path]
+ }
+ , shortHands = { "foofoo" : ["--foo", "Mr. Foo"]
+ , "b7" : ["--bar", "7"]
+ , "m" : ["--bloo", "medium"]
+ , "p" : ["--pick"]
+ , "f" : ["--flag"]
+ }
+ // everything is optional.
+ // knownOpts and shorthands default to {}
+ // arg list defaults to process.argv
+ // slice defaults to 2
+ , parsed = nopt(knownOpts, shortHands, process.argv, 2)
+ console.log(parsed)
+
+This would give you support for any of the following:
+
+```bash
+$ node my-program.js --foo "blerp" --no-flag
+{ "foo" : "blerp", "flag" : false }
+
+$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag
+{ bar: 7, foo: "Mr. Hand", flag: true }
+
+$ node my-program.js --foo "blerp" -f -----p
+{ foo: "blerp", flag: true, pick: true }
+
+$ node my-program.js -fp --foofoo
+{ foo: "Mr. Foo", flag: true, pick: true }
+
+$ node my-program.js --foofoo -- -fp # -- stops the flag parsing.
+{ foo: "Mr. Foo", argv: { remain: ["-fp"] } }
+
+$ node my-program.js --blatzk -fp # unknown opts are ok.
+{ blatzk: true, flag: true, pick: true }
+
+$ node my-program.js --blatzk=1000 -fp # but you need to use = if they have a value
+{ blatzk: 1000, flag: true, pick: true }
+
+$ node my-program.js --no-blatzk -fp # unless they start with "no-"
+{ blatzk: false, flag: true, pick: true }
+
+$ node my-program.js --baz b/a/z # known paths are resolved.
+{ baz: "/Users/isaacs/b/a/z" }
+
+# if Array is one of the types, then it can take many
+# values, and will always be an array. The other types provided
+# specify what types are allowed in the list.
+
+$ node my-program.js --many1 5 --many1 null --many1 foo
+{ many1: ["5", "null", "foo"] }
+
+$ node my-program.js --many2 foo --many2 bar
+{ many2: ["/path/to/foo", "path/to/bar"] }
+```
+
+Read the tests at the bottom of `lib/nopt.js` for more examples of
+what this puppy can do.
+
+## Types
+
+The following types are supported, and defined on `nopt.typeDefs`
+
+* String: A normal string. No parsing is done.
+* path: A file system path. Gets resolved against cwd if not absolute.
+* url: A url. If it doesn't parse, it isn't accepted.
+* Number: Must be numeric.
+* Date: Must parse as a date. If it does, and `Date` is one of the options,
+ then it will return a Date object, not a string.
+* Boolean: Must be either `true` or `false`. If an option is a boolean,
+ then it does not need a value, and its presence will imply `true` as
+ the value. To negate boolean flags, do `--no-whatever` or `--whatever
+ false`
+* NaN: Means that the option is strictly not allowed. Any value will
+ fail.
+* Stream: An object matching the "Stream" class in node. Valuable
+ for use when validating programmatically. (npm uses this to let you
+ supply any WriteStream on the `outfd` and `logfd` config options.)
+* Array: If `Array` is specified as one of the types, then the value
+ will be parsed as a list of options. This means that multiple values
+ can be specified, and that the value will always be an array.
+
+If a type is an array of values not on this list, then those are
+considered valid values. For instance, in the example above, the
+`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`,
+and any other value will be rejected.
+
+When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be
+interpreted as their JavaScript equivalents.
+
+You can also mix types and values, or multiple types, in a list. For
+instance `{ blah: [Number, null] }` would allow a value to be set to
+either a Number or null. When types are ordered, this implies a
+preference, and the first type that can be used to properly interpret
+the value will be used.
+
+To define a new type, add it to `nopt.typeDefs`. Each item in that
+hash is an object with a `type` member and a `validate` method. The
+`type` member is an object that matches what goes in the type list. The
+`validate` method is a function that gets called with `validate(data,
+key, val)`. Validate methods should assign `data[key]` to the valid
+value of `val` if it can be handled properly, or return boolean
+`false` if it cannot.
+
+You can also call `nopt.clean(data, types, typeDefs)` to clean up a
+config object and remove its invalid properties.
+
+## Error Handling
+
+By default, nopt outputs a warning to standard error when invalid values for
+known options are found. You can change this behavior by assigning a method
+to `nopt.invalidHandler`. This method will be called with
+the offending `nopt.invalidHandler(key, val, types)`.
+
+If no `nopt.invalidHandler` is assigned, then it will console.error
+its whining. If it is assigned to boolean `false` then the warning is
+suppressed.
+
+## Abbreviations
+
+Yes, they are supported. If you define options like this:
+
+```javascript
+{ "foolhardyelephants" : Boolean
+, "pileofmonkeys" : Boolean }
+```
+
+Then this will work:
+
+```bash
+node program.js --foolhar --pil
+node program.js --no-f --pileofmon
+# etc.
+```
+
+## Shorthands
+
+Shorthands are a hash of shorter option names to a snippet of args that
+they expand to.
+
+If multiple one-character shorthands are all combined, and the
+combination does not unambiguously match any other option or shorthand,
+then they will be broken up into their constituent parts. For example:
+
+```json
+{ "s" : ["--loglevel", "silent"]
+, "g" : "--global"
+, "f" : "--force"
+, "p" : "--parseable"
+, "l" : "--long"
+}
+```
+
+```bash
+npm ls -sgflp
+# just like doing this:
+npm ls --loglevel silent --global --force --long --parseable
+```
+
+## The Rest of the args
+
+The config object returned by nopt is given a special member called
+`argv`, which is an object with the following fields:
+
+* `remain`: The remaining args after all the parsing has occurred.
+* `original`: The args as they originally appeared.
+* `cooked`: The args after flags and shorthands are expanded.
+
+## Slicing
+
+Node programs are called with more or less the exact argv as it appears
+in C land, after the v8 and node-specific options have been plucked off.
+As such, `argv[0]` is always `node` and `argv[1]` is always the
+JavaScript program being run.
+
+That's usually not very useful to you. So they're sliced off by
+default. If you want them, then you can pass in `0` as the last
+argument, or any other number that you'd like to slice off the start of
+the list.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/bin/nopt.js b/deps/npm/node_modules/node-gyp/node_modules/nopt/bin/nopt.js
new file mode 100755
index 0000000000..3232d4c570
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/bin/nopt.js
@@ -0,0 +1,54 @@
+#!/usr/bin/env node
+var nopt = require("../lib/nopt")
+ , path = require("path")
+ , types = { num: Number
+ , bool: Boolean
+ , help: Boolean
+ , list: Array
+ , "num-list": [Number, Array]
+ , "str-list": [String, Array]
+ , "bool-list": [Boolean, Array]
+ , str: String
+ , clear: Boolean
+ , config: Boolean
+ , length: Number
+ , file: path
+ }
+ , shorthands = { s: [ "--str", "astring" ]
+ , b: [ "--bool" ]
+ , nb: [ "--no-bool" ]
+ , tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ]
+ , "?": ["--help"]
+ , h: ["--help"]
+ , H: ["--help"]
+ , n: [ "--num", "125" ]
+ , c: ["--config"]
+ , l: ["--length"]
+ , f: ["--file"]
+ }
+ , parsed = nopt( types
+ , shorthands
+ , process.argv
+ , 2 )
+
+console.log("parsed", parsed)
+
+if (parsed.help) {
+ console.log("")
+ console.log("nopt cli tester")
+ console.log("")
+ console.log("types")
+ console.log(Object.keys(types).map(function M (t) {
+ var type = types[t]
+ if (Array.isArray(type)) {
+ return [t, type.map(function (type) { return type.name })]
+ }
+ return [t, type && type.name]
+ }).reduce(function (s, i) {
+ s[i[0]] = i[1]
+ return s
+ }, {}))
+ console.log("")
+ console.log("shorthands")
+ console.log(shorthands)
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/examples/my-program.js b/deps/npm/node_modules/node-gyp/node_modules/nopt/examples/my-program.js
new file mode 100755
index 0000000000..142447e18e
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/examples/my-program.js
@@ -0,0 +1,30 @@
+#!/usr/bin/env node
+
+//process.env.DEBUG_NOPT = 1
+
+// my-program.js
+var nopt = require("../lib/nopt")
+ , Stream = require("stream").Stream
+ , path = require("path")
+ , knownOpts = { "foo" : [String, null]
+ , "bar" : [Stream, Number]
+ , "baz" : path
+ , "bloo" : [ "big", "medium", "small" ]
+ , "flag" : Boolean
+ , "pick" : Boolean
+ }
+ , shortHands = { "foofoo" : ["--foo", "Mr. Foo"]
+ , "b7" : ["--bar", "7"]
+ , "m" : ["--bloo", "medium"]
+ , "p" : ["--pick"]
+ , "f" : ["--flag", "true"]
+ , "g" : ["--flag"]
+ , "s" : "--flag"
+ }
+ // everything is optional.
+ // knownOpts and shorthands default to {}
+ // arg list defaults to process.argv
+ // slice defaults to 2
+ , parsed = nopt(knownOpts, shortHands, process.argv, 2)
+
+console.log("parsed =\n"+ require("util").inspect(parsed))
diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt.js b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt.js
new file mode 100644
index 0000000000..97707e7842
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt.js
@@ -0,0 +1,415 @@
+// info about each config option.
+
+var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG
+ ? function () { console.error.apply(console, arguments) }
+ : function () {}
+
+var url = require("url")
+ , path = require("path")
+ , Stream = require("stream").Stream
+ , abbrev = require("abbrev")
+
+module.exports = exports = nopt
+exports.clean = clean
+
+exports.typeDefs =
+ { String : { type: String, validate: validateString }
+ , Boolean : { type: Boolean, validate: validateBoolean }
+ , url : { type: url, validate: validateUrl }
+ , Number : { type: Number, validate: validateNumber }
+ , path : { type: path, validate: validatePath }
+ , Stream : { type: Stream, validate: validateStream }
+ , Date : { type: Date, validate: validateDate }
+ }
+
+function nopt (types, shorthands, args, slice) {
+ args = args || process.argv
+ types = types || {}
+ shorthands = shorthands || {}
+ if (typeof slice !== "number") slice = 2
+
+ debug(types, shorthands, args, slice)
+
+ args = args.slice(slice)
+ var data = {}
+ , key
+ , remain = []
+ , cooked = args
+ , original = args.slice(0)
+
+ parse(args, data, remain, types, shorthands)
+ // now data is full
+ clean(data, types, exports.typeDefs)
+ data.argv = {remain:remain,cooked:cooked,original:original}
+ Object.defineProperty(data.argv, 'toString', { value: function () {
+ return this.original.map(JSON.stringify).join(" ")
+ }, enumerable: false })
+ return data
+}
+
+function clean (data, types, typeDefs) {
+ typeDefs = typeDefs || exports.typeDefs
+ var remove = {}
+ , typeDefault = [false, true, null, String, Array]
+
+ Object.keys(data).forEach(function (k) {
+ if (k === "argv") return
+ var val = data[k]
+ , isArray = Array.isArray(val)
+ , type = types[k]
+ if (!isArray) val = [val]
+ if (!type) type = typeDefault
+ if (type === Array) type = typeDefault.concat(Array)
+ if (!Array.isArray(type)) type = [type]
+
+ debug("val=%j", val)
+ debug("types=", type)
+ val = val.map(function (val) {
+ // if it's an unknown value, then parse false/true/null/numbers/dates
+ if (typeof val === "string") {
+ debug("string %j", val)
+ val = val.trim()
+ if ((val === "null" && ~type.indexOf(null))
+ || (val === "true" &&
+ (~type.indexOf(true) || ~type.indexOf(Boolean)))
+ || (val === "false" &&
+ (~type.indexOf(false) || ~type.indexOf(Boolean)))) {
+ val = JSON.parse(val)
+ debug("jsonable %j", val)
+ } else if (~type.indexOf(Number) && !isNaN(val)) {
+ debug("convert to number", val)
+ val = +val
+ } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) {
+ debug("convert to date", val)
+ val = new Date(val)
+ }
+ }
+
+ if (!types.hasOwnProperty(k)) {
+ return val
+ }
+
+ // allow `--no-blah` to set 'blah' to null if null is allowed
+ if (val === false && ~type.indexOf(null) &&
+ !(~type.indexOf(false) || ~type.indexOf(Boolean))) {
+ val = null
+ }
+
+ var d = {}
+ d[k] = val
+ debug("prevalidated val", d, val, types[k])
+ if (!validate(d, k, val, types[k], typeDefs)) {
+ if (exports.invalidHandler) {
+ exports.invalidHandler(k, val, types[k], data)
+ } else if (exports.invalidHandler !== false) {
+ debug("invalid: "+k+"="+val, types[k])
+ }
+ return remove
+ }
+ debug("validated val", d, val, types[k])
+ return d[k]
+ }).filter(function (val) { return val !== remove })
+
+ if (!val.length) delete data[k]
+ else if (isArray) {
+ debug(isArray, data[k], val)
+ data[k] = val
+ } else data[k] = val[0]
+
+ debug("k=%s val=%j", k, val, data[k])
+ })
+}
+
+function validateString (data, k, val) {
+ data[k] = String(val)
+}
+
+function validatePath (data, k, val) {
+ if (val === true) return false
+ if (val === null) return true
+
+ val = String(val)
+ var homePattern = process.platform === 'win32' ? /^~(\/|\\)/ : /^~\//
+ if (val.match(homePattern) && process.env.HOME) {
+ val = path.resolve(process.env.HOME, val.substr(2))
+ }
+ data[k] = path.resolve(String(val))
+ return true
+}
+
+function validateNumber (data, k, val) {
+ debug("validate Number %j %j %j", k, val, isNaN(val))
+ if (isNaN(val)) return false
+ data[k] = +val
+}
+
+function validateDate (data, k, val) {
+ debug("validate Date %j %j %j", k, val, Date.parse(val))
+ var s = Date.parse(val)
+ if (isNaN(s)) return false
+ data[k] = new Date(val)
+}
+
+function validateBoolean (data, k, val) {
+ if (val instanceof Boolean) val = val.valueOf()
+ else if (typeof val === "string") {
+ if (!isNaN(val)) val = !!(+val)
+ else if (val === "null" || val === "false") val = false
+ else val = true
+ } else val = !!val
+ data[k] = val
+}
+
+function validateUrl (data, k, val) {
+ val = url.parse(String(val))
+ if (!val.host) return false
+ data[k] = val.href
+}
+
+function validateStream (data, k, val) {
+ if (!(val instanceof Stream)) return false
+ data[k] = val
+}
+
+function validate (data, k, val, type, typeDefs) {
+ // arrays are lists of types.
+ if (Array.isArray(type)) {
+ for (var i = 0, l = type.length; i < l; i ++) {
+ if (type[i] === Array) continue
+ if (validate(data, k, val, type[i], typeDefs)) return true
+ }
+ delete data[k]
+ return false
+ }
+
+ // an array of anything?
+ if (type === Array) return true
+
+ // NaN is poisonous. Means that something is not allowed.
+ if (type !== type) {
+ debug("Poison NaN", k, val, type)
+ delete data[k]
+ return false
+ }
+
+ // explicit list of values
+ if (val === type) {
+ debug("Explicitly allowed %j", val)
+ // if (isArray) (data[k] = data[k] || []).push(val)
+ // else data[k] = val
+ data[k] = val
+ return true
+ }
+
+ // now go through the list of typeDefs, validate against each one.
+ var ok = false
+ , types = Object.keys(typeDefs)
+ for (var i = 0, l = types.length; i < l; i ++) {
+ debug("test type %j %j %j", k, val, types[i])
+ var t = typeDefs[types[i]]
+ if (t &&
+ ((type && type.name && t.type && t.type.name) ? (type.name === t.type.name) : (type === t.type))) {
+ var d = {}
+ ok = false !== t.validate(d, k, val)
+ val = d[k]
+ if (ok) {
+ // if (isArray) (data[k] = data[k] || []).push(val)
+ // else data[k] = val
+ data[k] = val
+ break
+ }
+ }
+ }
+ debug("OK? %j (%j %j %j)", ok, k, val, types[i])
+
+ if (!ok) delete data[k]
+ return ok
+}
+
+function parse (args, data, remain, types, shorthands) {
+ debug("parse", args, data, remain)
+
+ var key = null
+ , abbrevs = abbrev(Object.keys(types))
+ , shortAbbr = abbrev(Object.keys(shorthands))
+
+ for (var i = 0; i < args.length; i ++) {
+ var arg = args[i]
+ debug("arg", arg)
+
+ if (arg.match(/^-{2,}$/)) {
+ // done with keys.
+ // the rest are args.
+ remain.push.apply(remain, args.slice(i + 1))
+ args[i] = "--"
+ break
+ }
+ var hadEq = false
+ if (arg.charAt(0) === "-" && arg.length > 1) {
+ if (arg.indexOf("=") !== -1) {
+ hadEq = true
+ var v = arg.split("=")
+ arg = v.shift()
+ v = v.join("=")
+ args.splice.apply(args, [i, 1].concat([arg, v]))
+ }
+
+ // see if it's a shorthand
+ // if so, splice and back up to re-parse it.
+ var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs)
+ debug("arg=%j shRes=%j", arg, shRes)
+ if (shRes) {
+ debug(arg, shRes)
+ args.splice.apply(args, [i, 1].concat(shRes))
+ if (arg !== shRes[0]) {
+ i --
+ continue
+ }
+ }
+ arg = arg.replace(/^-+/, "")
+ var no = null
+ while (arg.toLowerCase().indexOf("no-") === 0) {
+ no = !no
+ arg = arg.substr(3)
+ }
+
+ if (abbrevs[arg]) arg = abbrevs[arg]
+
+ var isArray = types[arg] === Array ||
+ Array.isArray(types[arg]) && types[arg].indexOf(Array) !== -1
+
+ // allow unknown things to be arrays if specified multiple times.
+ if (!types.hasOwnProperty(arg) && data.hasOwnProperty(arg)) {
+ if (!Array.isArray(data[arg]))
+ data[arg] = [data[arg]]
+ isArray = true
+ }
+
+ var val
+ , la = args[i + 1]
+
+ var isBool = typeof no === 'boolean' ||
+ types[arg] === Boolean ||
+ Array.isArray(types[arg]) && types[arg].indexOf(Boolean) !== -1 ||
+ (typeof types[arg] === 'undefined' && !hadEq) ||
+ (la === "false" &&
+ (types[arg] === null ||
+ Array.isArray(types[arg]) && ~types[arg].indexOf(null)))
+
+ if (isBool) {
+ // just set and move along
+ val = !no
+ // however, also support --bool true or --bool false
+ if (la === "true" || la === "false") {
+ val = JSON.parse(la)
+ la = null
+ if (no) val = !val
+ i ++
+ }
+
+ // also support "foo":[Boolean, "bar"] and "--foo bar"
+ if (Array.isArray(types[arg]) && la) {
+ if (~types[arg].indexOf(la)) {
+ // an explicit type
+ val = la
+ i ++
+ } else if ( la === "null" && ~types[arg].indexOf(null) ) {
+ // null allowed
+ val = null
+ i ++
+ } else if ( !la.match(/^-{2,}[^-]/) &&
+ !isNaN(la) &&
+ ~types[arg].indexOf(Number) ) {
+ // number
+ val = +la
+ i ++
+ } else if ( !la.match(/^-[^-]/) && ~types[arg].indexOf(String) ) {
+ // string
+ val = la
+ i ++
+ }
+ }
+
+ if (isArray) (data[arg] = data[arg] || []).push(val)
+ else data[arg] = val
+
+ continue
+ }
+
+ if (types[arg] === String && la === undefined)
+ la = ""
+
+ if (la && la.match(/^-{2,}$/)) {
+ la = undefined
+ i --
+ }
+
+ val = la === undefined ? true : la
+ if (isArray) (data[arg] = data[arg] || []).push(val)
+ else data[arg] = val
+
+ i ++
+ continue
+ }
+ remain.push(arg)
+ }
+}
+
+function resolveShort (arg, shorthands, shortAbbr, abbrevs) {
+ // handle single-char shorthands glommed together, like
+ // npm ls -glp, but only if there is one dash, and only if
+ // all of the chars are single-char shorthands, and it's
+ // not a match to some other abbrev.
+ arg = arg.replace(/^-+/, '')
+
+ // if it's an exact known option, then don't go any further
+ if (abbrevs[arg] === arg)
+ return null
+
+ // if it's an exact known shortopt, same deal
+ if (shorthands[arg]) {
+ // make it an array, if it's a list of words
+ if (shorthands[arg] && !Array.isArray(shorthands[arg]))
+ shorthands[arg] = shorthands[arg].split(/\s+/)
+
+ return shorthands[arg]
+ }
+
+ // first check to see if this arg is a set of single-char shorthands
+ var singles = shorthands.___singles
+ if (!singles) {
+ singles = Object.keys(shorthands).filter(function (s) {
+ return s.length === 1
+ }).reduce(function (l,r) {
+ l[r] = true
+ return l
+ }, {})
+ shorthands.___singles = singles
+ debug('shorthand singles', singles)
+ }
+
+ var chrs = arg.split("").filter(function (c) {
+ return singles[c]
+ })
+
+ if (chrs.join("") === arg) return chrs.map(function (c) {
+ return shorthands[c]
+ }).reduce(function (l, r) {
+ return l.concat(r)
+ }, [])
+
+
+ // if it's an arg abbrev, and not a literal shorthand, then prefer the arg
+ if (abbrevs[arg] && !shorthands[arg])
+ return null
+
+ // if it's an abbr for a shorthand, then use that
+ if (shortAbbr[arg])
+ arg = shortAbbr[arg]
+
+ // make it an array, if it's a list of words
+ if (shorthands[arg] && !Array.isArray(shorthands[arg]))
+ shorthands[arg] = shorthands[arg].split(/\s+/)
+
+ return shorthands[arg]
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/package.json b/deps/npm/node_modules/node-gyp/node_modules/nopt/package.json
new file mode 100644
index 0000000000..011e21e0ab
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/package.json
@@ -0,0 +1,59 @@
+{
+ "_from": "nopt@2 || 3",
+ "_id": "nopt@3.0.6",
+ "_integrity": "sha1-xkZdvwirzU2zWTF/eaxopkayj/k=",
+ "_location": "/node-gyp/nopt",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "nopt@2 || 3",
+ "name": "nopt",
+ "escapedName": "nopt",
+ "rawSpec": "2 || 3",
+ "saveSpec": null,
+ "fetchSpec": "2 || 3"
+ },
+ "_requiredBy": [
+ "/node-gyp"
+ ],
+ "_resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.6.tgz",
+ "_shasum": "c6465dbf08abcd4db359317f79ac68a646b28ff9",
+ "_shrinkwrap": null,
+ "_spec": "nopt@2 || 3",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/node-gyp",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bin": {
+ "nopt": "./bin/nopt.js"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/nopt/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "abbrev": "1"
+ },
+ "deprecated": false,
+ "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.",
+ "devDependencies": {
+ "tap": "^1.2.0"
+ },
+ "homepage": "https://github.com/npm/nopt#readme",
+ "license": "ISC",
+ "main": "lib/nopt.js",
+ "name": "nopt",
+ "optionalDependencies": {},
+ "peerDependencies": {},
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/nopt.git"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "version": "3.0.6"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/test/basic.js b/deps/npm/node_modules/node-gyp/node_modules/nopt/test/basic.js
new file mode 100644
index 0000000000..d399de9209
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/test/basic.js
@@ -0,0 +1,273 @@
+var nopt = require("../")
+ , test = require('tap').test
+
+
+test("passing a string results in a string", function (t) {
+ var parsed = nopt({ key: String }, {}, ["--key", "myvalue"], 0)
+ t.same(parsed.key, "myvalue")
+ t.end()
+})
+
+// https://github.com/npm/nopt/issues/31
+test("Empty String results in empty string, not true", function (t) {
+ var parsed = nopt({ empty: String }, {}, ["--empty"], 0)
+ t.same(parsed.empty, "")
+ t.end()
+})
+
+test("~ path is resolved to $HOME", function (t) {
+ var path = require("path")
+ if (!process.env.HOME) process.env.HOME = "/tmp"
+ var parsed = nopt({key: path}, {}, ["--key=~/val"], 0)
+ t.same(parsed.key, path.resolve(process.env.HOME, "val"))
+ t.end()
+})
+
+// https://github.com/npm/nopt/issues/24
+test("Unknown options are not parsed as numbers", function (t) {
+ var parsed = nopt({"parse-me": Number}, null, ['--leave-as-is=1.20', '--parse-me=1.20'], 0)
+ t.equal(parsed['leave-as-is'], '1.20')
+ t.equal(parsed['parse-me'], 1.2)
+ t.end()
+});
+
+// https://github.com/npm/nopt/issues/48
+test("Check types based on name of type", function (t) {
+ var parsed = nopt({"parse-me": {name: "Number"}}, null, ['--parse-me=1.20'], 0)
+ t.equal(parsed['parse-me'], 1.2)
+ t.end()
+})
+
+
+test("Missing types are not parsed", function (t) {
+ var parsed = nopt({"parse-me": {}}, null, ['--parse-me=1.20'], 0)
+ //should only contain argv
+ t.equal(Object.keys(parsed).length, 1)
+ t.end()
+})
+
+test("Types passed without a name are not parsed", function (t) {
+ var parsed = nopt({"parse-me": {}}, {}, ['--parse-me=1.20'], 0)
+ //should only contain argv
+ t.equal(Object.keys(parsed).length, 1)
+ t.end()
+})
+
+test("other tests", function (t) {
+
+ var util = require("util")
+ , Stream = require("stream")
+ , path = require("path")
+ , url = require("url")
+
+ , shorthands =
+ { s : ["--loglevel", "silent"]
+ , d : ["--loglevel", "info"]
+ , dd : ["--loglevel", "verbose"]
+ , ddd : ["--loglevel", "silly"]
+ , noreg : ["--no-registry"]
+ , reg : ["--registry"]
+ , "no-reg" : ["--no-registry"]
+ , silent : ["--loglevel", "silent"]
+ , verbose : ["--loglevel", "verbose"]
+ , h : ["--usage"]
+ , H : ["--usage"]
+ , "?" : ["--usage"]
+ , help : ["--usage"]
+ , v : ["--version"]
+ , f : ["--force"]
+ , desc : ["--description"]
+ , "no-desc" : ["--no-description"]
+ , "local" : ["--no-global"]
+ , l : ["--long"]
+ , p : ["--parseable"]
+ , porcelain : ["--parseable"]
+ , g : ["--global"]
+ }
+
+ , types =
+ { aoa: Array
+ , nullstream: [null, Stream]
+ , date: Date
+ , str: String
+ , browser : String
+ , cache : path
+ , color : ["always", Boolean]
+ , depth : Number
+ , description : Boolean
+ , dev : Boolean
+ , editor : path
+ , force : Boolean
+ , global : Boolean
+ , globalconfig : path
+ , group : [String, Number]
+ , gzipbin : String
+ , logfd : [Number, Stream]
+ , loglevel : ["silent","win","error","warn","info","verbose","silly"]
+ , long : Boolean
+ , "node-version" : [false, String]
+ , npaturl : url
+ , npat : Boolean
+ , "onload-script" : [false, String]
+ , outfd : [Number, Stream]
+ , parseable : Boolean
+ , pre: Boolean
+ , prefix: path
+ , proxy : url
+ , "rebuild-bundle" : Boolean
+ , registry : url
+ , searchopts : String
+ , searchexclude: [null, String]
+ , shell : path
+ , t: [Array, String]
+ , tag : String
+ , tar : String
+ , tmp : path
+ , "unsafe-perm" : Boolean
+ , usage : Boolean
+ , user : String
+ , username : String
+ , userconfig : path
+ , version : Boolean
+ , viewer: path
+ , _exit : Boolean
+ , path: path
+ }
+
+ ; [["-v", {version:true}, []]
+ ,["---v", {version:true}, []]
+ ,["ls -s --no-reg connect -d",
+ {loglevel:"info",registry:null},["ls","connect"]]
+ ,["ls ---s foo",{loglevel:"silent"},["ls","foo"]]
+ ,["ls --registry blargle", {}, ["ls"]]
+ ,["--no-registry", {registry:null}, []]
+ ,["--no-color true", {color:false}, []]
+ ,["--no-color false", {color:true}, []]
+ ,["--no-color", {color:false}, []]
+ ,["--color false", {color:false}, []]
+ ,["--color --logfd 7", {logfd:7,color:true}, []]
+ ,["--color=true", {color:true}, []]
+ ,["--logfd=10", {logfd:10}, []]
+ ,["--tmp=/tmp -tar=gtar",{tmp:"/tmp",tar:"gtar"},[]]
+ ,["--tmp=tmp -tar=gtar",
+ {tmp:path.resolve(process.cwd(), "tmp"),tar:"gtar"},[]]
+ ,["--logfd x", {}, []]
+ ,["a -true -- -no-false", {true:true},["a","-no-false"]]
+ ,["a -no-false", {false:false},["a"]]
+ ,["a -no-no-true", {true:true}, ["a"]]
+ ,["a -no-no-no-false", {false:false}, ["a"]]
+ ,["---NO-no-No-no-no-no-nO-no-no"+
+ "-No-no-no-no-no-no-no-no-no"+
+ "-no-no-no-no-NO-NO-no-no-no-no-no-no"+
+ "-no-body-can-do-the-boogaloo-like-I-do"
+ ,{"body-can-do-the-boogaloo-like-I-do":false}, []]
+ ,["we are -no-strangers-to-love "+
+ "--you-know=the-rules --and=so-do-i "+
+ "---im-thinking-of=a-full-commitment "+
+ "--no-you-would-get-this-from-any-other-guy "+
+ "--no-gonna-give-you-up "+
+ "-no-gonna-let-you-down=true "+
+ "--no-no-gonna-run-around false "+
+ "--desert-you=false "+
+ "--make-you-cry false "+
+ "--no-tell-a-lie "+
+ "--no-no-and-hurt-you false"
+ ,{"strangers-to-love":false
+ ,"you-know":"the-rules"
+ ,"and":"so-do-i"
+ ,"you-would-get-this-from-any-other-guy":false
+ ,"gonna-give-you-up":false
+ ,"gonna-let-you-down":false
+ ,"gonna-run-around":false
+ ,"desert-you":false
+ ,"make-you-cry":false
+ ,"tell-a-lie":false
+ ,"and-hurt-you":false
+ },["we", "are"]]
+ ,["-t one -t two -t three"
+ ,{t: ["one", "two", "three"]}
+ ,[]]
+ ,["-t one -t null -t three four five null"
+ ,{t: ["one", "null", "three"]}
+ ,["four", "five", "null"]]
+ ,["-t foo"
+ ,{t:["foo"]}
+ ,[]]
+ ,["--no-t"
+ ,{t:["false"]}
+ ,[]]
+ ,["-no-no-t"
+ ,{t:["true"]}
+ ,[]]
+ ,["-aoa one -aoa null -aoa 100"
+ ,{aoa:["one", null, '100']}
+ ,[]]
+ ,["-str 100"
+ ,{str:"100"}
+ ,[]]
+ ,["--color always"
+ ,{color:"always"}
+ ,[]]
+ ,["--no-nullstream"
+ ,{nullstream:null}
+ ,[]]
+ ,["--nullstream false"
+ ,{nullstream:null}
+ ,[]]
+ ,["--notadate=2011-01-25"
+ ,{notadate: "2011-01-25"}
+ ,[]]
+ ,["--date 2011-01-25"
+ ,{date: new Date("2011-01-25")}
+ ,[]]
+ ,["-cl 1"
+ ,{config: true, length: 1}
+ ,[]
+ ,{config: Boolean, length: Number, clear: Boolean}
+ ,{c: "--config", l: "--length"}]
+ ,["--acount bla"
+ ,{"acount":true}
+ ,["bla"]
+ ,{account: Boolean, credentials: Boolean, options: String}
+ ,{a:"--account", c:"--credentials",o:"--options"}]
+ ,["--clear"
+ ,{clear:true}
+ ,[]
+ ,{clear:Boolean,con:Boolean,len:Boolean,exp:Boolean,add:Boolean,rep:Boolean}
+ ,{c:"--con",l:"--len",e:"--exp",a:"--add",r:"--rep"}]
+ ,["--file -"
+ ,{"file":"-"}
+ ,[]
+ ,{file:String}
+ ,{}]
+ ,["--file -"
+ ,{"file":true}
+ ,["-"]
+ ,{file:Boolean}
+ ,{}]
+ ,["--path"
+ ,{"path":null}
+ ,[]]
+ ,["--path ."
+ ,{"path":process.cwd()}
+ ,[]]
+ ].forEach(function (test) {
+ var argv = test[0].split(/\s+/)
+ , opts = test[1]
+ , rem = test[2]
+ , actual = nopt(test[3] || types, test[4] || shorthands, argv, 0)
+ , parsed = actual.argv
+ delete actual.argv
+ for (var i in opts) {
+ var e = JSON.stringify(opts[i])
+ , a = JSON.stringify(actual[i] === undefined ? null : actual[i])
+ if (e && typeof e === "object") {
+ t.deepEqual(e, a)
+ } else {
+ t.equal(e, a)
+ }
+ }
+ t.deepEqual(rem, parsed.remain)
+ })
+ t.end()
+})
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/semver/LICENSE
new file mode 100644
index 0000000000..19129e315f
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/README.md b/deps/npm/node_modules/node-gyp/node_modules/semver/README.md
new file mode 100644
index 0000000000..cbd956549d
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/README.md
@@ -0,0 +1,350 @@
+semver(1) -- The semantic versioner for npm
+===========================================
+
+## Usage
+
+ $ npm install semver
+ $ node
+ var semver = require('semver')
+
+ semver.valid('1.2.3') // '1.2.3'
+ semver.valid('a.b.c') // null
+ semver.clean(' =v1.2.3 ') // '1.2.3'
+ semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
+ semver.gt('1.2.3', '9.8.7') // false
+ semver.lt('1.2.3', '9.8.7') // true
+
+As a command-line utility:
+
+ $ semver -h
+
+ SemVer 5.1.0
+
+ A JavaScript implementation of the http://semver.org/ specification
+ Copyright Isaac Z. Schlueter
+
+ Usage: semver [options] <version> [<version> [...]]
+ Prints valid versions sorted by SemVer precedence
+
+ Options:
+ -r --range <range>
+ Print versions that match the specified range.
+
+ -i --increment [<level>]
+ Increment a version by the specified level. Level can
+ be one of: major, minor, patch, premajor, preminor,
+ prepatch, or prerelease. Default level is 'patch'.
+ Only one version may be specified.
+
+ --preid <identifier>
+ Identifier to be used to prefix premajor, preminor,
+ prepatch or prerelease version increments.
+
+ -l --loose
+ Interpret versions and ranges loosely
+
+ Program exits successfully if any valid version satisfies
+ all supplied ranges, and prints all satisfying versions.
+
+ If no satisfying versions are found, then exits failure.
+
+ Versions are printed in ascending order, so supplying
+ multiple versions to the utility will just sort them.
+
+## Versions
+
+A "version" is described by the `v2.0.0` specification found at
+<http://semver.org/>.
+
+A leading `"="` or `"v"` character is stripped off and ignored.
+
+## Ranges
+
+A `version range` is a set of `comparators` which specify versions
+that satisfy the range.
+
+A `comparator` is composed of an `operator` and a `version`. The set
+of primitive `operators` is:
+
+* `<` Less than
+* `<=` Less than or equal to
+* `>` Greater than
+* `>=` Greater than or equal to
+* `=` Equal. If no operator is specified, then equality is assumed,
+ so this operator is optional, but MAY be included.
+
+For example, the comparator `>=1.2.7` would match the versions
+`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6`
+or `1.1.0`.
+
+Comparators can be joined by whitespace to form a `comparator set`,
+which is satisfied by the **intersection** of all of the comparators
+it includes.
+
+A range is composed of one or more comparator sets, joined by `||`. A
+version matches a range if and only if every comparator in at least
+one of the `||`-separated comparator sets is satisfied by the version.
+
+For example, the range `>=1.2.7 <1.3.0` would match the versions
+`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`,
+or `1.1.0`.
+
+The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`,
+`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`.
+
+### Prerelease Tags
+
+If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then
+it will only be allowed to satisfy comparator sets if at least one
+comparator with the same `[major, minor, patch]` tuple also has a
+prerelease tag.
+
+For example, the range `>1.2.3-alpha.3` would be allowed to match the
+version `1.2.3-alpha.7`, but it would *not* be satisfied by
+`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater
+than" `1.2.3-alpha.3` according to the SemVer sort rules. The version
+range only accepts prerelease tags on the `1.2.3` version. The
+version `3.4.5` *would* satisfy the range, because it does not have a
+prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`.
+
+The purpose for this behavior is twofold. First, prerelease versions
+frequently are updated very quickly, and contain many breaking changes
+that are (by the author's design) not yet fit for public consumption.
+Therefore, by default, they are excluded from range matching
+semantics.
+
+Second, a user who has opted into using a prerelease version has
+clearly indicated the intent to use *that specific* set of
+alpha/beta/rc versions. By including a prerelease tag in the range,
+the user is indicating that they are aware of the risk. However, it
+is still not appropriate to assume that they have opted into taking a
+similar risk on the *next* set of prerelease versions.
+
+#### Prerelease Identifiers
+
+The method `.inc` takes an additional `identifier` string argument that
+will append the value of the string as a prerelease identifier:
+
+```javascript
+> semver.inc('1.2.3', 'prerelease', 'beta')
+'1.2.4-beta.0'
+```
+
+command-line example:
+
+```shell
+$ semver 1.2.3 -i prerelease --preid beta
+1.2.4-beta.0
+```
+
+Which then can be used to increment further:
+
+```shell
+$ semver 1.2.4-beta.0 -i prerelease
+1.2.4-beta.1
+```
+
+### Advanced Range Syntax
+
+Advanced range syntax desugars to primitive comparators in
+deterministic ways.
+
+Advanced ranges may be combined in the same way as primitive
+comparators using white space or `||`.
+
+#### Hyphen Ranges `X.Y.Z - A.B.C`
+
+Specifies an inclusive set.
+
+* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`
+
+If a partial version is provided as the first version in the inclusive
+range, then the missing pieces are replaced with zeroes.
+
+* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4`
+
+If a partial version is provided as the second version in the
+inclusive range, then all versions that start with the supplied parts
+of the tuple are accepted, but nothing that would be greater than the
+provided tuple parts.
+
+* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0`
+* `1.2.3 - 2` := `>=1.2.3 <3.0.0`
+
+#### X-Ranges `1.2.x` `1.X` `1.2.*` `*`
+
+Any of `X`, `x`, or `*` may be used to "stand in" for one of the
+numeric values in the `[major, minor, patch]` tuple.
+
+* `*` := `>=0.0.0` (Any version satisfies)
+* `1.x` := `>=1.0.0 <2.0.0` (Matching major version)
+* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions)
+
+A partial version range is treated as an X-Range, so the special
+character is in fact optional.
+
+* `""` (empty string) := `*` := `>=0.0.0`
+* `1` := `1.x.x` := `>=1.0.0 <2.0.0`
+* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0`
+
+#### Tilde Ranges `~1.2.3` `~1.2` `~1`
+
+Allows patch-level changes if a minor version is specified on the
+comparator. Allows minor-level changes if not.
+
+* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0`
+* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`)
+* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`)
+* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0`
+* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`)
+* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`)
+* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in
+ the `1.2.3` version will be allowed, if they are greater than or
+ equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
+ `1.2.4-beta.2` would not, because it is a prerelease of a
+ different `[major, minor, patch]` tuple.
+
+#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4`
+
+Allows changes that do not modify the left-most non-zero digit in the
+`[major, minor, patch]` tuple. In other words, this allows patch and
+minor updates for versions `1.0.0` and above, patch updates for
+versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`.
+
+Many authors treat a `0.x` version as if the `x` were the major
+"breaking-change" indicator.
+
+Caret ranges are ideal when an author may make breaking changes
+between `0.2.4` and `0.3.0` releases, which is a common practice.
+However, it presumes that there will *not* be breaking changes between
+`0.2.4` and `0.2.5`. It allows for changes that are presumed to be
+additive (but non-breaking), according to commonly observed practices.
+
+* `^1.2.3` := `>=1.2.3 <2.0.0`
+* `^0.2.3` := `>=0.2.3 <0.3.0`
+* `^0.0.3` := `>=0.0.3 <0.0.4`
+* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in
+ the `1.2.3` version will be allowed, if they are greater than or
+ equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but
+ `1.2.4-beta.2` would not, because it is a prerelease of a
+ different `[major, minor, patch]` tuple.
+* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the
+ `0.0.3` version *only* will be allowed, if they are greater than or
+ equal to `beta`. So, `0.0.3-pr.2` would be allowed.
+
+When parsing caret ranges, a missing `patch` value desugars to the
+number `0`, but will allow flexibility within that value, even if the
+major and minor versions are both `0`.
+
+* `^1.2.x` := `>=1.2.0 <2.0.0`
+* `^0.0.x` := `>=0.0.0 <0.1.0`
+* `^0.0` := `>=0.0.0 <0.1.0`
+
+A missing `minor` and `patch` values will desugar to zero, but also
+allow flexibility within those values, even if the major version is
+zero.
+
+* `^1.x` := `>=1.0.0 <2.0.0`
+* `^0.x` := `>=0.0.0 <1.0.0`
+
+### Range Grammar
+
+Putting all this together, here is a Backus-Naur grammar for ranges,
+for the benefit of parser authors:
+
+```bnf
+range-set ::= range ( logical-or range ) *
+logical-or ::= ( ' ' ) * '||' ( ' ' ) *
+range ::= hyphen | simple ( ' ' simple ) * | ''
+hyphen ::= partial ' - ' partial
+simple ::= primitive | partial | tilde | caret
+primitive ::= ( '<' | '>' | '>=' | '<=' | '=' | ) partial
+partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
+xr ::= 'x' | 'X' | '*' | nr
+nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) *
+tilde ::= '~' partial
+caret ::= '^' partial
+qualifier ::= ( '-' pre )? ( '+' build )?
+pre ::= parts
+build ::= parts
+parts ::= part ( '.' part ) *
+part ::= nr | [-0-9A-Za-z]+
+```
+
+## Functions
+
+All methods and classes take a final `loose` boolean argument that, if
+true, will be more forgiving about not-quite-valid semver strings.
+The resulting output will always be 100% strict, of course.
+
+Strict-mode Comparators and Ranges will be strict about the SemVer
+strings that they parse.
+
+* `valid(v)`: Return the parsed version, or null if it's not valid.
+* `inc(v, release)`: Return the version incremented by the release
+ type (`major`, `premajor`, `minor`, `preminor`, `patch`,
+ `prepatch`, or `prerelease`), or null if it's not valid
+ * `premajor` in one call will bump the version up to the next major
+ version and down to a prerelease of that major version.
+ `preminor`, and `prepatch` work the same way.
+ * If called from a non-prerelease version, the `prerelease` will work the
+ same as `prepatch`. It increments the patch version, then makes a
+ prerelease. If the input version is already a prerelease it simply
+ increments it.
+* `prerelease(v)`: Returns an array of prerelease components, or null
+ if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]`
+* `major(v)`: Return the major version number.
+* `minor(v)`: Return the minor version number.
+* `patch(v)`: Return the patch version number.
+
+### Comparison
+
+* `gt(v1, v2)`: `v1 > v2`
+* `gte(v1, v2)`: `v1 >= v2`
+* `lt(v1, v2)`: `v1 < v2`
+* `lte(v1, v2)`: `v1 <= v2`
+* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent,
+ even if they're not the exact same string. You already know how to
+ compare strings.
+* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`.
+* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call
+ the corresponding function above. `"==="` and `"!=="` do simple
+ string comparison, but are included for completeness. Throws if an
+ invalid comparison string is provided.
+* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if
+ `v2` is greater. Sorts in ascending order if passed to `Array.sort()`.
+* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions
+ in descending order when passed to `Array.sort()`.
+* `diff(v1, v2)`: Returns difference between two versions by the release type
+ (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`),
+ or null if the versions are the same.
+
+
+### Ranges
+
+* `validRange(range)`: Return the valid range or null if it's not valid
+* `satisfies(version, range)`: Return true if the version satisfies the
+ range.
+* `maxSatisfying(versions, range)`: Return the highest version in the list
+ that satisfies the range, or `null` if none of them do.
+* `minSatisfying(versions, range)`: Return the lowest version in the list
+ that satisfies the range, or `null` if none of them do.
+* `gtr(version, range)`: Return `true` if version is greater than all the
+ versions possible in the range.
+* `ltr(version, range)`: Return `true` if version is less than all the
+ versions possible in the range.
+* `outside(version, range, hilo)`: Return true if the version is outside
+ the bounds of the range in either the high or low direction. The
+ `hilo` argument must be either the string `'>'` or `'<'`. (This is
+ the function called by `gtr` and `ltr`.)
+
+Note that, since ranges may be non-contiguous, a version might not be
+greater than a range, less than a range, *or* satisfy a range! For
+example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9`
+until `2.0.0`, so the version `1.2.10` would not be greater than the
+range (because `2.0.1` satisfies, which is higher), nor less than the
+range (since `1.2.8` satisfies, which is lower), and it also does not
+satisfy the range.
+
+If you want to know if a version satisfies or does not satisfy a
+range, use the `satisfies(version, range)` function.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/bin/semver b/deps/npm/node_modules/node-gyp/node_modules/semver/bin/semver
new file mode 100755
index 0000000000..c5f2e857e8
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/bin/semver
@@ -0,0 +1,133 @@
+#!/usr/bin/env node
+// Standalone semver comparison program.
+// Exits successfully and prints matching version(s) if
+// any supplied version is valid and passes all tests.
+
+var argv = process.argv.slice(2)
+ , versions = []
+ , range = []
+ , gt = []
+ , lt = []
+ , eq = []
+ , inc = null
+ , version = require("../package.json").version
+ , loose = false
+ , identifier = undefined
+ , semver = require("../semver")
+ , reverse = false
+
+main()
+
+function main () {
+ if (!argv.length) return help()
+ while (argv.length) {
+ var a = argv.shift()
+ var i = a.indexOf('=')
+ if (i !== -1) {
+ a = a.slice(0, i)
+ argv.unshift(a.slice(i + 1))
+ }
+ switch (a) {
+ case "-rv": case "-rev": case "--rev": case "--reverse":
+ reverse = true
+ break
+ case "-l": case "--loose":
+ loose = true
+ break
+ case "-v": case "--version":
+ versions.push(argv.shift())
+ break
+ case "-i": case "--inc": case "--increment":
+ switch (argv[0]) {
+ case "major": case "minor": case "patch": case "prerelease":
+ case "premajor": case "preminor": case "prepatch":
+ inc = argv.shift()
+ break
+ default:
+ inc = "patch"
+ break
+ }
+ break
+ case "--preid":
+ identifier = argv.shift()
+ break
+ case "-r": case "--range":
+ range.push(argv.shift())
+ break
+ case "-h": case "--help": case "-?":
+ return help()
+ default:
+ versions.push(a)
+ break
+ }
+ }
+
+ versions = versions.filter(function (v) {
+ return semver.valid(v, loose)
+ })
+ if (!versions.length) return fail()
+ if (inc && (versions.length !== 1 || range.length))
+ return failInc()
+
+ for (var i = 0, l = range.length; i < l ; i ++) {
+ versions = versions.filter(function (v) {
+ return semver.satisfies(v, range[i], loose)
+ })
+ if (!versions.length) return fail()
+ }
+ return success(versions)
+}
+
+function failInc () {
+ console.error("--inc can only be used on a single version with no range")
+ fail()
+}
+
+function fail () { process.exit(1) }
+
+function success () {
+ var compare = reverse ? "rcompare" : "compare"
+ versions.sort(function (a, b) {
+ return semver[compare](a, b, loose)
+ }).map(function (v) {
+ return semver.clean(v, loose)
+ }).map(function (v) {
+ return inc ? semver.inc(v, inc, loose, identifier) : v
+ }).forEach(function (v,i,_) { console.log(v) })
+}
+
+function help () {
+ console.log(["SemVer " + version
+ ,""
+ ,"A JavaScript implementation of the http://semver.org/ specification"
+ ,"Copyright Isaac Z. Schlueter"
+ ,""
+ ,"Usage: semver [options] <version> [<version> [...]]"
+ ,"Prints valid versions sorted by SemVer precedence"
+ ,""
+ ,"Options:"
+ ,"-r --range <range>"
+ ," Print versions that match the specified range."
+ ,""
+ ,"-i --increment [<level>]"
+ ," Increment a version by the specified level. Level can"
+ ," be one of: major, minor, patch, premajor, preminor,"
+ ," prepatch, or prerelease. Default level is 'patch'."
+ ," Only one version may be specified."
+ ,""
+ ,"--preid <identifier>"
+ ," Identifier to be used to prefix premajor, preminor,"
+ ," prepatch or prerelease version increments."
+ ,""
+ ,"-l --loose"
+ ," Interpret versions and ranges loosely"
+ ,""
+ ,"Program exits successfully if any valid version satisfies"
+ ,"all supplied ranges, and prints all satisfying versions."
+ ,""
+ ,"If no satisfying versions are found, then exits failure."
+ ,""
+ ,"Versions are printed in ascending order, so supplying"
+ ,"multiple versions to the utility will just sort them."
+ ].join("\n"))
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/package.json b/deps/npm/node_modules/node-gyp/node_modules/semver/package.json
new file mode 100644
index 0000000000..80f00fe575
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/package.json
@@ -0,0 +1,54 @@
+{
+ "_from": "semver@~5.3.0",
+ "_id": "semver@5.3.0",
+ "_inBundle": false,
+ "_integrity": "sha1-myzl094C0XxgEq0yaqa00M9U+U8=",
+ "_location": "/node-gyp/semver",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "semver@~5.3.0",
+ "name": "semver",
+ "escapedName": "semver",
+ "rawSpec": "~5.3.0",
+ "saveSpec": null,
+ "fetchSpec": "~5.3.0"
+ },
+ "_requiredBy": [
+ "/node-gyp"
+ ],
+ "_resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz",
+ "_shasum": "9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f",
+ "_spec": "semver@~5.3.0",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/node-gyp",
+ "bin": {
+ "semver": "./bin/semver"
+ },
+ "bugs": {
+ "url": "https://github.com/npm/node-semver/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "The semantic version parser used by npm.",
+ "devDependencies": {
+ "tap": "^2.0.0"
+ },
+ "files": [
+ "bin",
+ "range.bnf",
+ "semver.js"
+ ],
+ "homepage": "https://github.com/npm/node-semver#readme",
+ "license": "ISC",
+ "main": "semver.js",
+ "name": "semver",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/npm/node-semver.git"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "version": "5.3.0"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/range.bnf b/deps/npm/node_modules/node-gyp/node_modules/semver/range.bnf
new file mode 100644
index 0000000000..25ebd5c832
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/range.bnf
@@ -0,0 +1,16 @@
+range-set ::= range ( logical-or range ) *
+logical-or ::= ( ' ' ) * '||' ( ' ' ) *
+range ::= hyphen | simple ( ' ' simple ) * | ''
+hyphen ::= partial ' - ' partial
+simple ::= primitive | partial | tilde | caret
+primitive ::= ( '<' | '>' | '>=' | '<=' | '=' | ) partial
+partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )?
+xr ::= 'x' | 'X' | '*' | nr
+nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) *
+tilde ::= '~' partial
+caret ::= '^' partial
+qualifier ::= ( '-' pre )? ( '+' build )?
+pre ::= parts
+build ::= parts
+parts ::= part ( '.' part ) *
+part ::= nr | [-0-9A-Za-z]+
diff --git a/deps/npm/node_modules/node-gyp/node_modules/semver/semver.js b/deps/npm/node_modules/node-gyp/node_modules/semver/semver.js
new file mode 100644
index 0000000000..5f1a3c5c9e
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/semver/semver.js
@@ -0,0 +1,1203 @@
+exports = module.exports = SemVer;
+
+// The debug function is excluded entirely from the minified version.
+/* nomin */ var debug;
+/* nomin */ if (typeof process === 'object' &&
+ /* nomin */ process.env &&
+ /* nomin */ process.env.NODE_DEBUG &&
+ /* nomin */ /\bsemver\b/i.test(process.env.NODE_DEBUG))
+ /* nomin */ debug = function() {
+ /* nomin */ var args = Array.prototype.slice.call(arguments, 0);
+ /* nomin */ args.unshift('SEMVER');
+ /* nomin */ console.log.apply(console, args);
+ /* nomin */ };
+/* nomin */ else
+ /* nomin */ debug = function() {};
+
+// Note: this is the semver.org version of the spec that it implements
+// Not necessarily the package version of this code.
+exports.SEMVER_SPEC_VERSION = '2.0.0';
+
+var MAX_LENGTH = 256;
+var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991;
+
+// The actual regexps go on exports.re
+var re = exports.re = [];
+var src = exports.src = [];
+var R = 0;
+
+// The following Regular Expressions can be used for tokenizing,
+// validating, and parsing SemVer version strings.
+
+// ## Numeric Identifier
+// A single `0`, or a non-zero digit followed by zero or more digits.
+
+var NUMERICIDENTIFIER = R++;
+src[NUMERICIDENTIFIER] = '0|[1-9]\\d*';
+var NUMERICIDENTIFIERLOOSE = R++;
+src[NUMERICIDENTIFIERLOOSE] = '[0-9]+';
+
+
+// ## Non-numeric Identifier
+// Zero or more digits, followed by a letter or hyphen, and then zero or
+// more letters, digits, or hyphens.
+
+var NONNUMERICIDENTIFIER = R++;
+src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*';
+
+
+// ## Main Version
+// Three dot-separated numeric identifiers.
+
+var MAINVERSION = R++;
+src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIER] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIER] + ')';
+
+var MAINVERSIONLOOSE = R++;
+src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIERLOOSE] + ')';
+
+// ## Pre-release Version Identifier
+// A numeric identifier, or a non-numeric identifier.
+
+var PRERELEASEIDENTIFIER = R++;
+src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] +
+ '|' + src[NONNUMERICIDENTIFIER] + ')';
+
+var PRERELEASEIDENTIFIERLOOSE = R++;
+src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] +
+ '|' + src[NONNUMERICIDENTIFIER] + ')';
+
+
+// ## Pre-release Version
+// Hyphen, followed by one or more dot-separated pre-release version
+// identifiers.
+
+var PRERELEASE = R++;
+src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] +
+ '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))';
+
+var PRERELEASELOOSE = R++;
+src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] +
+ '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))';
+
+// ## Build Metadata Identifier
+// Any combination of digits, letters, or hyphens.
+
+var BUILDIDENTIFIER = R++;
+src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+';
+
+// ## Build Metadata
+// Plus sign, followed by one or more period-separated build metadata
+// identifiers.
+
+var BUILD = R++;
+src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] +
+ '(?:\\.' + src[BUILDIDENTIFIER] + ')*))';
+
+
+// ## Full Version String
+// A main version, followed optionally by a pre-release version and
+// build metadata.
+
+// Note that the only major, minor, patch, and pre-release sections of
+// the version string are capturing groups. The build metadata is not a
+// capturing group, because it should not ever be used in version
+// comparison.
+
+var FULL = R++;
+var FULLPLAIN = 'v?' + src[MAINVERSION] +
+ src[PRERELEASE] + '?' +
+ src[BUILD] + '?';
+
+src[FULL] = '^' + FULLPLAIN + '$';
+
+// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
+// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
+// common in the npm registry.
+var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] +
+ src[PRERELEASELOOSE] + '?' +
+ src[BUILD] + '?';
+
+var LOOSE = R++;
+src[LOOSE] = '^' + LOOSEPLAIN + '$';
+
+var GTLT = R++;
+src[GTLT] = '((?:<|>)?=?)';
+
+// Something like "2.*" or "1.2.x".
+// Note that "x.x" is a valid xRange identifer, meaning "any version"
+// Only the first item is strictly required.
+var XRANGEIDENTIFIERLOOSE = R++;
+src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*';
+var XRANGEIDENTIFIER = R++;
+src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*';
+
+var XRANGEPLAIN = R++;
+src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:' + src[PRERELEASE] + ')?' +
+ src[BUILD] + '?' +
+ ')?)?';
+
+var XRANGEPLAINLOOSE = R++;
+src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:' + src[PRERELEASELOOSE] + ')?' +
+ src[BUILD] + '?' +
+ ')?)?';
+
+var XRANGE = R++;
+src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$';
+var XRANGELOOSE = R++;
+src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$';
+
+// Tilde ranges.
+// Meaning is "reasonably at or greater than"
+var LONETILDE = R++;
+src[LONETILDE] = '(?:~>?)';
+
+var TILDETRIM = R++;
+src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+';
+re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g');
+var tildeTrimReplace = '$1~';
+
+var TILDE = R++;
+src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$';
+var TILDELOOSE = R++;
+src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$';
+
+// Caret ranges.
+// Meaning is "at least and backwards compatible with"
+var LONECARET = R++;
+src[LONECARET] = '(?:\\^)';
+
+var CARETTRIM = R++;
+src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+';
+re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g');
+var caretTrimReplace = '$1^';
+
+var CARET = R++;
+src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$';
+var CARETLOOSE = R++;
+src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$';
+
+// A simple gt/lt/eq thing, or just "" to indicate "any version"
+var COMPARATORLOOSE = R++;
+src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$';
+var COMPARATOR = R++;
+src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$';
+
+
+// An expression to strip any whitespace between the gtlt and the thing
+// it modifies, so that `> 1.2.3` ==> `>1.2.3`
+var COMPARATORTRIM = R++;
+src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] +
+ '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')';
+
+// this one has to use the /g flag
+re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g');
+var comparatorTrimReplace = '$1$2$3';
+
+
+// Something like `1.2.3 - 1.2.4`
+// Note that these all use the loose form, because they'll be
+// checked against either the strict or loose comparator form
+// later.
+var HYPHENRANGE = R++;
+src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' +
+ '\\s+-\\s+' +
+ '(' + src[XRANGEPLAIN] + ')' +
+ '\\s*$';
+
+var HYPHENRANGELOOSE = R++;
+src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' +
+ '\\s+-\\s+' +
+ '(' + src[XRANGEPLAINLOOSE] + ')' +
+ '\\s*$';
+
+// Star ranges basically just allow anything at all.
+var STAR = R++;
+src[STAR] = '(<|>)?=?\\s*\\*';
+
+// Compile to actual regexp objects.
+// All are flag-free, unless they were created above with a flag.
+for (var i = 0; i < R; i++) {
+ debug(i, src[i]);
+ if (!re[i])
+ re[i] = new RegExp(src[i]);
+}
+
+exports.parse = parse;
+function parse(version, loose) {
+ if (version instanceof SemVer)
+ return version;
+
+ if (typeof version !== 'string')
+ return null;
+
+ if (version.length > MAX_LENGTH)
+ return null;
+
+ var r = loose ? re[LOOSE] : re[FULL];
+ if (!r.test(version))
+ return null;
+
+ try {
+ return new SemVer(version, loose);
+ } catch (er) {
+ return null;
+ }
+}
+
+exports.valid = valid;
+function valid(version, loose) {
+ var v = parse(version, loose);
+ return v ? v.version : null;
+}
+
+
+exports.clean = clean;
+function clean(version, loose) {
+ var s = parse(version.trim().replace(/^[=v]+/, ''), loose);
+ return s ? s.version : null;
+}
+
+exports.SemVer = SemVer;
+
+function SemVer(version, loose) {
+ if (version instanceof SemVer) {
+ if (version.loose === loose)
+ return version;
+ else
+ version = version.version;
+ } else if (typeof version !== 'string') {
+ throw new TypeError('Invalid Version: ' + version);
+ }
+
+ if (version.length > MAX_LENGTH)
+ throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
+
+ if (!(this instanceof SemVer))
+ return new SemVer(version, loose);
+
+ debug('SemVer', version, loose);
+ this.loose = loose;
+ var m = version.trim().match(loose ? re[LOOSE] : re[FULL]);
+
+ if (!m)
+ throw new TypeError('Invalid Version: ' + version);
+
+ this.raw = version;
+
+ // these are actually numbers
+ this.major = +m[1];
+ this.minor = +m[2];
+ this.patch = +m[3];
+
+ if (this.major > MAX_SAFE_INTEGER || this.major < 0)
+ throw new TypeError('Invalid major version')
+
+ if (this.minor > MAX_SAFE_INTEGER || this.minor < 0)
+ throw new TypeError('Invalid minor version')
+
+ if (this.patch > MAX_SAFE_INTEGER || this.patch < 0)
+ throw new TypeError('Invalid patch version')
+
+ // numberify any prerelease numeric ids
+ if (!m[4])
+ this.prerelease = [];
+ else
+ this.prerelease = m[4].split('.').map(function(id) {
+ if (/^[0-9]+$/.test(id)) {
+ var num = +id;
+ if (num >= 0 && num < MAX_SAFE_INTEGER)
+ return num;
+ }
+ return id;
+ });
+
+ this.build = m[5] ? m[5].split('.') : [];
+ this.format();
+}
+
+SemVer.prototype.format = function() {
+ this.version = this.major + '.' + this.minor + '.' + this.patch;
+ if (this.prerelease.length)
+ this.version += '-' + this.prerelease.join('.');
+ return this.version;
+};
+
+SemVer.prototype.toString = function() {
+ return this.version;
+};
+
+SemVer.prototype.compare = function(other) {
+ debug('SemVer.compare', this.version, this.loose, other);
+ if (!(other instanceof SemVer))
+ other = new SemVer(other, this.loose);
+
+ return this.compareMain(other) || this.comparePre(other);
+};
+
+SemVer.prototype.compareMain = function(other) {
+ if (!(other instanceof SemVer))
+ other = new SemVer(other, this.loose);
+
+ return compareIdentifiers(this.major, other.major) ||
+ compareIdentifiers(this.minor, other.minor) ||
+ compareIdentifiers(this.patch, other.patch);
+};
+
+SemVer.prototype.comparePre = function(other) {
+ if (!(other instanceof SemVer))
+ other = new SemVer(other, this.loose);
+
+ // NOT having a prerelease is > having one
+ if (this.prerelease.length && !other.prerelease.length)
+ return -1;
+ else if (!this.prerelease.length && other.prerelease.length)
+ return 1;
+ else if (!this.prerelease.length && !other.prerelease.length)
+ return 0;
+
+ var i = 0;
+ do {
+ var a = this.prerelease[i];
+ var b = other.prerelease[i];
+ debug('prerelease compare', i, a, b);
+ if (a === undefined && b === undefined)
+ return 0;
+ else if (b === undefined)
+ return 1;
+ else if (a === undefined)
+ return -1;
+ else if (a === b)
+ continue;
+ else
+ return compareIdentifiers(a, b);
+ } while (++i);
+};
+
+// preminor will bump the version up to the next minor release, and immediately
+// down to pre-release. premajor and prepatch work the same way.
+SemVer.prototype.inc = function(release, identifier) {
+ switch (release) {
+ case 'premajor':
+ this.prerelease.length = 0;
+ this.patch = 0;
+ this.minor = 0;
+ this.major++;
+ this.inc('pre', identifier);
+ break;
+ case 'preminor':
+ this.prerelease.length = 0;
+ this.patch = 0;
+ this.minor++;
+ this.inc('pre', identifier);
+ break;
+ case 'prepatch':
+ // If this is already a prerelease, it will bump to the next version
+ // drop any prereleases that might already exist, since they are not
+ // relevant at this point.
+ this.prerelease.length = 0;
+ this.inc('patch', identifier);
+ this.inc('pre', identifier);
+ break;
+ // If the input is a non-prerelease version, this acts the same as
+ // prepatch.
+ case 'prerelease':
+ if (this.prerelease.length === 0)
+ this.inc('patch', identifier);
+ this.inc('pre', identifier);
+ break;
+
+ case 'major':
+ // If this is a pre-major version, bump up to the same major version.
+ // Otherwise increment major.
+ // 1.0.0-5 bumps to 1.0.0
+ // 1.1.0 bumps to 2.0.0
+ if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0)
+ this.major++;
+ this.minor = 0;
+ this.patch = 0;
+ this.prerelease = [];
+ break;
+ case 'minor':
+ // If this is a pre-minor version, bump up to the same minor version.
+ // Otherwise increment minor.
+ // 1.2.0-5 bumps to 1.2.0
+ // 1.2.1 bumps to 1.3.0
+ if (this.patch !== 0 || this.prerelease.length === 0)
+ this.minor++;
+ this.patch = 0;
+ this.prerelease = [];
+ break;
+ case 'patch':
+ // If this is not a pre-release version, it will increment the patch.
+ // If it is a pre-release it will bump up to the same patch version.
+ // 1.2.0-5 patches to 1.2.0
+ // 1.2.0 patches to 1.2.1
+ if (this.prerelease.length === 0)
+ this.patch++;
+ this.prerelease = [];
+ break;
+ // This probably shouldn't be used publicly.
+ // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
+ case 'pre':
+ if (this.prerelease.length === 0)
+ this.prerelease = [0];
+ else {
+ var i = this.prerelease.length;
+ while (--i >= 0) {
+ if (typeof this.prerelease[i] === 'number') {
+ this.prerelease[i]++;
+ i = -2;
+ }
+ }
+ if (i === -1) // didn't increment anything
+ this.prerelease.push(0);
+ }
+ if (identifier) {
+ // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
+ // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
+ if (this.prerelease[0] === identifier) {
+ if (isNaN(this.prerelease[1]))
+ this.prerelease = [identifier, 0];
+ } else
+ this.prerelease = [identifier, 0];
+ }
+ break;
+
+ default:
+ throw new Error('invalid increment argument: ' + release);
+ }
+ this.format();
+ this.raw = this.version;
+ return this;
+};
+
+exports.inc = inc;
+function inc(version, release, loose, identifier) {
+ if (typeof(loose) === 'string') {
+ identifier = loose;
+ loose = undefined;
+ }
+
+ try {
+ return new SemVer(version, loose).inc(release, identifier).version;
+ } catch (er) {
+ return null;
+ }
+}
+
+exports.diff = diff;
+function diff(version1, version2) {
+ if (eq(version1, version2)) {
+ return null;
+ } else {
+ var v1 = parse(version1);
+ var v2 = parse(version2);
+ if (v1.prerelease.length || v2.prerelease.length) {
+ for (var key in v1) {
+ if (key === 'major' || key === 'minor' || key === 'patch') {
+ if (v1[key] !== v2[key]) {
+ return 'pre'+key;
+ }
+ }
+ }
+ return 'prerelease';
+ }
+ for (var key in v1) {
+ if (key === 'major' || key === 'minor' || key === 'patch') {
+ if (v1[key] !== v2[key]) {
+ return key;
+ }
+ }
+ }
+ }
+}
+
+exports.compareIdentifiers = compareIdentifiers;
+
+var numeric = /^[0-9]+$/;
+function compareIdentifiers(a, b) {
+ var anum = numeric.test(a);
+ var bnum = numeric.test(b);
+
+ if (anum && bnum) {
+ a = +a;
+ b = +b;
+ }
+
+ return (anum && !bnum) ? -1 :
+ (bnum && !anum) ? 1 :
+ a < b ? -1 :
+ a > b ? 1 :
+ 0;
+}
+
+exports.rcompareIdentifiers = rcompareIdentifiers;
+function rcompareIdentifiers(a, b) {
+ return compareIdentifiers(b, a);
+}
+
+exports.major = major;
+function major(a, loose) {
+ return new SemVer(a, loose).major;
+}
+
+exports.minor = minor;
+function minor(a, loose) {
+ return new SemVer(a, loose).minor;
+}
+
+exports.patch = patch;
+function patch(a, loose) {
+ return new SemVer(a, loose).patch;
+}
+
+exports.compare = compare;
+function compare(a, b, loose) {
+ return new SemVer(a, loose).compare(b);
+}
+
+exports.compareLoose = compareLoose;
+function compareLoose(a, b) {
+ return compare(a, b, true);
+}
+
+exports.rcompare = rcompare;
+function rcompare(a, b, loose) {
+ return compare(b, a, loose);
+}
+
+exports.sort = sort;
+function sort(list, loose) {
+ return list.sort(function(a, b) {
+ return exports.compare(a, b, loose);
+ });
+}
+
+exports.rsort = rsort;
+function rsort(list, loose) {
+ return list.sort(function(a, b) {
+ return exports.rcompare(a, b, loose);
+ });
+}
+
+exports.gt = gt;
+function gt(a, b, loose) {
+ return compare(a, b, loose) > 0;
+}
+
+exports.lt = lt;
+function lt(a, b, loose) {
+ return compare(a, b, loose) < 0;
+}
+
+exports.eq = eq;
+function eq(a, b, loose) {
+ return compare(a, b, loose) === 0;
+}
+
+exports.neq = neq;
+function neq(a, b, loose) {
+ return compare(a, b, loose) !== 0;
+}
+
+exports.gte = gte;
+function gte(a, b, loose) {
+ return compare(a, b, loose) >= 0;
+}
+
+exports.lte = lte;
+function lte(a, b, loose) {
+ return compare(a, b, loose) <= 0;
+}
+
+exports.cmp = cmp;
+function cmp(a, op, b, loose) {
+ var ret;
+ switch (op) {
+ case '===':
+ if (typeof a === 'object') a = a.version;
+ if (typeof b === 'object') b = b.version;
+ ret = a === b;
+ break;
+ case '!==':
+ if (typeof a === 'object') a = a.version;
+ if (typeof b === 'object') b = b.version;
+ ret = a !== b;
+ break;
+ case '': case '=': case '==': ret = eq(a, b, loose); break;
+ case '!=': ret = neq(a, b, loose); break;
+ case '>': ret = gt(a, b, loose); break;
+ case '>=': ret = gte(a, b, loose); break;
+ case '<': ret = lt(a, b, loose); break;
+ case '<=': ret = lte(a, b, loose); break;
+ default: throw new TypeError('Invalid operator: ' + op);
+ }
+ return ret;
+}
+
+exports.Comparator = Comparator;
+function Comparator(comp, loose) {
+ if (comp instanceof Comparator) {
+ if (comp.loose === loose)
+ return comp;
+ else
+ comp = comp.value;
+ }
+
+ if (!(this instanceof Comparator))
+ return new Comparator(comp, loose);
+
+ debug('comparator', comp, loose);
+ this.loose = loose;
+ this.parse(comp);
+
+ if (this.semver === ANY)
+ this.value = '';
+ else
+ this.value = this.operator + this.semver.version;
+
+ debug('comp', this);
+}
+
+var ANY = {};
+Comparator.prototype.parse = function(comp) {
+ var r = this.loose ? re[COMPARATORLOOSE] : re[COMPARATOR];
+ var m = comp.match(r);
+
+ if (!m)
+ throw new TypeError('Invalid comparator: ' + comp);
+
+ this.operator = m[1];
+ if (this.operator === '=')
+ this.operator = '';
+
+ // if it literally is just '>' or '' then allow anything.
+ if (!m[2])
+ this.semver = ANY;
+ else
+ this.semver = new SemVer(m[2], this.loose);
+};
+
+Comparator.prototype.toString = function() {
+ return this.value;
+};
+
+Comparator.prototype.test = function(version) {
+ debug('Comparator.test', version, this.loose);
+
+ if (this.semver === ANY)
+ return true;
+
+ if (typeof version === 'string')
+ version = new SemVer(version, this.loose);
+
+ return cmp(version, this.operator, this.semver, this.loose);
+};
+
+
+exports.Range = Range;
+function Range(range, loose) {
+ if ((range instanceof Range) && range.loose === loose)
+ return range;
+
+ if (!(this instanceof Range))
+ return new Range(range, loose);
+
+ this.loose = loose;
+
+ // First, split based on boolean or ||
+ this.raw = range;
+ this.set = range.split(/\s*\|\|\s*/).map(function(range) {
+ return this.parseRange(range.trim());
+ }, this).filter(function(c) {
+ // throw out any that are not relevant for whatever reason
+ return c.length;
+ });
+
+ if (!this.set.length) {
+ throw new TypeError('Invalid SemVer Range: ' + range);
+ }
+
+ this.format();
+}
+
+Range.prototype.format = function() {
+ this.range = this.set.map(function(comps) {
+ return comps.join(' ').trim();
+ }).join('||').trim();
+ return this.range;
+};
+
+Range.prototype.toString = function() {
+ return this.range;
+};
+
+Range.prototype.parseRange = function(range) {
+ var loose = this.loose;
+ range = range.trim();
+ debug('range', range, loose);
+ // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
+ var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE];
+ range = range.replace(hr, hyphenReplace);
+ debug('hyphen replace', range);
+ // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
+ range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace);
+ debug('comparator trim', range, re[COMPARATORTRIM]);
+
+ // `~ 1.2.3` => `~1.2.3`
+ range = range.replace(re[TILDETRIM], tildeTrimReplace);
+
+ // `^ 1.2.3` => `^1.2.3`
+ range = range.replace(re[CARETTRIM], caretTrimReplace);
+
+ // normalize spaces
+ range = range.split(/\s+/).join(' ');
+
+ // At this point, the range is completely trimmed and
+ // ready to be split into comparators.
+
+ var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR];
+ var set = range.split(' ').map(function(comp) {
+ return parseComparator(comp, loose);
+ }).join(' ').split(/\s+/);
+ if (this.loose) {
+ // in loose mode, throw out any that are not valid comparators
+ set = set.filter(function(comp) {
+ return !!comp.match(compRe);
+ });
+ }
+ set = set.map(function(comp) {
+ return new Comparator(comp, loose);
+ });
+
+ return set;
+};
+
+// Mostly just for testing and legacy API reasons
+exports.toComparators = toComparators;
+function toComparators(range, loose) {
+ return new Range(range, loose).set.map(function(comp) {
+ return comp.map(function(c) {
+ return c.value;
+ }).join(' ').trim().split(' ');
+ });
+}
+
+// comprised of xranges, tildes, stars, and gtlt's at this point.
+// already replaced the hyphen ranges
+// turn into a set of JUST comparators.
+function parseComparator(comp, loose) {
+ debug('comp', comp);
+ comp = replaceCarets(comp, loose);
+ debug('caret', comp);
+ comp = replaceTildes(comp, loose);
+ debug('tildes', comp);
+ comp = replaceXRanges(comp, loose);
+ debug('xrange', comp);
+ comp = replaceStars(comp, loose);
+ debug('stars', comp);
+ return comp;
+}
+
+function isX(id) {
+ return !id || id.toLowerCase() === 'x' || id === '*';
+}
+
+// ~, ~> --> * (any, kinda silly)
+// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
+// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
+// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
+// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
+// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
+function replaceTildes(comp, loose) {
+ return comp.trim().split(/\s+/).map(function(comp) {
+ return replaceTilde(comp, loose);
+ }).join(' ');
+}
+
+function replaceTilde(comp, loose) {
+ var r = loose ? re[TILDELOOSE] : re[TILDE];
+ return comp.replace(r, function(_, M, m, p, pr) {
+ debug('tilde', comp, _, M, m, p, pr);
+ var ret;
+
+ if (isX(M))
+ ret = '';
+ else if (isX(m))
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0';
+ else if (isX(p))
+ // ~1.2 == >=1.2.0 <1.3.0
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0';
+ else if (pr) {
+ debug('replaceTilde pr', pr);
+ if (pr.charAt(0) !== '-')
+ pr = '-' + pr;
+ ret = '>=' + M + '.' + m + '.' + p + pr +
+ ' <' + M + '.' + (+m + 1) + '.0';
+ } else
+ // ~1.2.3 == >=1.2.3 <1.3.0
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + (+m + 1) + '.0';
+
+ debug('tilde return', ret);
+ return ret;
+ });
+}
+
+// ^ --> * (any, kinda silly)
+// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
+// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
+// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
+// ^1.2.3 --> >=1.2.3 <2.0.0
+// ^1.2.0 --> >=1.2.0 <2.0.0
+function replaceCarets(comp, loose) {
+ return comp.trim().split(/\s+/).map(function(comp) {
+ return replaceCaret(comp, loose);
+ }).join(' ');
+}
+
+function replaceCaret(comp, loose) {
+ debug('caret', comp, loose);
+ var r = loose ? re[CARETLOOSE] : re[CARET];
+ return comp.replace(r, function(_, M, m, p, pr) {
+ debug('caret', comp, _, M, m, p, pr);
+ var ret;
+
+ if (isX(M))
+ ret = '';
+ else if (isX(m))
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0';
+ else if (isX(p)) {
+ if (M === '0')
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0';
+ else
+ ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0';
+ } else if (pr) {
+ debug('replaceCaret pr', pr);
+ if (pr.charAt(0) !== '-')
+ pr = '-' + pr;
+ if (M === '0') {
+ if (m === '0')
+ ret = '>=' + M + '.' + m + '.' + p + pr +
+ ' <' + M + '.' + m + '.' + (+p + 1);
+ else
+ ret = '>=' + M + '.' + m + '.' + p + pr +
+ ' <' + M + '.' + (+m + 1) + '.0';
+ } else
+ ret = '>=' + M + '.' + m + '.' + p + pr +
+ ' <' + (+M + 1) + '.0.0';
+ } else {
+ debug('no pr');
+ if (M === '0') {
+ if (m === '0')
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + m + '.' + (+p + 1);
+ else
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + (+m + 1) + '.0';
+ } else
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + (+M + 1) + '.0.0';
+ }
+
+ debug('caret return', ret);
+ return ret;
+ });
+}
+
+function replaceXRanges(comp, loose) {
+ debug('replaceXRanges', comp, loose);
+ return comp.split(/\s+/).map(function(comp) {
+ return replaceXRange(comp, loose);
+ }).join(' ');
+}
+
+function replaceXRange(comp, loose) {
+ comp = comp.trim();
+ var r = loose ? re[XRANGELOOSE] : re[XRANGE];
+ return comp.replace(r, function(ret, gtlt, M, m, p, pr) {
+ debug('xRange', comp, ret, gtlt, M, m, p, pr);
+ var xM = isX(M);
+ var xm = xM || isX(m);
+ var xp = xm || isX(p);
+ var anyX = xp;
+
+ if (gtlt === '=' && anyX)
+ gtlt = '';
+
+ if (xM) {
+ if (gtlt === '>' || gtlt === '<') {
+ // nothing is allowed
+ ret = '<0.0.0';
+ } else {
+ // nothing is forbidden
+ ret = '*';
+ }
+ } else if (gtlt && anyX) {
+ // replace X with 0
+ if (xm)
+ m = 0;
+ if (xp)
+ p = 0;
+
+ if (gtlt === '>') {
+ // >1 => >=2.0.0
+ // >1.2 => >=1.3.0
+ // >1.2.3 => >= 1.2.4
+ gtlt = '>=';
+ if (xm) {
+ M = +M + 1;
+ m = 0;
+ p = 0;
+ } else if (xp) {
+ m = +m + 1;
+ p = 0;
+ }
+ } else if (gtlt === '<=') {
+ // <=0.7.x is actually <0.8.0, since any 0.7.x should
+ // pass. Similarly, <=7.x is actually <8.0.0, etc.
+ gtlt = '<';
+ if (xm)
+ M = +M + 1;
+ else
+ m = +m + 1;
+ }
+
+ ret = gtlt + M + '.' + m + '.' + p;
+ } else if (xm) {
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0';
+ } else if (xp) {
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0';
+ }
+
+ debug('xRange return', ret);
+
+ return ret;
+ });
+}
+
+// Because * is AND-ed with everything else in the comparator,
+// and '' means "any version", just remove the *s entirely.
+function replaceStars(comp, loose) {
+ debug('replaceStars', comp, loose);
+ // Looseness is ignored here. star is always as loose as it gets!
+ return comp.trim().replace(re[STAR], '');
+}
+
+// This function is passed to string.replace(re[HYPHENRANGE])
+// M, m, patch, prerelease, build
+// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
+// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
+// 1.2 - 3.4 => >=1.2.0 <3.5.0
+function hyphenReplace($0,
+ from, fM, fm, fp, fpr, fb,
+ to, tM, tm, tp, tpr, tb) {
+
+ if (isX(fM))
+ from = '';
+ else if (isX(fm))
+ from = '>=' + fM + '.0.0';
+ else if (isX(fp))
+ from = '>=' + fM + '.' + fm + '.0';
+ else
+ from = '>=' + from;
+
+ if (isX(tM))
+ to = '';
+ else if (isX(tm))
+ to = '<' + (+tM + 1) + '.0.0';
+ else if (isX(tp))
+ to = '<' + tM + '.' + (+tm + 1) + '.0';
+ else if (tpr)
+ to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr;
+ else
+ to = '<=' + to;
+
+ return (from + ' ' + to).trim();
+}
+
+
+// if ANY of the sets match ALL of its comparators, then pass
+Range.prototype.test = function(version) {
+ if (!version)
+ return false;
+
+ if (typeof version === 'string')
+ version = new SemVer(version, this.loose);
+
+ for (var i = 0; i < this.set.length; i++) {
+ if (testSet(this.set[i], version))
+ return true;
+ }
+ return false;
+};
+
+function testSet(set, version) {
+ for (var i = 0; i < set.length; i++) {
+ if (!set[i].test(version))
+ return false;
+ }
+
+ if (version.prerelease.length) {
+ // Find the set of versions that are allowed to have prereleases
+ // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
+ // That should allow `1.2.3-pr.2` to pass.
+ // However, `1.2.4-alpha.notready` should NOT be allowed,
+ // even though it's within the range set by the comparators.
+ for (var i = 0; i < set.length; i++) {
+ debug(set[i].semver);
+ if (set[i].semver === ANY)
+ continue;
+
+ if (set[i].semver.prerelease.length > 0) {
+ var allowed = set[i].semver;
+ if (allowed.major === version.major &&
+ allowed.minor === version.minor &&
+ allowed.patch === version.patch)
+ return true;
+ }
+ }
+
+ // Version has a -pre, but it's not one of the ones we like.
+ return false;
+ }
+
+ return true;
+}
+
+exports.satisfies = satisfies;
+function satisfies(version, range, loose) {
+ try {
+ range = new Range(range, loose);
+ } catch (er) {
+ return false;
+ }
+ return range.test(version);
+}
+
+exports.maxSatisfying = maxSatisfying;
+function maxSatisfying(versions, range, loose) {
+ return versions.filter(function(version) {
+ return satisfies(version, range, loose);
+ }).sort(function(a, b) {
+ return rcompare(a, b, loose);
+ })[0] || null;
+}
+
+exports.minSatisfying = minSatisfying;
+function minSatisfying(versions, range, loose) {
+ return versions.filter(function(version) {
+ return satisfies(version, range, loose);
+ }).sort(function(a, b) {
+ return compare(a, b, loose);
+ })[0] || null;
+}
+
+exports.validRange = validRange;
+function validRange(range, loose) {
+ try {
+ // Return '*' instead of '' so that truthiness works.
+ // This will throw if it's invalid anyway
+ return new Range(range, loose).range || '*';
+ } catch (er) {
+ return null;
+ }
+}
+
+// Determine if version is less than all the versions possible in the range
+exports.ltr = ltr;
+function ltr(version, range, loose) {
+ return outside(version, range, '<', loose);
+}
+
+// Determine if version is greater than all the versions possible in the range.
+exports.gtr = gtr;
+function gtr(version, range, loose) {
+ return outside(version, range, '>', loose);
+}
+
+exports.outside = outside;
+function outside(version, range, hilo, loose) {
+ version = new SemVer(version, loose);
+ range = new Range(range, loose);
+
+ var gtfn, ltefn, ltfn, comp, ecomp;
+ switch (hilo) {
+ case '>':
+ gtfn = gt;
+ ltefn = lte;
+ ltfn = lt;
+ comp = '>';
+ ecomp = '>=';
+ break;
+ case '<':
+ gtfn = lt;
+ ltefn = gte;
+ ltfn = gt;
+ comp = '<';
+ ecomp = '<=';
+ break;
+ default:
+ throw new TypeError('Must provide a hilo val of "<" or ">"');
+ }
+
+ // If it satisifes the range it is not outside
+ if (satisfies(version, range, loose)) {
+ return false;
+ }
+
+ // From now on, variable terms are as if we're in "gtr" mode.
+ // but note that everything is flipped for the "ltr" function.
+
+ for (var i = 0; i < range.set.length; ++i) {
+ var comparators = range.set[i];
+
+ var high = null;
+ var low = null;
+
+ comparators.forEach(function(comparator) {
+ if (comparator.semver === ANY) {
+ comparator = new Comparator('>=0.0.0')
+ }
+ high = high || comparator;
+ low = low || comparator;
+ if (gtfn(comparator.semver, high.semver, loose)) {
+ high = comparator;
+ } else if (ltfn(comparator.semver, low.semver, loose)) {
+ low = comparator;
+ }
+ });
+
+ // If the edge version comparator has a operator then our version
+ // isn't outside it
+ if (high.operator === comp || high.operator === ecomp) {
+ return false;
+ }
+
+ // If the lowest version comparator has an operator and our version
+ // is less than it then it isn't higher than the range
+ if ((!low.operator || low.operator === comp) &&
+ ltefn(version, low.semver)) {
+ return false;
+ } else if (low.operator === ecomp && ltfn(version, low.semver)) {
+ return false;
+ }
+ }
+ return true;
+}
+
+exports.prerelease = prerelease;
+function prerelease(version, loose) {
+ var parsed = parse(version, loose);
+ return (parsed && parsed.prerelease.length) ? parsed.prerelease : null;
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/.npmignore b/deps/npm/node_modules/node-gyp/node_modules/tar/.npmignore
new file mode 100644
index 0000000000..c167ad5b1c
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/.npmignore
@@ -0,0 +1,5 @@
+.*.swp
+node_modules
+examples/extract/
+test/tmp/
+test/fixtures/
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/.travis.yml b/deps/npm/node_modules/node-gyp/node_modules/tar/.travis.yml
new file mode 100644
index 0000000000..fca8ef0194
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/.travis.yml
@@ -0,0 +1,4 @@
+language: node_js
+node_js:
+ - 0.10
+ - 0.11
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/tar/LICENSE
new file mode 100644
index 0000000000..019b7e40ea
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/LICENSE
@@ -0,0 +1,12 @@
+The ISC License
+Copyright (c) Isaac Z. Schlueter and Contributors
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/README.md b/deps/npm/node_modules/node-gyp/node_modules/tar/README.md
new file mode 100644
index 0000000000..cfda2ac180
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/README.md
@@ -0,0 +1,50 @@
+# node-tar
+
+Tar for Node.js.
+
+[![NPM](https://nodei.co/npm/tar.png)](https://nodei.co/npm/tar/)
+
+## API
+
+See `examples/` for usage examples.
+
+### var tar = require('tar')
+
+Returns an object with `.Pack`, `.Extract` and `.Parse` methods.
+
+### tar.Pack([properties])
+
+Returns a through stream. Use
+[fstream](https://npmjs.org/package/fstream) to write files into the
+pack stream and you will receive tar archive data from the pack
+stream.
+
+This only works with directories, it does not work with individual files.
+
+The optional `properties` object are used to set properties in the tar
+'Global Extended Header'. If the `fromBase` property is set to true,
+the tar will contain files relative to the path passed, and not with
+the path included.
+
+### tar.Extract([options])
+
+Returns a through stream. Write tar data to the stream and the files
+in the tarball will be extracted onto the filesystem.
+
+`options` can be:
+
+```js
+{
+ path: '/path/to/extract/tar/into',
+ strip: 0, // how many path segments to strip from the root when extracting
+}
+```
+
+`options` also get passed to the `fstream.Writer` instance that `tar`
+uses internally.
+
+### tar.Parse()
+
+Returns a writable stream. Write tar data to it and it will emit
+`entry` events for each entry parsed from the tarball. This is used by
+`tar.Extract`.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/examples/extracter.js b/deps/npm/node_modules/node-gyp/node_modules/tar/examples/extracter.js
new file mode 100644
index 0000000000..f6253a72c5
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/examples/extracter.js
@@ -0,0 +1,19 @@
+var tar = require("../tar.js")
+ , fs = require("fs")
+
+
+function onError(err) {
+ console.error('An error occurred:', err)
+}
+
+function onEnd() {
+ console.log('Extracted!')
+}
+
+var extractor = tar.Extract({path: __dirname + "/extract"})
+ .on('error', onError)
+ .on('end', onEnd);
+
+fs.createReadStream(__dirname + "/../test/fixtures/c.tar")
+ .on('error', onError)
+ .pipe(extractor);
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/examples/packer.js b/deps/npm/node_modules/node-gyp/node_modules/tar/examples/packer.js
new file mode 100644
index 0000000000..039969ce30
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/examples/packer.js
@@ -0,0 +1,24 @@
+var tar = require("../tar.js")
+ , fstream = require("fstream")
+ , fs = require("fs")
+
+var dirDest = fs.createWriteStream('dir.tar')
+
+
+function onError(err) {
+ console.error('An error occurred:', err)
+}
+
+function onEnd() {
+ console.log('Packed!')
+}
+
+var packer = tar.Pack({ noProprietary: true })
+ .on('error', onError)
+ .on('end', onEnd);
+
+// This must be a "directory"
+fstream.Reader({ path: __dirname, type: "Directory" })
+ .on('error', onError)
+ .pipe(packer)
+ .pipe(dirDest)
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/examples/reader.js b/deps/npm/node_modules/node-gyp/node_modules/tar/examples/reader.js
new file mode 100644
index 0000000000..8d113ad30d
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/examples/reader.js
@@ -0,0 +1,35 @@
+var tar = require("../tar.js")
+ , fs = require("fs")
+
+fs.createReadStream(__dirname + "/../test/fixtures/c.tar")
+ .pipe(tar.Parse())
+ .on("extendedHeader", function (e) {
+ console.error("extended pax header", e.props)
+ e.on("end", function () {
+ console.error("extended pax fields:", e.fields)
+ })
+ })
+ .on("ignoredEntry", function (e) {
+ console.error("ignoredEntry?!?", e.props)
+ })
+ .on("longLinkpath", function (e) {
+ console.error("longLinkpath entry", e.props)
+ e.on("end", function () {
+ console.error("value=%j", e.body.toString())
+ })
+ })
+ .on("longPath", function (e) {
+ console.error("longPath entry", e.props)
+ e.on("end", function () {
+ console.error("value=%j", e.body.toString())
+ })
+ })
+ .on("entry", function (e) {
+ console.error("entry", e.props)
+ e.on("data", function (c) {
+ console.error(" >>>" + c.toString().replace(/\n/g, "\\n"))
+ })
+ e.on("end", function () {
+ console.error(" <<<EOF")
+ })
+ })
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/lib/buffer-entry.js b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/buffer-entry.js
new file mode 100644
index 0000000000..6c1da2373a
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/buffer-entry.js
@@ -0,0 +1,30 @@
+// just like the Entry class, but it buffers the contents
+//
+// XXX It would be good to set a maximum BufferEntry filesize,
+// since it eats up memory. In normal operation,
+// these are only for long filenames or link names, which are
+// rarely very big.
+
+module.exports = BufferEntry
+
+var inherits = require("inherits")
+ , Entry = require("./entry.js")
+
+function BufferEntry () {
+ Entry.apply(this, arguments)
+ this._buffer = new Buffer(this.props.size)
+ this._offset = 0
+ this.body = ""
+ this.on("end", function () {
+ this.body = this._buffer.toString().slice(0, -1)
+ })
+}
+
+inherits(BufferEntry, Entry)
+
+// collect the bytes as they come in.
+BufferEntry.prototype.write = function (c) {
+ c.copy(this._buffer, this._offset)
+ this._offset += c.length
+ Entry.prototype.write.call(this, c)
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/lib/entry-writer.js b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/entry-writer.js
new file mode 100644
index 0000000000..8e09042d01
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/entry-writer.js
@@ -0,0 +1,169 @@
+module.exports = EntryWriter
+
+var tar = require("../tar.js")
+ , TarHeader = require("./header.js")
+ , Entry = require("./entry.js")
+ , inherits = require("inherits")
+ , BlockStream = require("block-stream")
+ , ExtendedHeaderWriter
+ , Stream = require("stream").Stream
+ , EOF = {}
+
+inherits(EntryWriter, Stream)
+
+function EntryWriter (props) {
+ var me = this
+
+ if (!(me instanceof EntryWriter)) {
+ return new EntryWriter(props)
+ }
+
+ Stream.apply(this)
+
+ me.writable = true
+ me.readable = true
+
+ me._stream = new BlockStream(512)
+
+ me._stream.on("data", function (c) {
+ me.emit("data", c)
+ })
+
+ me._stream.on("drain", function () {
+ me.emit("drain")
+ })
+
+ me._stream.on("end", function () {
+ me.emit("end")
+ me.emit("close")
+ })
+
+ me.props = props
+ if (props.type === "Directory") {
+ props.size = 0
+ }
+ props.ustar = "ustar\0"
+ props.ustarver = "00"
+ me.path = props.path
+
+ me._buffer = []
+ me._didHeader = false
+ me._meta = false
+
+ me.on("pipe", function () {
+ me._process()
+ })
+}
+
+EntryWriter.prototype.write = function (c) {
+ // console.error(".. ew write")
+ if (this._ended) return this.emit("error", new Error("write after end"))
+ this._buffer.push(c)
+ this._process()
+ this._needDrain = this._buffer.length > 0
+ return !this._needDrain
+}
+
+EntryWriter.prototype.end = function (c) {
+ // console.error(".. ew end")
+ if (c) this._buffer.push(c)
+ this._buffer.push(EOF)
+ this._ended = true
+ this._process()
+ this._needDrain = this._buffer.length > 0
+}
+
+EntryWriter.prototype.pause = function () {
+ // console.error(".. ew pause")
+ this._paused = true
+ this.emit("pause")
+}
+
+EntryWriter.prototype.resume = function () {
+ // console.error(".. ew resume")
+ this._paused = false
+ this.emit("resume")
+ this._process()
+}
+
+EntryWriter.prototype.add = function (entry) {
+ // console.error(".. ew add")
+ if (!this.parent) return this.emit("error", new Error("no parent"))
+
+ // make sure that the _header and such is emitted, and clear out
+ // the _currentEntry link on the parent.
+ if (!this._ended) this.end()
+
+ return this.parent.add(entry)
+}
+
+EntryWriter.prototype._header = function () {
+ // console.error(".. ew header")
+ if (this._didHeader) return
+ this._didHeader = true
+
+ var headerBlock = TarHeader.encode(this.props)
+
+ if (this.props.needExtended && !this._meta) {
+ var me = this
+
+ ExtendedHeaderWriter = ExtendedHeaderWriter ||
+ require("./extended-header-writer.js")
+
+ ExtendedHeaderWriter(this.props)
+ .on("data", function (c) {
+ me.emit("data", c)
+ })
+ .on("error", function (er) {
+ me.emit("error", er)
+ })
+ .end()
+ }
+
+ // console.error(".. .. ew headerBlock emitting")
+ this.emit("data", headerBlock)
+ this.emit("header")
+}
+
+EntryWriter.prototype._process = function () {
+ // console.error(".. .. ew process")
+ if (!this._didHeader && !this._meta) {
+ this._header()
+ }
+
+ if (this._paused || this._processing) {
+ // console.error(".. .. .. paused=%j, processing=%j", this._paused, this._processing)
+ return
+ }
+
+ this._processing = true
+
+ var buf = this._buffer
+ for (var i = 0; i < buf.length; i ++) {
+ // console.error(".. .. .. i=%d", i)
+
+ var c = buf[i]
+
+ if (c === EOF) this._stream.end()
+ else this._stream.write(c)
+
+ if (this._paused) {
+ // console.error(".. .. .. paused mid-emission")
+ this._processing = false
+ if (i < buf.length) {
+ this._needDrain = true
+ this._buffer = buf.slice(i + 1)
+ }
+ return
+ }
+ }
+
+ // console.error(".. .. .. emitted")
+ this._buffer.length = 0
+ this._processing = false
+
+ // console.error(".. .. .. emitting drain")
+ this.emit("drain")
+}
+
+EntryWriter.prototype.destroy = function () {}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/lib/entry.js b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/entry.js
new file mode 100644
index 0000000000..5f5dd3c268
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/entry.js
@@ -0,0 +1,220 @@
+// A passthrough read/write stream that sets its properties
+// based on a header, extendedHeader, and globalHeader
+//
+// Can be either a file system object of some sort, or
+// a pax/ustar metadata entry.
+
+module.exports = Entry
+
+var TarHeader = require("./header.js")
+ , tar = require("../tar")
+ , assert = require("assert").ok
+ , Stream = require("stream").Stream
+ , inherits = require("inherits")
+ , fstream = require("fstream").Abstract
+
+function Entry (header, extended, global) {
+ Stream.call(this)
+ this.readable = true
+ this.writable = true
+
+ this._needDrain = false
+ this._paused = false
+ this._reading = false
+ this._ending = false
+ this._ended = false
+ this._remaining = 0
+ this._abort = false
+ this._queue = []
+ this._index = 0
+ this._queueLen = 0
+
+ this._read = this._read.bind(this)
+
+ this.props = {}
+ this._header = header
+ this._extended = extended || {}
+
+ // globals can change throughout the course of
+ // a file parse operation. Freeze it at its current state.
+ this._global = {}
+ var me = this
+ Object.keys(global || {}).forEach(function (g) {
+ me._global[g] = global[g]
+ })
+
+ this._setProps()
+}
+
+inherits(Entry, Stream)
+
+Entry.prototype.write = function (c) {
+ if (this._ending) this.error("write() after end()", null, true)
+ if (this._remaining === 0) {
+ this.error("invalid bytes past eof")
+ }
+
+ // often we'll get a bunch of \0 at the end of the last write,
+ // since chunks will always be 512 bytes when reading a tarball.
+ if (c.length > this._remaining) {
+ c = c.slice(0, this._remaining)
+ }
+ this._remaining -= c.length
+
+ // put it on the stack.
+ var ql = this._queueLen
+ this._queue.push(c)
+ this._queueLen ++
+
+ this._read()
+
+ // either paused, or buffered
+ if (this._paused || ql > 0) {
+ this._needDrain = true
+ return false
+ }
+
+ return true
+}
+
+Entry.prototype.end = function (c) {
+ if (c) this.write(c)
+ this._ending = true
+ this._read()
+}
+
+Entry.prototype.pause = function () {
+ this._paused = true
+ this.emit("pause")
+}
+
+Entry.prototype.resume = function () {
+ // console.error(" Tar Entry resume", this.path)
+ this.emit("resume")
+ this._paused = false
+ this._read()
+ return this._queueLen - this._index > 1
+}
+
+ // This is bound to the instance
+Entry.prototype._read = function () {
+ // console.error(" Tar Entry _read", this.path)
+
+ if (this._paused || this._reading || this._ended) return
+
+ // set this flag so that event handlers don't inadvertently
+ // get multiple _read() calls running.
+ this._reading = true
+
+ // have any data to emit?
+ while (this._index < this._queueLen && !this._paused) {
+ var chunk = this._queue[this._index ++]
+ this.emit("data", chunk)
+ }
+
+ // check if we're drained
+ if (this._index >= this._queueLen) {
+ this._queue.length = this._queueLen = this._index = 0
+ if (this._needDrain) {
+ this._needDrain = false
+ this.emit("drain")
+ }
+ if (this._ending) {
+ this._ended = true
+ this.emit("end")
+ }
+ }
+
+ // if the queue gets too big, then pluck off whatever we can.
+ // this should be fairly rare.
+ var mql = this._maxQueueLen
+ if (this._queueLen > mql && this._index > 0) {
+ mql = Math.min(this._index, mql)
+ this._index -= mql
+ this._queueLen -= mql
+ this._queue = this._queue.slice(mql)
+ }
+
+ this._reading = false
+}
+
+Entry.prototype._setProps = function () {
+ // props = extended->global->header->{}
+ var header = this._header
+ , extended = this._extended
+ , global = this._global
+ , props = this.props
+
+ // first get the values from the normal header.
+ var fields = tar.fields
+ for (var f = 0; fields[f] !== null; f ++) {
+ var field = fields[f]
+ , val = header[field]
+ if (typeof val !== "undefined") props[field] = val
+ }
+
+ // next, the global header for this file.
+ // numeric values, etc, will have already been parsed.
+ ;[global, extended].forEach(function (p) {
+ Object.keys(p).forEach(function (f) {
+ if (typeof p[f] !== "undefined") props[f] = p[f]
+ })
+ })
+
+ // no nulls allowed in path or linkpath
+ ;["path", "linkpath"].forEach(function (p) {
+ if (props.hasOwnProperty(p)) {
+ props[p] = props[p].split("\0")[0]
+ }
+ })
+
+
+ // set date fields to be a proper date
+ ;["mtime", "ctime", "atime"].forEach(function (p) {
+ if (props.hasOwnProperty(p)) {
+ props[p] = new Date(props[p] * 1000)
+ }
+ })
+
+ // set the type so that we know what kind of file to create
+ var type
+ switch (tar.types[props.type]) {
+ case "OldFile":
+ case "ContiguousFile":
+ type = "File"
+ break
+
+ case "GNUDumpDir":
+ type = "Directory"
+ break
+
+ case undefined:
+ type = "Unknown"
+ break
+
+ case "Link":
+ case "SymbolicLink":
+ case "CharacterDevice":
+ case "BlockDevice":
+ case "Directory":
+ case "FIFO":
+ default:
+ type = tar.types[props.type]
+ }
+
+ this.type = type
+ this.path = props.path
+ this.size = props.size
+
+ // size is special, since it signals when the file needs to end.
+ this._remaining = props.size
+}
+
+// the parser may not call write if _abort is true.
+// useful for skipping data from some files quickly.
+Entry.prototype.abort = function(){
+ this._abort = true
+}
+
+Entry.prototype.warn = fstream.warn
+Entry.prototype.error = fstream.error
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js
new file mode 100644
index 0000000000..1728c4583a
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/extended-header-writer.js
@@ -0,0 +1,191 @@
+
+module.exports = ExtendedHeaderWriter
+
+var inherits = require("inherits")
+ , EntryWriter = require("./entry-writer.js")
+
+inherits(ExtendedHeaderWriter, EntryWriter)
+
+var tar = require("../tar.js")
+ , path = require("path")
+ , TarHeader = require("./header.js")
+
+// props is the props of the thing we need to write an
+// extended header for.
+// Don't be shy with it. Just encode everything.
+function ExtendedHeaderWriter (props) {
+ // console.error(">> ehw ctor")
+ var me = this
+
+ if (!(me instanceof ExtendedHeaderWriter)) {
+ return new ExtendedHeaderWriter(props)
+ }
+
+ me.fields = props
+
+ var p =
+ { path : ("PaxHeader" + path.join("/", props.path || ""))
+ .replace(/\\/g, "/").substr(0, 100)
+ , mode : props.mode || 0666
+ , uid : props.uid || 0
+ , gid : props.gid || 0
+ , size : 0 // will be set later
+ , mtime : props.mtime || Date.now() / 1000
+ , type : "x"
+ , linkpath : ""
+ , ustar : "ustar\0"
+ , ustarver : "00"
+ , uname : props.uname || ""
+ , gname : props.gname || ""
+ , devmaj : props.devmaj || 0
+ , devmin : props.devmin || 0
+ }
+
+
+ EntryWriter.call(me, p)
+ // console.error(">> ehw props", me.props)
+ me.props = p
+
+ me._meta = true
+}
+
+ExtendedHeaderWriter.prototype.end = function () {
+ // console.error(">> ehw end")
+ var me = this
+
+ if (me._ended) return
+ me._ended = true
+
+ me._encodeFields()
+
+ if (me.props.size === 0) {
+ // nothing to write!
+ me._ready = true
+ me._stream.end()
+ return
+ }
+
+ me._stream.write(TarHeader.encode(me.props))
+ me.body.forEach(function (l) {
+ me._stream.write(l)
+ })
+ me._ready = true
+
+ // console.error(">> ehw _process calling end()", me.props)
+ this._stream.end()
+}
+
+ExtendedHeaderWriter.prototype._encodeFields = function () {
+ // console.error(">> ehw _encodeFields")
+ this.body = []
+ if (this.fields.prefix) {
+ this.fields.path = this.fields.prefix + "/" + this.fields.path
+ this.fields.prefix = ""
+ }
+ encodeFields(this.fields, "", this.body, this.fields.noProprietary)
+ var me = this
+ this.body.forEach(function (l) {
+ me.props.size += l.length
+ })
+}
+
+function encodeFields (fields, prefix, body, nop) {
+ // console.error(">> >> ehw encodeFields")
+ // "%d %s=%s\n", <length>, <keyword>, <value>
+ // The length is a decimal number, and includes itself and the \n
+ // Numeric values are decimal strings.
+
+ Object.keys(fields).forEach(function (k) {
+ var val = fields[k]
+ , numeric = tar.numeric[k]
+
+ if (prefix) k = prefix + "." + k
+
+ // already including NODETAR.type, don't need File=true also
+ if (k === fields.type && val === true) return
+
+ switch (k) {
+ // don't include anything that's always handled just fine
+ // in the normal header, or only meaningful in the context
+ // of nodetar
+ case "mode":
+ case "cksum":
+ case "ustar":
+ case "ustarver":
+ case "prefix":
+ case "basename":
+ case "dirname":
+ case "needExtended":
+ case "block":
+ case "filter":
+ return
+
+ case "rdev":
+ if (val === 0) return
+ break
+
+ case "nlink":
+ case "dev": // Truly a hero among men, Creator of Star!
+ case "ino": // Speak his name with reverent awe! It is:
+ k = "SCHILY." + k
+ break
+
+ default: break
+ }
+
+ if (val && typeof val === "object" &&
+ !Buffer.isBuffer(val)) encodeFields(val, k, body, nop)
+ else if (val === null || val === undefined) return
+ else body.push.apply(body, encodeField(k, val, nop))
+ })
+
+ return body
+}
+
+function encodeField (k, v, nop) {
+ // lowercase keys must be valid, otherwise prefix with
+ // "NODETAR."
+ if (k.charAt(0) === k.charAt(0).toLowerCase()) {
+ var m = k.split(".")[0]
+ if (!tar.knownExtended[m]) k = "NODETAR." + k
+ }
+
+ // no proprietary
+ if (nop && k.charAt(0) !== k.charAt(0).toLowerCase()) {
+ return []
+ }
+
+ if (typeof val === "number") val = val.toString(10)
+
+ var s = new Buffer(" " + k + "=" + v + "\n")
+ , digits = Math.floor(Math.log(s.length) / Math.log(10)) + 1
+
+ // console.error("1 s=%j digits=%j s.length=%d", s.toString(), digits, s.length)
+
+ // if adding that many digits will make it go over that length,
+ // then add one to it. For example, if the string is:
+ // " foo=bar\n"
+ // then that's 9 characters. With the "9", that bumps the length
+ // up to 10. However, this is invalid:
+ // "10 foo=bar\n"
+ // but, since that's actually 11 characters, since 10 adds another
+ // character to the length, and the length includes the number
+ // itself. In that case, just bump it up again.
+ if (s.length + digits >= Math.pow(10, digits)) digits += 1
+ // console.error("2 s=%j digits=%j s.length=%d", s.toString(), digits, s.length)
+
+ var len = digits + s.length
+ // console.error("3 s=%j digits=%j s.length=%d len=%d", s.toString(), digits, s.length, len)
+ var lenBuf = new Buffer("" + len)
+ if (lenBuf.length + s.length !== len) {
+ throw new Error("Bad length calculation\n"+
+ "len="+len+"\n"+
+ "lenBuf="+JSON.stringify(lenBuf.toString())+"\n"+
+ "lenBuf.length="+lenBuf.length+"\n"+
+ "digits="+digits+"\n"+
+ "s="+JSON.stringify(s.toString())+"\n"+
+ "s.length="+s.length)
+ }
+
+ return [lenBuf, s]
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/lib/extended-header.js b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/extended-header.js
new file mode 100644
index 0000000000..74f432ceee
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/extended-header.js
@@ -0,0 +1,140 @@
+// An Entry consisting of:
+//
+// "%d %s=%s\n", <length>, <keyword>, <value>
+//
+// The length is a decimal number, and includes itself and the \n
+// \0 does not terminate anything. Only the length terminates the string.
+// Numeric values are decimal strings.
+
+module.exports = ExtendedHeader
+
+var Entry = require("./entry.js")
+ , inherits = require("inherits")
+ , tar = require("../tar.js")
+ , numeric = tar.numeric
+ , keyTrans = { "SCHILY.dev": "dev"
+ , "SCHILY.ino": "ino"
+ , "SCHILY.nlink": "nlink" }
+
+function ExtendedHeader () {
+ Entry.apply(this, arguments)
+ this.on("data", this._parse)
+ this.fields = {}
+ this._position = 0
+ this._fieldPos = 0
+ this._state = SIZE
+ this._sizeBuf = []
+ this._keyBuf = []
+ this._valBuf = []
+ this._size = -1
+ this._key = ""
+}
+
+inherits(ExtendedHeader, Entry)
+ExtendedHeader.prototype._parse = parse
+
+var s = 0
+ , states = ExtendedHeader.states = {}
+ , SIZE = states.SIZE = s++
+ , KEY = states.KEY = s++
+ , VAL = states.VAL = s++
+ , ERR = states.ERR = s++
+
+Object.keys(states).forEach(function (s) {
+ states[states[s]] = states[s]
+})
+
+states[s] = null
+
+// char code values for comparison
+var _0 = "0".charCodeAt(0)
+ , _9 = "9".charCodeAt(0)
+ , point = ".".charCodeAt(0)
+ , a = "a".charCodeAt(0)
+ , Z = "Z".charCodeAt(0)
+ , a = "a".charCodeAt(0)
+ , z = "z".charCodeAt(0)
+ , space = " ".charCodeAt(0)
+ , eq = "=".charCodeAt(0)
+ , cr = "\n".charCodeAt(0)
+
+function parse (c) {
+ if (this._state === ERR) return
+
+ for ( var i = 0, l = c.length
+ ; i < l
+ ; this._position++, this._fieldPos++, i++) {
+ // console.error("top of loop, size="+this._size)
+
+ var b = c[i]
+
+ if (this._size >= 0 && this._fieldPos > this._size) {
+ error(this, "field exceeds length="+this._size)
+ return
+ }
+
+ switch (this._state) {
+ case ERR: return
+
+ case SIZE:
+ // console.error("parsing size, b=%d, rest=%j", b, c.slice(i).toString())
+ if (b === space) {
+ this._state = KEY
+ // this._fieldPos = this._sizeBuf.length
+ this._size = parseInt(new Buffer(this._sizeBuf).toString(), 10)
+ this._sizeBuf.length = 0
+ continue
+ }
+ if (b < _0 || b > _9) {
+ error(this, "expected [" + _0 + ".." + _9 + "], got " + b)
+ return
+ }
+ this._sizeBuf.push(b)
+ continue
+
+ case KEY:
+ // can be any char except =, not > size.
+ if (b === eq) {
+ this._state = VAL
+ this._key = new Buffer(this._keyBuf).toString()
+ if (keyTrans[this._key]) this._key = keyTrans[this._key]
+ this._keyBuf.length = 0
+ continue
+ }
+ this._keyBuf.push(b)
+ continue
+
+ case VAL:
+ // field must end with cr
+ if (this._fieldPos === this._size - 1) {
+ // console.error("finished with "+this._key)
+ if (b !== cr) {
+ error(this, "expected \\n at end of field")
+ return
+ }
+ var val = new Buffer(this._valBuf).toString()
+ if (numeric[this._key]) {
+ val = parseFloat(val)
+ }
+ this.fields[this._key] = val
+
+ this._valBuf.length = 0
+ this._state = SIZE
+ this._size = -1
+ this._fieldPos = -1
+ continue
+ }
+ this._valBuf.push(b)
+ continue
+ }
+ }
+}
+
+function error (me, msg) {
+ msg = "invalid header: " + msg
+ + "\nposition=" + me._position
+ + "\nfield position=" + me._fieldPos
+
+ me.error(msg)
+ me.state = ERR
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/lib/extract.js b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/extract.js
new file mode 100644
index 0000000000..fe1bb976eb
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/extract.js
@@ -0,0 +1,94 @@
+// give it a tarball and a path, and it'll dump the contents
+
+module.exports = Extract
+
+var tar = require("../tar.js")
+ , fstream = require("fstream")
+ , inherits = require("inherits")
+ , path = require("path")
+
+function Extract (opts) {
+ if (!(this instanceof Extract)) return new Extract(opts)
+ tar.Parse.apply(this)
+
+ if (typeof opts !== "object") {
+ opts = { path: opts }
+ }
+
+ // better to drop in cwd? seems more standard.
+ opts.path = opts.path || path.resolve("node-tar-extract")
+ opts.type = "Directory"
+ opts.Directory = true
+
+ // similar to --strip or --strip-components
+ opts.strip = +opts.strip
+ if (!opts.strip || opts.strip <= 0) opts.strip = 0
+
+ this._fst = fstream.Writer(opts)
+
+ this.pause()
+ var me = this
+
+ // Hardlinks in tarballs are relative to the root
+ // of the tarball. So, they need to be resolved against
+ // the target directory in order to be created properly.
+ me.on("entry", function (entry) {
+ // if there's a "strip" argument, then strip off that many
+ // path components.
+ if (opts.strip) {
+ var p = entry.path.split("/").slice(opts.strip).join("/")
+ entry.path = entry.props.path = p
+ if (entry.linkpath) {
+ var lp = entry.linkpath.split("/").slice(opts.strip).join("/")
+ entry.linkpath = entry.props.linkpath = lp
+ }
+ }
+ if (entry.type === "Link") {
+ entry.linkpath = entry.props.linkpath =
+ path.join(opts.path, path.join("/", entry.props.linkpath))
+ }
+
+ if (entry.type === "SymbolicLink") {
+ var dn = path.dirname(entry.path) || ""
+ var linkpath = entry.props.linkpath
+ var target = path.resolve(opts.path, dn, linkpath)
+ if (target.indexOf(opts.path) !== 0) {
+ linkpath = path.join(opts.path, path.join("/", linkpath))
+ }
+ entry.linkpath = entry.props.linkpath = linkpath
+ }
+ })
+
+ this._fst.on("ready", function () {
+ me.pipe(me._fst, { end: false })
+ me.resume()
+ })
+
+ this._fst.on('error', function(err) {
+ me.emit('error', err)
+ })
+
+ this._fst.on('drain', function() {
+ me.emit('drain')
+ })
+
+ // this._fst.on("end", function () {
+ // console.error("\nEEEE Extract End", me._fst.path)
+ // })
+
+ this._fst.on("close", function () {
+ // console.error("\nEEEE Extract End", me._fst.path)
+ me.emit("finish")
+ me.emit("end")
+ me.emit("close")
+ })
+}
+
+inherits(Extract, tar.Parse)
+
+Extract.prototype._streamEnd = function () {
+ var me = this
+ if (!me._ended || me._entry) me.error("unexpected eof")
+ me._fst.end()
+ // my .end() is coming later.
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js
new file mode 100644
index 0000000000..0bfc7b80aa
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/global-header-writer.js
@@ -0,0 +1,14 @@
+module.exports = GlobalHeaderWriter
+
+var ExtendedHeaderWriter = require("./extended-header-writer.js")
+ , inherits = require("inherits")
+
+inherits(GlobalHeaderWriter, ExtendedHeaderWriter)
+
+function GlobalHeaderWriter (props) {
+ if (!(this instanceof GlobalHeaderWriter)) {
+ return new GlobalHeaderWriter(props)
+ }
+ ExtendedHeaderWriter.call(this, props)
+ this.props.type = "g"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/lib/header.js b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/header.js
new file mode 100644
index 0000000000..3741d5d3f2
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/header.js
@@ -0,0 +1,384 @@
+// parse a 512-byte header block to a data object, or vice-versa
+// If the data won't fit nicely in a simple header, then generate
+// the appropriate extended header file, and return that.
+
+module.exports = TarHeader
+
+var tar = require("../tar.js")
+ , fields = tar.fields
+ , fieldOffs = tar.fieldOffs
+ , fieldEnds = tar.fieldEnds
+ , fieldSize = tar.fieldSize
+ , numeric = tar.numeric
+ , assert = require("assert").ok
+ , space = " ".charCodeAt(0)
+ , slash = "/".charCodeAt(0)
+ , bslash = process.platform === "win32" ? "\\".charCodeAt(0) : null
+
+function TarHeader (block) {
+ if (!(this instanceof TarHeader)) return new TarHeader(block)
+ if (block) this.decode(block)
+}
+
+TarHeader.prototype =
+ { decode : decode
+ , encode: encode
+ , calcSum: calcSum
+ , checkSum: checkSum
+ }
+
+TarHeader.parseNumeric = parseNumeric
+TarHeader.encode = encode
+TarHeader.decode = decode
+
+// note that this will only do the normal ustar header, not any kind
+// of extended posix header file. If something doesn't fit comfortably,
+// then it will set obj.needExtended = true, and set the block to
+// the closest approximation.
+function encode (obj) {
+ if (!obj && !(this instanceof TarHeader)) throw new Error(
+ "encode must be called on a TarHeader, or supplied an object")
+
+ obj = obj || this
+ var block = obj.block = new Buffer(512)
+
+ // if the object has a "prefix", then that's actually an extension of
+ // the path field.
+ if (obj.prefix) {
+ // console.error("%% header encoding, got a prefix", obj.prefix)
+ obj.path = obj.prefix + "/" + obj.path
+ // console.error("%% header encoding, prefixed path", obj.path)
+ obj.prefix = ""
+ }
+
+ obj.needExtended = false
+
+ if (obj.mode) {
+ if (typeof obj.mode === "string") obj.mode = parseInt(obj.mode, 8)
+ obj.mode = obj.mode & 0777
+ }
+
+ for (var f = 0; fields[f] !== null; f ++) {
+ var field = fields[f]
+ , off = fieldOffs[f]
+ , end = fieldEnds[f]
+ , ret
+
+ switch (field) {
+ case "cksum":
+ // special, done below, after all the others
+ break
+
+ case "prefix":
+ // special, this is an extension of the "path" field.
+ // console.error("%% header encoding, skip prefix later")
+ break
+
+ case "type":
+ // convert from long name to a single char.
+ var type = obj.type || "0"
+ if (type.length > 1) {
+ type = tar.types[obj.type]
+ if (!type) type = "0"
+ }
+ writeText(block, off, end, type)
+ break
+
+ case "path":
+ // uses the "prefix" field if > 100 bytes, but <= 255
+ var pathLen = Buffer.byteLength(obj.path)
+ , pathFSize = fieldSize[fields.path]
+ , prefFSize = fieldSize[fields.prefix]
+
+ // paths between 100 and 255 should use the prefix field.
+ // longer than 255
+ if (pathLen > pathFSize &&
+ pathLen <= pathFSize + prefFSize) {
+ // need to find a slash somewhere in the middle so that
+ // path and prefix both fit in their respective fields
+ var searchStart = pathLen - 1 - pathFSize
+ , searchEnd = prefFSize
+ , found = false
+ , pathBuf = new Buffer(obj.path)
+
+ for ( var s = searchStart
+ ; (s <= searchEnd)
+ ; s ++ ) {
+ if (pathBuf[s] === slash || pathBuf[s] === bslash) {
+ found = s
+ break
+ }
+ }
+
+ if (found !== false) {
+ prefix = pathBuf.slice(0, found).toString("utf8")
+ path = pathBuf.slice(found + 1).toString("utf8")
+
+ ret = writeText(block, off, end, path)
+ off = fieldOffs[fields.prefix]
+ end = fieldEnds[fields.prefix]
+ // console.error("%% header writing prefix", off, end, prefix)
+ ret = writeText(block, off, end, prefix) || ret
+ break
+ }
+ }
+
+ // paths less than 100 chars don't need a prefix
+ // and paths longer than 255 need an extended header and will fail
+ // on old implementations no matter what we do here.
+ // Null out the prefix, and fallthrough to default.
+ // console.error("%% header writing no prefix")
+ var poff = fieldOffs[fields.prefix]
+ , pend = fieldEnds[fields.prefix]
+ writeText(block, poff, pend, "")
+ // fallthrough
+
+ // all other fields are numeric or text
+ default:
+ ret = numeric[field]
+ ? writeNumeric(block, off, end, obj[field])
+ : writeText(block, off, end, obj[field] || "")
+ break
+ }
+ obj.needExtended = obj.needExtended || ret
+ }
+
+ var off = fieldOffs[fields.cksum]
+ , end = fieldEnds[fields.cksum]
+
+ writeNumeric(block, off, end, calcSum.call(this, block))
+
+ return block
+}
+
+// if it's a negative number, or greater than will fit,
+// then use write256.
+var MAXNUM = { 12: 077777777777
+ , 11: 07777777777
+ , 8 : 07777777
+ , 7 : 0777777 }
+function writeNumeric (block, off, end, num) {
+ var writeLen = end - off
+ , maxNum = MAXNUM[writeLen] || 0
+
+ num = num || 0
+ // console.error(" numeric", num)
+
+ if (num instanceof Date ||
+ Object.prototype.toString.call(num) === "[object Date]") {
+ num = num.getTime() / 1000
+ }
+
+ if (num > maxNum || num < 0) {
+ write256(block, off, end, num)
+ // need an extended header if negative or too big.
+ return true
+ }
+
+ // god, tar is so annoying
+ // if the string is small enough, you should put a space
+ // between the octal string and the \0, but if it doesn't
+ // fit, then don't.
+ var numStr = Math.floor(num).toString(8)
+ if (num < MAXNUM[writeLen - 1]) numStr += " "
+
+ // pad with "0" chars
+ if (numStr.length < writeLen) {
+ numStr = (new Array(writeLen - numStr.length).join("0")) + numStr
+ }
+
+ if (numStr.length !== writeLen - 1) {
+ throw new Error("invalid length: " + JSON.stringify(numStr) + "\n" +
+ "expected: "+writeLen)
+ }
+ block.write(numStr, off, writeLen, "utf8")
+ block[end - 1] = 0
+}
+
+function write256 (block, off, end, num) {
+ var buf = block.slice(off, end)
+ var positive = num >= 0
+ buf[0] = positive ? 0x80 : 0xFF
+
+ // get the number as a base-256 tuple
+ if (!positive) num *= -1
+ var tuple = []
+ do {
+ var n = num % 256
+ tuple.push(n)
+ num = (num - n) / 256
+ } while (num)
+
+ var bytes = tuple.length
+
+ var fill = buf.length - bytes
+ for (var i = 1; i < fill; i ++) {
+ buf[i] = positive ? 0 : 0xFF
+ }
+
+ // tuple is a base256 number, with [0] as the *least* significant byte
+ // if it's negative, then we need to flip all the bits once we hit the
+ // first non-zero bit. The 2's-complement is (0x100 - n), and the 1's-
+ // complement is (0xFF - n).
+ var zero = true
+ for (i = bytes; i > 0; i --) {
+ var byte = tuple[bytes - i]
+ if (positive) buf[fill + i] = byte
+ else if (zero && byte === 0) buf[fill + i] = 0
+ else if (zero) {
+ zero = false
+ buf[fill + i] = 0x100 - byte
+ } else buf[fill + i] = 0xFF - byte
+ }
+}
+
+function writeText (block, off, end, str) {
+ // strings are written as utf8, then padded with \0
+ var strLen = Buffer.byteLength(str)
+ , writeLen = Math.min(strLen, end - off)
+ // non-ascii fields need extended headers
+ // long fields get truncated
+ , needExtended = strLen !== str.length || strLen > writeLen
+
+ // write the string, and null-pad
+ if (writeLen > 0) block.write(str, off, writeLen, "utf8")
+ for (var i = off + writeLen; i < end; i ++) block[i] = 0
+
+ return needExtended
+}
+
+function calcSum (block) {
+ block = block || this.block
+ assert(Buffer.isBuffer(block) && block.length === 512)
+
+ if (!block) throw new Error("Need block to checksum")
+
+ // now figure out what it would be if the cksum was " "
+ var sum = 0
+ , start = fieldOffs[fields.cksum]
+ , end = fieldEnds[fields.cksum]
+
+ for (var i = 0; i < fieldOffs[fields.cksum]; i ++) {
+ sum += block[i]
+ }
+
+ for (var i = start; i < end; i ++) {
+ sum += space
+ }
+
+ for (var i = end; i < 512; i ++) {
+ sum += block[i]
+ }
+
+ return sum
+}
+
+
+function checkSum (block) {
+ var sum = calcSum.call(this, block)
+ block = block || this.block
+
+ var cksum = block.slice(fieldOffs[fields.cksum], fieldEnds[fields.cksum])
+ cksum = parseNumeric(cksum)
+
+ return cksum === sum
+}
+
+function decode (block) {
+ block = block || this.block
+ assert(Buffer.isBuffer(block) && block.length === 512)
+
+ this.block = block
+ this.cksumValid = this.checkSum()
+
+ var prefix = null
+
+ // slice off each field.
+ for (var f = 0; fields[f] !== null; f ++) {
+ var field = fields[f]
+ , val = block.slice(fieldOffs[f], fieldEnds[f])
+
+ switch (field) {
+ case "ustar":
+ // if not ustar, then everything after that is just padding.
+ if (val.toString() !== "ustar\0") {
+ this.ustar = false
+ return
+ } else {
+ // console.error("ustar:", val, val.toString())
+ this.ustar = val.toString()
+ }
+ break
+
+ // prefix is special, since it might signal the xstar header
+ case "prefix":
+ var atime = parseNumeric(val.slice(131, 131 + 12))
+ , ctime = parseNumeric(val.slice(131 + 12, 131 + 12 + 12))
+ if ((val[130] === 0 || val[130] === space) &&
+ typeof atime === "number" &&
+ typeof ctime === "number" &&
+ val[131 + 12] === space &&
+ val[131 + 12 + 12] === space) {
+ this.atime = atime
+ this.ctime = ctime
+ val = val.slice(0, 130)
+ }
+ prefix = val.toString("utf8").replace(/\0+$/, "")
+ // console.error("%% header reading prefix", prefix)
+ break
+
+ // all other fields are null-padding text
+ // or a number.
+ default:
+ if (numeric[field]) {
+ this[field] = parseNumeric(val)
+ } else {
+ this[field] = val.toString("utf8").replace(/\0+$/, "")
+ }
+ break
+ }
+ }
+
+ // if we got a prefix, then prepend it to the path.
+ if (prefix) {
+ this.path = prefix + "/" + this.path
+ // console.error("%% header got a prefix", this.path)
+ }
+}
+
+function parse256 (buf) {
+ // first byte MUST be either 80 or FF
+ // 80 for positive, FF for 2's comp
+ var positive
+ if (buf[0] === 0x80) positive = true
+ else if (buf[0] === 0xFF) positive = false
+ else return null
+
+ // build up a base-256 tuple from the least sig to the highest
+ var zero = false
+ , tuple = []
+ for (var i = buf.length - 1; i > 0; i --) {
+ var byte = buf[i]
+ if (positive) tuple.push(byte)
+ else if (zero && byte === 0) tuple.push(0)
+ else if (zero) {
+ zero = false
+ tuple.push(0x100 - byte)
+ } else tuple.push(0xFF - byte)
+ }
+
+ for (var sum = 0, i = 0, l = tuple.length; i < l; i ++) {
+ sum += tuple[i] * Math.pow(256, i)
+ }
+
+ return positive ? sum : -1 * sum
+}
+
+function parseNumeric (f) {
+ if (f[0] & 0x80) return parse256(f)
+
+ var str = f.toString("utf8").split("\0")[0].trim()
+ , res = parseInt(str, 8)
+
+ return isNaN(res) ? null : res
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/lib/pack.js b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/pack.js
new file mode 100644
index 0000000000..5a3bb95a12
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/pack.js
@@ -0,0 +1,236 @@
+// pipe in an fstream, and it'll make a tarball.
+// key-value pair argument is global extended header props.
+
+module.exports = Pack
+
+var EntryWriter = require("./entry-writer.js")
+ , Stream = require("stream").Stream
+ , path = require("path")
+ , inherits = require("inherits")
+ , GlobalHeaderWriter = require("./global-header-writer.js")
+ , collect = require("fstream").collect
+ , eof = new Buffer(512)
+
+for (var i = 0; i < 512; i ++) eof[i] = 0
+
+inherits(Pack, Stream)
+
+function Pack (props) {
+ // console.error("-- p ctor")
+ var me = this
+ if (!(me instanceof Pack)) return new Pack(props)
+
+ if (props) me._noProprietary = props.noProprietary
+ else me._noProprietary = false
+
+ me._global = props
+
+ me.readable = true
+ me.writable = true
+ me._buffer = []
+ // console.error("-- -- set current to null in ctor")
+ me._currentEntry = null
+ me._processing = false
+
+ me._pipeRoot = null
+ me.on("pipe", function (src) {
+ if (src.root === me._pipeRoot) return
+ me._pipeRoot = src
+ src.on("end", function () {
+ me._pipeRoot = null
+ })
+ me.add(src)
+ })
+}
+
+Pack.prototype.addGlobal = function (props) {
+ // console.error("-- p addGlobal")
+ if (this._didGlobal) return
+ this._didGlobal = true
+
+ var me = this
+ GlobalHeaderWriter(props)
+ .on("data", function (c) {
+ me.emit("data", c)
+ })
+ .end()
+}
+
+Pack.prototype.add = function (stream) {
+ if (this._global && !this._didGlobal) this.addGlobal(this._global)
+
+ if (this._ended) return this.emit("error", new Error("add after end"))
+
+ collect(stream)
+ this._buffer.push(stream)
+ this._process()
+ this._needDrain = this._buffer.length > 0
+ return !this._needDrain
+}
+
+Pack.prototype.pause = function () {
+ this._paused = true
+ if (this._currentEntry) this._currentEntry.pause()
+ this.emit("pause")
+}
+
+Pack.prototype.resume = function () {
+ this._paused = false
+ if (this._currentEntry) this._currentEntry.resume()
+ this.emit("resume")
+ this._process()
+}
+
+Pack.prototype.end = function () {
+ this._ended = true
+ this._buffer.push(eof)
+ this._process()
+}
+
+Pack.prototype._process = function () {
+ var me = this
+ if (me._paused || me._processing) {
+ return
+ }
+
+ var entry = me._buffer.shift()
+
+ if (!entry) {
+ if (me._needDrain) {
+ me.emit("drain")
+ }
+ return
+ }
+
+ if (entry.ready === false) {
+ // console.error("-- entry is not ready", entry)
+ me._buffer.unshift(entry)
+ entry.on("ready", function () {
+ // console.error("-- -- ready!", entry)
+ me._process()
+ })
+ return
+ }
+
+ me._processing = true
+
+ if (entry === eof) {
+ // need 2 ending null blocks.
+ me.emit("data", eof)
+ me.emit("data", eof)
+ me.emit("end")
+ me.emit("close")
+ return
+ }
+
+ // Change the path to be relative to the root dir that was
+ // added to the tarball.
+ //
+ // XXX This should be more like how -C works, so you can
+ // explicitly set a root dir, and also explicitly set a pathname
+ // in the tarball to use. That way we can skip a lot of extra
+ // work when resolving symlinks for bundled dependencies in npm.
+
+ var root = path.dirname((entry.root || entry).path);
+ if (me._global && me._global.fromBase && entry.root && entry.root.path) {
+ // user set 'fromBase: true' indicating tar root should be directory itself
+ root = entry.root.path;
+ }
+
+ var wprops = {}
+
+ Object.keys(entry.props || {}).forEach(function (k) {
+ wprops[k] = entry.props[k]
+ })
+
+ if (me._noProprietary) wprops.noProprietary = true
+
+ wprops.path = path.relative(root, entry.path || '')
+
+ // actually not a matter of opinion or taste.
+ if (process.platform === "win32") {
+ wprops.path = wprops.path.replace(/\\/g, "/")
+ }
+
+ if (!wprops.type)
+ wprops.type = 'Directory'
+
+ switch (wprops.type) {
+ // sockets not supported
+ case "Socket":
+ return
+
+ case "Directory":
+ wprops.path += "/"
+ wprops.size = 0
+ break
+
+ case "Link":
+ var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
+ wprops.linkpath = path.relative(root, lp) || "."
+ wprops.size = 0
+ break
+
+ case "SymbolicLink":
+ var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
+ wprops.linkpath = path.relative(path.dirname(entry.path), lp) || "."
+ wprops.size = 0
+ break
+ }
+
+ // console.error("-- new writer", wprops)
+ // if (!wprops.type) {
+ // // console.error("-- no type?", entry.constructor.name, entry)
+ // }
+
+ // console.error("-- -- set current to new writer", wprops.path)
+ var writer = me._currentEntry = EntryWriter(wprops)
+
+ writer.parent = me
+
+ // writer.on("end", function () {
+ // // console.error("-- -- writer end", writer.path)
+ // })
+
+ writer.on("data", function (c) {
+ me.emit("data", c)
+ })
+
+ writer.on("header", function () {
+ Buffer.prototype.toJSON = function () {
+ return this.toString().split(/\0/).join(".")
+ }
+ // console.error("-- -- writer header %j", writer.props)
+ if (writer.props.size === 0) nextEntry()
+ })
+ writer.on("close", nextEntry)
+
+ var ended = false
+ function nextEntry () {
+ if (ended) return
+ ended = true
+
+ // console.error("-- -- writer close", writer.path)
+ // console.error("-- -- set current to null", wprops.path)
+ me._currentEntry = null
+ me._processing = false
+ me._process()
+ }
+
+ writer.on("error", function (er) {
+ // console.error("-- -- writer error", writer.path)
+ me.emit("error", er)
+ })
+
+ // if it's the root, then there's no need to add its entries,
+ // or data, since they'll be added directly.
+ if (entry === me._pipeRoot) {
+ // console.error("-- is the root, don't auto-add")
+ writer.add = null
+ }
+
+ entry.pipe(writer)
+}
+
+Pack.prototype.destroy = function () {}
+Pack.prototype.write = function () {}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/lib/parse.js b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/parse.js
new file mode 100644
index 0000000000..600ad782f0
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/lib/parse.js
@@ -0,0 +1,275 @@
+
+// A writable stream.
+// It emits "entry" events, which provide a readable stream that has
+// header info attached.
+
+module.exports = Parse.create = Parse
+
+var stream = require("stream")
+ , Stream = stream.Stream
+ , BlockStream = require("block-stream")
+ , tar = require("../tar.js")
+ , TarHeader = require("./header.js")
+ , Entry = require("./entry.js")
+ , BufferEntry = require("./buffer-entry.js")
+ , ExtendedHeader = require("./extended-header.js")
+ , assert = require("assert").ok
+ , inherits = require("inherits")
+ , fstream = require("fstream")
+
+// reading a tar is a lot like reading a directory
+// However, we're actually not going to run the ctor,
+// since it does a stat and various other stuff.
+// This inheritance gives us the pause/resume/pipe
+// behavior that is desired.
+inherits(Parse, fstream.Reader)
+
+function Parse () {
+ var me = this
+ if (!(me instanceof Parse)) return new Parse()
+
+ // doesn't apply fstream.Reader ctor?
+ // no, becasue we don't want to stat/etc, we just
+ // want to get the entry/add logic from .pipe()
+ Stream.apply(me)
+
+ me.writable = true
+ me.readable = true
+ me._stream = new BlockStream(512)
+ me.position = 0
+ me._ended = false
+
+ me._stream.on("error", function (e) {
+ me.emit("error", e)
+ })
+
+ me._stream.on("data", function (c) {
+ me._process(c)
+ })
+
+ me._stream.on("end", function () {
+ me._streamEnd()
+ })
+
+ me._stream.on("drain", function () {
+ me.emit("drain")
+ })
+}
+
+// overridden in Extract class, since it needs to
+// wait for its DirWriter part to finish before
+// emitting "end"
+Parse.prototype._streamEnd = function () {
+ var me = this
+ if (!me._ended || me._entry) me.error("unexpected eof")
+ me.emit("end")
+}
+
+// a tar reader is actually a filter, not just a readable stream.
+// So, you should pipe a tarball stream into it, and it needs these
+// write/end methods to do that.
+Parse.prototype.write = function (c) {
+ if (this._ended) {
+ // gnutar puts a LOT of nulls at the end.
+ // you can keep writing these things forever.
+ // Just ignore them.
+ for (var i = 0, l = c.length; i > l; i ++) {
+ if (c[i] !== 0) return this.error("write() after end()")
+ }
+ return
+ }
+ return this._stream.write(c)
+}
+
+Parse.prototype.end = function (c) {
+ this._ended = true
+ return this._stream.end(c)
+}
+
+// don't need to do anything, since we're just
+// proxying the data up from the _stream.
+// Just need to override the parent's "Not Implemented"
+// error-thrower.
+Parse.prototype._read = function () {}
+
+Parse.prototype._process = function (c) {
+ assert(c && c.length === 512, "block size should be 512")
+
+ // one of three cases.
+ // 1. A new header
+ // 2. A part of a file/extended header
+ // 3. One of two or more EOF null blocks
+
+ if (this._entry) {
+ var entry = this._entry
+ if(!entry._abort) entry.write(c)
+ else {
+ entry._remaining -= c.length
+ if(entry._remaining < 0) entry._remaining = 0
+ }
+ if (entry._remaining === 0) {
+ entry.end()
+ this._entry = null
+ }
+ } else {
+ // either zeroes or a header
+ var zero = true
+ for (var i = 0; i < 512 && zero; i ++) {
+ zero = c[i] === 0
+ }
+
+ // eof is *at least* 2 blocks of nulls, and then the end of the
+ // file. you can put blocks of nulls between entries anywhere,
+ // so appending one tarball to another is technically valid.
+ // ending without the eof null blocks is not allowed, however.
+ if (zero) {
+ if (this._eofStarted)
+ this._ended = true
+ this._eofStarted = true
+ } else {
+ this._eofStarted = false
+ this._startEntry(c)
+ }
+ }
+
+ this.position += 512
+}
+
+// take a header chunk, start the right kind of entry.
+Parse.prototype._startEntry = function (c) {
+ var header = new TarHeader(c)
+ , self = this
+ , entry
+ , ev
+ , EntryType
+ , onend
+ , meta = false
+
+ if (null === header.size || !header.cksumValid) {
+ var e = new Error("invalid tar file")
+ e.header = header
+ e.tar_file_offset = this.position
+ e.tar_block = this.position / 512
+ return this.emit("error", e)
+ }
+
+ switch (tar.types[header.type]) {
+ case "File":
+ case "OldFile":
+ case "Link":
+ case "SymbolicLink":
+ case "CharacterDevice":
+ case "BlockDevice":
+ case "Directory":
+ case "FIFO":
+ case "ContiguousFile":
+ case "GNUDumpDir":
+ // start a file.
+ // pass in any extended headers
+ // These ones consumers are typically most interested in.
+ EntryType = Entry
+ ev = "entry"
+ break
+
+ case "GlobalExtendedHeader":
+ // extended headers that apply to the rest of the tarball
+ EntryType = ExtendedHeader
+ onend = function () {
+ self._global = self._global || {}
+ Object.keys(entry.fields).forEach(function (k) {
+ self._global[k] = entry.fields[k]
+ })
+ }
+ ev = "globalExtendedHeader"
+ meta = true
+ break
+
+ case "ExtendedHeader":
+ case "OldExtendedHeader":
+ // extended headers that apply to the next entry
+ EntryType = ExtendedHeader
+ onend = function () {
+ self._extended = entry.fields
+ }
+ ev = "extendedHeader"
+ meta = true
+ break
+
+ case "NextFileHasLongLinkpath":
+ // set linkpath=<contents> in extended header
+ EntryType = BufferEntry
+ onend = function () {
+ self._extended = self._extended || {}
+ self._extended.linkpath = entry.body
+ }
+ ev = "longLinkpath"
+ meta = true
+ break
+
+ case "NextFileHasLongPath":
+ case "OldGnuLongPath":
+ // set path=<contents> in file-extended header
+ EntryType = BufferEntry
+ onend = function () {
+ self._extended = self._extended || {}
+ self._extended.path = entry.body
+ }
+ ev = "longPath"
+ meta = true
+ break
+
+ default:
+ // all the rest we skip, but still set the _entry
+ // member, so that we can skip over their data appropriately.
+ // emit an event to say that this is an ignored entry type?
+ EntryType = Entry
+ ev = "ignoredEntry"
+ break
+ }
+
+ var global, extended
+ if (meta) {
+ global = extended = null
+ } else {
+ var global = this._global
+ var extended = this._extended
+
+ // extendedHeader only applies to one entry, so once we start
+ // an entry, it's over.
+ this._extended = null
+ }
+ entry = new EntryType(header, extended, global)
+ entry.meta = meta
+
+ // only proxy data events of normal files.
+ if (!meta) {
+ entry.on("data", function (c) {
+ me.emit("data", c)
+ })
+ }
+
+ if (onend) entry.on("end", onend)
+
+ this._entry = entry
+ var me = this
+
+ entry.on("pause", function () {
+ me.pause()
+ })
+
+ entry.on("resume", function () {
+ me.resume()
+ })
+
+ if (this.listeners("*").length) {
+ this.emit("*", ev, entry)
+ }
+
+ this.emit(ev, entry)
+
+ // Zero-byte entry. End immediately.
+ if (entry.props.size === 0) {
+ entry.end()
+ this._entry = null
+ }
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENCE b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENCE
new file mode 100644
index 0000000000..74489e2e26
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENCE
@@ -0,0 +1,25 @@
+Copyright (c) Isaac Z. Schlueter
+All rights reserved.
+
+The BSD License
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
+``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENSE
new file mode 100644
index 0000000000..19129e315f
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/README.md b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/README.md
new file mode 100644
index 0000000000..c16e9c4688
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/README.md
@@ -0,0 +1,14 @@
+# block-stream
+
+A stream of blocks.
+
+Write data into it, and it'll output data in buffer blocks the size you
+specify, padding with zeroes if necessary.
+
+```javascript
+var block = new BlockStream(512)
+fs.createReadStream("some-file").pipe(block)
+block.pipe(fs.createWriteStream("block-file"))
+```
+
+When `.end()` or `.flush()` is called, it'll pad the block with zeroes.
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/block-stream.js b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/block-stream.js
new file mode 100644
index 0000000000..008de035c2
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/block-stream.js
@@ -0,0 +1,209 @@
+// write data to it, and it'll emit data in 512 byte blocks.
+// if you .end() or .flush(), it'll emit whatever it's got,
+// padded with nulls to 512 bytes.
+
+module.exports = BlockStream
+
+var Stream = require("stream").Stream
+ , inherits = require("inherits")
+ , assert = require("assert").ok
+ , debug = process.env.DEBUG ? console.error : function () {}
+
+function BlockStream (size, opt) {
+ this.writable = this.readable = true
+ this._opt = opt || {}
+ this._chunkSize = size || 512
+ this._offset = 0
+ this._buffer = []
+ this._bufferLength = 0
+ if (this._opt.nopad) this._zeroes = false
+ else {
+ this._zeroes = new Buffer(this._chunkSize)
+ for (var i = 0; i < this._chunkSize; i ++) {
+ this._zeroes[i] = 0
+ }
+ }
+}
+
+inherits(BlockStream, Stream)
+
+BlockStream.prototype.write = function (c) {
+ // debug(" BS write", c)
+ if (this._ended) throw new Error("BlockStream: write after end")
+ if (c && !Buffer.isBuffer(c)) c = new Buffer(c + "")
+ if (c.length) {
+ this._buffer.push(c)
+ this._bufferLength += c.length
+ }
+ // debug("pushed onto buffer", this._bufferLength)
+ if (this._bufferLength >= this._chunkSize) {
+ if (this._paused) {
+ // debug(" BS paused, return false, need drain")
+ this._needDrain = true
+ return false
+ }
+ this._emitChunk()
+ }
+ return true
+}
+
+BlockStream.prototype.pause = function () {
+ // debug(" BS pausing")
+ this._paused = true
+}
+
+BlockStream.prototype.resume = function () {
+ // debug(" BS resume")
+ this._paused = false
+ return this._emitChunk()
+}
+
+BlockStream.prototype.end = function (chunk) {
+ // debug("end", chunk)
+ if (typeof chunk === "function") cb = chunk, chunk = null
+ if (chunk) this.write(chunk)
+ this._ended = true
+ this.flush()
+}
+
+BlockStream.prototype.flush = function () {
+ this._emitChunk(true)
+}
+
+BlockStream.prototype._emitChunk = function (flush) {
+ // debug("emitChunk flush=%j emitting=%j paused=%j", flush, this._emitting, this._paused)
+
+ // emit a <chunkSize> chunk
+ if (flush && this._zeroes) {
+ // debug(" BS push zeroes", this._bufferLength)
+ // push a chunk of zeroes
+ var padBytes = (this._bufferLength % this._chunkSize)
+ if (padBytes !== 0) padBytes = this._chunkSize - padBytes
+ if (padBytes > 0) {
+ // debug("padBytes", padBytes, this._zeroes.slice(0, padBytes))
+ this._buffer.push(this._zeroes.slice(0, padBytes))
+ this._bufferLength += padBytes
+ // debug(this._buffer[this._buffer.length - 1].length, this._bufferLength)
+ }
+ }
+
+ if (this._emitting || this._paused) return
+ this._emitting = true
+
+ // debug(" BS entering loops")
+ var bufferIndex = 0
+ while (this._bufferLength >= this._chunkSize &&
+ (flush || !this._paused)) {
+ // debug(" BS data emission loop", this._bufferLength)
+
+ var out
+ , outOffset = 0
+ , outHas = this._chunkSize
+
+ while (outHas > 0 && (flush || !this._paused) ) {
+ // debug(" BS data inner emit loop", this._bufferLength)
+ var cur = this._buffer[bufferIndex]
+ , curHas = cur.length - this._offset
+ // debug("cur=", cur)
+ // debug("curHas=%j", curHas)
+ // If it's not big enough to fill the whole thing, then we'll need
+ // to copy multiple buffers into one. However, if it is big enough,
+ // then just slice out the part we want, to save unnecessary copying.
+ // Also, need to copy if we've already done some copying, since buffers
+ // can't be joined like cons strings.
+ if (out || curHas < outHas) {
+ out = out || new Buffer(this._chunkSize)
+ cur.copy(out, outOffset,
+ this._offset, this._offset + Math.min(curHas, outHas))
+ } else if (cur.length === outHas && this._offset === 0) {
+ // shortcut -- cur is exactly long enough, and no offset.
+ out = cur
+ } else {
+ // slice out the piece of cur that we need.
+ out = cur.slice(this._offset, this._offset + outHas)
+ }
+
+ if (curHas > outHas) {
+ // means that the current buffer couldn't be completely output
+ // update this._offset to reflect how much WAS written
+ this._offset += outHas
+ outHas = 0
+ } else {
+ // output the entire current chunk.
+ // toss it away
+ outHas -= curHas
+ outOffset += curHas
+ bufferIndex ++
+ this._offset = 0
+ }
+ }
+
+ this._bufferLength -= this._chunkSize
+ assert(out.length === this._chunkSize)
+ // debug("emitting data", out)
+ // debug(" BS emitting, paused=%j", this._paused, this._bufferLength)
+ this.emit("data", out)
+ out = null
+ }
+ // debug(" BS out of loops", this._bufferLength)
+
+ // whatever is left, it's not enough to fill up a block, or we're paused
+ this._buffer = this._buffer.slice(bufferIndex)
+ if (this._paused) {
+ // debug(" BS paused, leaving", this._bufferLength)
+ this._needsDrain = true
+ this._emitting = false
+ return
+ }
+
+ // if flushing, and not using null-padding, then need to emit the last
+ // chunk(s) sitting in the queue. We know that it's not enough to
+ // fill up a whole block, because otherwise it would have been emitted
+ // above, but there may be some offset.
+ var l = this._buffer.length
+ if (flush && !this._zeroes && l) {
+ if (l === 1) {
+ if (this._offset) {
+ this.emit("data", this._buffer[0].slice(this._offset))
+ } else {
+ this.emit("data", this._buffer[0])
+ }
+ } else {
+ var outHas = this._bufferLength
+ , out = new Buffer(outHas)
+ , outOffset = 0
+ for (var i = 0; i < l; i ++) {
+ var cur = this._buffer[i]
+ , curHas = cur.length - this._offset
+ cur.copy(out, outOffset, this._offset)
+ this._offset = 0
+ outOffset += curHas
+ this._bufferLength -= curHas
+ }
+ this.emit("data", out)
+ }
+ // truncate
+ this._buffer.length = 0
+ this._bufferLength = 0
+ this._offset = 0
+ }
+
+ // now either drained or ended
+ // debug("either draining, or ended", this._bufferLength, this._ended)
+ // means that we've flushed out all that we can so far.
+ if (this._needDrain) {
+ // debug("emitting drain", this._bufferLength)
+ this._needDrain = false
+ this.emit("drain")
+ }
+
+ if ((this._bufferLength === 0) && this._ended && !this._endEmitted) {
+ // debug("emitting end", this._bufferLength)
+ this._endEmitted = true
+ this.emit("end")
+ }
+
+ this._emitting = false
+
+ // debug(" BS no longer emitting", flush, this._paused, this._emitting, this._bufferLength, this._chunkSize)
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/package.json b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/package.json
new file mode 100644
index 0000000000..bf449e9633
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/node_modules/block-stream/package.json
@@ -0,0 +1,63 @@
+{
+ "_args": [
+ [
+ "block-stream@0.0.9",
+ "/Users/rebecca/code/npm"
+ ]
+ ],
+ "_from": "block-stream@0.0.9",
+ "_id": "block-stream@0.0.9",
+ "_inBundle": false,
+ "_integrity": "sha1-E+v+d4oDIFz+A3UUgeu0szAMEmo=",
+ "_location": "/node-gyp/tar/block-stream",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "version",
+ "registry": true,
+ "raw": "block-stream@0.0.9",
+ "name": "block-stream",
+ "escapedName": "block-stream",
+ "rawSpec": "0.0.9",
+ "saveSpec": null,
+ "fetchSpec": "0.0.9"
+ },
+ "_requiredBy": [
+ "/node-gyp/tar"
+ ],
+ "_resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz",
+ "_spec": "0.0.9",
+ "_where": "/Users/rebecca/code/npm",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/block-stream/issues"
+ },
+ "dependencies": {
+ "inherits": "~2.0.0"
+ },
+ "description": "a stream of blocks",
+ "devDependencies": {
+ "tap": "^5.7.1"
+ },
+ "engines": {
+ "node": "0.4 || >=0.5.8"
+ },
+ "files": [
+ "block-stream.js"
+ ],
+ "homepage": "https://github.com/isaacs/block-stream#readme",
+ "license": "ISC",
+ "main": "block-stream.js",
+ "name": "block-stream",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/block-stream.git"
+ },
+ "scripts": {
+ "test": "tap test/*.js --cov"
+ },
+ "version": "0.0.9"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/package.json b/deps/npm/node_modules/node-gyp/node_modules/tar/package.json
new file mode 100644
index 0000000000..0cb9624034
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/package.json
@@ -0,0 +1,64 @@
+{
+ "_args": [
+ [
+ "tar@2.2.1",
+ "/Users/rebecca/code/npm"
+ ]
+ ],
+ "_from": "tar@2.2.1",
+ "_id": "tar@2.2.1",
+ "_inBundle": false,
+ "_integrity": "sha1-jk0qJWwOIYXGsYrWlK7JaLg8sdE=",
+ "_location": "/node-gyp/tar",
+ "_phantomChildren": {
+ "inherits": "2.0.3"
+ },
+ "_requested": {
+ "type": "version",
+ "registry": true,
+ "raw": "tar@2.2.1",
+ "name": "tar",
+ "escapedName": "tar",
+ "rawSpec": "2.2.1",
+ "saveSpec": null,
+ "fetchSpec": "2.2.1"
+ },
+ "_requiredBy": [
+ "/node-gyp"
+ ],
+ "_resolved": "https://registry.npmjs.org/tar/-/tar-2.2.1.tgz",
+ "_spec": "2.2.1",
+ "_where": "/Users/rebecca/code/npm",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/node-tar/issues"
+ },
+ "dependencies": {
+ "block-stream": "*",
+ "fstream": "^1.0.2",
+ "inherits": "2"
+ },
+ "description": "tar for node",
+ "devDependencies": {
+ "graceful-fs": "^4.1.2",
+ "mkdirp": "^0.5.0",
+ "rimraf": "1.x",
+ "tap": "0.x"
+ },
+ "homepage": "https://github.com/isaacs/node-tar#readme",
+ "license": "ISC",
+ "main": "tar.js",
+ "name": "tar",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-tar.git"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ },
+ "version": "2.2.1"
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/tar.js b/deps/npm/node_modules/node-gyp/node_modules/tar/tar.js
new file mode 100644
index 0000000000..a81298b9a0
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/tar.js
@@ -0,0 +1,173 @@
+// field paths that every tar file must have.
+// header is padded to 512 bytes.
+var f = 0
+ , fields = {}
+ , path = fields.path = f++
+ , mode = fields.mode = f++
+ , uid = fields.uid = f++
+ , gid = fields.gid = f++
+ , size = fields.size = f++
+ , mtime = fields.mtime = f++
+ , cksum = fields.cksum = f++
+ , type = fields.type = f++
+ , linkpath = fields.linkpath = f++
+ , headerSize = 512
+ , blockSize = 512
+ , fieldSize = []
+
+fieldSize[path] = 100
+fieldSize[mode] = 8
+fieldSize[uid] = 8
+fieldSize[gid] = 8
+fieldSize[size] = 12
+fieldSize[mtime] = 12
+fieldSize[cksum] = 8
+fieldSize[type] = 1
+fieldSize[linkpath] = 100
+
+// "ustar\0" may introduce another bunch of headers.
+// these are optional, and will be nulled out if not present.
+
+var ustar = fields.ustar = f++
+ , ustarver = fields.ustarver = f++
+ , uname = fields.uname = f++
+ , gname = fields.gname = f++
+ , devmaj = fields.devmaj = f++
+ , devmin = fields.devmin = f++
+ , prefix = fields.prefix = f++
+ , fill = fields.fill = f++
+
+// terminate fields.
+fields[f] = null
+
+fieldSize[ustar] = 6
+fieldSize[ustarver] = 2
+fieldSize[uname] = 32
+fieldSize[gname] = 32
+fieldSize[devmaj] = 8
+fieldSize[devmin] = 8
+fieldSize[prefix] = 155
+fieldSize[fill] = 12
+
+// nb: prefix field may in fact be 130 bytes of prefix,
+// a null char, 12 bytes for atime, 12 bytes for ctime.
+//
+// To recognize this format:
+// 1. prefix[130] === ' ' or '\0'
+// 2. atime and ctime are octal numeric values
+// 3. atime and ctime have ' ' in their last byte
+
+var fieldEnds = {}
+ , fieldOffs = {}
+ , fe = 0
+for (var i = 0; i < f; i ++) {
+ fieldOffs[i] = fe
+ fieldEnds[i] = (fe += fieldSize[i])
+}
+
+// build a translation table of field paths.
+Object.keys(fields).forEach(function (f) {
+ if (fields[f] !== null) fields[fields[f]] = f
+})
+
+// different values of the 'type' field
+// paths match the values of Stats.isX() functions, where appropriate
+var types =
+ { 0: "File"
+ , "\0": "OldFile" // like 0
+ , "": "OldFile"
+ , 1: "Link"
+ , 2: "SymbolicLink"
+ , 3: "CharacterDevice"
+ , 4: "BlockDevice"
+ , 5: "Directory"
+ , 6: "FIFO"
+ , 7: "ContiguousFile" // like 0
+ // posix headers
+ , g: "GlobalExtendedHeader" // k=v for the rest of the archive
+ , x: "ExtendedHeader" // k=v for the next file
+ // vendor-specific stuff
+ , A: "SolarisACL" // skip
+ , D: "GNUDumpDir" // like 5, but with data, which should be skipped
+ , I: "Inode" // metadata only, skip
+ , K: "NextFileHasLongLinkpath" // data = link path of next file
+ , L: "NextFileHasLongPath" // data = path of next file
+ , M: "ContinuationFile" // skip
+ , N: "OldGnuLongPath" // like L
+ , S: "SparseFile" // skip
+ , V: "TapeVolumeHeader" // skip
+ , X: "OldExtendedHeader" // like x
+ }
+
+Object.keys(types).forEach(function (t) {
+ types[types[t]] = types[types[t]] || t
+})
+
+// values for the mode field
+var modes =
+ { suid: 04000 // set uid on extraction
+ , sgid: 02000 // set gid on extraction
+ , svtx: 01000 // set restricted deletion flag on dirs on extraction
+ , uread: 0400
+ , uwrite: 0200
+ , uexec: 0100
+ , gread: 040
+ , gwrite: 020
+ , gexec: 010
+ , oread: 4
+ , owrite: 2
+ , oexec: 1
+ , all: 07777
+ }
+
+var numeric =
+ { mode: true
+ , uid: true
+ , gid: true
+ , size: true
+ , mtime: true
+ , devmaj: true
+ , devmin: true
+ , cksum: true
+ , atime: true
+ , ctime: true
+ , dev: true
+ , ino: true
+ , nlink: true
+ }
+
+Object.keys(modes).forEach(function (t) {
+ modes[modes[t]] = modes[modes[t]] || t
+})
+
+var knownExtended =
+ { atime: true
+ , charset: true
+ , comment: true
+ , ctime: true
+ , gid: true
+ , gname: true
+ , linkpath: true
+ , mtime: true
+ , path: true
+ , realtime: true
+ , security: true
+ , size: true
+ , uid: true
+ , uname: true }
+
+
+exports.fields = fields
+exports.fieldSize = fieldSize
+exports.fieldOffs = fieldOffs
+exports.fieldEnds = fieldEnds
+exports.types = types
+exports.modes = modes
+exports.numeric = numeric
+exports.headerSize = headerSize
+exports.blockSize = blockSize
+exports.knownExtended = knownExtended
+
+exports.Pack = require("./lib/pack.js")
+exports.Parse = require("./lib/parse.js")
+exports.Extract = require("./lib/extract.js")
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js
new file mode 100644
index 0000000000..1524ff7af0
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/test/00-setup-fixtures.js
@@ -0,0 +1,53 @@
+// the fixtures have some weird stuff that is painful
+// to include directly in the repo for various reasons.
+//
+// So, unpack the fixtures with the system tar first.
+//
+// This means, of course, that it'll only work if you
+// already have a tar implementation, and some of them
+// will not properly unpack the fixtures anyway.
+//
+// But, since usually those tests will fail on Windows
+// and other systems with less capable filesystems anyway,
+// at least this way we don't cause inconveniences by
+// merely cloning the repo or installing the package.
+
+var tap = require("tap")
+, child_process = require("child_process")
+, rimraf = require("rimraf")
+, test = tap.test
+, path = require("path")
+
+test("clean fixtures", function (t) {
+ rimraf(path.resolve(__dirname, "fixtures"), function (er) {
+ t.ifError(er, "rimraf ./fixtures/")
+ t.end()
+ })
+})
+
+test("clean tmp", function (t) {
+ rimraf(path.resolve(__dirname, "tmp"), function (er) {
+ t.ifError(er, "rimraf ./tmp/")
+ t.end()
+ })
+})
+
+test("extract fixtures", function (t) {
+ var c = child_process.spawn("tar"
+ ,["xzvf", "fixtures.tgz"]
+ ,{ cwd: __dirname })
+
+ c.stdout.on("data", errwrite)
+ c.stderr.on("data", errwrite)
+ function errwrite (chunk) {
+ process.stderr.write(chunk)
+ }
+
+ c.on("exit", function (code) {
+ t.equal(code, 0, "extract fixtures should exit with 0")
+ if (code) {
+ t.comment("Note, all tests from here on out will fail because of this.")
+ }
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgz b/deps/npm/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgz
new file mode 100644
index 0000000000..9e7014d85a
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/test/cb-never-called-1.0.1.tgz
Binary files differ
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js
new file mode 100644
index 0000000000..9719c42f35
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/test/dir-normalization.js
@@ -0,0 +1,177 @@
+// Set the umask, so that it works the same everywhere.
+process.umask(parseInt('22', 8))
+
+var fs = require('fs')
+var path = require('path')
+
+var fstream = require('fstream')
+var test = require('tap').test
+
+var tar = require('../tar.js')
+var file = path.resolve(__dirname, 'dir-normalization.tar')
+var target = path.resolve(__dirname, 'tmp/dir-normalization-test')
+var ee = 0
+
+var expectEntries = [
+ { path: 'fixtures/',
+ mode: '755',
+ type: '5',
+ linkpath: ''
+ },
+ { path: 'fixtures/a/',
+ mode: '755',
+ type: '5',
+ linkpath: ''
+ },
+ { path: 'fixtures/the-chumbler',
+ mode: '755',
+ type: '2',
+ linkpath: path.resolve(target, 'a/b/c/d/the-chumbler'),
+ },
+ { path: 'fixtures/a/b/',
+ mode: '755',
+ type: '5',
+ linkpath: ''
+ },
+ { path: 'fixtures/a/x',
+ mode: '644',
+ type: '0',
+ linkpath: ''
+ },
+ { path: 'fixtures/a/b/c/',
+ mode: '755',
+ type: '5',
+ linkpath: ''
+ },
+ { path: 'fixtures/a/b/c/y',
+ mode: '755',
+ type: '2',
+ linkpath: '../../x',
+ }
+]
+
+var ef = 0
+var expectFiles = [
+ { path: '',
+ mode: '40755',
+ type: 'Directory',
+ depth: 0,
+ linkpath: undefined
+ },
+ { path: '/fixtures',
+ mode: '40755',
+ type: 'Directory',
+ depth: 1,
+ linkpath: undefined
+ },
+ { path: '/fixtures/a',
+ mode: '40755',
+ type: 'Directory',
+ depth: 2,
+ linkpath: undefined
+ },
+ { path: '/fixtures/a/b',
+ mode: '40755',
+ type: 'Directory',
+ depth: 3,
+ linkpath: undefined
+ },
+ { path: '/fixtures/a/b/c',
+ mode: '40755',
+ type: 'Directory',
+ depth: 4,
+ linkpath: undefined
+ },
+ { path: '/fixtures/a/b/c/y',
+ mode: '120755',
+ type: 'SymbolicLink',
+ depth: 5,
+ linkpath: '../../x'
+ },
+ { path: '/fixtures/a/x',
+ mode: '100644',
+ type: 'File',
+ depth: 3,
+ linkpath: undefined
+ },
+ { path: '/fixtures/the-chumbler',
+ mode: '120755',
+ type: 'SymbolicLink',
+ depth: 2,
+ linkpath: path.resolve(target, 'a/b/c/d/the-chumbler')
+ }
+]
+
+test('preclean', function (t) {
+ require('rimraf').sync(path.join(__dirname, '/tmp/dir-normalization-test'))
+ t.pass('cleaned!')
+ t.end()
+})
+
+test('extract test', function (t) {
+ var extract = tar.Extract(target)
+ var inp = fs.createReadStream(file)
+
+ inp.pipe(extract)
+
+ extract.on('end', function () {
+ t.equal(ee, expectEntries.length, 'should see ' + expectEntries.length + ' entries')
+
+ // should get no more entries after end
+ extract.removeAllListeners('entry')
+ extract.on('entry', function (e) {
+ t.fail('Should not get entries after end!')
+ })
+
+ next()
+ })
+
+ extract.on('entry', function (entry) {
+ var mode = entry.props.mode & (~parseInt('22', 8))
+ var found = {
+ path: entry.path,
+ mode: mode.toString(8),
+ type: entry.props.type,
+ linkpath: entry.props.linkpath,
+ }
+
+ var wanted = expectEntries[ee++]
+ t.equivalent(found, wanted, 'tar entry ' + ee + ' ' + (wanted && wanted.path))
+ })
+
+ function next () {
+ var r = fstream.Reader({
+ path: target,
+ type: 'Directory',
+ sort: 'alpha'
+ })
+
+ r.on('ready', function () {
+ foundEntry(r)
+ })
+
+ r.on('end', finish)
+
+ function foundEntry (entry) {
+ var p = entry.path.substr(target.length)
+ var mode = entry.props.mode & (~parseInt('22', 8))
+ var found = {
+ path: p,
+ mode: mode.toString(8),
+ type: entry.props.type,
+ depth: entry.props.depth,
+ linkpath: entry.props.linkpath
+ }
+
+ var wanted = expectFiles[ef++]
+ t.equivalent(found, wanted, 'unpacked file ' + ef + ' ' + (wanted && wanted.path))
+
+ entry.on('entry', foundEntry)
+ }
+
+ function finish () {
+ t.equal(ef, expectFiles.length, 'should have ' + ef + ' items')
+ t.end()
+ }
+ }
+})
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tar b/deps/npm/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tar
new file mode 100644
index 0000000000..3c4845356c
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/test/dir-normalization.tar
Binary files differ
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js
new file mode 100644
index 0000000000..e484920fd9
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/test/error-on-broken.js
@@ -0,0 +1,33 @@
+var fs = require('fs')
+var path = require('path')
+var zlib = require('zlib')
+
+var tap = require('tap')
+
+var tar = require('../tar.js')
+
+var file = path.join(__dirname, 'cb-never-called-1.0.1.tgz')
+var target = path.join(__dirname, 'tmp/extract-test')
+
+tap.test('preclean', function (t) {
+ require('rimraf').sync(__dirname + '/tmp/extract-test')
+ t.pass('cleaned!')
+ t.end()
+})
+
+tap.test('extract test', function (t) {
+ var extract = tar.Extract(target)
+ var inp = fs.createReadStream(file)
+
+ inp.pipe(zlib.createGunzip()).pipe(extract)
+
+ extract.on('error', function (er) {
+ t.equal(er.message, 'unexpected eof', 'error noticed')
+ t.end()
+ })
+
+ extract.on('end', function () {
+ t.fail('shouldn\'t reach this point due to errors')
+ t.end()
+ })
+})
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/test/extract-move.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/extract-move.js
new file mode 100644
index 0000000000..45400cd9bb
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/test/extract-move.js
@@ -0,0 +1,132 @@
+// Set the umask, so that it works the same everywhere.
+process.umask(parseInt('22', 8))
+
+var tap = require("tap")
+ , tar = require("../tar.js")
+ , fs = require("fs")
+ , gfs = require("graceful-fs")
+ , path = require("path")
+ , file = path.resolve(__dirname, "fixtures/dir.tar")
+ , target = path.resolve(__dirname, "tmp/extract-test")
+ , index = 0
+ , fstream = require("fstream")
+ , rimraf = require("rimraf")
+ , mkdirp = require("mkdirp")
+
+ , ee = 0
+ , expectEntries = [
+ {
+ "path" : "dir/",
+ "mode" : "750",
+ "type" : "5",
+ "depth" : undefined,
+ "size" : 0,
+ "linkpath" : "",
+ "nlink" : undefined,
+ "dev" : undefined,
+ "ino" : undefined
+ },
+ {
+ "path" : "dir/sub/",
+ "mode" : "750",
+ "type" : "5",
+ "depth" : undefined,
+ "size" : 0,
+ "linkpath" : "",
+ "nlink" : undefined,
+ "dev" : undefined,
+ "ino" : undefined
+ } ]
+
+function slow (fs, method, t1, t2) {
+ var orig = fs[method]
+ if (!orig) return null
+ fs[method] = function () {
+ var args = [].slice.call(arguments)
+ console.error("slow", method, args[0])
+ var cb = args.pop()
+
+ setTimeout(function () {
+ orig.apply(fs, args.concat(function(er, data) {
+ setTimeout(function() {
+ cb(er, data)
+ }, t2)
+ }))
+ }, t1)
+ }
+}
+
+// Make sure we get the graceful-fs that fstream is using.
+var gfs2
+try {
+ gfs2 = require("fstream/node_modules/graceful-fs")
+} catch (er) {}
+
+var slowMethods = ["chown", "chmod", "utimes", "lutimes"]
+slowMethods.forEach(function (method) {
+ var t1 = 500
+ var t2 = 0
+ slow(fs, method, t1, t2)
+ slow(gfs, method, t1, t2)
+ if (gfs2) {
+ slow(gfs2, method, t1, t2)
+ }
+})
+
+
+
+// The extract class basically just pipes the input
+// to a Reader, and then to a fstream.DirWriter
+
+// So, this is as much a test of fstream.Reader and fstream.Writer
+// as it is of tar.Extract, but it sort of makes sense.
+
+tap.test("preclean", function (t) {
+ rimraf.sync(target)
+ /mkdirp.sync(target)
+ t.pass("cleaned!")
+ t.end()
+})
+
+tap.test("extract test", function (t) {
+ var extract = tar.Extract(target)
+ var inp = fs.createReadStream(file)
+
+ // give it a weird buffer size to try to break in odd places
+ inp.bufferSize = 1234
+
+ inp.pipe(extract)
+
+ extract.on("end", function () {
+ rimraf.sync(target)
+
+ t.equal(ee, expectEntries.length, "should see "+ee+" entries")
+
+ // should get no more entries after end
+ extract.removeAllListeners("entry")
+ extract.on("entry", function (e) {
+ t.fail("Should not get entries after end!")
+ })
+
+ t.end()
+ })
+
+
+ extract.on("entry", function (entry) {
+ var found =
+ { path: entry.path
+ , mode: entry.props.mode.toString(8)
+ , type: entry.props.type
+ , depth: entry.props.depth
+ , size: entry.props.size
+ , linkpath: entry.props.linkpath
+ , nlink: entry.props.nlink
+ , dev: entry.props.dev
+ , ino: entry.props.ino
+ }
+
+ var wanted = expectEntries[ee ++]
+
+ t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path)
+ })
+})
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/test/extract.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/extract.js
new file mode 100644
index 0000000000..eca4e7cc96
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/test/extract.js
@@ -0,0 +1,367 @@
+// Set the umask, so that it works the same everywhere.
+process.umask(parseInt('22', 8))
+
+var tap = require("tap")
+ , tar = require("../tar.js")
+ , fs = require("fs")
+ , path = require("path")
+ , file = path.resolve(__dirname, "fixtures/c.tar")
+ , target = path.resolve(__dirname, "tmp/extract-test")
+ , index = 0
+ , fstream = require("fstream")
+
+ , ee = 0
+ , expectEntries =
+[ { path: 'c.txt',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 513,
+ linkpath: '',
+ nlink: undefined,
+ dev: undefined,
+ ino: undefined },
+ { path: 'cc.txt',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 513,
+ linkpath: '',
+ nlink: undefined,
+ dev: undefined,
+ ino: undefined },
+ { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 100,
+ linkpath: '',
+ nlink: undefined,
+ dev: undefined,
+ ino: undefined },
+ { path: 'Ω.txt',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 2,
+ linkpath: '',
+ nlink: undefined,
+ dev: undefined,
+ ino: undefined },
+ { path: 'Ω.txt',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 2,
+ linkpath: '',
+ nlink: 1,
+ dev: 234881026,
+ ino: 51693379 },
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 200,
+ linkpath: '',
+ nlink: 1,
+ dev: 234881026,
+ ino: 51681874 },
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 201,
+ linkpath: '',
+ nlink: undefined,
+ dev: undefined,
+ ino: undefined },
+ { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',
+ mode: '777',
+ type: '2',
+ depth: undefined,
+ size: 0,
+ linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ nlink: undefined,
+ dev: undefined,
+ ino: undefined },
+ { path: '200-hard',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 200,
+ linkpath: '',
+ nlink: 2,
+ dev: 234881026,
+ ino: 51681874 },
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: '644',
+ type: '1',
+ depth: undefined,
+ size: 0,
+ linkpath: path.resolve(target, '200-hard'),
+ nlink: 2,
+ dev: 234881026,
+ ino: 51681874 } ]
+
+ , ef = 0
+ , expectFiles =
+[ { path: '',
+ mode: '40755',
+ type: 'Directory',
+ depth: 0,
+ linkpath: undefined },
+ { path: '/200-hard',
+ mode: '100644',
+ type: 'File',
+ depth: 1,
+ size: 200,
+ linkpath: undefined,
+ nlink: 2 },
+ { path: '/200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',
+ mode: '120777',
+ type: 'SymbolicLink',
+ depth: 1,
+ size: 200,
+ linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ nlink: 1 },
+ { path: '/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: '100644',
+ type: 'Link',
+ depth: 1,
+ size: 200,
+ linkpath: path.join(target, '200-hard'),
+ nlink: 2 },
+ { path: '/c.txt',
+ mode: '100644',
+ type: 'File',
+ depth: 1,
+ size: 513,
+ linkpath: undefined,
+ nlink: 1 },
+ { path: '/cc.txt',
+ mode: '100644',
+ type: 'File',
+ depth: 1,
+ size: 513,
+ linkpath: undefined,
+ nlink: 1 },
+ { path: '/r',
+ mode: '40755',
+ type: 'Directory',
+ depth: 1,
+ linkpath: undefined },
+ { path: '/r/e',
+ mode: '40755',
+ type: 'Directory',
+ depth: 2,
+ linkpath: undefined },
+ { path: '/r/e/a',
+ mode: '40755',
+ type: 'Directory',
+ depth: 3,
+ linkpath: undefined },
+ { path: '/r/e/a/l',
+ mode: '40755',
+ type: 'Directory',
+ depth: 4,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l',
+ mode: '40755',
+ type: 'Directory',
+ depth: 5,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y',
+ mode: '40755',
+ type: 'Directory',
+ depth: 6,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-',
+ mode: '40755',
+ type: 'Directory',
+ depth: 7,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d',
+ mode: '40755',
+ type: 'Directory',
+ depth: 8,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e',
+ mode: '40755',
+ type: 'Directory',
+ depth: 9,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e/e',
+ mode: '40755',
+ type: 'Directory',
+ depth: 10,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p',
+ mode: '40755',
+ type: 'Directory',
+ depth: 11,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-',
+ mode: '40755',
+ type: 'Directory',
+ depth: 12,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f',
+ mode: '40755',
+ type: 'Directory',
+ depth: 13,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o',
+ mode: '40755',
+ type: 'Directory',
+ depth: 14,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l',
+ mode: '40755',
+ type: 'Directory',
+ depth: 15,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d',
+ mode: '40755',
+ type: 'Directory',
+ depth: 16,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e',
+ mode: '40755',
+ type: 'Directory',
+ depth: 17,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r',
+ mode: '40755',
+ type: 'Directory',
+ depth: 18,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-',
+ mode: '40755',
+ type: 'Directory',
+ depth: 19,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p',
+ mode: '40755',
+ type: 'Directory',
+ depth: 20,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a',
+ mode: '40755',
+ type: 'Directory',
+ depth: 21,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t',
+ mode: '40755',
+ type: 'Directory',
+ depth: 22,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h',
+ mode: '40755',
+ type: 'Directory',
+ depth: 23,
+ linkpath: undefined },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: '100644',
+ type: 'File',
+ depth: 24,
+ size: 100,
+ linkpath: undefined,
+ nlink: 1 },
+ { path: '/Ω.txt',
+ mode: '100644',
+ type: 'File',
+ depth: 1,
+ size: 2,
+ linkpath: undefined,
+ nlink: 1 } ]
+
+
+
+// The extract class basically just pipes the input
+// to a Reader, and then to a fstream.DirWriter
+
+// So, this is as much a test of fstream.Reader and fstream.Writer
+// as it is of tar.Extract, but it sort of makes sense.
+
+tap.test("preclean", function (t) {
+ require("rimraf").sync(__dirname + "/tmp/extract-test")
+ t.pass("cleaned!")
+ t.end()
+})
+
+tap.test("extract test", function (t) {
+ var extract = tar.Extract(target)
+ var inp = fs.createReadStream(file)
+
+ // give it a weird buffer size to try to break in odd places
+ inp.bufferSize = 1234
+
+ inp.pipe(extract)
+
+ extract.on("end", function () {
+ t.equal(ee, expectEntries.length, "should see "+ee+" entries")
+
+ // should get no more entries after end
+ extract.removeAllListeners("entry")
+ extract.on("entry", function (e) {
+ t.fail("Should not get entries after end!")
+ })
+
+ next()
+ })
+
+ extract.on("entry", function (entry) {
+ var found =
+ { path: entry.path
+ , mode: entry.props.mode.toString(8)
+ , type: entry.props.type
+ , depth: entry.props.depth
+ , size: entry.props.size
+ , linkpath: entry.props.linkpath
+ , nlink: entry.props.nlink
+ , dev: entry.props.dev
+ , ino: entry.props.ino
+ }
+
+ var wanted = expectEntries[ee ++]
+
+ t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path)
+ })
+
+ function next () {
+ var r = fstream.Reader({ path: target
+ , type: "Directory"
+ // this is just to encourage consistency
+ , sort: "alpha" })
+
+ r.on("ready", function () {
+ foundEntry(r)
+ })
+
+ r.on("end", finish)
+
+ function foundEntry (entry) {
+ var p = entry.path.substr(target.length)
+ var found =
+ { path: p
+ , mode: entry.props.mode.toString(8)
+ , type: entry.props.type
+ , depth: entry.props.depth
+ , size: entry.props.size
+ , linkpath: entry.props.linkpath
+ , nlink: entry.props.nlink
+ }
+
+ var wanted = expectFiles[ef ++]
+
+ t.has(found, wanted, "unpacked file " + ef + " " + wanted.path)
+
+ entry.on("entry", foundEntry)
+ }
+
+ function finish () {
+ t.equal(ef, expectFiles.length, "should have "+ef+" items")
+ t.end()
+ }
+ }
+})
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/test/fixtures.tgz b/deps/npm/node_modules/node-gyp/node_modules/tar/test/fixtures.tgz
new file mode 100644
index 0000000000..f1676023af
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/test/fixtures.tgz
Binary files differ
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/test/header.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/header.js
new file mode 100644
index 0000000000..8ea6f79500
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/test/header.js
@@ -0,0 +1,183 @@
+var tap = require("tap")
+var TarHeader = require("../lib/header.js")
+var tar = require("../tar.js")
+var fs = require("fs")
+
+
+var headers =
+ { "a.txt file header":
+ [ "612e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303430312031313635313336303333332030313234353100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ , { cksumValid: true
+ , path: 'a.txt'
+ , mode: 420
+ , uid: 24561
+ , gid: 20
+ , size: 257
+ , mtime: 1319493851
+ , cksum: 5417
+ , type: '0'
+ , linkpath: ''
+ , ustar: 'ustar\0'
+ , ustarver: '00'
+ , uname: 'isaacs'
+ , gname: 'staff'
+ , devmaj: 0
+ , devmin: 0
+ , fill: '' }
+ ]
+
+ , "omega pax": // the extended header from omega tar.
+ [ "5061784865616465722fcea92e74787400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303137302031313534333731303631312030313530353100207800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ , { cksumValid: true
+ , path: 'PaxHeader/Ω.txt'
+ , mode: 420
+ , uid: 24561
+ , gid: 20
+ , size: 120
+ , mtime: 1301254537
+ , cksum: 6697
+ , type: 'x'
+ , linkpath: ''
+ , ustar: 'ustar\0'
+ , ustarver: '00'
+ , uname: 'isaacs'
+ , gname: 'staff'
+ , devmaj: 0
+ , devmin: 0
+ , fill: '' } ]
+
+ , "omega file header":
+ [ "cea92e7478740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030322031313534333731303631312030313330373200203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ , { cksumValid: true
+ , path: 'Ω.txt'
+ , mode: 420
+ , uid: 24561
+ , gid: 20
+ , size: 2
+ , mtime: 1301254537
+ , cksum: 5690
+ , type: '0'
+ , linkpath: ''
+ , ustar: 'ustar\0'
+ , ustarver: '00'
+ , uname: 'isaacs'
+ , gname: 'staff'
+ , devmaj: 0
+ , devmin: 0
+ , fill: '' } ]
+
+ , "foo.js file header":
+ [ "666f6f2e6a730000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030342031313534333637303734312030313236313700203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ , { cksumValid: true
+ , path: 'foo.js'
+ , mode: 420
+ , uid: 24561
+ , gid: 20
+ , size: 4
+ , mtime: 1301246433
+ , cksum: 5519
+ , type: '0'
+ , linkpath: ''
+ , ustar: 'ustar\0'
+ , ustarver: '00'
+ , uname: 'isaacs'
+ , gname: 'staff'
+ , devmaj: 0
+ , devmin: 0
+ , fill: '' }
+ ]
+
+ , "b.txt file header":
+ [ "622e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030313030302031313635313336303637372030313234363100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ , { cksumValid: true
+ , path: 'b.txt'
+ , mode: 420
+ , uid: 24561
+ , gid: 20
+ , size: 512
+ , mtime: 1319494079
+ , cksum: 5425
+ , type: '0'
+ , linkpath: ''
+ , ustar: 'ustar\0'
+ , ustarver: '00'
+ , uname: 'isaacs'
+ , gname: 'staff'
+ , devmaj: 0
+ , devmin: 0
+ , fill: '' }
+ ]
+
+ , "deep nested file":
+ [ "636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363633030303634342000303537373631200030303030323420003030303030303030313434203131363532313531353333203034333331340020300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000075737461720030306973616163730000000000000000000000000000000000000000000000000000737461666600000000000000000000000000000000000000000000000000000030303030303020003030303030302000722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d2f662f6f2f6c2f642f652f722f2d2f702f612f742f680000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ , { cksumValid: true,
+ path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'
+ , mode: 420
+ , uid: 24561
+ , gid: 20
+ , size: 100
+ , mtime: 1319687003
+ , cksum: 18124
+ , type: '0'
+ , linkpath: ''
+ , ustar: 'ustar\0'
+ , ustarver: '00'
+ , uname: 'isaacs'
+ , gname: 'staff'
+ , devmaj: 0
+ , devmin: 0
+ , fill: '' }
+ ]
+ }
+
+tap.test("parsing", function (t) {
+ Object.keys(headers).forEach(function (name) {
+ var h = headers[name]
+ , header = new Buffer(h[0], "hex")
+ , expect = h[1]
+ , parsed = new TarHeader(header)
+
+ // console.error(parsed)
+ t.has(parsed, expect, "parse " + name)
+ })
+ t.end()
+})
+
+tap.test("encoding", function (t) {
+ Object.keys(headers).forEach(function (name) {
+ var h = headers[name]
+ , expect = new Buffer(h[0], "hex")
+ , encoded = TarHeader.encode(h[1])
+
+ // might have slightly different bytes, since the standard
+ // isn't very strict, but should have the same semantics
+ // checkSum will be different, but cksumValid will be true
+
+ var th = new TarHeader(encoded)
+ delete h[1].block
+ delete h[1].needExtended
+ delete h[1].cksum
+ t.has(th, h[1], "fields "+name)
+ })
+ t.end()
+})
+
+// test these manually. they're a bit rare to find in the wild
+tap.test("parseNumeric tests", function (t) {
+ var parseNumeric = TarHeader.parseNumeric
+ , numbers =
+ { "303737373737373700": 2097151
+ , "30373737373737373737373700": 8589934591
+ , "303030303036343400": 420
+ , "800000ffffffffffff": 281474976710655
+ , "ffffff000000000001": -281474976710654
+ , "ffffff000000000000": -281474976710655
+ , "800000000000200000": 2097152
+ , "8000000000001544c5": 1393861
+ , "ffffffffffff1544c5": -15383354 }
+ Object.keys(numbers).forEach(function (n) {
+ var b = new Buffer(n, "hex")
+ t.equal(parseNumeric(b), numbers[n], n + " === " + numbers[n])
+ })
+ t.end()
+})
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js
new file mode 100644
index 0000000000..d4b03a1fe9
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/test/pack-no-proprietary.js
@@ -0,0 +1,886 @@
+// This is exactly like test/pack.js, except that it's excluding
+// any proprietary headers.
+//
+// This loses some information about the filesystem, but creates
+// tarballs that are supported by more versions of tar, especially
+// old non-spec-compliant copies of gnutar.
+
+// the symlink file is excluded from git, because it makes
+// windows freak the hell out.
+var fs = require("fs")
+ , path = require("path")
+ , symlink = path.resolve(__dirname, "fixtures/symlink")
+try { fs.unlinkSync(symlink) } catch (e) {}
+fs.symlinkSync("./hardlink-1", symlink)
+process.on("exit", function () {
+ fs.unlinkSync(symlink)
+})
+
+var tap = require("tap")
+ , tar = require("../tar.js")
+ , pkg = require("../package.json")
+ , Pack = tar.Pack
+ , fstream = require("fstream")
+ , Reader = fstream.Reader
+ , Writer = fstream.Writer
+ , input = path.resolve(__dirname, "fixtures/")
+ , target = path.resolve(__dirname, "tmp/pack.tar")
+ , uid = process.getuid ? process.getuid() : 0
+ , gid = process.getgid ? process.getgid() : 0
+
+ , entries =
+
+ // the global header and root fixtures/ dir are going to get
+ // a different date each time, so omit that bit.
+ // Also, dev/ino values differ across machines, so that's not
+ // included.
+ [ [ 'entry',
+ { path: 'fixtures/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'extendedHeader',
+ { path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ uid: uid,
+ gid: gid,
+ size: 200 } ]
+
+ , [ 'entry',
+ { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 200,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/a.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 257,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/b.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 512,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/c.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 513,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/cc.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 513,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/dir/',
+ mode: 488,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/dir/sub/',
+ mode: 488,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/foo.js',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 4,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/hardlink-1',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 200,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/hardlink-2',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '1',
+ linkpath: 'fixtures/hardlink-1',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/omega.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 2,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/packtest/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/packtest/omega.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 2,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/packtest/star.4.html',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 54081,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'extendedHeader',
+ { path: 'PaxHeader/fixtures/packtest/Ω.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { path: 'fixtures/packtest/Ω.txt',
+ uid: uid,
+ gid: gid,
+ size: 2 } ]
+
+ , [ 'entry',
+ { path: 'fixtures/packtest/Ω.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 2,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 100,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/symlink',
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '2',
+ linkpath: 'hardlink-1',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'extendedHeader',
+ { path: 'PaxHeader/fixtures/Ω.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { path: "fixtures/Ω.txt"
+ , uid: uid
+ , gid: gid
+ , size: 2 } ]
+
+ , [ 'entry',
+ { path: 'fixtures/Ω.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 2,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+ ]
+
+
+// first, make sure that the hardlinks are actually hardlinks, or this
+// won't work. Git has a way of replacing them with a copy.
+var hard1 = path.resolve(__dirname, "fixtures/hardlink-1")
+ , hard2 = path.resolve(__dirname, "fixtures/hardlink-2")
+ , fs = require("fs")
+
+try { fs.unlinkSync(hard2) } catch (e) {}
+fs.linkSync(hard1, hard2)
+
+tap.test("with global header", { timeout: 10000 }, function (t) {
+ runTest(t, true)
+})
+
+tap.test("without global header", { timeout: 10000 }, function (t) {
+ runTest(t, false)
+})
+
+function alphasort (a, b) {
+ return a === b ? 0
+ : a.toLowerCase() > b.toLowerCase() ? 1
+ : a.toLowerCase() < b.toLowerCase() ? -1
+ : a > b ? 1
+ : -1
+}
+
+
+function runTest (t, doGH) {
+ var reader = Reader({ path: input
+ , filter: function () {
+ return !this.path.match(/\.(tar|hex)$/)
+ }
+ , sort: alphasort
+ })
+
+ var props = doGH ? pkg : {}
+ props.noProprietary = true
+ var pack = Pack(props)
+ var writer = Writer(target)
+
+ // global header should be skipped regardless, since it has no content.
+ var entry = 0
+
+ t.ok(reader, "reader ok")
+ t.ok(pack, "pack ok")
+ t.ok(writer, "writer ok")
+
+ pack.pipe(writer)
+
+ var parse = tar.Parse()
+ t.ok(parse, "parser should be ok")
+
+ pack.on("data", function (c) {
+ // console.error("PACK DATA")
+ if (c.length !== 512) {
+ // this one is too noisy, only assert if it'll be relevant
+ t.equal(c.length, 512, "parser should emit data in 512byte blocks")
+ }
+ parse.write(c)
+ })
+
+ pack.on("end", function () {
+ // console.error("PACK END")
+ t.pass("parser ends")
+ parse.end()
+ })
+
+ pack.on("error", function (er) {
+ t.fail("pack error", er)
+ })
+
+ parse.on("error", function (er) {
+ t.fail("parse error", er)
+ })
+
+ writer.on("error", function (er) {
+ t.fail("writer error", er)
+ })
+
+ reader.on("error", function (er) {
+ t.fail("reader error", er)
+ })
+
+ parse.on("*", function (ev, e) {
+ var wanted = entries[entry++]
+ if (!wanted) {
+ t.fail("unexpected event: "+ev)
+ return
+ }
+ t.equal(ev, wanted[0], "event type should be "+wanted[0])
+
+ if (ev !== wanted[0] || e.path !== wanted[1].path) {
+ console.error("wanted", wanted)
+ console.error([ev, e.props])
+ e.on("end", function () {
+ console.error(e.fields)
+ throw "break"
+ })
+ }
+
+ t.has(e.props, wanted[1], "properties "+wanted[1].path)
+ if (wanted[2]) {
+ e.on("end", function () {
+ if (!e.fields) {
+ t.ok(e.fields, "should get fields")
+ } else {
+ t.has(e.fields, wanted[2], "should get expected fields")
+ }
+ })
+ }
+ })
+
+ reader.pipe(pack)
+
+ writer.on("close", function () {
+ t.equal(entry, entries.length, "should get all expected entries")
+ t.pass("it finished")
+ t.end()
+ })
+
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/test/pack.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/pack.js
new file mode 100644
index 0000000000..0f16c07bb0
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/test/pack.js
@@ -0,0 +1,952 @@
+
+// the symlink file is excluded from git, because it makes
+// windows freak the hell out.
+var fs = require("fs")
+ , path = require("path")
+ , symlink = path.resolve(__dirname, "fixtures/symlink")
+try { fs.unlinkSync(symlink) } catch (e) {}
+fs.symlinkSync("./hardlink-1", symlink)
+process.on("exit", function () {
+ fs.unlinkSync(symlink)
+})
+
+
+var tap = require("tap")
+ , tar = require("../tar.js")
+ , pkg = require("../package.json")
+ , Pack = tar.Pack
+ , fstream = require("fstream")
+ , Reader = fstream.Reader
+ , Writer = fstream.Writer
+ , input = path.resolve(__dirname, "fixtures/")
+ , target = path.resolve(__dirname, "tmp/pack.tar")
+ , uid = process.getuid ? process.getuid() : 0
+ , gid = process.getgid ? process.getgid() : 0
+
+ , entries =
+
+ // the global header and root fixtures/ dir are going to get
+ // a different date each time, so omit that bit.
+ // Also, dev/ino values differ across machines, so that's not
+ // included.
+ [ [ 'globalExtendedHeader',
+ { path: 'PaxHeader/',
+ mode: 438,
+ uid: 0,
+ gid: 0,
+ type: 'g',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { "NODETAR.author": pkg.author,
+ "NODETAR.name": pkg.name,
+ "NODETAR.description": pkg.description,
+ "NODETAR.version": pkg.version,
+ "NODETAR.repository.type": pkg.repository.type,
+ "NODETAR.repository.url": pkg.repository.url,
+ "NODETAR.main": pkg.main,
+ "NODETAR.scripts.test": pkg.scripts.test } ]
+
+ , [ 'entry',
+ { path: 'fixtures/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'extendedHeader',
+ { path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ 'NODETAR.depth': '1',
+ 'NODETAR.type': 'File',
+ nlink: 1,
+ uid: uid,
+ gid: gid,
+ size: 200,
+ 'NODETAR.blksize': '4096',
+ 'NODETAR.blocks': '8' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 200,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '',
+ 'NODETAR.depth': '1',
+ 'NODETAR.type': 'File',
+ nlink: 1,
+ 'NODETAR.blksize': '4096',
+ 'NODETAR.blocks': '8' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/a.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 257,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/b.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 512,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/c.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 513,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/cc.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 513,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/dir/',
+ mode: 488,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/dir/sub/',
+ mode: 488,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+
+ , [ 'entry',
+ { path: 'fixtures/foo.js',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 4,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/hardlink-1',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 200,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/hardlink-2',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '1',
+ linkpath: 'fixtures/hardlink-1',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/omega.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 2,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/packtest/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/packtest/omega.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 2,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/packtest/star.4.html',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 54081,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'extendedHeader',
+ { path: 'PaxHeader/fixtures/packtest/Ω.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { path: 'fixtures/packtest/Ω.txt',
+ 'NODETAR.depth': '2',
+ 'NODETAR.type': 'File',
+ nlink: 1,
+ uid: uid,
+ gid: gid,
+ size: 2,
+ 'NODETAR.blksize': '4096',
+ 'NODETAR.blocks': '8' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/packtest/Ω.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 2,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '',
+ 'NODETAR.depth': '2',
+ 'NODETAR.type': 'File',
+ nlink: 1,
+ 'NODETAR.blksize': '4096',
+ 'NODETAR.blocks': '8' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 100,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/symlink',
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '2',
+ linkpath: 'hardlink-1',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'extendedHeader',
+ { path: 'PaxHeader/fixtures/Ω.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { path: "fixtures/Ω.txt"
+ , "NODETAR.depth": "1"
+ , "NODETAR.type": "File"
+ , nlink: 1
+ , uid: uid
+ , gid: gid
+ , size: 2
+ , "NODETAR.blksize": "4096"
+ , "NODETAR.blocks": "8" } ]
+
+ , [ 'entry',
+ { path: 'fixtures/Ω.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 2,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '',
+ 'NODETAR.depth': '1',
+ 'NODETAR.type': 'File',
+ nlink: 1,
+ 'NODETAR.blksize': '4096',
+ 'NODETAR.blocks': '8' } ]
+ ]
+
+
+// first, make sure that the hardlinks are actually hardlinks, or this
+// won't work. Git has a way of replacing them with a copy.
+var hard1 = path.resolve(__dirname, "fixtures/hardlink-1")
+ , hard2 = path.resolve(__dirname, "fixtures/hardlink-2")
+ , fs = require("fs")
+
+try { fs.unlinkSync(hard2) } catch (e) {}
+fs.linkSync(hard1, hard2)
+
+tap.test("with global header", { timeout: 10000 }, function (t) {
+ runTest(t, true)
+})
+
+tap.test("without global header", { timeout: 10000 }, function (t) {
+ runTest(t, false)
+})
+
+tap.test("with from base", { timeout: 10000 }, function (t) {
+ runTest(t, true, true)
+})
+
+function alphasort (a, b) {
+ return a === b ? 0
+ : a.toLowerCase() > b.toLowerCase() ? 1
+ : a.toLowerCase() < b.toLowerCase() ? -1
+ : a > b ? 1
+ : -1
+}
+
+
+function runTest (t, doGH, doFromBase) {
+ var reader = Reader({ path: input
+ , filter: function () {
+ return !this.path.match(/\.(tar|hex)$/)
+ }
+ , sort: alphasort
+ })
+
+ var props = doGH ? pkg : {}
+ if(doFromBase) props.fromBase = true;
+
+ var pack = Pack(props)
+ var writer = Writer(target)
+
+ // skip the global header if we're not doing that.
+ var entry = doGH ? 0 : 1
+
+ t.ok(reader, "reader ok")
+ t.ok(pack, "pack ok")
+ t.ok(writer, "writer ok")
+
+ pack.pipe(writer)
+
+ var parse = tar.Parse()
+ t.ok(parse, "parser should be ok")
+
+ pack.on("data", function (c) {
+ // console.error("PACK DATA")
+ if (c.length !== 512) {
+ // this one is too noisy, only assert if it'll be relevant
+ t.equal(c.length, 512, "parser should emit data in 512byte blocks")
+ }
+ parse.write(c)
+ })
+
+ pack.on("end", function () {
+ // console.error("PACK END")
+ t.pass("parser ends")
+ parse.end()
+ })
+
+ pack.on("error", function (er) {
+ t.fail("pack error", er)
+ })
+
+ parse.on("error", function (er) {
+ t.fail("parse error", er)
+ })
+
+ writer.on("error", function (er) {
+ t.fail("writer error", er)
+ })
+
+ reader.on("error", function (er) {
+ t.fail("reader error", er)
+ })
+
+ parse.on("*", function (ev, e) {
+ var wanted = entries[entry++]
+ if (!wanted) {
+ t.fail("unexpected event: "+ev)
+ return
+ }
+ t.equal(ev, wanted[0], "event type should be "+wanted[0])
+
+ if(doFromBase) {
+ if(wanted[1].path.indexOf('fixtures/') && wanted[1].path.length == 100)
+ wanted[1].path = wanted[1].path.replace('fixtures/', '') + 'ccccccccc'
+
+ if(wanted[1]) wanted[1].path = wanted[1].path.replace('fixtures/', '').replace('//', '/')
+ if(wanted[1].path == '') wanted[1].path = '/'
+ if(wanted[2] && wanted[2].path) wanted[2].path = wanted[2].path.replace('fixtures', '').replace(/^\//, '')
+
+ wanted[1].linkpath = wanted[1].linkpath.replace('fixtures/', '')
+ }
+
+ if (ev !== wanted[0] || e.path !== wanted[1].path) {
+ console.error("wanted", wanted)
+ console.error([ev, e.props])
+ e.on("end", function () {
+ console.error(e.fields)
+ throw "break"
+ })
+ }
+
+
+ t.has(e.props, wanted[1], "properties "+wanted[1].path)
+ if (wanted[2]) {
+ e.on("end", function () {
+ if (!e.fields) {
+ t.ok(e.fields, "should get fields")
+ } else {
+ t.has(e.fields, wanted[2], "should get expected fields")
+ }
+ })
+ }
+ })
+
+ reader.pipe(pack)
+
+ writer.on("close", function () {
+ t.equal(entry, entries.length, "should get all expected entries")
+ t.pass("it finished")
+ t.end()
+ })
+
+}
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/test/parse-discard.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/parse-discard.js
new file mode 100644
index 0000000000..79408c274b
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/test/parse-discard.js
@@ -0,0 +1,29 @@
+var tap = require("tap")
+ , tar = require("../tar.js")
+ , fs = require("fs")
+ , path = require("path")
+ , file = path.resolve(__dirname, "fixtures/c.tar")
+
+tap.test("parser test", function (t) {
+ var parser = tar.Parse()
+ var total = 0
+ var dataTotal = 0
+
+ parser.on("end", function () {
+
+ t.equals(total-513,dataTotal,'should have discarded only c.txt')
+
+ t.end()
+ })
+
+ fs.createReadStream(file)
+ .pipe(parser)
+ .on('entry',function(entry){
+ if(entry.path === 'c.txt') entry.abort()
+
+ total += entry.size;
+ entry.on('data',function(data){
+ dataTotal += data.length
+ })
+ })
+})
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/test/parse.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/parse.js
new file mode 100644
index 0000000000..f765a50129
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/test/parse.js
@@ -0,0 +1,359 @@
+var tap = require("tap")
+ , tar = require("../tar.js")
+ , fs = require("fs")
+ , path = require("path")
+ , file = path.resolve(__dirname, "fixtures/c.tar")
+ , index = 0
+
+ , expect =
+[ [ 'entry',
+ { path: 'c.txt',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 513,
+ mtime: new Date('Wed, 26 Oct 2011 01:10:58 GMT'),
+ cksum: 5422,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ undefined ],
+ [ 'entry',
+ { path: 'cc.txt',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 513,
+ mtime: new Date('Wed, 26 Oct 2011 01:11:02 GMT'),
+ cksum: 5525,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ undefined ],
+ [ 'entry',
+ { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 100,
+ mtime: new Date('Thu, 27 Oct 2011 03:43:23 GMT'),
+ cksum: 18124,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ undefined ],
+ [ 'entry',
+ { path: 'Ω.txt',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 2,
+ mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
+ cksum: 5695,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ undefined ],
+ [ 'extendedHeader',
+ { path: 'PaxHeader/Ω.txt',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 120,
+ mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
+ cksum: 6702,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { path: 'Ω.txt',
+ ctime: 1319737909,
+ atime: 1319739061,
+ dev: 234881026,
+ ino: 51693379,
+ nlink: 1 } ],
+ [ 'entry',
+ { path: 'Ω.txt',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 2,
+ mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
+ cksum: 5695,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '',
+ ctime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
+ atime: new Date('Thu, 27 Oct 2011 18:11:01 GMT'),
+ dev: 234881026,
+ ino: 51693379,
+ nlink: 1 },
+ undefined ],
+ [ 'extendedHeader',
+ { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 353,
+ mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ cksum: 14488,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ ctime: 1319686868,
+ atime: 1319741254,
+ 'LIBARCHIVE.creationtime': '1319686852',
+ dev: 234881026,
+ ino: 51681874,
+ nlink: 1 } ],
+ [ 'entry',
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 200,
+ mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ cksum: 14570,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '',
+ ctime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ atime: new Date('Thu, 27 Oct 2011 18:47:34 GMT'),
+ 'LIBARCHIVE.creationtime': '1319686852',
+ dev: 234881026,
+ ino: 51681874,
+ nlink: 1 },
+ undefined ],
+ [ 'longPath',
+ { path: '././@LongLink',
+ mode: 0,
+ uid: 0,
+ gid: 0,
+ size: 201,
+ mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'),
+ cksum: 4976,
+ type: 'L',
+ linkpath: '',
+ ustar: false },
+ '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ],
+ [ 'entry',
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: 1000,
+ gid: 1000,
+ size: 201,
+ mtime: new Date('Thu, 27 Oct 2011 22:21:50 GMT'),
+ cksum: 14086,
+ type: '0',
+ linkpath: '',
+ ustar: false },
+ undefined ],
+ [ 'longLinkpath',
+ { path: '././@LongLink',
+ mode: 0,
+ uid: 0,
+ gid: 0,
+ size: 201,
+ mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'),
+ cksum: 4975,
+ type: 'K',
+ linkpath: '',
+ ustar: false },
+ '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ],
+ [ 'longPath',
+ { path: '././@LongLink',
+ mode: 0,
+ uid: 0,
+ gid: 0,
+ size: 201,
+ mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'),
+ cksum: 4976,
+ type: 'L',
+ linkpath: '',
+ ustar: false },
+ '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL' ],
+ [ 'entry',
+ { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',
+ mode: 511,
+ uid: 1000,
+ gid: 1000,
+ size: 0,
+ mtime: new Date('Fri, 28 Oct 2011 23:05:17 GMT'),
+ cksum: 21603,
+ type: '2',
+ linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ ustar: false },
+ undefined ],
+ [ 'extendedHeader',
+ { path: 'PaxHeader/200-hard',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 143,
+ mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ cksum: 6533,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { ctime: 1320617144,
+ atime: 1320617232,
+ 'LIBARCHIVE.creationtime': '1319686852',
+ dev: 234881026,
+ ino: 51681874,
+ nlink: 2 } ],
+ [ 'entry',
+ { path: '200-hard',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 200,
+ mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ cksum: 5526,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '',
+ ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'),
+ atime: new Date('Sun, 06 Nov 2011 22:07:12 GMT'),
+ 'LIBARCHIVE.creationtime': '1319686852',
+ dev: 234881026,
+ ino: 51681874,
+ nlink: 2 },
+ undefined ],
+ [ 'extendedHeader',
+ { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 353,
+ mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ cksum: 14488,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ ctime: 1320617144,
+ atime: 1320617406,
+ 'LIBARCHIVE.creationtime': '1319686852',
+ dev: 234881026,
+ ino: 51681874,
+ nlink: 2 } ],
+ [ 'entry',
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ cksum: 15173,
+ type: '1',
+ linkpath: '200-hard',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '',
+ ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'),
+ atime: new Date('Sun, 06 Nov 2011 22:10:06 GMT'),
+ 'LIBARCHIVE.creationtime': '1319686852',
+ dev: 234881026,
+ ino: 51681874,
+ nlink: 2 },
+ undefined ] ]
+
+
+tap.test("parser test", function (t) {
+ var parser = tar.Parse()
+
+ parser.on("end", function () {
+ t.equal(index, expect.length, "saw all expected events")
+ t.end()
+ })
+
+ fs.createReadStream(file)
+ .pipe(parser)
+ .on("*", function (ev, entry) {
+ var wanted = expect[index]
+ if (!wanted) {
+ return t.fail("Unexpected event: " + ev)
+ }
+ var result = [ev, entry.props]
+ entry.on("end", function () {
+ result.push(entry.fields || entry.body)
+
+ t.equal(ev, wanted[0], index + " event type")
+ t.equivalent(entry.props, wanted[1], wanted[1].path + " entry properties")
+ if (wanted[2]) {
+ t.equivalent(result[2], wanted[2], "metadata values")
+ }
+ index ++
+ })
+ })
+})
diff --git a/deps/npm/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js b/deps/npm/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js
new file mode 100644
index 0000000000..a00ff7faa0
--- /dev/null
+++ b/deps/npm/node_modules/node-gyp/node_modules/tar/test/zz-cleanup.js
@@ -0,0 +1,20 @@
+// clean up the fixtures
+
+var tap = require("tap")
+, rimraf = require("rimraf")
+, test = tap.test
+, path = require("path")
+
+test("clean fixtures", function (t) {
+ rimraf(path.resolve(__dirname, "fixtures"), function (er) {
+ t.ifError(er, "rimraf ./fixtures/")
+ t.end()
+ })
+})
+
+test("clean tmp", function (t) {
+ rimraf(path.resolve(__dirname, "tmp"), function (er) {
+ t.ifError(er, "rimraf ./tmp/")
+ t.end()
+ })
+})