summaryrefslogtreecommitdiff
path: root/deps/npm/node_modules/tar
diff options
context:
space:
mode:
Diffstat (limited to 'deps/npm/node_modules/tar')
-rw-r--r--deps/npm/node_modules/tar/.npmignore5
-rw-r--r--deps/npm/node_modules/tar/.travis.yml4
-rw-r--r--deps/npm/node_modules/tar/LICENSE3
-rw-r--r--deps/npm/node_modules/tar/README.md893
-rw-r--r--deps/npm/node_modules/tar/examples/extracter.js19
-rw-r--r--deps/npm/node_modules/tar/examples/packer.js24
-rw-r--r--deps/npm/node_modules/tar/examples/reader.js36
-rw-r--r--deps/npm/node_modules/tar/index.js18
-rw-r--r--deps/npm/node_modules/tar/lib/buffer-entry.js30
-rw-r--r--deps/npm/node_modules/tar/lib/create.js110
-rw-r--r--deps/npm/node_modules/tar/lib/entry-writer.js169
-rw-r--r--deps/npm/node_modules/tar/lib/entry.js220
-rw-r--r--deps/npm/node_modules/tar/lib/extended-header-writer.js191
-rw-r--r--deps/npm/node_modules/tar/lib/extended-header.js140
-rw-r--r--deps/npm/node_modules/tar/lib/extract.js189
-rw-r--r--deps/npm/node_modules/tar/lib/global-header-writer.js14
-rw-r--r--deps/npm/node_modules/tar/lib/header.js583
-rw-r--r--deps/npm/node_modules/tar/lib/high-level-opt.js29
-rw-r--r--deps/npm/node_modules/tar/lib/large-numbers.js92
-rw-r--r--deps/npm/node_modules/tar/lib/list.js132
-rw-r--r--deps/npm/node_modules/tar/lib/mkdir.js207
-rw-r--r--deps/npm/node_modules/tar/lib/pack.js537
-rw-r--r--deps/npm/node_modules/tar/lib/parse.js632
-rw-r--r--deps/npm/node_modules/tar/lib/pax.js145
-rw-r--r--deps/npm/node_modules/tar/lib/read-entry.js94
-rw-r--r--deps/npm/node_modules/tar/lib/replace.js211
-rw-r--r--deps/npm/node_modules/tar/lib/types.js44
-rw-r--r--deps/npm/node_modules/tar/lib/unpack.js481
-rw-r--r--deps/npm/node_modules/tar/lib/update.js36
-rw-r--r--deps/npm/node_modules/tar/lib/warn-mixin.js14
-rw-r--r--deps/npm/node_modules/tar/lib/winchars.js23
-rw-r--r--deps/npm/node_modules/tar/lib/write-entry.js395
-rw-r--r--deps/npm/node_modules/tar/node_modules/block-stream/LICENCE25
-rw-r--r--deps/npm/node_modules/tar/node_modules/block-stream/README.md14
-rw-r--r--deps/npm/node_modules/tar/node_modules/block-stream/block-stream.js209
-rw-r--r--deps/npm/node_modules/tar/node_modules/block-stream/package.json63
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/.npmignore4
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/.travis.yml7
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/README.md46
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/b.js12
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-minipass.js11
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-through2.js12
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-transform.js11
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/bench/lib/nullsink.js12
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/bench/lib/numbers.js41
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/bench/lib/timer.js15
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/bench/test.js160
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/d.js7
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/e.js17
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/eos.js12
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/foo0
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/index.js295
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/minipass-benchmarks.xlsxbin0 -> 54935 bytes
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/package.json64
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/test/basic.js438
-rw-r--r--deps/npm/node_modules/tar/node_modules/minipass/test/empty-end.js38
-rw-r--r--deps/npm/node_modules/tar/node_modules/minizlib/LICENSE26
-rw-r--r--deps/npm/node_modules/tar/node_modules/minizlib/README.md44
-rw-r--r--deps/npm/node_modules/tar/node_modules/minizlib/constants.js46
-rw-r--r--deps/npm/node_modules/tar/node_modules/minizlib/index.js333
-rw-r--r--deps/npm/node_modules/tar/node_modules/minizlib/package.json71
-rw-r--r--deps/npm/node_modules/tar/node_modules/yallist/LICENSE (renamed from deps/npm/node_modules/tar/node_modules/block-stream/LICENSE)0
-rw-r--r--deps/npm/node_modules/tar/node_modules/yallist/README.md204
-rw-r--r--deps/npm/node_modules/tar/node_modules/yallist/iterator.js8
-rw-r--r--deps/npm/node_modules/tar/node_modules/yallist/package.json63
-rw-r--r--deps/npm/node_modules/tar/node_modules/yallist/yallist.js376
-rw-r--r--deps/npm/node_modules/tar/package.json75
-rw-r--r--deps/npm/node_modules/tar/tar.js173
-rw-r--r--deps/npm/node_modules/tar/test/00-setup-fixtures.js53
-rw-r--r--deps/npm/node_modules/tar/test/cb-never-called-1.0.1.tgzbin4096 -> 0 bytes
-rw-r--r--deps/npm/node_modules/tar/test/dir-normalization.js177
-rw-r--r--deps/npm/node_modules/tar/test/dir-normalization.tarbin4608 -> 0 bytes
-rw-r--r--deps/npm/node_modules/tar/test/error-on-broken.js33
-rw-r--r--deps/npm/node_modules/tar/test/extract-move.js132
-rw-r--r--deps/npm/node_modules/tar/test/extract.js367
-rw-r--r--deps/npm/node_modules/tar/test/fixtures.tgzbin19352 -> 0 bytes
-rw-r--r--deps/npm/node_modules/tar/test/header.js183
-rw-r--r--deps/npm/node_modules/tar/test/pack-no-proprietary.js886
-rw-r--r--deps/npm/node_modules/tar/test/pack.js952
-rw-r--r--deps/npm/node_modules/tar/test/parse-discard.js29
-rw-r--r--deps/npm/node_modules/tar/test/parse.js359
-rw-r--r--deps/npm/node_modules/tar/test/zz-cleanup.js20
82 files changed, 6396 insertions, 5447 deletions
diff --git a/deps/npm/node_modules/tar/.npmignore b/deps/npm/node_modules/tar/.npmignore
deleted file mode 100644
index c167ad5b1c..0000000000
--- a/deps/npm/node_modules/tar/.npmignore
+++ /dev/null
@@ -1,5 +0,0 @@
-.*.swp
-node_modules
-examples/extract/
-test/tmp/
-test/fixtures/
diff --git a/deps/npm/node_modules/tar/.travis.yml b/deps/npm/node_modules/tar/.travis.yml
deleted file mode 100644
index fca8ef0194..0000000000
--- a/deps/npm/node_modules/tar/.travis.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-language: node_js
-node_js:
- - 0.10
- - 0.11
diff --git a/deps/npm/node_modules/tar/LICENSE b/deps/npm/node_modules/tar/LICENSE
index 019b7e40ea..19129e315f 100644
--- a/deps/npm/node_modules/tar/LICENSE
+++ b/deps/npm/node_modules/tar/LICENSE
@@ -1,8 +1,11 @@
The ISC License
+
Copyright (c) Isaac Z. Schlueter and Contributors
+
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
+
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
diff --git a/deps/npm/node_modules/tar/README.md b/deps/npm/node_modules/tar/README.md
index cfda2ac180..a356a78da2 100644
--- a/deps/npm/node_modules/tar/README.md
+++ b/deps/npm/node_modules/tar/README.md
@@ -1,50 +1,883 @@
# node-tar
-Tar for Node.js.
+[![Build Status](https://travis-ci.org/npm/node-tar.svg?branch=master)](https://travis-ci.org/npm/node-tar)
-[![NPM](https://nodei.co/npm/tar.png)](https://nodei.co/npm/tar/)
+[Fast](./benchmarks) and full-featured Tar for Node.js
-## API
+The API is designed to mimic the behavior of `tar(1)` on unix systems.
+If you are familiar with how tar works, most of this will hopefully be
+straightforward for you. If not, then hopefully this module can teach
+you useful unix skills that may come in handy someday :)
-See `examples/` for usage examples.
+## Background
-### var tar = require('tar')
+A "tar file" or "tarball" is an archive of file system entries
+(directories, files, links, etc.) The name comes from "tape archive".
+If you run `man tar` on almost any Unix command line, you'll learn
+quite a bit about what it can do, and its history.
-Returns an object with `.Pack`, `.Extract` and `.Parse` methods.
+Tar has 5 main top-level commands:
-### tar.Pack([properties])
+* `c` Create an archive
+* `r` Replace entries within an archive
+* `u` Update entries within an archive (ie, replace if they're newer)
+* `t` List out the contents of an archive
+* `x` Extract an archive to disk
-Returns a through stream. Use
-[fstream](https://npmjs.org/package/fstream) to write files into the
-pack stream and you will receive tar archive data from the pack
-stream.
+The other flags and options modify how this top level function works.
-This only works with directories, it does not work with individual files.
+## High-Level API
-The optional `properties` object are used to set properties in the tar
-'Global Extended Header'. If the `fromBase` property is set to true,
-the tar will contain files relative to the path passed, and not with
-the path included.
+These 5 functions are the high-level API. All of them have a
+single-character name (for unix nerds familiar with `tar(1)`) as well
+as a long name (for everyone else).
-### tar.Extract([options])
+All the high-level functions take the following arguments, all three
+of which are optional and may be omitted.
-Returns a through stream. Write tar data to the stream and the files
-in the tarball will be extracted onto the filesystem.
+1. `options` - An optional object specifying various options
+2. `paths` - An array of paths to add or extract
+3. `callback` - Called when the command is completed, if async. (If
+ sync or no file specified, providing a callback throws a
+ `TypeError`.)
-`options` can be:
+If the command is sync (ie, if `options.sync=true`), then the
+callback is not allowed, since the action will be completed immediately.
+
+If a `file` argument is specified, and the command is async, then a
+`Promise` is returned. In this case, if async, a callback may be
+provided which is called when the command is completed.
+
+If a `file` option is not specified, then a stream is returned. For
+`create`, this is a readable stream of the generated archive. For
+`list` and `extract` this is a writable stream that an archive should
+be written into. If a file is not specified, then a callback is not
+allowed, because you're already getting a stream to work with.
+
+`replace` and `update` only work on existing archives, and so require
+a `file` argument.
+
+Sync commands without a file argument return a stream that acts on its
+input immediately in the same tick. For readable streams, this means
+that all of the data is immediately available by calling
+`stream.read()`. For writable streams, it will be acted upon as soon
+as it is provided, but this can be at any time.
+
+### Warnings
+
+Some things cause tar to emit a warning, but should usually not cause
+the entire operation to fail. There are three ways to handle
+warnings:
+
+1. **Ignore them** (default) Invalid entries won't be put in the
+ archive, and invalid entries won't be unpacked. This is usually
+ fine, but can hide failures that you might care about.
+2. **Notice them** Add an `onwarn` function to the options, or listen
+ to the `'warn'` event on any tar stream. The function will get
+ called as `onwarn(message, data)`. Handle as appropriate.
+3. **Explode them.** Set `strict: true` in the options object, and
+ `warn` messages will be emitted as `'error'` events instead. If
+ there's no `error` handler, this causes the program to crash. If
+ used with a promise-returning/callback-taking method, then it'll
+ send the error to the promise/callback.
+
+### Examples
+
+The API mimics the `tar(1)` command line functionality, with aliases
+for more human-readable option and function names. The goal is that
+if you know how to use `tar(1)` in Unix, then you know how to use
+`require('tar')` in JavaScript.
+
+To replicate `tar czf my-tarball.tgz files and folders`, you'd do:
+
+```js
+tar.c(
+ {
+ gzip: <true|gzip options>,
+ file: 'my-tarball.tgz'
+ },
+ ['some', 'files', 'and', 'folders']
+).then(_ => { .. tarball has been created .. })
+```
+
+To replicate `tar cz files and folders > my-tarball.tgz`, you'd do:
+
+```js
+tar.c( // or tar.create
+ {
+ gzip: <true|gzip options>
+ },
+ ['some', 'files', 'and', 'folders']
+).pipe(fs.createWriteStream('my-tarball.tgz')
+```
+
+To replicate `tar xf my-tarball.tgz` you'd do:
+
+```js
+tar.x( // or tar.extract(
+ {
+ file: 'my-tarball.tgz'
+ }
+).then(_=> { .. tarball has been dumped in cwd .. })
+```
+
+To replicate `cat my-tarball.tgz | tar x -C some-dir --strip=1`:
```js
-{
- path: '/path/to/extract/tar/into',
- strip: 0, // how many path segments to strip from the root when extracting
-}
+fs.createReadStream('my-tarball.tgz').pipe(
+ tar.x({
+ strip: 1,
+ C: 'some-dir' // alias for cwd:'some-dir', also ok
+ })
+)
```
-`options` also get passed to the `fstream.Writer` instance that `tar`
-uses internally.
+To replicate `tar tf my-tarball.tgz`, do this:
+
+```js
+tar.t({
+ file: 'my-tarball.tgz',
+ onentry: entry => { .. do whatever with it .. }
+})
+```
+
+To replicate `cat my-tarball.tgz | tar t` do:
+
+```js
+fs.createReadStream('my-tarball.tgz')
+ .pipe(tar.t())
+ .on('entry', entry => { .. do whatever with it .. })
+```
+
+To do anything synchronous, add `sync: true` to the options. Note
+that sync functions don't take a callback and don't return a promise.
+When the function returns, it's already done. Sync methods without a
+file argument return a sync stream, which flushes immediately. But,
+of course, it still won't be done until you `.end()` it.
+
+To filter entries, add `filter: <function>` to the options.
+Tar-creating methods call the filter with `filter(path, stat)`.
+Tar-reading methods (including extraction) call the filter with
+`filter(path, entry)`. The filter is called in the `this`-context of
+the `Pack` or `Unpack` stream object.
+
+The arguments list to `tar t` and `tar x` specify a list of filenames
+to extract or list, so they're equivalent to a filter that tests if
+the file is in the list.
+
+For those who _aren't_ fans of tar's single-character command names:
+
+```
+tar.c === tar.create
+tar.r === tar.replace (appends to archive, file is required)
+tar.u === tar.update (appends if newer, file is required)
+tar.x === tar.extract
+tar.t === tar.list
+```
+
+Keep reading for all the command descriptions and options, as well as
+the low-level API that they are built on.
+
+### tar.c(options, fileList, callback) [alias: tar.create]
+
+Create a tarball archive.
+
+The `fileList` is an array of paths to add to the tarball. Adding a
+directory also adds its children recursively.
+
+An entry in `fileList` that starts with an `@` symbol is a tar archive
+whose entries will be added. To add a file that starts with `@`,
+prepend it with `./`.
+
+The following options are supported:
+
+- `file` Write the tarball archive to the specified filename. If this
+ is specified, then the callback will be fired when the file has been
+ written, and a promise will be returned that resolves when the file
+ is written. If a filename is not specified, then a Readable Stream
+ will be returned which will emit the file data. [Alias: `f`]
+- `sync` Act synchronously. If this is set, then any provided file
+ will be fully written after the call to `tar.c`. If this is set,
+ and a file is not provided, then the resulting stream will already
+ have the data ready to `read` or `emit('data')` as soon as you
+ request it.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`. [Alias: `C`]
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()` [Alias: `z`]
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths. [Alias: `P`]
+- `mode` The mode to set on the created file archive
+- `noDirRecurse` Do not recursively archive the contents of
+ directories. [Alias: `n`]
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such. [Alias: `L`, `h`]
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+
+The following options are mostly internal, but can be modified in some
+advanced use cases, such as re-using caches between runs.
+
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `readdirCache` A Map object that caches calls to `readdir`.
+- `jobs` A number specifying how many concurrent jobs to run.
+ Defaults to 4.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+
+### tar.x(options, fileList, callback) [alias: tar.extract]
+
+Extract a tarball archive.
+
+The `fileList` is an array of paths to extract from the tarball. If
+no paths are provided, then all the entries are extracted.
+
+If the archive is gzipped, then tar will detect this and unzip it.
+
+Note that all directories that are created will be forced to be
+writable, readable, and listable by their owner, to avoid cases where
+a directory prevents extraction of child entries by virtue of its
+mode.
+
+Most extraction errors will cause a `warn` event to be emitted. If
+the `cwd` is missing, or not a directory, then the extraction will
+fail completely.
+
+The following options are supported:
+
+- `cwd` Extract files relative to the specified directory. Defaults
+ to `process.cwd()`. If provided, this must exist and must be a
+ directory. [Alias: `C`]
+- `file` The archive file to extract. If not specified, then a
+ Writable stream is returned where the archive data should be
+ written. [Alias: `f`]
+- `sync` Create files and directories synchronously.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being unpacked. Return `true` to unpack the entry from the
+ archive, or `false` to skip it.
+- `newer` Set to true to keep the existing file on disk if it's newer
+ than the file in the archive. [Alias: `keep-newer`,
+ `keep-newer-files`]
+- `keep` Do not overwrite existing files. In particular, if a file
+ appears more than once in an archive, later copies will not
+ overwrite earlier copies. [Alias: `k`, `keep-existing`]
+- `preservePaths` Allow absolute paths, paths containing `..`, and
+ extracting through symbolic links. By default, `/` is stripped from
+ absolute paths, `..` paths are not extracted, and any file whose
+ location would be modified by a symbolic link is not extracted.
+ [Alias: `P`]
+- `unlink` Unlink files before creating them. Without this option,
+ tar overwrites existing files, which preserves existing hardlinks.
+ With this option, existing hardlinks will be broken, as will any
+ symlink that would affect the location of an extracted file. [Alias:
+ `U`]
+- `strip` Remove the specified number of leading path elements.
+ Pathnames with fewer elements will be silently skipped. Note that
+ the pathname is edited after applying the filter, but before
+ security checks. [Alias: `strip-components`, `stripComponents`]
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `preserveOwner` If true, tar will set the `uid` and `gid` of
+ extracted entries to the `uid` and `gid` fields in the archive.
+ This defaults to true when run as root, and false otherwise. If
+ false, then files and directories will be set with the owner and
+ group of the user running the process. This is similar to `-p` in
+ `tar(1)`, but ACLs and other system-specific data is never unpacked
+ in this implementation, and modes are set by default already.
+ [Alias: `p`]
+- `uid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified user id, regardless of the `uid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `gid` option.
+- `gid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified group id, regardless of the `gid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `uid` option.
+
+The following options are mostly internal, but can be modified in some
+advanced use cases, such as re-using caches between runs.
+
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `umask` Filter the modes of entries like `process.umask()`.
+- `dmode` Default mode for directories
+- `fmode` Default mode for files
+- `dirCache` A Map object of which directories exist.
+- `maxMetaEntrySize` The maximum size of meta entries that is
+ supported. Defaults to 1 MB.
+
+### tar.t(options, fileList, callback) [alias: tar.list]
+
+List the contents of a tarball archive.
+
+The `fileList` is an array of paths to list from the tarball. If
+no paths are provided, then all the entries are listed.
+
+If the archive is gzipped, then tar will detect this and unzip it.
+
+Returns an event emitter that emits `entry` events with
+`tar.ReadEntry` objects. However, they don't emit `'data'` or `'end'`
+events. (If you want to get actual readable entries, use the
+`tar.Parse` class instead.)
+
+The following options are supported:
+
+- `cwd` Extract files relative to the specified directory. Defaults
+ to `process.cwd()`. [Alias: `C`]
+- `file` The archive file to list. If not specified, then a
+ Writable stream is returned where the archive data should be
+ written. [Alias: `f`]
+- `sync` Read the specified file synchronously. (This has no effect
+ when a file option isn't specified, because entries are emitted as
+ fast as they are parsed from the stream anyway.)
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being listed. Return `true` to emit the entry from the
+ archive, or `false` to skip it.
+- `onentry` A function that gets called with `(entry)` for each entry
+ that passes the filter. This is important for when both `file` and
+ `sync` are set, because it will be called synchronously.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noResume` By default, `entry` streams are resumed immediately after
+ the call to `onentry`. Set `noResume: true` to suppress this
+ behavior. Note that by opting into this, the stream will never
+ complete until the entry data is consumed.
+
+### tar.u(options, fileList, callback) [alias: tar.update]
+
+Add files to an archive if they are newer than the entry already in
+the tarball archive.
+
+The `fileList` is an array of paths to add to the tarball. Adding a
+directory also adds its children recursively.
+
+An entry in `fileList` that starts with an `@` symbol is a tar archive
+whose entries will be added. To add a file that starts with `@`,
+prepend it with `./`.
+
+The following options are supported:
+
+- `file` Required. Write the tarball archive to the specified
+ filename. [Alias: `f`]
+- `sync` Act synchronously. If this is set, then any provided file
+ will be fully written after the call to `tar.c`.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for adding entries to the
+ archive. Defaults to `process.cwd()`. [Alias: `C`]
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()` [Alias: `z`]
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths. [Alias: `P`]
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noDirRecurse` Do not recursively archive the contents of
+ directories. [Alias: `n`]
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such. [Alias: `L`, `h`]
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+
+### tar.r(options, fileList, callback) [alias: tar.replace]
+
+Add files to an existing archive. Because later entries override
+earlier entries, this effectively replaces any existing entries.
+
+The `fileList` is an array of paths to add to the tarball. Adding a
+directory also adds its children recursively.
+
+An entry in `fileList` that starts with an `@` symbol is a tar archive
+whose entries will be added. To add a file that starts with `@`,
+prepend it with `./`.
+
+The following options are supported:
+
+- `file` Required. Write the tarball archive to the specified
+ filename. [Alias: `f`]
+- `sync` Act synchronously. If this is set, then any provided file
+ will be fully written after the call to `tar.c`.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for adding entries to the
+ archive. Defaults to `process.cwd()`. [Alias: `C`]
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()` [Alias: `z`]
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths. [Alias: `P`]
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noDirRecurse` Do not recursively archive the contents of
+ directories. [Alias: `n`]
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such. [Alias: `L`, `h`]
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+
+## Low-Level API
+
+### class tar.Pack
+
+A readable tar stream.
+
+Has all the standard readable stream interface stuff. `'data'` and
+`'end'` events, `read()` method, `pause()` and `resume()`, etc.
+
+#### constructor(options)
+
+The following options are supported:
+
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`.
+- `prefix` A path portion to prefix onto the entries in the archive.
+- `gzip` Set to any truthy value to create a gzipped archive, or an
+ object with settings for `zlib.Gzip()`
+- `filter` A function that gets called with `(path, stat)` for each
+ entry being added. Return `true` to add the entry to the archive,
+ or `false` to omit it.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `readdirCache` A Map object that caches calls to `readdir`.
+- `jobs` A number specifying how many concurrent jobs to run.
+ Defaults to 4.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 16 MB.
+- `noDirRecurse` Do not recursively archive the contents of
+ directories.
+- `follow` Set to true to pack the targets of symbolic links. Without
+ this option, symbolic links are archived as such.
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+
+#### add(path)
+
+Adds an entry to the archive. Returns the Pack stream.
+
+#### write(path)
+
+Adds an entry to the archive. Returns true if flushed.
+
+#### end()
+
+Finishes the archive.
+
+### class tar.Pack.Sync
+
+Synchronous version of `tar.Pack`.
+
+### class tar.Unpack
+
+A writable stream that unpacks a tar archive onto the file system.
+
+All the normal writable stream stuff is supported. `write()` and
+`end()` methods, `'drain'` events, etc.
+
+Note that all directories that are created will be forced to be
+writable, readable, and listable by their owner, to avoid cases where
+a directory prevents extraction of child entries by virtue of its
+mode.
+
+`'close'` is emitted when it's done writing stuff to the file system.
+
+Most unpack errors will cause a `warn` event to be emitted. If the
+`cwd` is missing, or not a directory, then an error will be emitted.
+
+#### constructor(options)
+
+- `cwd` Extract files relative to the specified directory. Defaults
+ to `process.cwd()`. If provided, this must exist and must be a
+ directory.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being unpacked. Return `true` to unpack the entry from the
+ archive, or `false` to skip it.
+- `newer` Set to true to keep the existing file on disk if it's newer
+ than the file in the archive.
+- `keep` Do not overwrite existing files. In particular, if a file
+ appears more than once in an archive, later copies will not
+ overwrite earlier copies.
+- `preservePaths` Allow absolute paths, paths containing `..`, and
+ extracting through symbolic links. By default, `/` is stripped from
+ absolute paths, `..` paths are not extracted, and any file whose
+ location would be modified by a symbolic link is not extracted.
+- `unlink` Unlink files before creating them. Without this option,
+ tar overwrites existing files, which preserves existing hardlinks.
+ With this option, existing hardlinks will be broken, as will any
+ symlink that would affect the location of an extracted file.
+- `strip` Remove the specified number of leading path elements.
+ Pathnames with fewer elements will be silently skipped. Note that
+ the pathname is edited after applying the filter, but before
+ security checks.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+- `umask` Filter the modes of entries like `process.umask()`.
+- `dmode` Default mode for directories
+- `fmode` Default mode for files
+- `dirCache` A Map object of which directories exist.
+- `maxMetaEntrySize` The maximum size of meta entries that is
+ supported. Defaults to 1 MB.
+- `preserveOwner` If true, tar will set the `uid` and `gid` of
+ extracted entries to the `uid` and `gid` fields in the archive.
+ This defaults to true when run as root, and false otherwise. If
+ false, then files and directories will be set with the owner and
+ group of the user running the process. This is similar to `-p` in
+ `tar(1)`, but ACLs and other system-specific data is never unpacked
+ in this implementation, and modes are set by default already.
+- `win32` True if on a windows platform. Causes behavior where
+ filenames containing `<|>?` chars are converted to
+ windows-compatible values while being unpacked.
+- `uid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified user id, regardless of the `uid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `gid` option.
+- `gid` Set to a number to force ownership of all extracted files and
+ folders, and all implicitly created directories, to be owned by the
+ specified group id, regardless of the `gid` field in the archive.
+ Cannot be used along with `preserveOwner`. Requires also setting a
+ `uid` option.
+
+### class tar.Unpack.Sync
+
+Synchronous version of `tar.Unpack`.
+
+### class tar.Parse
+
+A writable stream that parses a tar archive stream. All the standard
+writable stream stuff is supported.
+
+If the archive is gzipped, then tar will detect this and unzip it.
+
+Emits `'entry'` events with `tar.ReadEntry` objects, which are
+themselves readable streams that you can pipe wherever.
+
+Each `entry` will not emit until the one before it is flushed through,
+so make sure to either consume the data (with `on('data', ...)` or
+`.pipe(...)`) or throw it away with `.resume()` to keep the stream
+flowing.
+
+#### constructor(options)
+
+Returns an event emitter that emits `entry` events with
+`tar.ReadEntry` objects.
+
+The following options are supported:
+
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `filter` A function that gets called with `(path, entry)` for each
+ entry being listed. Return `true` to emit the entry from the
+ archive, or `false` to skip it.
+- `onentry` A function that gets called with `(entry)` for each entry
+ that passes the filter.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+
+#### abort(message, error)
+
+Stop all parsing activities. This is called when there are zlib
+errors. It also emits a warning with the message and error provided.
+
+### class tar.ReadEntry extends [MiniPass](http://npm.im/minipass)
+
+A representation of an entry that is being read out of a tar archive.
+
+It has the following fields:
+
+- `extended` The extended metadata object provided to the constructor.
+- `globalExtended` The global extended metadata object provided to the
+ constructor.
+- `remain` The number of bytes remaining to be written into the
+ stream.
+- `blockRemain` The number of 512-byte blocks remaining to be written
+ into the stream.
+- `ignore` Whether this entry should be ignored.
+- `meta` True if this represents metadata about the next entry, false
+ if it represents a filesystem object.
+- All the fields from the header, extended header, and global extended
+ header are added to the ReadEntry object. So it has `path`, `type`,
+ `size, `mode`, and so on.
+
+#### constructor(header, extended, globalExtended)
+
+Create a new ReadEntry object with the specified header, extended
+header, and global extended header values.
+
+### class tar.WriteEntry extends [MiniPass](http://npm.im/minipass)
+
+A representation of an entry that is being written from the file
+system into a tar archive.
+
+Emits data for the Header, and for the Pax Extended Header if one is
+required, as well as any body data.
+
+Creating a WriteEntry for a directory does not also create
+WriteEntry objects for all of the directory contents.
+
+It has the following fields:
+
+- `path` The path field that will be written to the archive. By
+ default, this is also the path from the cwd to the file system
+ object.
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `myuid` If supported, the uid of the user running the current
+ process.
+- `myuser` The `env.USER` string if set, or `''`. Set as the entry
+ `uname` field if the file's `uid` matches `this.myuid`.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 1 MB.
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`.
+- `absolute` The absolute path to the entry on the filesystem. By
+ default, this is `path.resolve(this.cwd, this.path)`, but it can be
+ overridden explicitly.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `win32` True if on a windows platform. Causes behavior where paths
+ replace `\` with `/` and filenames containing the windows-compatible
+ forms of `<|>?:` characters are converted to actual `<|>?:` characters
+ in the archive.
+- `noPax` Suppress pax extended headers. Note that this means that
+ long paths and linkpaths will be truncated, and large or negative
+ numeric values may be interpreted incorrectly.
+
+#### constructor(path, options)
+
+`path` is the path of the entry as it is written in the archive.
+
+The following options are supported:
+
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `maxReadSize` The maximum buffer size for `fs.read()` operations.
+ Defaults to 1 MB.
+- `linkCache` A Map object containing the device and inode value for
+ any file whose nlink is > 1, to identify hard links.
+- `statCache` A Map object that caches calls `lstat`.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `cwd` The current working directory for creating the archive.
+ Defaults to `process.cwd()`.
+- `absolute` The absolute path to the entry on the filesystem. By
+ default, this is `path.resolve(this.cwd, this.path)`, but it can be
+ overridden explicitly.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `win32` True if on a windows platform. Causes behavior where paths
+ replace `\` with `/`.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+
+#### warn(message, data)
+
+If strict, emit an error with the provided message.
+
+Othewise, emit a `'warn'` event with the provided message and data.
+
+### class tar.WriteEntry.Sync
+
+Synchronous version of tar.WriteEntry
+
+### class tar.WriteEntry.Tar
+
+A version of tar.WriteEntry that gets its data from a tar.ReadEntry
+instead of from the filesystem.
+
+#### constructor(readEntry, options)
+
+`readEntry` is the entry being read out of another archive.
+
+The following options are supported:
+
+- `portable` Omit metadata that is system-specific: `ctime`, `atime`,
+ `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note
+ that `mtime` is still included, because this is necessary other
+ time-based operations.
+- `preservePaths` Allow absolute paths. By default, `/` is stripped
+ from absolute paths.
+- `strict` Treat warnings as crash-worthy errors. Default false.
+- `onwarn` A function that will get called with `(message, data)` for
+ any warnings encountered.
+
+### class tar.Header
+
+A class for reading and writing header blocks.
+
+It has the following fields:
+
+- `nullBlock` True if decoding a block which is entirely composed of
+ `0x00` null bytes. (Useful because tar files are terminated by
+ at least 2 null blocks.)
+- `cksumValid` True if the checksum in the header is valid, false
+ otherwise.
+- `needPax` True if the values, as encoded, will require a Pax
+ extended header.
+- `path` The path of the entry.
+- `mode` The 4 lowest-order octal digits of the file mode. That is,
+ read/write/execute permissions for world, group, and owner, and the
+ setuid, setgid, and sticky bits.
+- `uid` Numeric user id of the file owner
+- `gid` Numeric group id of the file owner
+- `size` Size of the file in bytes
+- `mtime` Modified time of the file
+- `cksum` The checksum of the header. This is generated by adding all
+ the bytes of the header block, treating the checksum field itself as
+ all ascii space characters (that is, `0x20`).
+- `type` The human-readable name of the type of entry this represents,
+ or the alphanumeric key if unknown.
+- `typeKey` The alphanumeric key for the type of entry this header
+ represents.
+- `linkpath` The target of Link and SymbolicLink entries.
+- `uname` Human-readable user name of the file owner
+- `gname` Human-readable group name of the file owner
+- `devmaj` The major portion of the device number. Always `0` for
+ files, directories, and links.
+- `devmin` The minor portion of the device number. Always `0` for
+ files, directories, and links.
+- `atime` File access time.
+- `ctime` File change time.
+
+#### constructor(data, [offset=0])
+
+`data` is optional. It is either a Buffer that should be interpreted
+as a tar Header starting at the specified offset and continuing for
+512 bytes, or a data object of keys and values to set on the header
+object, and eventually encode as a tar Header.
+
+#### decode(block, offset)
+
+Decode the provided buffer starting at the specified offset.
+
+Buffer length must be greater than 512 bytes.
+
+#### set(data)
+
+Set the fields in the data object.
+
+#### encode(buffer, offset)
+
+Encode the header fields into the buffer at the specified offset.
+
+Returns `this.needPax` to indicate whether a Pax Extended Header is
+required to properly encode the specified data.
+
+### class tar.Pax
+
+An object representing a set of key-value pairs in an Pax extended
+header entry.
+
+It has the following fields. Where the same name is used, they have
+the same semantics as the tar.Header field of the same name.
+
+- `global` True if this represents a global extended header, or false
+ if it is for a single entry.
+- `atime`
+- `charset`
+- `comment`
+- `ctime`
+- `gid`
+- `gname`
+- `linkpath`
+- `mtime`
+- `path`
+- `size`
+- `uid`
+- `uname`
+- `dev`
+- `ino`
+- `nlink`
+
+#### constructor(object, global)
+
+Set the fields set in the object. `global` is a boolean that defaults
+to false.
+
+#### encode()
+
+Return a Buffer containing the header and body for the Pax extended
+header entry, or `null` if there is nothing to encode.
+
+#### encodeBody()
+
+Return a string representing the body of the pax extended header
+entry.
+
+#### encodeField(fieldName)
+
+Return a string representing the key/value encoding for the specified
+fieldName, or `''` if the field is unset.
+
+### tar.Pax.parse(string, extended, global)
+
+Return a new Pax object created by parsing the contents of the string
+provided.
+
+If the `extended` object is set, then also add the fields from that
+object. (This is necessary because multiple metadata entries can
+occur in sequence.)
+
+### tar.types
+
+A translation table for the `type` field in tar headers.
+
+#### tar.types.name.get(code)
+
+Get the human-readable name for a given alphanumeric code.
-### tar.Parse()
+#### tar.types.code.get(name)
-Returns a writable stream. Write tar data to it and it will emit
-`entry` events for each entry parsed from the tarball. This is used by
-`tar.Extract`.
+Get the alphanumeric code for a given human-readable name.
diff --git a/deps/npm/node_modules/tar/examples/extracter.js b/deps/npm/node_modules/tar/examples/extracter.js
deleted file mode 100644
index f6253a72c5..0000000000
--- a/deps/npm/node_modules/tar/examples/extracter.js
+++ /dev/null
@@ -1,19 +0,0 @@
-var tar = require("../tar.js")
- , fs = require("fs")
-
-
-function onError(err) {
- console.error('An error occurred:', err)
-}
-
-function onEnd() {
- console.log('Extracted!')
-}
-
-var extractor = tar.Extract({path: __dirname + "/extract"})
- .on('error', onError)
- .on('end', onEnd);
-
-fs.createReadStream(__dirname + "/../test/fixtures/c.tar")
- .on('error', onError)
- .pipe(extractor);
diff --git a/deps/npm/node_modules/tar/examples/packer.js b/deps/npm/node_modules/tar/examples/packer.js
deleted file mode 100644
index 039969ce30..0000000000
--- a/deps/npm/node_modules/tar/examples/packer.js
+++ /dev/null
@@ -1,24 +0,0 @@
-var tar = require("../tar.js")
- , fstream = require("fstream")
- , fs = require("fs")
-
-var dirDest = fs.createWriteStream('dir.tar')
-
-
-function onError(err) {
- console.error('An error occurred:', err)
-}
-
-function onEnd() {
- console.log('Packed!')
-}
-
-var packer = tar.Pack({ noProprietary: true })
- .on('error', onError)
- .on('end', onEnd);
-
-// This must be a "directory"
-fstream.Reader({ path: __dirname, type: "Directory" })
- .on('error', onError)
- .pipe(packer)
- .pipe(dirDest)
diff --git a/deps/npm/node_modules/tar/examples/reader.js b/deps/npm/node_modules/tar/examples/reader.js
deleted file mode 100644
index 39f3f0888a..0000000000
--- a/deps/npm/node_modules/tar/examples/reader.js
+++ /dev/null
@@ -1,36 +0,0 @@
-var tar = require("../tar.js")
- , fs = require("fs")
-
-fs.createReadStream(__dirname + "/../test/fixtures/c.tar")
- .pipe(tar.Parse())
- .on("extendedHeader", function (e) {
- console.error("extended pax header", e.props)
- e.on("end", function () {
- console.error("extended pax fields:", e.fields)
- })
- })
- .on("ignoredEntry", function (e) {
- console.error("ignoredEntry?!?", e.props)
- })
- .on("longLinkpath", function (e) {
- console.error("longLinkpath entry", e.props)
- e.on("end", function () {
- console.error("value=%j", e.body.toString())
- })
- })
- .on("longPath", function (e) {
- console.error("longPath entry", e.props)
- e.on("end", function () {
- console.error("value=%j", e.body.toString())
- })
- })
- .on("entry", function (e) {
- console.error("entry", e.props)
- e.on("data", function (c) {
- console.error(" >>>" + c.toString().replace(/\n/g, "\\n"))
- })
- e.on("end", function () {
- console.error(" <<<EOF")
- })
- })
-
diff --git a/deps/npm/node_modules/tar/index.js b/deps/npm/node_modules/tar/index.js
new file mode 100644
index 0000000000..c9ae06e790
--- /dev/null
+++ b/deps/npm/node_modules/tar/index.js
@@ -0,0 +1,18 @@
+'use strict'
+
+// high-level commands
+exports.c = exports.create = require('./lib/create.js')
+exports.r = exports.replace = require('./lib/replace.js')
+exports.t = exports.list = require('./lib/list.js')
+exports.u = exports.update = require('./lib/update.js')
+exports.x = exports.extract = require('./lib/extract.js')
+
+// classes
+exports.Pack = require('./lib/pack.js')
+exports.Unpack = require('./lib/unpack.js')
+exports.Parse = require('./lib/parse.js')
+exports.ReadEntry = require('./lib/read-entry.js')
+exports.WriteEntry = require('./lib/write-entry.js')
+exports.Header = require('./lib/header.js')
+exports.Pax = require('./lib/pax.js')
+exports.types = require('./lib/types.js')
diff --git a/deps/npm/node_modules/tar/lib/buffer-entry.js b/deps/npm/node_modules/tar/lib/buffer-entry.js
deleted file mode 100644
index 6c1da2373a..0000000000
--- a/deps/npm/node_modules/tar/lib/buffer-entry.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// just like the Entry class, but it buffers the contents
-//
-// XXX It would be good to set a maximum BufferEntry filesize,
-// since it eats up memory. In normal operation,
-// these are only for long filenames or link names, which are
-// rarely very big.
-
-module.exports = BufferEntry
-
-var inherits = require("inherits")
- , Entry = require("./entry.js")
-
-function BufferEntry () {
- Entry.apply(this, arguments)
- this._buffer = new Buffer(this.props.size)
- this._offset = 0
- this.body = ""
- this.on("end", function () {
- this.body = this._buffer.toString().slice(0, -1)
- })
-}
-
-inherits(BufferEntry, Entry)
-
-// collect the bytes as they come in.
-BufferEntry.prototype.write = function (c) {
- c.copy(this._buffer, this._offset)
- this._offset += c.length
- Entry.prototype.write.call(this, c)
-}
diff --git a/deps/npm/node_modules/tar/lib/create.js b/deps/npm/node_modules/tar/lib/create.js
new file mode 100644
index 0000000000..5d46b3ba70
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/create.js
@@ -0,0 +1,110 @@
+'use strict'
+
+// tar -c
+const hlo = require('./high-level-opt.js')
+
+const Pack = require('./pack.js')
+const fs = require('fs')
+const t = require('./list.js')
+const path = require('path')
+
+const c = module.exports = (opt_, files, cb) => {
+ if (typeof files === 'function')
+ cb = files
+
+ if (Array.isArray(opt_))
+ files = opt_, opt_ = {}
+
+ if (!files || !Array.isArray(files) || !files.length)
+ throw new TypeError('no files or directories specified')
+
+ files = Array.from(files)
+
+ const opt = hlo(opt_)
+
+ if (opt.sync && typeof cb === 'function')
+ throw new TypeError('callback not supported for sync tar functions')
+
+ if (!opt.file && typeof cb === 'function')
+ throw new TypeError('callback only supported with file option')
+
+ return opt.file && opt.sync ? createFileSync(opt, files)
+ : opt.file ? createFile(opt, files, cb)
+ : opt.sync ? createSync(opt, files)
+ : create(opt, files)
+}
+
+const createFileSync = (opt, files) => {
+ const p = new Pack.Sync(opt)
+
+ let threw = true
+ let fd
+ try {
+ fd = fs.openSync(opt.file, 'w', opt.mode || 0o666)
+ p.on('data', chunk => fs.writeSync(fd, chunk, 0, chunk.length))
+ p.on('end', _ => fs.closeSync(fd))
+ addFilesSync(p, files)
+ threw = false
+ } finally {
+ if (threw)
+ try { fs.closeSync(fd) } catch (er) {}
+ }
+}
+
+const createFile = (opt, files, cb) => {
+ const p = new Pack(opt)
+ const stream = fs.createWriteStream(opt.file, { mode: opt.mode || 0o666 })
+ p.pipe(stream)
+
+ const promise = new Promise((res, rej) => {
+ stream.on('error', rej)
+ stream.on('close', res)
+ p.on('error', rej)
+ })
+
+ addFilesAsync(p, files)
+
+ return cb ? promise.then(cb, cb) : promise
+}
+
+const addFilesSync = (p, files) => {
+ files.forEach(file => {
+ if (file.charAt(0) === '@')
+ t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ sync: true,
+ noResume: true,
+ onentry: entry => p.add(entry)
+ })
+ else
+ p.add(file)
+ })
+ p.end()
+}
+
+const addFilesAsync = (p, files) => {
+ while (files.length) {
+ const file = files.shift()
+ if (file.charAt(0) === '@')
+ return t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ noResume: true,
+ onentry: entry => p.add(entry)
+ }).then(_ => addFilesAsync(p, files))
+ else
+ p.add(file)
+ }
+ p.end()
+}
+
+const createSync = (opt, files) => {
+ const p = new Pack.Sync(opt)
+ addFilesSync(p, files)
+ return p
+}
+
+const create = (opt, files) => {
+ const p = new Pack(opt)
+ addFilesAsync(p, files)
+ return p
+}
diff --git a/deps/npm/node_modules/tar/lib/entry-writer.js b/deps/npm/node_modules/tar/lib/entry-writer.js
deleted file mode 100644
index 8e09042d01..0000000000
--- a/deps/npm/node_modules/tar/lib/entry-writer.js
+++ /dev/null
@@ -1,169 +0,0 @@
-module.exports = EntryWriter
-
-var tar = require("../tar.js")
- , TarHeader = require("./header.js")
- , Entry = require("./entry.js")
- , inherits = require("inherits")
- , BlockStream = require("block-stream")
- , ExtendedHeaderWriter
- , Stream = require("stream").Stream
- , EOF = {}
-
-inherits(EntryWriter, Stream)
-
-function EntryWriter (props) {
- var me = this
-
- if (!(me instanceof EntryWriter)) {
- return new EntryWriter(props)
- }
-
- Stream.apply(this)
-
- me.writable = true
- me.readable = true
-
- me._stream = new BlockStream(512)
-
- me._stream.on("data", function (c) {
- me.emit("data", c)
- })
-
- me._stream.on("drain", function () {
- me.emit("drain")
- })
-
- me._stream.on("end", function () {
- me.emit("end")
- me.emit("close")
- })
-
- me.props = props
- if (props.type === "Directory") {
- props.size = 0
- }
- props.ustar = "ustar\0"
- props.ustarver = "00"
- me.path = props.path
-
- me._buffer = []
- me._didHeader = false
- me._meta = false
-
- me.on("pipe", function () {
- me._process()
- })
-}
-
-EntryWriter.prototype.write = function (c) {
- // console.error(".. ew write")
- if (this._ended) return this.emit("error", new Error("write after end"))
- this._buffer.push(c)
- this._process()
- this._needDrain = this._buffer.length > 0
- return !this._needDrain
-}
-
-EntryWriter.prototype.end = function (c) {
- // console.error(".. ew end")
- if (c) this._buffer.push(c)
- this._buffer.push(EOF)
- this._ended = true
- this._process()
- this._needDrain = this._buffer.length > 0
-}
-
-EntryWriter.prototype.pause = function () {
- // console.error(".. ew pause")
- this._paused = true
- this.emit("pause")
-}
-
-EntryWriter.prototype.resume = function () {
- // console.error(".. ew resume")
- this._paused = false
- this.emit("resume")
- this._process()
-}
-
-EntryWriter.prototype.add = function (entry) {
- // console.error(".. ew add")
- if (!this.parent) return this.emit("error", new Error("no parent"))
-
- // make sure that the _header and such is emitted, and clear out
- // the _currentEntry link on the parent.
- if (!this._ended) this.end()
-
- return this.parent.add(entry)
-}
-
-EntryWriter.prototype._header = function () {
- // console.error(".. ew header")
- if (this._didHeader) return
- this._didHeader = true
-
- var headerBlock = TarHeader.encode(this.props)
-
- if (this.props.needExtended && !this._meta) {
- var me = this
-
- ExtendedHeaderWriter = ExtendedHeaderWriter ||
- require("./extended-header-writer.js")
-
- ExtendedHeaderWriter(this.props)
- .on("data", function (c) {
- me.emit("data", c)
- })
- .on("error", function (er) {
- me.emit("error", er)
- })
- .end()
- }
-
- // console.error(".. .. ew headerBlock emitting")
- this.emit("data", headerBlock)
- this.emit("header")
-}
-
-EntryWriter.prototype._process = function () {
- // console.error(".. .. ew process")
- if (!this._didHeader && !this._meta) {
- this._header()
- }
-
- if (this._paused || this._processing) {
- // console.error(".. .. .. paused=%j, processing=%j", this._paused, this._processing)
- return
- }
-
- this._processing = true
-
- var buf = this._buffer
- for (var i = 0; i < buf.length; i ++) {
- // console.error(".. .. .. i=%d", i)
-
- var c = buf[i]
-
- if (c === EOF) this._stream.end()
- else this._stream.write(c)
-
- if (this._paused) {
- // console.error(".. .. .. paused mid-emission")
- this._processing = false
- if (i < buf.length) {
- this._needDrain = true
- this._buffer = buf.slice(i + 1)
- }
- return
- }
- }
-
- // console.error(".. .. .. emitted")
- this._buffer.length = 0
- this._processing = false
-
- // console.error(".. .. .. emitting drain")
- this.emit("drain")
-}
-
-EntryWriter.prototype.destroy = function () {}
diff --git a/deps/npm/node_modules/tar/lib/entry.js b/deps/npm/node_modules/tar/lib/entry.js
deleted file mode 100644
index 591202bd3b..0000000000
--- a/deps/npm/node_modules/tar/lib/entry.js
+++ /dev/null
@@ -1,220 +0,0 @@
-// A passthrough read/write stream that sets its properties
-// based on a header, extendedHeader, and globalHeader
-//
-// Can be either a file system object of some sort, or
-// a pax/ustar metadata entry.
-
-module.exports = Entry
-
-var TarHeader = require("./header.js")
- , tar = require("../tar")
- , assert = require("assert").ok
- , Stream = require("stream").Stream
- , inherits = require("inherits")
- , fstream = require("fstream").Abstract
-
-function Entry (header, extended, global) {
- Stream.call(this)
- this.readable = true
- this.writable = true
-
- this._needDrain = false
- this._paused = false
- this._reading = false
- this._ending = false
- this._ended = false
- this._remaining = 0
- this._abort = false
- this._queue = []
- this._index = 0
- this._queueLen = 0
-
- this._read = this._read.bind(this)
-
- this.props = {}
- this._header = header
- this._extended = extended || {}
-
- // globals can change throughout the course of
- // a file parse operation. Freeze it at its current state.
- this._global = {}
- var me = this
- Object.keys(global || {}).forEach(function (g) {
- me._global[g] = global[g]
- })
-
- this._setProps()
-}
-
-inherits(Entry, Stream)
-
-Entry.prototype.write = function (c) {
- if (this._ending) this.error("write() after end()", null, true)
- if (this._remaining === 0) {
- this.error("invalid bytes past eof")
- }
-
- // often we'll get a bunch of \0 at the end of the last write,
- // since chunks will always be 512 bytes when reading a tarball.
- if (c.length > this._remaining) {
- c = c.slice(0, this._remaining)
- }
- this._remaining -= c.length
-
- // put it on the stack.
- var ql = this._queueLen
- this._queue.push(c)
- this._queueLen ++
-
- this._read()
-
- // either paused, or buffered
- if (this._paused || ql > 0) {
- this._needDrain = true
- return false
- }
-
- return true
-}
-
-Entry.prototype.end = function (c) {
- if (c) this.write(c)
- this._ending = true
- this._read()
-}
-
-Entry.prototype.pause = function () {
- this._paused = true
- this.emit("pause")
-}
-
-Entry.prototype.resume = function () {
- // console.error(" Tar Entry resume", this.path)
- this.emit("resume")
- this._paused = false
- this._read()
- return this._queueLen - this._index > 1
-}
-
- // This is bound to the instance
-Entry.prototype._read = function () {
- // console.error(" Tar Entry _read", this.path)
-
- if (this._paused || this._reading || this._ended) return
-
- // set this flag so that event handlers don't inadvertently
- // get multiple _read() calls running.
- this._reading = true
-
- // have any data to emit?
- while (this._index < this._queueLen && !this._paused) {
- var chunk = this._queue[this._index ++]
- this.emit("data", chunk)
- }
-
- // check if we're drained
- if (this._index >= this._queueLen) {
- this._queue.length = this._queueLen = this._index = 0
- if (this._needDrain) {
- this._needDrain = false
- this.emit("drain")
- }
- if (this._ending) {
- this._ended = true
- this.emit("end")
- }
- }
-
- // if the queue gets too big, then pluck off whatever we can.
- // this should be fairly rare.
- var mql = this._maxQueueLen
- if (this._queueLen > mql && this._index > 0) {
- mql = Math.min(this._index, mql)
- this._index -= mql
- this._queueLen -= mql
- this._queue = this._queue.slice(mql)
- }
-
- this._reading = false
-}
-
-Entry.prototype._setProps = function () {
- // props = extended->global->header->{}
- var header = this._header
- , extended = this._extended
- , global = this._global
- , props = this.props
-
- // first get the values from the normal header.
- var fields = tar.fields
- for (var f = 0; fields[f] !== null; f ++) {
- var field = fields[f]
- , val = header[field]
- if (typeof val !== "undefined") props[field] = val
- }
-
- // next, the global header for this file.
- // numeric values, etc, will have already been parsed.
- ;[global, extended].forEach(function (p) {
- Object.keys(p).forEach(function (f) {
- if (typeof p[f] !== "undefined") props[f] = p[f]
- })
- })
-
- // no nulls allowed in path or linkpath
- ;["path", "linkpath"].forEach(function (p) {
- if (props.hasOwnProperty(p)) {
- props[p] = props[p].split("\0")[0]
- }
- })
-
-
- // set date fields to be a proper date
- ;["mtime", "ctime", "atime"].forEach(function (p) {
- if (props.hasOwnProperty(p)) {
- props[p] = new Date(props[p] * 1000)
- }
- })
-
- // set the type so that we know what kind of file to create
- var type
- switch (tar.types[props.type]) {
- case "OldFile":
- case "ContiguousFile":
- type = "File"
- break
-
- case "GNUDumpDir":
- type = "Directory"
- break
-
- case undefined:
- type = "Unknown"
- break
-
- case "Link":
- case "SymbolicLink":
- case "CharacterDevice":
- case "BlockDevice":
- case "Directory":
- case "FIFO":
- default:
- type = tar.types[props.type]
- }
-
- this.type = type
- this.path = props.path
- this.size = props.size
-
- // size is special, since it signals when the file needs to end.
- this._remaining = props.size
-}
-
-// the parser may not call write if _abort is true.
-// useful for skipping data from some files quickly.
-Entry.prototype.abort = function(){
- this._abort = true
-}
-
-Entry.prototype.warn = fstream.warn
-Entry.prototype.error = fstream.error
diff --git a/deps/npm/node_modules/tar/lib/extended-header-writer.js b/deps/npm/node_modules/tar/lib/extended-header-writer.js
deleted file mode 100644
index 1728c4583a..0000000000
--- a/deps/npm/node_modules/tar/lib/extended-header-writer.js
+++ /dev/null
@@ -1,191 +0,0 @@
-
-module.exports = ExtendedHeaderWriter
-
-var inherits = require("inherits")
- , EntryWriter = require("./entry-writer.js")
-
-inherits(ExtendedHeaderWriter, EntryWriter)
-
-var tar = require("../tar.js")
- , path = require("path")
- , TarHeader = require("./header.js")
-
-// props is the props of the thing we need to write an
-// extended header for.
-// Don't be shy with it. Just encode everything.
-function ExtendedHeaderWriter (props) {
- // console.error(">> ehw ctor")
- var me = this
-
- if (!(me instanceof ExtendedHeaderWriter)) {
- return new ExtendedHeaderWriter(props)
- }
-
- me.fields = props
-
- var p =
- { path : ("PaxHeader" + path.join("/", props.path || ""))
- .replace(/\\/g, "/").substr(0, 100)
- , mode : props.mode || 0666
- , uid : props.uid || 0
- , gid : props.gid || 0
- , size : 0 // will be set later
- , mtime : props.mtime || Date.now() / 1000
- , type : "x"
- , linkpath : ""
- , ustar : "ustar\0"
- , ustarver : "00"
- , uname : props.uname || ""
- , gname : props.gname || ""
- , devmaj : props.devmaj || 0
- , devmin : props.devmin || 0
- }
-
-
- EntryWriter.call(me, p)
- // console.error(">> ehw props", me.props)
- me.props = p
-
- me._meta = true
-}
-
-ExtendedHeaderWriter.prototype.end = function () {
- // console.error(">> ehw end")
- var me = this
-
- if (me._ended) return
- me._ended = true
-
- me._encodeFields()
-
- if (me.props.size === 0) {
- // nothing to write!
- me._ready = true
- me._stream.end()
- return
- }
-
- me._stream.write(TarHeader.encode(me.props))
- me.body.forEach(function (l) {
- me._stream.write(l)
- })
- me._ready = true
-
- // console.error(">> ehw _process calling end()", me.props)
- this._stream.end()
-}
-
-ExtendedHeaderWriter.prototype._encodeFields = function () {
- // console.error(">> ehw _encodeFields")
- this.body = []
- if (this.fields.prefix) {
- this.fields.path = this.fields.prefix + "/" + this.fields.path
- this.fields.prefix = ""
- }
- encodeFields(this.fields, "", this.body, this.fields.noProprietary)
- var me = this
- this.body.forEach(function (l) {
- me.props.size += l.length
- })
-}
-
-function encodeFields (fields, prefix, body, nop) {
- // console.error(">> >> ehw encodeFields")
- // "%d %s=%s\n", <length>, <keyword>, <value>
- // The length is a decimal number, and includes itself and the \n
- // Numeric values are decimal strings.
-
- Object.keys(fields).forEach(function (k) {
- var val = fields[k]
- , numeric = tar.numeric[k]
-
- if (prefix) k = prefix + "." + k
-
- // already including NODETAR.type, don't need File=true also
- if (k === fields.type && val === true) return
-
- switch (k) {
- // don't include anything that's always handled just fine
- // in the normal header, or only meaningful in the context
- // of nodetar
- case "mode":
- case "cksum":
- case "ustar":
- case "ustarver":
- case "prefix":
- case "basename":
- case "dirname":
- case "needExtended":
- case "block":
- case "filter":
- return
-
- case "rdev":
- if (val === 0) return
- break
-
- case "nlink":
- case "dev": // Truly a hero among men, Creator of Star!
- case "ino": // Speak his name with reverent awe! It is:
- k = "SCHILY." + k
- break
-
- default: break
- }
-
- if (val && typeof val === "object" &&
- !Buffer.isBuffer(val)) encodeFields(val, k, body, nop)
- else if (val === null || val === undefined) return
- else body.push.apply(body, encodeField(k, val, nop))
- })
-
- return body
-}
-
-function encodeField (k, v, nop) {
- // lowercase keys must be valid, otherwise prefix with
- // "NODETAR."
- if (k.charAt(0) === k.charAt(0).toLowerCase()) {
- var m = k.split(".")[0]
- if (!tar.knownExtended[m]) k = "NODETAR." + k
- }
-
- // no proprietary
- if (nop && k.charAt(0) !== k.charAt(0).toLowerCase()) {
- return []
- }
-
- if (typeof val === "number") val = val.toString(10)
-
- var s = new Buffer(" " + k + "=" + v + "\n")
- , digits = Math.floor(Math.log(s.length) / Math.log(10)) + 1
-
- // console.error("1 s=%j digits=%j s.length=%d", s.toString(), digits, s.length)
-
- // if adding that many digits will make it go over that length,
- // then add one to it. For example, if the string is:
- // " foo=bar\n"
- // then that's 9 characters. With the "9", that bumps the length
- // up to 10. However, this is invalid:
- // "10 foo=bar\n"
- // but, since that's actually 11 characters, since 10 adds another
- // character to the length, and the length includes the number
- // itself. In that case, just bump it up again.
- if (s.length + digits >= Math.pow(10, digits)) digits += 1
- // console.error("2 s=%j digits=%j s.length=%d", s.toString(), digits, s.length)
-
- var len = digits + s.length
- // console.error("3 s=%j digits=%j s.length=%d len=%d", s.toString(), digits, s.length, len)
- var lenBuf = new Buffer("" + len)
- if (lenBuf.length + s.length !== len) {
- throw new Error("Bad length calculation\n"+
- "len="+len+"\n"+
- "lenBuf="+JSON.stringify(lenBuf.toString())+"\n"+
- "lenBuf.length="+lenBuf.length+"\n"+
- "digits="+digits+"\n"+
- "s="+JSON.stringify(s.toString())+"\n"+
- "s.length="+s.length)
- }
-
- return [lenBuf, s]
-}
diff --git a/deps/npm/node_modules/tar/lib/extended-header.js b/deps/npm/node_modules/tar/lib/extended-header.js
deleted file mode 100644
index 74f432ceee..0000000000
--- a/deps/npm/node_modules/tar/lib/extended-header.js
+++ /dev/null
@@ -1,140 +0,0 @@
-// An Entry consisting of:
-//
-// "%d %s=%s\n", <length>, <keyword>, <value>
-//
-// The length is a decimal number, and includes itself and the \n
-// \0 does not terminate anything. Only the length terminates the string.
-// Numeric values are decimal strings.
-
-module.exports = ExtendedHeader
-
-var Entry = require("./entry.js")
- , inherits = require("inherits")
- , tar = require("../tar.js")
- , numeric = tar.numeric
- , keyTrans = { "SCHILY.dev": "dev"
- , "SCHILY.ino": "ino"
- , "SCHILY.nlink": "nlink" }
-
-function ExtendedHeader () {
- Entry.apply(this, arguments)
- this.on("data", this._parse)
- this.fields = {}
- this._position = 0
- this._fieldPos = 0
- this._state = SIZE
- this._sizeBuf = []
- this._keyBuf = []
- this._valBuf = []
- this._size = -1
- this._key = ""
-}
-
-inherits(ExtendedHeader, Entry)
-ExtendedHeader.prototype._parse = parse
-
-var s = 0
- , states = ExtendedHeader.states = {}
- , SIZE = states.SIZE = s++
- , KEY = states.KEY = s++
- , VAL = states.VAL = s++
- , ERR = states.ERR = s++
-
-Object.keys(states).forEach(function (s) {
- states[states[s]] = states[s]
-})
-
-states[s] = null
-
-// char code values for comparison
-var _0 = "0".charCodeAt(0)
- , _9 = "9".charCodeAt(0)
- , point = ".".charCodeAt(0)
- , a = "a".charCodeAt(0)
- , Z = "Z".charCodeAt(0)
- , a = "a".charCodeAt(0)
- , z = "z".charCodeAt(0)
- , space = " ".charCodeAt(0)
- , eq = "=".charCodeAt(0)
- , cr = "\n".charCodeAt(0)
-
-function parse (c) {
- if (this._state === ERR) return
-
- for ( var i = 0, l = c.length
- ; i < l
- ; this._position++, this._fieldPos++, i++) {
- // console.error("top of loop, size="+this._size)
-
- var b = c[i]
-
- if (this._size >= 0 && this._fieldPos > this._size) {
- error(this, "field exceeds length="+this._size)
- return
- }
-
- switch (this._state) {
- case ERR: return
-
- case SIZE:
- // console.error("parsing size, b=%d, rest=%j", b, c.slice(i).toString())
- if (b === space) {
- this._state = KEY
- // this._fieldPos = this._sizeBuf.length
- this._size = parseInt(new Buffer(this._sizeBuf).toString(), 10)
- this._sizeBuf.length = 0
- continue
- }
- if (b < _0 || b > _9) {
- error(this, "expected [" + _0 + ".." + _9 + "], got " + b)
- return
- }
- this._sizeBuf.push(b)
- continue
-
- case KEY:
- // can be any char except =, not > size.
- if (b === eq) {
- this._state = VAL
- this._key = new Buffer(this._keyBuf).toString()
- if (keyTrans[this._key]) this._key = keyTrans[this._key]
- this._keyBuf.length = 0
- continue
- }
- this._keyBuf.push(b)
- continue
-
- case VAL:
- // field must end with cr
- if (this._fieldPos === this._size - 1) {
- // console.error("finished with "+this._key)
- if (b !== cr) {
- error(this, "expected \\n at end of field")
- return
- }
- var val = new Buffer(this._valBuf).toString()
- if (numeric[this._key]) {
- val = parseFloat(val)
- }
- this.fields[this._key] = val
-
- this._valBuf.length = 0
- this._state = SIZE
- this._size = -1
- this._fieldPos = -1
- continue
- }
- this._valBuf.push(b)
- continue
- }
- }
-}
-
-function error (me, msg) {
- msg = "invalid header: " + msg
- + "\nposition=" + me._position
- + "\nfield position=" + me._fieldPos
-
- me.error(msg)
- me.state = ERR
-}
diff --git a/deps/npm/node_modules/tar/lib/extract.js b/deps/npm/node_modules/tar/lib/extract.js
index fe1bb976eb..53ecf67894 100644
--- a/deps/npm/node_modules/tar/lib/extract.js
+++ b/deps/npm/node_modules/tar/lib/extract.js
@@ -1,94 +1,127 @@
-// give it a tarball and a path, and it'll dump the contents
+'use strict'
-module.exports = Extract
+// tar -x
+const hlo = require('./high-level-opt.js')
+const Unpack = require('./unpack.js')
+const fs = require('fs')
+const path = require('path')
-var tar = require("../tar.js")
- , fstream = require("fstream")
- , inherits = require("inherits")
- , path = require("path")
+const x = module.exports = (opt_, files, cb) => {
+ if (typeof opt_ === 'function')
+ cb = opt_, files = null, opt_ = {}
+ else if (Array.isArray(opt_))
+ files = opt_, opt_ = {}
-function Extract (opts) {
- if (!(this instanceof Extract)) return new Extract(opts)
- tar.Parse.apply(this)
+ if (typeof files === 'function')
+ cb = files, files = null
- if (typeof opts !== "object") {
- opts = { path: opts }
- }
+ if (!files)
+ files = []
+ else
+ files = Array.from(files)
- // better to drop in cwd? seems more standard.
- opts.path = opts.path || path.resolve("node-tar-extract")
- opts.type = "Directory"
- opts.Directory = true
-
- // similar to --strip or --strip-components
- opts.strip = +opts.strip
- if (!opts.strip || opts.strip <= 0) opts.strip = 0
-
- this._fst = fstream.Writer(opts)
-
- this.pause()
- var me = this
-
- // Hardlinks in tarballs are relative to the root
- // of the tarball. So, they need to be resolved against
- // the target directory in order to be created properly.
- me.on("entry", function (entry) {
- // if there's a "strip" argument, then strip off that many
- // path components.
- if (opts.strip) {
- var p = entry.path.split("/").slice(opts.strip).join("/")
- entry.path = entry.props.path = p
- if (entry.linkpath) {
- var lp = entry.linkpath.split("/").slice(opts.strip).join("/")
- entry.linkpath = entry.props.linkpath = lp
- }
- }
- if (entry.type === "Link") {
- entry.linkpath = entry.props.linkpath =
- path.join(opts.path, path.join("/", entry.props.linkpath))
- }
+ const opt = hlo(opt_)
- if (entry.type === "SymbolicLink") {
- var dn = path.dirname(entry.path) || ""
- var linkpath = entry.props.linkpath
- var target = path.resolve(opts.path, dn, linkpath)
- if (target.indexOf(opts.path) !== 0) {
- linkpath = path.join(opts.path, path.join("/", linkpath))
- }
- entry.linkpath = entry.props.linkpath = linkpath
- }
- })
+ if (opt.sync && typeof cb === 'function')
+ throw new TypeError('callback not supported for sync tar functions')
- this._fst.on("ready", function () {
- me.pipe(me._fst, { end: false })
- me.resume()
- })
+ if (!opt.file && typeof cb === 'function')
+ throw new TypeError('callback only supported with file option')
- this._fst.on('error', function(err) {
- me.emit('error', err)
- })
+ if (files.length)
+ filesFilter(opt, files)
- this._fst.on('drain', function() {
- me.emit('drain')
- })
+ return opt.file && opt.sync ? extractFileSync(opt)
+ : opt.file ? extractFile(opt, cb)
+ : opt.sync ? extractSync(opt)
+ : extract(opt)
+}
+
+// construct a filter that limits the file entries listed
+// include child entries if a dir is included
+const filesFilter = (opt, files) => {
+ const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
+ const filter = opt.filter
- // this._fst.on("end", function () {
- // console.error("\nEEEE Extract End", me._fst.path)
- // })
+ const mapHas = (file, r) => {
+ const root = r || path.parse(file).root || '.'
+ const ret = file === root ? false
+ : map.has(file) ? map.get(file)
+ : mapHas(path.dirname(file), root)
- this._fst.on("close", function () {
- // console.error("\nEEEE Extract End", me._fst.path)
- me.emit("finish")
- me.emit("end")
- me.emit("close")
+ map.set(file, ret)
+ return ret
+ }
+
+ opt.filter = filter
+ ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
+ : file => mapHas(file.replace(/\/+$/, ''))
+}
+
+const extractFileSync = opt => {
+ const u = new Unpack.Sync(opt)
+
+ const file = opt.file
+ let threw = true
+ let fd
+ try {
+ const stat = fs.statSync(file)
+ const readSize = opt.maxReadSize || 16*1024*1024
+ if (stat.size < readSize)
+ u.end(fs.readFileSync(file))
+ else {
+ let pos = 0
+ const buf = Buffer.allocUnsafe(readSize)
+ fd = fs.openSync(file, 'r')
+ while (pos < stat.size) {
+ let bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
+ pos += bytesRead
+ u.write(buf.slice(0, bytesRead))
+ }
+ u.end()
+ fs.closeSync(fd)
+ }
+ threw = false
+ } finally {
+ if (threw && fd)
+ try { fs.closeSync(fd) } catch (er) {}
+ }
+}
+
+const extractFile = (opt, cb) => {
+ const u = new Unpack(opt)
+ const readSize = opt.maxReadSize || 16*1024*1024
+
+ const file = opt.file
+ const p = new Promise((resolve, reject) => {
+ u.on('error', reject)
+ u.on('close', resolve)
+
+ fs.stat(file, (er, stat) => {
+ if (er)
+ reject(er)
+ else if (stat.size < readSize)
+ fs.readFile(file, (er, data) => {
+ if (er)
+ return reject(er)
+ u.end(data)
+ })
+ else {
+ const stream = fs.createReadStream(file, {
+ highWaterMark: readSize
+ })
+ stream.on('error', reject)
+ stream.pipe(u)
+ }
+ })
})
+ return cb ? p.then(cb, cb) : p
}
-inherits(Extract, tar.Parse)
+const extractSync = opt => {
+ return new Unpack.Sync(opt)
+}
-Extract.prototype._streamEnd = function () {
- var me = this
- if (!me._ended || me._entry) me.error("unexpected eof")
- me._fst.end()
- // my .end() is coming later.
+const extract = opt => {
+ return new Unpack(opt)
}
diff --git a/deps/npm/node_modules/tar/lib/global-header-writer.js b/deps/npm/node_modules/tar/lib/global-header-writer.js
deleted file mode 100644
index 0bfc7b80aa..0000000000
--- a/deps/npm/node_modules/tar/lib/global-header-writer.js
+++ /dev/null
@@ -1,14 +0,0 @@
-module.exports = GlobalHeaderWriter
-
-var ExtendedHeaderWriter = require("./extended-header-writer.js")
- , inherits = require("inherits")
-
-inherits(GlobalHeaderWriter, ExtendedHeaderWriter)
-
-function GlobalHeaderWriter (props) {
- if (!(this instanceof GlobalHeaderWriter)) {
- return new GlobalHeaderWriter(props)
- }
- ExtendedHeaderWriter.call(this, props)
- this.props.type = "g"
-}
diff --git a/deps/npm/node_modules/tar/lib/header.js b/deps/npm/node_modules/tar/lib/header.js
index 05b237c0c7..db002e8c18 100644
--- a/deps/npm/node_modules/tar/lib/header.js
+++ b/deps/npm/node_modules/tar/lib/header.js
@@ -1,385 +1,272 @@
+'use strict'
// parse a 512-byte header block to a data object, or vice-versa
-// If the data won't fit nicely in a simple header, then generate
-// the appropriate extended header file, and return that.
-
-module.exports = TarHeader
-
-var tar = require("../tar.js")
- , fields = tar.fields
- , fieldOffs = tar.fieldOffs
- , fieldEnds = tar.fieldEnds
- , fieldSize = tar.fieldSize
- , numeric = tar.numeric
- , assert = require("assert").ok
- , space = " ".charCodeAt(0)
- , slash = "/".charCodeAt(0)
- , bslash = process.platform === "win32" ? "\\".charCodeAt(0) : null
-
-function TarHeader (block) {
- if (!(this instanceof TarHeader)) return new TarHeader(block)
- if (block) this.decode(block)
-}
-
-TarHeader.prototype =
- { decode : decode
- , encode: encode
- , calcSum: calcSum
- , checkSum: checkSum
+// encode returns `true` if a pax extended header is needed, because
+// the data could not be faithfully encoded in a simple header.
+// (Also, check header.needPax to see if it needs a pax header.)
+
+const types = require('./types.js')
+const pathModule = require('path')
+const large = require('./large-numbers.js')
+
+const TYPE = Symbol('type')
+
+class Header {
+ constructor (data, off) {
+ this.cksumValid = false
+ this.needPax = false
+ this.nullBlock = false
+
+ this.block = null
+ this.path = null
+ this.mode = null
+ this.uid = null
+ this.gid = null
+ this.size = null
+ this.mtime = null
+ this.cksum = null
+ this[TYPE] = '0'
+ this.linkpath = null
+ this.uname = null
+ this.gname = null
+ this.devmaj = 0
+ this.devmin = 0
+ this.atime = null
+ this.ctime = null
+
+ if (Buffer.isBuffer(data)) {
+ this.decode(data, off || 0)
+ } else if (data)
+ this.set(data)
}
-TarHeader.parseNumeric = parseNumeric
-TarHeader.encode = encode
-TarHeader.decode = decode
-
-// note that this will only do the normal ustar header, not any kind
-// of extended posix header file. If something doesn't fit comfortably,
-// then it will set obj.needExtended = true, and set the block to
-// the closest approximation.
-function encode (obj) {
- if (!obj && !(this instanceof TarHeader)) throw new Error(
- "encode must be called on a TarHeader, or supplied an object")
-
- obj = obj || this
- var block = obj.block = new Buffer(512)
-
- // if the object has a "prefix", then that's actually an extension of
- // the path field.
- if (obj.prefix) {
- // console.error("%% header encoding, got a prefix", obj.prefix)
- obj.path = obj.prefix + "/" + obj.path
- // console.error("%% header encoding, prefixed path", obj.path)
- obj.prefix = ""
- }
-
- obj.needExtended = false
-
- if (obj.mode) {
- if (typeof obj.mode === "string") obj.mode = parseInt(obj.mode, 8)
- obj.mode = obj.mode & 0777
- }
+ decode (buf, off) {
+ if (!off)
+ off = 0
+
+ if (!buf || !(buf.length >= off + 512))
+ throw new Error('need 512 bytes for header')
+
+ this.path = decString(buf, off, 100)
+ this.mode = decNumber(buf, off + 100, 8)
+ this.uid = decNumber(buf, off + 108, 8)
+ this.gid = decNumber(buf, off + 116, 8)
+ this.size = decNumber(buf, off + 124, 12)
+ this.mtime = decDate(buf, off + 136, 12)
+ this.cksum = decNumber(buf, off + 148, 12)
+
+ // old tar versions marked dirs as a file with a trailing /
+ this[TYPE] = decString(buf, off + 156, 1)
+ if (this[TYPE] === '')
+ this[TYPE] = '0'
+ if (this[TYPE] === '0' && this.path.substr(-1) === '/')
+ this[TYPE] = '5'
+
+ // tar implementations sometimes incorrectly put the stat(dir).size
+ // as the size in the tarball, even though Directory entries are
+ // not able to have any body at all. In the very rare chance that
+ // it actually DOES have a body, we weren't going to do anything with
+ // it anyway, and it'll just be a warning about an invalid header.
+ if (this[TYPE] === '5')
+ this.size = 0
+
+ this.linkpath = decString(buf, off + 157, 100)
+ if (buf.slice(off + 257, off + 265).toString() === 'ustar\u000000') {
+ this.uname = decString(buf, off + 265, 32)
+ this.gname = decString(buf, off + 297, 32)
+ this.devmaj = decNumber(buf, off + 329, 8)
+ this.devmin = decNumber(buf, off + 337, 8)
+ if (buf[off + 475] !== 0) {
+ // definitely a prefix, definitely >130 chars.
+ const prefix = decString(buf, off + 345, 155)
+ this.path = prefix + '/' + this.path
+ } else {
+ const prefix = decString(buf, off + 345, 130)
+ if (prefix)
+ this.path = prefix + '/' + this.path
+ this.atime = decDate(buf, off + 476, 12)
+ this.ctime = decDate(buf, off + 488, 12)
+ }
+ }
- for (var f = 0; fields[f] !== null; f ++) {
- var field = fields[f]
- , off = fieldOffs[f]
- , end = fieldEnds[f]
- , ret
-
- switch (field) {
- case "cksum":
- // special, done below, after all the others
- break
-
- case "prefix":
- // special, this is an extension of the "path" field.
- // console.error("%% header encoding, skip prefix later")
- break
-
- case "type":
- // convert from long name to a single char.
- var type = obj.type || "0"
- if (type.length > 1) {
- type = tar.types[obj.type]
- if (!type) type = "0"
- }
- writeText(block, off, end, type)
- break
-
- case "path":
- // uses the "prefix" field if > 100 bytes, but <= 255
- var pathLen = Buffer.byteLength(obj.path)
- , pathFSize = fieldSize[fields.path]
- , prefFSize = fieldSize[fields.prefix]
-
- // paths between 100 and 255 should use the prefix field.
- // longer than 255
- if (pathLen > pathFSize &&
- pathLen <= pathFSize + prefFSize) {
- // need to find a slash somewhere in the middle so that
- // path and prefix both fit in their respective fields
- var searchStart = pathLen - 1 - pathFSize
- , searchEnd = prefFSize
- , found = false
- , pathBuf = new Buffer(obj.path)
-
- for ( var s = searchStart
- ; (s <= searchEnd)
- ; s ++ ) {
- if (pathBuf[s] === slash || pathBuf[s] === bslash) {
- found = s
- break
- }
- }
-
- if (found !== false) {
- prefix = pathBuf.slice(0, found).toString("utf8")
- path = pathBuf.slice(found + 1).toString("utf8")
-
- ret = writeText(block, off, end, path)
- off = fieldOffs[fields.prefix]
- end = fieldEnds[fields.prefix]
- // console.error("%% header writing prefix", off, end, prefix)
- ret = writeText(block, off, end, prefix) || ret
- break
- }
- }
-
- // paths less than 100 chars don't need a prefix
- // and paths longer than 255 need an extended header and will fail
- // on old implementations no matter what we do here.
- // Null out the prefix, and fallthrough to default.
- // console.error("%% header writing no prefix")
- var poff = fieldOffs[fields.prefix]
- , pend = fieldEnds[fields.prefix]
- writeText(block, poff, pend, "")
- // fallthrough
-
- // all other fields are numeric or text
- default:
- ret = numeric[field]
- ? writeNumeric(block, off, end, obj[field])
- : writeText(block, off, end, obj[field] || "")
- break
+ let sum = 8 * 0x20
+ for (let i = off; i < off + 148; i++) {
+ sum += buf[i]
+ }
+ for (let i = off + 156; i < off + 512; i++) {
+ sum += buf[i]
}
- obj.needExtended = obj.needExtended || ret
+ this.cksumValid = sum === this.cksum
+ if (this.cksum === null && sum === 8 * 0x20)
+ this.nullBlock = true
}
- var off = fieldOffs[fields.cksum]
- , end = fieldEnds[fields.cksum]
-
- writeNumeric(block, off, end, calcSum.call(this, block))
+ encode (buf, off) {
+ if (!buf) {
+ buf = this.block = Buffer.alloc(512)
+ off = 0
+ }
- return block
-}
+ if (!off)
+ off = 0
+
+ if (!(buf.length >= off + 512))
+ throw new Error('need 512 bytes for header')
+
+ const prefixSize = this.ctime || this.atime ? 130 : 155
+ const split = splitPrefix(this.path || '', prefixSize)
+ const path = split[0]
+ const prefix = split[1]
+ this.needPax = split[2]
+
+ this.needPax = encString(buf, off, 100, path) || this.needPax
+ this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax
+ this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax
+ this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax
+ this.needPax = encNumber(buf, off + 124, 12, this.size) || this.needPax
+ this.needPax = encDate(buf, off + 136, 12, this.mtime) || this.needPax
+ buf[off + 156] = this[TYPE].charCodeAt(0)
+ this.needPax = encString(buf, off + 157, 100, this.linkpath) || this.needPax
+ buf.write('ustar\u000000', off + 257, 8)
+ this.needPax = encString(buf, off + 265, 32, this.uname) || this.needPax
+ this.needPax = encString(buf, off + 297, 32, this.gname) || this.needPax
+ this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax
+ this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax
+ this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax
+ if (buf[off + 475] !== 0)
+ this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax
+ else {
+ this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax
+ this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax
+ this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax
+ }
-// if it's a negative number, or greater than will fit,
-// then use write256.
-var MAXNUM = { 12: 077777777777
- , 11: 07777777777
- , 8 : 07777777
- , 7 : 0777777 }
-function writeNumeric (block, off, end, num) {
- var writeLen = end - off
- , maxNum = MAXNUM[writeLen] || 0
-
- num = num || 0
- // console.error(" numeric", num)
-
- if (num instanceof Date ||
- Object.prototype.toString.call(num) === "[object Date]") {
- num = num.getTime() / 1000
- }
+ let sum = 8 * 0x20
+ for (let i = off; i < off + 148; i++) {
+ sum += buf[i]
+ }
+ for (let i = off + 156; i < off + 512; i++) {
+ sum += buf[i]
+ }
+ this.cksum = sum
+ encNumber(buf, off + 148, 8, this.cksum)
+ this.cksumValid = true
- if (num > maxNum || num < 0) {
- write256(block, off, end, num)
- // need an extended header if negative or too big.
- return true
+ return this.needPax
}
- // god, tar is so annoying
- // if the string is small enough, you should put a space
- // between the octal string and the \0, but if it doesn't
- // fit, then don't.
- var numStr = Math.floor(num).toString(8)
- if (num < MAXNUM[writeLen - 1]) numStr += " "
-
- // pad with "0" chars
- if (numStr.length < writeLen) {
- numStr = (new Array(writeLen - numStr.length).join("0")) + numStr
+ set (data) {
+ for (let i in data) {
+ if (data[i] !== null && data[i] !== undefined)
+ this[i] = data[i]
+ }
}
- if (numStr.length !== writeLen - 1) {
- throw new Error("invalid length: " + JSON.stringify(numStr) + "\n" +
- "expected: "+writeLen)
+ get type () {
+ return types.name.get(this[TYPE]) || this[TYPE]
}
- block.write(numStr, off, writeLen, "utf8")
- block[end - 1] = 0
-}
-function write256 (block, off, end, num) {
- var buf = block.slice(off, end)
- var positive = num >= 0
- buf[0] = positive ? 0x80 : 0xFF
-
- // get the number as a base-256 tuple
- if (!positive) num *= -1
- var tuple = []
- do {
- var n = num % 256
- tuple.push(n)
- num = (num - n) / 256
- } while (num)
-
- var bytes = tuple.length
-
- var fill = buf.length - bytes
- for (var i = 1; i < fill; i ++) {
- buf[i] = positive ? 0 : 0xFF
+ get typeKey () {
+ return this[TYPE]
}
- // tuple is a base256 number, with [0] as the *least* significant byte
- // if it's negative, then we need to flip all the bits once we hit the
- // first non-zero bit. The 2's-complement is (0x100 - n), and the 1's-
- // complement is (0xFF - n).
- var zero = true
- for (i = bytes; i > 0; i --) {
- var byte = tuple[bytes - i]
- if (positive) buf[fill + i] = byte
- else if (zero && byte === 0) buf[fill + i] = 0
- else if (zero) {
- zero = false
- buf[fill + i] = 0x100 - byte
- } else buf[fill + i] = 0xFF - byte
+ set type (type) {
+ if (types.code.has(type))
+ this[TYPE] = types.code.get(type)
+ else
+ this[TYPE] = type
}
}
-function writeText (block, off, end, str) {
- // strings are written as utf8, then padded with \0
- var strLen = Buffer.byteLength(str)
- , writeLen = Math.min(strLen, end - off)
- // non-ascii fields need extended headers
- // long fields get truncated
- , needExtended = strLen !== str.length || strLen > writeLen
-
- // write the string, and null-pad
- if (writeLen > 0) block.write(str, off, writeLen, "utf8")
- for (var i = off + writeLen; i < end; i ++) block[i] = 0
-
- return needExtended
-}
-
-function calcSum (block) {
- block = block || this.block
- assert(Buffer.isBuffer(block) && block.length === 512)
-
- if (!block) throw new Error("Need block to checksum")
-
- // now figure out what it would be if the cksum was " "
- var sum = 0
- , start = fieldOffs[fields.cksum]
- , end = fieldEnds[fields.cksum]
-
- for (var i = 0; i < fieldOffs[fields.cksum]; i ++) {
- sum += block[i]
+const splitPrefix = (p, prefixSize) => {
+ const pathSize = 100
+ let pp = p
+ let prefix = ''
+ let ret
+ const root = pathModule.parse(p).root || '.'
+
+ if (Buffer.byteLength(pp) < pathSize)
+ ret = [pp, prefix, false]
+ else {
+ // first set prefix to the dir, and path to the base
+ prefix = pathModule.dirname(pp)
+ pp = pathModule.basename(pp)
+
+ do {
+ // both fit!
+ if (Buffer.byteLength(pp) <= pathSize &&
+ Buffer.byteLength(prefix) <= prefixSize)
+ ret = [pp, prefix, false]
+
+ // prefix fits in prefix, but path doesn't fit in path
+ else if (Buffer.byteLength(pp) > pathSize &&
+ Buffer.byteLength(prefix) <= prefixSize)
+ ret = [pp.substr(0, pathSize - 1), prefix, true]
+
+ else {
+ // make path take a bit from prefix
+ pp = pathModule.join(pathModule.basename(prefix), pp)
+ prefix = pathModule.dirname(prefix)
+ }
+ } while (prefix !== root && !ret)
+
+ // at this point, found no resolution, just truncate
+ if (!ret)
+ ret = [p.substr(0, pathSize - 1), '', true]
}
-
- for (var i = start; i < end; i ++) {
- sum += space
- }
-
- for (var i = end; i < 512; i ++) {
- sum += block[i]
- }
-
- return sum
+ return ret
}
+const decString = (buf, off, size) =>
+ buf.slice(off, off + size).toString('utf8').replace(/\0.*/, '')
-function checkSum (block) {
- var sum = calcSum.call(this, block)
- block = block || this.block
+const decDate = (buf, off, size) =>
+ numToDate(decNumber(buf, off, size))
- var cksum = block.slice(fieldOffs[fields.cksum], fieldEnds[fields.cksum])
- cksum = parseNumeric(cksum)
+const numToDate = num => num === null ? null : new Date(num * 1000)
- return cksum === sum
-}
+const decNumber = (buf, off, size) =>
+ buf[off] & 0x80 ? large.parse(buf.slice(off, off + size))
+ : decSmallNumber(buf, off, size)
-function decode (block) {
- block = block || this.block
- assert(Buffer.isBuffer(block) && block.length === 512)
-
- this.block = block
- this.cksumValid = this.checkSum()
-
- var prefix = null
-
- // slice off each field.
- for (var f = 0; fields[f] !== null; f ++) {
- var field = fields[f]
- , val = block.slice(fieldOffs[f], fieldEnds[f])
-
- switch (field) {
- case "ustar":
- // if not ustar, then everything after that is just padding.
- if (val.toString() !== "ustar\0") {
- this.ustar = false
- return
- } else {
- // console.error("ustar:", val, val.toString())
- this.ustar = val.toString()
- }
- break
-
- // prefix is special, since it might signal the xstar header
- case "prefix":
- var atime = parseNumeric(val.slice(131, 131 + 12))
- , ctime = parseNumeric(val.slice(131 + 12, 131 + 12 + 12))
- if ((val[130] === 0 || val[130] === space) &&
- typeof atime === "number" &&
- typeof ctime === "number" &&
- val[131 + 12] === space &&
- val[131 + 12 + 12] === space) {
- this.atime = atime
- this.ctime = ctime
- val = val.slice(0, 130)
- }
- prefix = val.toString("utf8").replace(/\0+$/, "")
- // console.error("%% header reading prefix", prefix)
- break
-
- // all other fields are null-padding text
- // or a number.
- default:
- if (numeric[field]) {
- this[field] = parseNumeric(val)
- } else {
- this[field] = val.toString("utf8").replace(/\0+$/, "")
- }
- break
- }
- }
+const nanNull = value => isNaN(value) ? null : value
- // if we got a prefix, then prepend it to the path.
- if (prefix) {
- this.path = prefix + "/" + this.path
- // console.error("%% header got a prefix", this.path)
- }
+const decSmallNumber = (buf, off, size) =>
+ nanNull(parseInt(
+ buf.slice(off, off + size)
+ .toString('utf8').replace(/\0.*$/, '').trim(), 8))
+
+// the maximum encodable as a null-terminated octal, by field size
+const MAXNUM = {
+ 12: 0o77777777777,
+ 8 : 0o7777777
}
-function parse256 (buf) {
- // first byte MUST be either 80 or FF
- // 80 for positive, FF for 2's comp
- var positive
- if (buf[0] === 0x80) positive = true
- else if (buf[0] === 0xFF) positive = false
- else return null
-
- // build up a base-256 tuple from the least sig to the highest
- var zero = false
- , tuple = []
- for (var i = buf.length - 1; i > 0; i --) {
- var byte = buf[i]
- if (positive) tuple.push(byte)
- else if (zero && byte === 0) tuple.push(0)
- else if (zero) {
- zero = false
- tuple.push(0x100 - byte)
- } else tuple.push(0xFF - byte)
- }
+const encNumber = (buf, off, size, number) =>
+ number === null ? false :
+ number > MAXNUM[size] || number < 0
+ ? (large.encode(number, buf.slice(off, off + size)), true)
+ : (encSmallNumber(buf, off, size, number), false)
- for (var sum = 0, i = 0, l = tuple.length; i < l; i ++) {
- sum += tuple[i] * Math.pow(256, i)
- }
+const encSmallNumber = (buf, off, size, number) =>
+ buf.write(octalString(number, size), off, size, 'ascii')
- return positive ? sum : -1 * sum
-}
+const octalString = (number, size) =>
+ padOctal(Math.floor(number).toString(8), size)
-function parseNumeric (f) {
- if (f[0] & 0x80) return parse256(f)
+const padOctal = (string, size) =>
+ (string.length === size - 1 ? string
+ : new Array(size - string.length - 1).join('0') + string + ' ') + '\0'
- var str = f.toString("utf8").split("\0")[0].trim()
- , res = parseInt(str, 8)
+const encDate = (buf, off, size, date) =>
+ date === null ? false :
+ encNumber(buf, off, size, date.getTime() / 1000)
- return isNaN(res) ? null : res
-}
+// enough to fill the longest string we've got
+const NULLS = new Array(156).join('\0')
+// pad with nulls, return true if it's longer or non-ascii
+const encString = (buf, off, size, string) =>
+ string === null ? false :
+ (buf.write(string + NULLS, off, size, 'utf8'),
+ string.length !== Buffer.byteLength(string) || string.length > size)
+module.exports = Header
diff --git a/deps/npm/node_modules/tar/lib/high-level-opt.js b/deps/npm/node_modules/tar/lib/high-level-opt.js
new file mode 100644
index 0000000000..7333db915c
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/high-level-opt.js
@@ -0,0 +1,29 @@
+'use strict'
+
+// turn tar(1) style args like `C` into the more verbose things like `cwd`
+
+const argmap = new Map([
+ ['C', 'cwd'],
+ ['f', 'file'],
+ ['z', 'gzip'],
+ ['P', 'preservePaths'],
+ ['U', 'unlink'],
+ ['strip-components', 'strip'],
+ ['stripComponents', 'strip'],
+ ['keep-newer', 'newer'],
+ ['keepNewer', 'newer'],
+ ['keep-newer-files', 'newer'],
+ ['keepNewerFiles', 'newer'],
+ ['k', 'keep'],
+ ['keep-existing', 'keep'],
+ ['keepExisting', 'keep'],
+ ['m', 'noMtime'],
+ ['no-mtime', 'noMtime'],
+ ['p', 'preserveOwner'],
+ ['L', 'follow'],
+ ['h', 'follow']
+])
+
+const parse = module.exports = opt => opt ? Object.keys(opt).map(k => [
+ argmap.has(k) ? argmap.get(k) : k, opt[k]
+]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {}
diff --git a/deps/npm/node_modules/tar/lib/large-numbers.js b/deps/npm/node_modules/tar/lib/large-numbers.js
new file mode 100644
index 0000000000..ff49992630
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/large-numbers.js
@@ -0,0 +1,92 @@
+'use strict'
+// Tar can encode large and negative numbers using a leading byte of
+// 0xff for negative, and 0x80 for positive. The trailing byte in the
+// section will always be 0x20, or in some implementations 0x00.
+// this module encodes and decodes these things.
+
+const encode = exports.encode = (num, buf) => {
+ buf[buf.length - 1] = 0x20
+ if (num < 0)
+ encodeNegative(num, buf)
+ else
+ encodePositive(num, buf)
+ return buf
+}
+
+const encodePositive = (num, buf) => {
+ buf[0] = 0x80
+ for (var i = buf.length - 2; i > 0; i--) {
+ if (num === 0)
+ buf[i] = 0
+ else {
+ buf[i] = num % 0x100
+ num = Math.floor(num / 0x100)
+ }
+ }
+}
+
+const encodeNegative = (num, buf) => {
+ buf[0] = 0xff
+ var flipped = false
+ num = num * -1
+ for (var i = buf.length - 2; i > 0; i--) {
+ var byte
+ if (num === 0)
+ byte = 0
+ else {
+ byte = num % 0x100
+ num = Math.floor(num / 0x100)
+ }
+ if (flipped)
+ buf[i] = onesComp(byte)
+ else if (byte === 0)
+ buf[i] = 0
+ else {
+ flipped = true
+ buf[i] = twosComp(byte)
+ }
+ }
+}
+
+const parse = exports.parse = (buf) => {
+ var post = buf[buf.length - 1]
+ var pre = buf[0]
+ return pre === 0x80 ? pos(buf.slice(1, buf.length - 1))
+ : twos(buf.slice(1, buf.length - 1))
+}
+
+const twos = (buf) => {
+ var len = buf.length
+ var sum = 0
+ var flipped = false
+ for (var i = len - 1; i > -1; i--) {
+ var byte = buf[i]
+ var f
+ if (flipped)
+ f = onesComp(byte)
+ else if (byte === 0)
+ f = byte
+ else {
+ flipped = true
+ f = twosComp(byte)
+ }
+ if (f !== 0)
+ sum += f * Math.pow(256, len - i - 1)
+ }
+ return sum * -1
+}
+
+const pos = (buf) => {
+ var len = buf.length
+ var sum = 0
+ for (var i = len - 1; i > -1; i--) {
+ var byte = buf[i]
+ if (byte !== 0)
+ sum += byte * Math.pow(256, len - i - 1)
+ }
+ return sum
+}
+
+const onesComp = byte => (0xff ^ byte) & 0xff
+
+const twosComp = byte => ((0xff ^ byte) + 1) & 0xff
diff --git a/deps/npm/node_modules/tar/lib/list.js b/deps/npm/node_modules/tar/lib/list.js
new file mode 100644
index 0000000000..1f5e70bd36
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/list.js
@@ -0,0 +1,132 @@
+'use strict'
+
+// XXX: This shares a lot in common with extract.js
+// maybe some DRY opportunity here?
+
+// tar -t
+const hlo = require('./high-level-opt.js')
+const Parser = require('./parse.js')
+const fs = require('fs')
+const path = require('path')
+
+const t = module.exports = (opt_, files, cb) => {
+ if (typeof opt_ === 'function')
+ cb = opt_, files = null, opt_ = {}
+ else if (Array.isArray(opt_))
+ files = opt_, opt_ = {}
+
+ if (typeof files === 'function')
+ cb = files, files = null
+
+ if (!files)
+ files = []
+ else
+ files = Array.from(files)
+
+ const opt = hlo(opt_)
+
+ if (opt.sync && typeof cb === 'function')
+ throw new TypeError('callback not supported for sync tar functions')
+
+ if (!opt.file && typeof cb === 'function')
+ throw new TypeError('callback only supported with file option')
+
+ if (files.length)
+ filesFilter(opt, files)
+
+ if (!opt.noResume)
+ onentryFunction(opt)
+
+ return opt.file && opt.sync ? listFileSync(opt)
+ : opt.file ? listFile(opt, cb)
+ : list(opt)
+}
+
+const onentryFunction = opt => {
+ const onentry = opt.onentry
+ opt.onentry = onentry ? e => {
+ onentry(e)
+ e.resume()
+ } : e => e.resume()
+}
+
+// construct a filter that limits the file entries listed
+// include child entries if a dir is included
+const filesFilter = (opt, files) => {
+ const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
+ const filter = opt.filter
+
+ const mapHas = (file, r) => {
+ const root = r || path.parse(file).root || '.'
+ const ret = file === root ? false
+ : map.has(file) ? map.get(file)
+ : mapHas(path.dirname(file), root)
+
+ map.set(file, ret)
+ return ret
+ }
+
+ opt.filter = filter
+ ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
+ : file => mapHas(file.replace(/\/+$/, ''))
+}
+
+const listFileSync = opt => {
+ const p = list(opt)
+ const file = opt.file
+ let threw = true
+ let fd
+ try {
+ const stat = fs.statSync(file)
+ const readSize = opt.maxReadSize || 16*1024*1024
+ if (stat.size < readSize) {
+ p.end(fs.readFileSync(file))
+ } else {
+ let pos = 0
+ const buf = Buffer.allocUnsafe(readSize)
+ fd = fs.openSync(file, 'r')
+ while (pos < stat.size) {
+ let bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
+ pos += bytesRead
+ p.write(buf.slice(0, bytesRead))
+ }
+ p.end()
+ }
+ threw = false
+ } finally {
+ if (threw && fd)
+ try { fs.closeSync(fd) } catch (er) {}
+ }
+}
+
+const listFile = (opt, cb) => {
+ const parse = new Parser(opt)
+ const readSize = opt.maxReadSize || 16*1024*1024
+
+ const file = opt.file
+ const p = new Promise((resolve, reject) => {
+ parse.on('error', reject)
+ parse.on('end', resolve)
+
+ fs.stat(file, (er, stat) => {
+ if (er)
+ reject(er)
+ else if (stat.size < readSize)
+ fs.readFile(file, (er, data) => {
+ if (er)
+ return reject(er)
+ parse.end(data)
+ })
+ else {
+ const stream = fs.createReadStream(file, {
+ highWaterMark: readSize
+ })
+ stream.on('error', reject)
+ stream.pipe(parse)
+ }
+ })
+ })
+ return cb ? p.then(cb, cb) : p
+}
+
+const list = opt => new Parser(opt)
diff --git a/deps/npm/node_modules/tar/lib/mkdir.js b/deps/npm/node_modules/tar/lib/mkdir.js
new file mode 100644
index 0000000000..2a8f461afe
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/mkdir.js
@@ -0,0 +1,207 @@
+'use strict'
+// wrapper around mkdirp for tar's needs.
+
+// TODO: This should probably be a class, not functionally
+// passing around state in a gazillion args.
+
+const mkdirp = require('mkdirp')
+const fs = require('fs')
+const path = require('path')
+const chownr = require('chownr')
+
+class SymlinkError extends Error {
+ constructor (symlink, path) {
+ super('Cannot extract through symbolic link')
+ this.path = path
+ this.symlink = symlink
+ }
+
+ get name () {
+ return 'SylinkError'
+ }
+}
+
+class CwdError extends Error {
+ constructor (path, code) {
+ super(code + ': Cannot cd into \'' + path + '\'')
+ this.path = path
+ this.code = code
+ }
+
+ get name () {
+ return 'CwdError'
+ }
+}
+
+const mkdir = module.exports = (dir, opt, cb) => {
+ // if there's any overlap between mask and mode,
+ // then we'll need an explicit chmod
+ const umask = opt.umask
+ const mode = opt.mode | 0o0700
+ const needChmod = (mode & umask) !== 0
+
+ const uid = opt.uid
+ const gid = opt.gid
+ const doChown = typeof uid === 'number' &&
+ typeof gid === 'number' &&
+ ( uid !== opt.processUid || gid !== opt.processGid )
+
+ const preserve = opt.preserve
+ const unlink = opt.unlink
+ const cache = opt.cache
+ const cwd = opt.cwd
+
+ const done = (er, created) => {
+ if (er)
+ cb(er)
+ else {
+ cache.set(dir, true)
+ if (created && doChown)
+ chownr(created, uid, gid, er => done(er))
+ else if (needChmod)
+ fs.chmod(dir, mode, cb)
+ else
+ cb()
+ }
+ }
+
+ if (cache && cache.get(dir) === true)
+ return done()
+
+ if (dir === cwd)
+ return fs.lstat(dir, (er, st) => {
+ if (er || !st.isDirectory())
+ er = new CwdError(dir, er && er.code || 'ENOTDIR')
+ done(er)
+ })
+
+ if (preserve)
+ return mkdirp(dir, mode, done)
+
+ const sub = path.relative(cwd, dir)
+ const parts = sub.split(/\/|\\/)
+ mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
+}
+
+const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
+ if (!parts.length)
+ return cb(null, created)
+ const p = parts.shift()
+ const part = base + '/' + p
+ if (cache.get(part))
+ return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
+ fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
+}
+
+const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
+ if (er) {
+ if (er.path && path.dirname(er.path) === cwd &&
+ (er.code === 'ENOTDIR' || er.code === 'ENOENT'))
+ return cb(new CwdError(cwd, er.code))
+
+ fs.lstat(part, (statEr, st) => {
+ if (statEr)
+ cb(statEr)
+ else if (st.isDirectory())
+ mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
+ else if (unlink)
+ fs.unlink(part, er => {
+ if (er)
+ return cb(er)
+ fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
+ })
+ else if (st.isSymbolicLink())
+ return cb(new SymlinkError(part, part + '/' + parts.join('/')))
+ else
+ cb(er)
+ })
+ } else {
+ created = created || part
+ mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
+ }
+}
+
+const mkdirSync = module.exports.sync = (dir, opt) => {
+ // if there's any overlap between mask and mode,
+ // then we'll need an explicit chmod
+ const umask = opt.umask
+ const mode = opt.mode | 0o0700
+ const needChmod = (mode & umask) !== 0
+
+ const uid = opt.uid
+ const gid = opt.gid
+ const doChown = typeof uid === 'number' &&
+ typeof gid === 'number' &&
+ ( uid !== opt.processUid || gid !== opt.processGid )
+
+ const preserve = opt.preserve
+ const unlink = opt.unlink
+ const cache = opt.cache
+ const cwd = opt.cwd
+
+ const done = (created) => {
+ cache.set(dir, true)
+ if (created && doChown)
+ chownr.sync(created, uid, gid)
+ if (needChmod)
+ fs.chmodSync(dir, mode)
+ cache.set(dir, true)
+ }
+
+ if (cache && cache.get(dir) === true)
+ return done()
+
+ if (dir === cwd) {
+ let ok = false
+ let code = 'ENOTDIR'
+ try {
+ ok = fs.lstatSync(dir).isDirectory()
+ } catch (er) {
+ code = er.code
+ } finally {
+ if (!ok)
+ throw new CwdError(dir, code)
+ }
+ done()
+ return
+ }
+
+ if (preserve)
+ return done(mkdirp.sync(dir, mode))
+
+ const sub = path.relative(cwd, dir)
+ const parts = sub.split(/\/|\\/)
+ let created = null
+ for (let p = parts.shift(), part = cwd;
+ p && (part += '/' + p);
+ p = parts.shift()) {
+
+ if (cache.get(part))
+ continue
+
+ try {
+ fs.mkdirSync(part, mode)
+ created = created || part
+ cache.set(part, true)
+ } catch (er) {
+ if (er.path && path.dirname(er.path) === cwd &&
+ (er.code === 'ENOTDIR' || er.code === 'ENOENT'))
+ return new CwdError(cwd, er.code)
+
+ const st = fs.lstatSync(part)
+ if (st.isDirectory()) {
+ cache.set(part, true)
+ continue
+ } else if (unlink) {
+ fs.unlinkSync(part)
+ fs.mkdirSync(part, mode)
+ created = created || part
+ cache.set(part, true)
+ continue
+ } else if (st.isSymbolicLink())
+ return new SymlinkError(part, part + '/' + parts.join('/'))
+ }
+ }
+
+ return done(created)
+}
diff --git a/deps/npm/node_modules/tar/lib/pack.js b/deps/npm/node_modules/tar/lib/pack.js
index 5a3bb95a12..09b6ac590b 100644
--- a/deps/npm/node_modules/tar/lib/pack.js
+++ b/deps/npm/node_modules/tar/lib/pack.js
@@ -1,236 +1,399 @@
-// pipe in an fstream, and it'll make a tarball.
-// key-value pair argument is global extended header props.
-
-module.exports = Pack
-
-var EntryWriter = require("./entry-writer.js")
- , Stream = require("stream").Stream
- , path = require("path")
- , inherits = require("inherits")
- , GlobalHeaderWriter = require("./global-header-writer.js")
- , collect = require("fstream").collect
- , eof = new Buffer(512)
-
-for (var i = 0; i < 512; i ++) eof[i] = 0
-
-inherits(Pack, Stream)
-
-function Pack (props) {
- // console.error("-- p ctor")
- var me = this
- if (!(me instanceof Pack)) return new Pack(props)
-
- if (props) me._noProprietary = props.noProprietary
- else me._noProprietary = false
-
- me._global = props
-
- me.readable = true
- me.writable = true
- me._buffer = []
- // console.error("-- -- set current to null in ctor")
- me._currentEntry = null
- me._processing = false
-
- me._pipeRoot = null
- me.on("pipe", function (src) {
- if (src.root === me._pipeRoot) return
- me._pipeRoot = src
- src.on("end", function () {
- me._pipeRoot = null
- })
- me.add(src)
- })
+'use strict'
+
+// A readable tar stream creator
+// Technically, this is a transform stream that you write paths into,
+// and tar format comes out of.
+// The `add()` method is like `write()` but returns this,
+// and end() return `this` as well, so you can
+// do `new Pack(opt).add('files').add('dir').end().pipe(output)
+// You could also do something like:
+// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
+
+class PackJob {
+ constructor (path, absolute) {
+ this.path = path || './'
+ this.absolute = absolute
+ this.entry = null
+ this.stat = null
+ this.readdir = null
+ this.pending = false
+ this.ignore = false
+ this.piped = false
+ }
}
-Pack.prototype.addGlobal = function (props) {
- // console.error("-- p addGlobal")
- if (this._didGlobal) return
- this._didGlobal = true
-
- var me = this
- GlobalHeaderWriter(props)
- .on("data", function (c) {
- me.emit("data", c)
- })
- .end()
-}
+const MiniPass = require('minipass')
+const zlib = require('minizlib')
+const ReadEntry = require('./read-entry.js')
+const WriteEntry = require('./write-entry.js')
+const WriteEntrySync = WriteEntry.Sync
+const WriteEntryTar = WriteEntry.Tar
+const Yallist = require('yallist')
+const EOF = Buffer.alloc(1024)
+const ONSTAT = Symbol('onStat')
+const ENDED = Symbol('ended')
+const QUEUE = Symbol('queue')
+const CURRENT = Symbol('current')
+const PROCESS = Symbol('process')
+const PROCESSING = Symbol('processing')
+const PROCESSJOB = Symbol('processJob')
+const JOBS = Symbol('jobs')
+const JOBDONE = Symbol('jobDone')
+const ADDFSENTRY = Symbol('addFSEntry')
+const ADDTARENTRY = Symbol('addTarEntry')
+const STAT = Symbol('stat')
+const READDIR = Symbol('readdir')
+const ONREADDIR = Symbol('onreaddir')
+const PIPE = Symbol('pipe')
+const ENTRY = Symbol('entry')
+const ENTRYOPT = Symbol('entryOpt')
+const WRITEENTRYCLASS = Symbol('writeEntryClass')
+const WRITE = Symbol('write')
+const ONDRAIN = Symbol('ondrain')
+
+const fs = require('fs')
+const path = require('path')
+const warner = require('./warn-mixin.js')
+
+const Pack = warner(class Pack extends MiniPass {
+ constructor (opt) {
+ super(opt)
+ opt = opt || Object.create(null)
+ this.opt = opt
+ this.cwd = opt.cwd || process.cwd()
+ this.maxReadSize = opt.maxReadSize
+ this.preservePaths = !!opt.preservePaths
+ this.strict = !!opt.strict
+ this.noPax = !!opt.noPax
+ this.prefix = (opt.prefix || '').replace(/(\\|\/)+$/, '')
+ this.linkCache = opt.linkCache || new Map()
+ this.statCache = opt.statCache || new Map()
+ this.readdirCache = opt.readdirCache || new Map()
+ this[WRITEENTRYCLASS] = WriteEntry
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+
+ this.zip = null
+ if (opt.gzip) {
+ if (typeof opt.gzip !== 'object')
+ opt.gzip = {}
+ this.zip = new zlib.Gzip(opt.gzip)
+ this.zip.on('data', chunk => super.write(chunk))
+ this.zip.on('end', _ => super.end())
+ this.zip.on('drain', _ => this[ONDRAIN]())
+ this.on('resume', _ => this.zip.resume())
+ } else
+ this.on('drain', this[ONDRAIN])
+
+ this.portable = !!opt.portable
+ this.noDirRecurse = !!opt.noDirRecurse
+ this.follow = !!opt.follow
+
+ this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
+
+ this[QUEUE] = new Yallist
+ this[JOBS] = 0
+ this.jobs = +opt.jobs || 4
+ this[PROCESSING] = false
+ this[ENDED] = false
+ }
-Pack.prototype.add = function (stream) {
- if (this._global && !this._didGlobal) this.addGlobal(this._global)
+ [WRITE] (chunk) {
+ return super.write(chunk)
+ }
- if (this._ended) return this.emit("error", new Error("add after end"))
+ add (path) {
+ this.write(path)
+ return this
+ }
- collect(stream)
- this._buffer.push(stream)
- this._process()
- this._needDrain = this._buffer.length > 0
- return !this._needDrain
-}
+ end (path) {
+ if (path)
+ this.write(path)
+ this[ENDED] = true
+ this[PROCESS]()
+ return this
+ }
-Pack.prototype.pause = function () {
- this._paused = true
- if (this._currentEntry) this._currentEntry.pause()
- this.emit("pause")
-}
+ write (path) {
+ if (this[ENDED])
+ throw new Error('write after end')
-Pack.prototype.resume = function () {
- this._paused = false
- if (this._currentEntry) this._currentEntry.resume()
- this.emit("resume")
- this._process()
-}
+ if (path instanceof ReadEntry)
+ this[ADDTARENTRY](path)
+ else
+ this[ADDFSENTRY](path)
+ return this.flowing
+ }
-Pack.prototype.end = function () {
- this._ended = true
- this._buffer.push(eof)
- this._process()
-}
+ [ADDTARENTRY] (p) {
+ const absolute = path.resolve(this.cwd, p.path)
+ if (this.prefix)
+ p.path = this.prefix + '/' + p.path.replace(/^\.(\/+|$)/, '')
+
+ // in this case, we don't have to wait for the stat
+ if (!this.filter(p.path, p))
+ p.resume()
+ else {
+ const job = new PackJob(p.path, absolute, false)
+ job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
+ job.entry.on('end', _ => this[JOBDONE](job))
+ this[JOBS] += 1
+ this[QUEUE].push(job)
+ }
-Pack.prototype._process = function () {
- var me = this
- if (me._paused || me._processing) {
- return
+ this[PROCESS]()
}
- var entry = me._buffer.shift()
+ [ADDFSENTRY] (p) {
+ const absolute = path.resolve(this.cwd, p)
+ if (this.prefix)
+ p = this.prefix + '/' + p.replace(/^\.(\/+|$)/, '')
- if (!entry) {
- if (me._needDrain) {
- me.emit("drain")
- }
- return
+ this[QUEUE].push(new PackJob(p, absolute))
+ this[PROCESS]()
}
- if (entry.ready === false) {
- // console.error("-- entry is not ready", entry)
- me._buffer.unshift(entry)
- entry.on("ready", function () {
- // console.error("-- -- ready!", entry)
- me._process()
+ [STAT] (job) {
+ job.pending = true
+ this[JOBS] += 1
+ const stat = this.follow ? 'stat' : 'lstat'
+ fs[stat](job.absolute, (er, stat) => {
+ job.pending = false
+ this[JOBS] -= 1
+ if (er)
+ this.emit('error', er)
+ else
+ this[ONSTAT](job, stat)
})
- return
}
- me._processing = true
+ [ONSTAT] (job, stat) {
+ this.statCache.set(job.absolute, stat)
+ job.stat = stat
+
+ // now we have the stat, we can filter it.
+ if (!this.filter(job.path, stat))
+ job.ignore = true
- if (entry === eof) {
- // need 2 ending null blocks.
- me.emit("data", eof)
- me.emit("data", eof)
- me.emit("end")
- me.emit("close")
- return
+ this[PROCESS]()
}
- // Change the path to be relative to the root dir that was
- // added to the tarball.
- //
- // XXX This should be more like how -C works, so you can
- // explicitly set a root dir, and also explicitly set a pathname
- // in the tarball to use. That way we can skip a lot of extra
- // work when resolving symlinks for bundled dependencies in npm.
+ [READDIR] (job) {
+ job.pending = true
+ this[JOBS] += 1
+ fs.readdir(job.absolute, (er, entries) => {
+ job.pending = false
+ this[JOBS] -= 1
+ if (er)
+ return this.emit('error', er)
+ this[ONREADDIR](job, entries)
+ })
+ }
- var root = path.dirname((entry.root || entry).path);
- if (me._global && me._global.fromBase && entry.root && entry.root.path) {
- // user set 'fromBase: true' indicating tar root should be directory itself
- root = entry.root.path;
+ [ONREADDIR] (job, entries) {
+ this.readdirCache.set(job.absolute, entries)
+ job.readdir = entries
+ this[PROCESS]()
}
- var wprops = {}
+ [PROCESS] () {
+ if (this[PROCESSING])
+ return
- Object.keys(entry.props || {}).forEach(function (k) {
- wprops[k] = entry.props[k]
- })
+ this[PROCESSING] = true
+ for (let w = this[QUEUE].head;
+ w !== null && this[JOBS] < this.jobs;
+ w = w.next) {
+ this[PROCESSJOB](w.value)
+ if (w.value.ignore) {
+ const p = w.next
+ this[QUEUE].removeNode(w)
+ w.next = p
+ }
+ }
- if (me._noProprietary) wprops.noProprietary = true
+ this[PROCESSING] = false
- wprops.path = path.relative(root, entry.path || '')
+ if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
+ if (this.zip)
+ this.zip.end(EOF)
+ else {
+ super.write(EOF)
+ super.end()
+ }
+ }
+ }
- // actually not a matter of opinion or taste.
- if (process.platform === "win32") {
- wprops.path = wprops.path.replace(/\\/g, "/")
+ get [CURRENT] () {
+ return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
}
- if (!wprops.type)
- wprops.type = 'Directory'
+ [JOBDONE] (job) {
+ this[QUEUE].shift()
+ this[JOBS] -= 1
+ this[PROCESS]()
+ }
- switch (wprops.type) {
- // sockets not supported
- case "Socket":
+ [PROCESSJOB] (job) {
+ if (job.pending)
return
- case "Directory":
- wprops.path += "/"
- wprops.size = 0
- break
-
- case "Link":
- var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
- wprops.linkpath = path.relative(root, lp) || "."
- wprops.size = 0
- break
+ if (job.entry) {
+ if (job === this[CURRENT] && !job.piped)
+ this[PIPE](job)
+ return
+ }
- case "SymbolicLink":
- var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
- wprops.linkpath = path.relative(path.dirname(entry.path), lp) || "."
- wprops.size = 0
- break
- }
+ if (!job.stat) {
+ if (this.statCache.has(job.absolute))
+ this[ONSTAT](job, this.statCache.get(job.absolute))
+ else
+ this[STAT](job)
+ }
+ if (!job.stat)
+ return
- // console.error("-- new writer", wprops)
- // if (!wprops.type) {
- // // console.error("-- no type?", entry.constructor.name, entry)
- // }
+ // filtered out!
+ if (job.ignore)
+ return
- // console.error("-- -- set current to new writer", wprops.path)
- var writer = me._currentEntry = EntryWriter(wprops)
+ if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
+ if (this.readdirCache.has(job.absolute))
+ this[ONREADDIR](job, this.readdirCache.get(job.absolute))
+ else
+ this[READDIR](job)
+ if (!job.readdir)
+ return
+ }
- writer.parent = me
+ // we know it doesn't have an entry, because that got checked above
+ job.entry = this[ENTRY](job)
+ if (!job.entry) {
+ job.ignore = true
+ return
+ }
- // writer.on("end", function () {
- // // console.error("-- -- writer end", writer.path)
- // })
+ if (job === this[CURRENT] && !job.piped)
+ this[PIPE](job)
+ }
- writer.on("data", function (c) {
- me.emit("data", c)
- })
+ [ENTRYOPT] (job) {
+ return {
+ onwarn: (msg, data) => {
+ this.warn(msg, data)
+ },
+ noPax: this.noPax,
+ cwd: this.cwd,
+ absolute: job.absolute,
+ preservePaths: this.preservePaths,
+ maxReadSize: this.maxReadSize,
+ strict: this.strict,
+ portable: this.portable,
+ linkCache: this.linkCache,
+ statCache: this.statCache
+ }
+ }
- writer.on("header", function () {
- Buffer.prototype.toJSON = function () {
- return this.toString().split(/\0/).join(".")
+ [ENTRY] (job) {
+ this[JOBS] += 1
+ try {
+ return new this[WRITEENTRYCLASS](
+ job.path, this[ENTRYOPT](job)).on('end', _ => {
+ this[JOBDONE](job)
+ }).on('error', er => this.emit('error', er))
+ } catch (er) {
+ this.emit('error', er)
}
- // console.error("-- -- writer header %j", writer.props)
- if (writer.props.size === 0) nextEntry()
- })
- writer.on("close", nextEntry)
+ }
+
+ [ONDRAIN] () {
+ if (this[CURRENT] && this[CURRENT].entry)
+ this[CURRENT].entry.resume()
+ }
+
+ // like .pipe() but using super, because our write() is special
+ [PIPE] (job) {
+ job.piped = true
+
+ if (job.readdir)
+ job.readdir.forEach(entry => {
+ const p = this.prefix ?
+ job.path.slice(this.prefix.length + 1) || './'
+ : job.path
+
+ const base = p === './' ? '' : p.replace(/\/*$/, '/')
+ this[ADDFSENTRY](base + entry)
+ })
+
+ const source = job.entry
+ const zip = this.zip
+
+ if (zip)
+ source.on('data', chunk => {
+ if (!zip.write(chunk))
+ source.pause()
+ })
+ else
+ source.on('data', chunk => {
+ if (!super.write(chunk))
+ source.pause()
+ })
+ }
- var ended = false
- function nextEntry () {
- if (ended) return
- ended = true
+ pause () {
+ if (this.zip)
+ this.zip.pause()
+ return super.pause()
+ }
+})
- // console.error("-- -- writer close", writer.path)
- // console.error("-- -- set current to null", wprops.path)
- me._currentEntry = null
- me._processing = false
- me._process()
+class PackSync extends Pack {
+ constructor (opt) {
+ super(opt)
+ this[WRITEENTRYCLASS] = WriteEntrySync
}
- writer.on("error", function (er) {
- // console.error("-- -- writer error", writer.path)
- me.emit("error", er)
- })
+ // pause/resume are no-ops in sync streams.
+ pause () {}
+ resume () {}
+
+ [STAT] (job) {
+ const stat = this.follow ? 'statSync' : 'lstatSync'
+ this[ONSTAT](job, fs[stat](job.absolute))
+ }
- // if it's the root, then there's no need to add its entries,
- // or data, since they'll be added directly.
- if (entry === me._pipeRoot) {
- // console.error("-- is the root, don't auto-add")
- writer.add = null
+ [READDIR] (job, stat) {
+ this[ONREADDIR](job, fs.readdirSync(job.absolute))
}
- entry.pipe(writer)
+ // gotta get it all in this tick
+ [PIPE] (job) {
+ const source = job.entry
+ const zip = this.zip
+
+ if (job.readdir)
+ job.readdir.forEach(entry => {
+ const p = this.prefix ?
+ job.path.slice(this.prefix.length + 1) || './'
+ : job.path
+
+
+ const base = p === './' ? '' : p.replace(/\/*$/, '/')
+ this[ADDFSENTRY](base + entry)
+ })
+
+ if (zip)
+ source.on('data', chunk => {
+ zip.write(chunk)
+ })
+ else
+ source.on('data', chunk => {
+ super[WRITE](chunk)
+ })
+ }
}
-Pack.prototype.destroy = function () {}
-Pack.prototype.write = function () {}
+Pack.Sync = PackSync
+
+module.exports = Pack
diff --git a/deps/npm/node_modules/tar/lib/parse.js b/deps/npm/node_modules/tar/lib/parse.js
index 600ad782f0..63c7ee9cef 100644
--- a/deps/npm/node_modules/tar/lib/parse.js
+++ b/deps/npm/node_modules/tar/lib/parse.js
@@ -1,275 +1,415 @@
+'use strict'
+
+// this[BUFFER] is the remainder of a chunk if we're waiting for
+// the full 512 bytes of a header to come in. We will Buffer.concat()
+// it to the next write(), which is a mem copy, but a small one.
+//
+// this[QUEUE] is a Yallist of entries that haven't been emitted
+// yet this can only get filled up if the user keeps write()ing after
+// a write() returns false, or does a write() with more than one entry
+//
+// We don't buffer chunks, we always parse them and either create an
+// entry, or push it into the active entry. The ReadEntry class knows
+// to throw data away if .ignore=true
+//
+// Shift entry off the buffer when it emits 'end', and emit 'entry' for
+// the next one in the list.
+//
+// At any time, we're pushing body chunks into the entry at WRITEENTRY,
+// and waiting for 'end' on the entry at READENTRY
+//
+// ignored entries get .resume() called on them straight away
+
+const warner = require('./warn-mixin.js')
+const path = require('path')
+const Header = require('./header.js')
+const EE = require('events')
+const Yallist = require('yallist')
+const maxMetaEntrySize = 1024 * 1024
+const Entry = require('./read-entry.js')
+const Pax = require('./pax.js')
+const zlib = require('minizlib')
+
+const gzipHeader = new Buffer([0x1f, 0x8b])
+const STATE = Symbol('state')
+const WRITEENTRY = Symbol('writeEntry')
+const READENTRY = Symbol('readEntry')
+const NEXTENTRY = Symbol('nextEntry')
+const PROCESSENTRY = Symbol('processEntry')
+const EX = Symbol('extendedHeader')
+const GEX = Symbol('globalExtendedHeader')
+const META = Symbol('meta')
+const EMITMETA = Symbol('emitMeta')
+const BUFFER = Symbol('buffer')
+const QUEUE = Symbol('queue')
+const ENDED = Symbol('ended')
+const EMITTEDEND = Symbol('emittedEnd')
+const EMIT = Symbol('emit')
+const UNZIP = Symbol('unzip')
+const CONSUMECHUNK = Symbol('consumeChunk')
+const CONSUMECHUNKSUB = Symbol('consumeChunkSub')
+const CONSUMEBODY = Symbol('consumeBody')
+const CONSUMEMETA = Symbol('consumeMeta')
+const CONSUMEHEADER = Symbol('consumeHeader')
+const CONSUMING = Symbol('consuming')
+const BUFFERCONCAT = Symbol('bufferConcat')
+const MAYBEEND = Symbol('maybeEnd')
+const WRITING = Symbol('writing')
+const ABORTED = Symbol('aborted')
+const DONE = Symbol('onDone')
+
+const noop = _ => true
+
+module.exports = warner(class Parser extends EE {
+ constructor (opt) {
+ opt = opt || {}
+ super(opt)
+
+ if (opt.ondone)
+ this.on(DONE, opt.ondone)
+ else
+ this.on(DONE, _ => {
+ this.emit('prefinish')
+ this.emit('finish')
+ this.emit('end')
+ this.emit('close')
+ })
+
+ this.strict = !!opt.strict
+ this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
+ this.filter = typeof opt.filter === 'function' ? opt.filter : noop
+
+ // have to set this so that streams are ok piping into it
+ this.writable = true
+ this.readable = false
+
+ this[QUEUE] = new Yallist()
+ this[BUFFER] = null
+ this[READENTRY] = null
+ this[WRITEENTRY] = null
+ this[STATE] = 'begin'
+ this[META] = ''
+ this[EX] = null
+ this[GEX] = null
+ this[ENDED] = false
+ this[UNZIP] = null
+ this[ABORTED] = false
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+ if (typeof opt.onentry === 'function')
+ this.on('entry', opt.onentry)
+ }
+
+ [CONSUMEHEADER] (chunk, position) {
+ const header = new Header(chunk, position)
-// A writable stream.
-// It emits "entry" events, which provide a readable stream that has
-// header info attached.
-
-module.exports = Parse.create = Parse
-
-var stream = require("stream")
- , Stream = stream.Stream
- , BlockStream = require("block-stream")
- , tar = require("../tar.js")
- , TarHeader = require("./header.js")
- , Entry = require("./entry.js")
- , BufferEntry = require("./buffer-entry.js")
- , ExtendedHeader = require("./extended-header.js")
- , assert = require("assert").ok
- , inherits = require("inherits")
- , fstream = require("fstream")
-
-// reading a tar is a lot like reading a directory
-// However, we're actually not going to run the ctor,
-// since it does a stat and various other stuff.
-// This inheritance gives us the pause/resume/pipe
-// behavior that is desired.
-inherits(Parse, fstream.Reader)
-
-function Parse () {
- var me = this
- if (!(me instanceof Parse)) return new Parse()
-
- // doesn't apply fstream.Reader ctor?
- // no, becasue we don't want to stat/etc, we just
- // want to get the entry/add logic from .pipe()
- Stream.apply(me)
-
- me.writable = true
- me.readable = true
- me._stream = new BlockStream(512)
- me.position = 0
- me._ended = false
-
- me._stream.on("error", function (e) {
- me.emit("error", e)
- })
-
- me._stream.on("data", function (c) {
- me._process(c)
- })
-
- me._stream.on("end", function () {
- me._streamEnd()
- })
-
- me._stream.on("drain", function () {
- me.emit("drain")
- })
-}
-
-// overridden in Extract class, since it needs to
-// wait for its DirWriter part to finish before
-// emitting "end"
-Parse.prototype._streamEnd = function () {
- var me = this
- if (!me._ended || me._entry) me.error("unexpected eof")
- me.emit("end")
-}
-
-// a tar reader is actually a filter, not just a readable stream.
-// So, you should pipe a tarball stream into it, and it needs these
-// write/end methods to do that.
-Parse.prototype.write = function (c) {
- if (this._ended) {
- // gnutar puts a LOT of nulls at the end.
- // you can keep writing these things forever.
- // Just ignore them.
- for (var i = 0, l = c.length; i > l; i ++) {
- if (c[i] !== 0) return this.error("write() after end()")
+ if (header.nullBlock)
+ this[EMIT]('nullBlock')
+ else if (!header.cksumValid)
+ this.warn('invalid entry', header)
+ else if (!header.path)
+ this.warn('invalid: path is required', header)
+ else {
+ const type = header.type
+ if (/^(Symbolic)?Link$/.test(type) && !header.linkpath)
+ this.warn('invalid: linkpath required', header)
+ else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath)
+ this.warn('invalid: linkpath forbidden', header)
+ else {
+ const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])
+
+ if (entry.meta) {
+ if (entry.size > this.maxMetaEntrySize) {
+ entry.ignore = true
+ this[EMIT]('ignoredEntry', entry)
+ this[STATE] = 'ignore'
+ } else if (entry.size > 0) {
+ this[META] = ''
+ entry.on('data', c => this[META] += c)
+ this[STATE] = 'meta'
+ }
+ } else {
+
+ this[EX] = null
+ entry.ignore = entry.ignore || !this.filter(entry.path, entry)
+ if (entry.ignore) {
+ this[EMIT]('ignoredEntry', entry)
+ this[STATE] = entry.remain ? 'ignore' : 'begin'
+ } else {
+ if (entry.remain)
+ this[STATE] = 'body'
+ else {
+ this[STATE] = 'begin'
+ entry.end()
+ }
+
+ if (!this[READENTRY]) {
+ this[QUEUE].push(entry)
+ this[NEXTENTRY]()
+ } else
+ this[QUEUE].push(entry)
+ }
+ }
+ }
}
- return
}
- return this._stream.write(c)
-}
-
-Parse.prototype.end = function (c) {
- this._ended = true
- return this._stream.end(c)
-}
-
-// don't need to do anything, since we're just
-// proxying the data up from the _stream.
-// Just need to override the parent's "Not Implemented"
-// error-thrower.
-Parse.prototype._read = function () {}
-
-Parse.prototype._process = function (c) {
- assert(c && c.length === 512, "block size should be 512")
-
- // one of three cases.
- // 1. A new header
- // 2. A part of a file/extended header
- // 3. One of two or more EOF null blocks
-
- if (this._entry) {
- var entry = this._entry
- if(!entry._abort) entry.write(c)
+
+ [PROCESSENTRY] (entry) {
+ let go = true
+
+ if (!entry) {
+ this[READENTRY] = null
+ go = false
+ } else if (Array.isArray(entry))
+ this.emit.apply(this, entry)
else {
- entry._remaining -= c.length
- if(entry._remaining < 0) entry._remaining = 0
+ this[READENTRY] = entry
+ this.emit('entry', entry)
+ if (!entry.emittedEnd) {
+ entry.on('end', _ => this[NEXTENTRY]())
+ go = false
+ }
}
- if (entry._remaining === 0) {
+
+ return go
+ }
+
+ [NEXTENTRY] () {
+ do {} while (this[PROCESSENTRY](this[QUEUE].shift()))
+
+ if (!this[QUEUE].length) {
+ // At this point, there's nothing in the queue, but we may have an
+ // entry which is being consumed (readEntry).
+ // If we don't, then we definitely can handle more data.
+ // If we do, and either it's flowing, or it has never had any data
+ // written to it, then it needs more.
+ // The only other possibility is that it has returned false from a
+ // write() call, so we wait for the next drain to continue.
+ const re = this[READENTRY]
+ const drainNow = !re || re.flowing || re.size === re.remain
+ if (drainNow) {
+ if (!this[WRITING])
+ this.emit('drain')
+ } else
+ re.once('drain', _ => this.emit('drain'))
+ }
+ }
+
+ [CONSUMEBODY] (chunk, position) {
+ // write up to but no more than writeEntry.blockRemain
+ const entry = this[WRITEENTRY]
+ const br = entry.blockRemain
+ const c = (br >= chunk.length && position === 0) ? chunk
+ : chunk.slice(position, position + br)
+
+ entry.write(c)
+
+ if (!entry.blockRemain) {
+ this[STATE] = 'begin'
+ this[WRITEENTRY] = null
entry.end()
- this._entry = null
- }
- } else {
- // either zeroes or a header
- var zero = true
- for (var i = 0; i < 512 && zero; i ++) {
- zero = c[i] === 0
}
- // eof is *at least* 2 blocks of nulls, and then the end of the
- // file. you can put blocks of nulls between entries anywhere,
- // so appending one tarball to another is technically valid.
- // ending without the eof null blocks is not allowed, however.
- if (zero) {
- if (this._eofStarted)
- this._ended = true
- this._eofStarted = true
- } else {
- this._eofStarted = false
- this._startEntry(c)
+ return c.length
+ }
+
+ [CONSUMEMETA] (chunk, position) {
+ const entry = this[WRITEENTRY]
+ const ret = this[CONSUMEBODY](chunk, position)
+
+ // if we finished, then the entry is reset
+ if (!this[WRITEENTRY])
+ this[EMITMETA](entry)
+
+ return ret
+ }
+
+ [EMIT] (ev, data, extra) {
+ if (!this[QUEUE].length && !this[READENTRY])
+ this.emit(ev, data, extra)
+ else
+ this[QUEUE].push([ev, data, extra])
+ }
+
+ [EMITMETA] (entry) {
+ this[EMIT]('meta', this[META])
+ switch (entry.type) {
+ case 'ExtendedHeader':
+ case 'OldExtendedHeader':
+ this[EX] = Pax.parse(this[META], this[EX], false)
+ break
+
+ case 'GlobalExtendedHeader':
+ this[GEX] = Pax.parse(this[META], this[GEX], true)
+ break
+
+ case 'NextFileHasLongPath':
+ case 'OldGnuLongPath':
+ this[EX] = this[EX] || Object.create(null)
+ this[EX].path = this[META].replace(/\0.*/, '')
+ break
+
+ case 'NextFileHasLongLinkpath':
+ this[EX] = this[EX] || Object.create(null)
+ this[EX].linkpath = this[META].replace(/\0.*/, '')
+ break
+
+ /* istanbul ignore next */
+ default: throw new Error('unknown meta: ' + entry.type)
}
}
- this.position += 512
-}
-
-// take a header chunk, start the right kind of entry.
-Parse.prototype._startEntry = function (c) {
- var header = new TarHeader(c)
- , self = this
- , entry
- , ev
- , EntryType
- , onend
- , meta = false
-
- if (null === header.size || !header.cksumValid) {
- var e = new Error("invalid tar file")
- e.header = header
- e.tar_file_offset = this.position
- e.tar_block = this.position / 512
- return this.emit("error", e)
+ abort (msg, error) {
+ this[ABORTED] = true
+ this.warn(msg, error)
+ this.emit('abort')
}
- switch (tar.types[header.type]) {
- case "File":
- case "OldFile":
- case "Link":
- case "SymbolicLink":
- case "CharacterDevice":
- case "BlockDevice":
- case "Directory":
- case "FIFO":
- case "ContiguousFile":
- case "GNUDumpDir":
- // start a file.
- // pass in any extended headers
- // These ones consumers are typically most interested in.
- EntryType = Entry
- ev = "entry"
- break
-
- case "GlobalExtendedHeader":
- // extended headers that apply to the rest of the tarball
- EntryType = ExtendedHeader
- onend = function () {
- self._global = self._global || {}
- Object.keys(entry.fields).forEach(function (k) {
- self._global[k] = entry.fields[k]
- })
+ write (chunk) {
+ if (this[ABORTED])
+ return
+
+ // first write, might be gzipped
+ if (this[UNZIP] === null && chunk) {
+ if (this[BUFFER]) {
+ chunk = Buffer.concat([this[BUFFER], chunk])
+ this[BUFFER] = null
}
- ev = "globalExtendedHeader"
- meta = true
- break
-
- case "ExtendedHeader":
- case "OldExtendedHeader":
- // extended headers that apply to the next entry
- EntryType = ExtendedHeader
- onend = function () {
- self._extended = entry.fields
+ if (chunk.length < gzipHeader.length) {
+ this[BUFFER] = chunk
+ return true
}
- ev = "extendedHeader"
- meta = true
- break
-
- case "NextFileHasLongLinkpath":
- // set linkpath=<contents> in extended header
- EntryType = BufferEntry
- onend = function () {
- self._extended = self._extended || {}
- self._extended.linkpath = entry.body
+ for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
+ if (chunk[i] !== gzipHeader[i])
+ this[UNZIP] = false
}
- ev = "longLinkpath"
- meta = true
- break
-
- case "NextFileHasLongPath":
- case "OldGnuLongPath":
- // set path=<contents> in file-extended header
- EntryType = BufferEntry
- onend = function () {
- self._extended = self._extended || {}
- self._extended.path = entry.body
+ if (this[UNZIP] === null) {
+ const ended = this[ENDED]
+ this[ENDED] = false
+ this[UNZIP] = new zlib.Unzip()
+ this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
+ this[UNZIP].on('error', er =>
+ this.abort('zlib error: ' + er.message, er))
+ this[UNZIP].on('end', _ => {
+ this[ENDED] = true
+ this[CONSUMECHUNK]()
+ })
+ return ended ? this[UNZIP].end(chunk) : this[UNZIP].write(chunk)
}
- ev = "longPath"
- meta = true
- break
-
- default:
- // all the rest we skip, but still set the _entry
- // member, so that we can skip over their data appropriately.
- // emit an event to say that this is an ignored entry type?
- EntryType = Entry
- ev = "ignoredEntry"
- break
- }
+ }
- var global, extended
- if (meta) {
- global = extended = null
- } else {
- var global = this._global
- var extended = this._extended
+ this[WRITING] = true
+ if (this[UNZIP])
+ this[UNZIP].write(chunk)
+ else
+ this[CONSUMECHUNK](chunk)
+ this[WRITING] = false
- // extendedHeader only applies to one entry, so once we start
- // an entry, it's over.
- this._extended = null
- }
- entry = new EntryType(header, extended, global)
- entry.meta = meta
-
- // only proxy data events of normal files.
- if (!meta) {
- entry.on("data", function (c) {
- me.emit("data", c)
- })
+ // return false if there's a queue, or if the current entry isn't flowing
+ const ret =
+ this[QUEUE].length ? false :
+ this[READENTRY] ? this[READENTRY].flowing :
+ true
+
+ // if we have no queue, then that means a clogged READENTRY
+ if (!ret && !this[QUEUE].length)
+ this[READENTRY].once('drain', _ => this.emit('drain'))
+
+ return ret
}
- if (onend) entry.on("end", onend)
+ [BUFFERCONCAT] (c) {
+ if (c && !this[ABORTED])
+ this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c
+ }
- this._entry = entry
- var me = this
+ [MAYBEEND] () {
+ if (this[ENDED] && !this[EMITTEDEND] && !this[ABORTED]) {
+ this[EMITTEDEND] = true
+ const entry = this[WRITEENTRY]
+ if (entry && entry.blockRemain) {
+ const have = this[BUFFER] ? this[BUFFER].length : 0
+ this.warn('Truncated input (needed ' + entry.blockRemain +
+ ' more bytes, only ' + have + ' available)', entry)
+ if (this[BUFFER])
+ entry.write(this[BUFFER])
+ entry.end()
+ }
+ this[EMIT](DONE)
+ }
+ }
- entry.on("pause", function () {
- me.pause()
- })
+ [CONSUMECHUNK] (chunk) {
+ if (this[CONSUMING]) {
+ this[BUFFERCONCAT](chunk)
+ } else if (!chunk && !this[BUFFER]) {
+ this[MAYBEEND]()
+ } else {
+ this[CONSUMING] = true
+ if (this[BUFFER]) {
+ this[BUFFERCONCAT](chunk)
+ const c = this[BUFFER]
+ this[BUFFER] = null
+ this[CONSUMECHUNKSUB](c)
+ } else {
+ this[CONSUMECHUNKSUB](chunk)
+ }
- entry.on("resume", function () {
- me.resume()
- })
+ while (this[BUFFER] && this[BUFFER].length >= 512 && !this[ABORTED]) {
+ const c = this[BUFFER]
+ this[BUFFER] = null
+ this[CONSUMECHUNKSUB](c)
+ }
+ this[CONSUMING] = false
+ }
- if (this.listeners("*").length) {
- this.emit("*", ev, entry)
+ if (!this[BUFFER] || this[ENDED])
+ this[MAYBEEND]()
}
- this.emit(ev, entry)
+ [CONSUMECHUNKSUB] (chunk) {
+ // we know that we are in CONSUMING mode, so anything written goes into
+ // the buffer. Advance the position and put any remainder in the buffer.
+ let position = 0
+ let length = chunk.length
+ while (position + 512 <= length && !this[ABORTED]) {
+ switch (this[STATE]) {
+ case 'begin':
+ this[CONSUMEHEADER](chunk, position)
+ position += 512
+ break
+
+ case 'ignore':
+ case 'body':
+ position += this[CONSUMEBODY](chunk, position)
+ break
+
+ case 'meta':
+ position += this[CONSUMEMETA](chunk, position)
+ break
+
+ /* istanbul ignore next */
+ default:
+ throw new Error('invalid state: ' + this[STATE])
+ }
+ }
- // Zero-byte entry. End immediately.
- if (entry.props.size === 0) {
- entry.end()
- this._entry = null
+ if (position < length) {
+ if (this[BUFFER])
+ this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])
+ else
+ this[BUFFER] = chunk.slice(position)
+ }
+ }
+
+ end (chunk) {
+ if (!this[ABORTED]) {
+ if (this[UNZIP])
+ this[UNZIP].end(chunk)
+ else {
+ this[ENDED] = true
+ this.write(chunk)
+ }
+ }
}
-}
+})
diff --git a/deps/npm/node_modules/tar/lib/pax.js b/deps/npm/node_modules/tar/lib/pax.js
new file mode 100644
index 0000000000..214a459f3b
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/pax.js
@@ -0,0 +1,145 @@
+'use strict'
+const Header = require('./header.js')
+const path = require('path')
+
+class Pax {
+ constructor (obj, global) {
+ this.atime = obj.atime || null
+ this.charset = obj.charset || null
+ this.comment = obj.comment || null
+ this.ctime = obj.ctime || null
+ this.gid = obj.gid || null
+ this.gname = obj.gname || null
+ this.linkpath = obj.linkpath || null
+ this.mtime = obj.mtime || null
+ this.path = obj.path || null
+ this.size = obj.size || null
+ this.uid = obj.uid || null
+ this.uname = obj.uname || null
+ this.dev = obj.dev || null
+ this.ino = obj.ino || null
+ this.nlink = obj.nlink || null
+ this.global = global || false
+ }
+
+ encode () {
+ const body = this.encodeBody()
+ if (body === '')
+ return null
+
+ const bodyLen = Buffer.byteLength(body)
+ // round up to 512 bytes
+ // add 512 for header
+ const bufLen = 512 * Math.ceil(1 + bodyLen / 512)
+ const buf = Buffer.allocUnsafe(bufLen)
+
+ // 0-fill the header section, it might not hit every field
+ for (let i = 0; i < 512; i++) {
+ buf[i] = 0
+ }
+
+ new Header({
+ // XXX split the path
+ // then the path should be PaxHeader + basename, but less than 99,
+ // prepend with the dirname
+ path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99),
+ mode: this.mode || 0o644,
+ uid: this.uid || null,
+ gid: this.gid || null,
+ size: bodyLen,
+ mtime: this.mtime || null,
+ type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
+ linkpath: '',
+ uname: this.uname || '',
+ gname: this.gname || '',
+ devmaj: 0,
+ devmin: 0,
+ atime: this.atime || null,
+ ctime: this.ctime || null
+ }).encode(buf)
+
+ buf.write(body, 512, bodyLen, 'utf8')
+
+ // null pad after the body
+ for (let i = bodyLen + 512; i < buf.length; i++) {
+ buf[i] = 0
+ }
+
+ return buf
+ }
+
+ encodeBody () {
+ return (
+ this.encodeField('path') +
+ this.encodeField('ctime') +
+ this.encodeField('atime') +
+ this.encodeField('dev') +
+ this.encodeField('ino') +
+ this.encodeField('nlink') +
+ this.encodeField('charset') +
+ this.encodeField('comment') +
+ this.encodeField('gid') +
+ this.encodeField('gname') +
+ this.encodeField('linkpath') +
+ this.encodeField('mtime') +
+ this.encodeField('size') +
+ this.encodeField('uid') +
+ this.encodeField('uname')
+ )
+ }
+
+ encodeField (field) {
+ if (this[field] === null || this[field] === undefined)
+ return ''
+ const v = this[field] instanceof Date ? this[field].getTime() / 1000
+ : this[field]
+ const s = ' ' +
+ (field === 'dev' || field === 'ino' || field === 'nlink'
+ ? 'SCHILY.' : '') +
+ field + '=' + v + '\n'
+ const byteLen = Buffer.byteLength(s)
+ // the digits includes the length of the digits in ascii base-10
+ // so if it's 9 characters, then adding 1 for the 9 makes it 10
+ // which makes it 11 chars.
+ let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1
+ if (byteLen + digits >= Math.pow(10, digits))
+ digits += 1
+ const len = digits + byteLen
+ return len + s
+ }
+}
+
+Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g)
+
+const merge = (a, b) =>
+ b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a
+
+const parseKV = string =>
+ string
+ .replace(/\n$/, '')
+ .split('\n')
+ .reduce(parseKVLine, Object.create(null))
+
+const parseKVLine = (set, line) => {
+ const n = parseInt(line, 10)
+
+ // XXX Values with \n in them will fail this.
+ // Refactor to not be a naive line-by-line parse.
+ if (n !== Buffer.byteLength(line) + 1)
+ return set
+
+ line = line.substr((n + ' ').length)
+ const kv = line.split('=')
+ const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1')
+ if (!k)
+ return set
+
+ const v = kv.join('=')
+ set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k)
+ ? new Date(v * 1000)
+ : /^[0-9]+$/.test(v) ? +v
+ : v
+ return set
+}
+
+module.exports = Pax
diff --git a/deps/npm/node_modules/tar/lib/read-entry.js b/deps/npm/node_modules/tar/lib/read-entry.js
new file mode 100644
index 0000000000..aa369c74f5
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/read-entry.js
@@ -0,0 +1,94 @@
+'use strict'
+const types = require('./types.js')
+const MiniPass = require('minipass')
+
+const SLURP = Symbol('slurp')
+module.exports = class ReadEntry extends MiniPass {
+ constructor (header, ex, gex) {
+ super()
+ this.extended = ex
+ this.globalExtended = gex
+ this.header = header
+ this.startBlockSize = 512 * Math.ceil(header.size / 512)
+ this.blockRemain = this.startBlockSize
+ this.remain = header.size
+ this.type = header.type
+ this.meta = false
+ this.ignore = false
+ switch (this.type) {
+ case 'File':
+ case 'OldFile':
+ case 'Link':
+ case 'SymbolicLink':
+ case 'CharacterDevice':
+ case 'BlockDevice':
+ case 'Directory':
+ case 'FIFO':
+ case 'ContiguousFile':
+ case 'GNUDumpDir':
+ break
+
+ case 'NextFileHasLongLinkpath':
+ case 'NextFileHasLongPath':
+ case 'OldGnuLongPath':
+ case 'GlobalExtendedHeader':
+ case 'ExtendedHeader':
+ case 'OldExtendedHeader':
+ this.meta = true
+ break
+
+ // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
+ // it may be worth doing the same, but with a warning.
+ default:
+ this.ignore = true
+ }
+
+ this.path = header.path
+ this.mode = header.mode
+ if (this.mode)
+ this.mode = this.mode & 0o7777
+ this.uid = header.uid
+ this.gid = header.gid
+ this.uname = header.uname
+ this.gname = header.gname
+ this.size = header.size
+ this.mtime = header.mtime
+ this.atime = header.atime
+ this.ctime = header.ctime
+ this.linkpath = header.linkpath
+ this.uname = header.uname
+ this.gname = header.gname
+
+ if (ex) this[SLURP](ex)
+ if (gex) this[SLURP](gex, true)
+ }
+
+ write (data) {
+ const writeLen = data.length
+ if (writeLen > this.blockRemain)
+ throw new Error('writing more to entry than is appropriate')
+
+ const r = this.remain
+ const br = this.blockRemain
+ this.remain = Math.max(0, r - writeLen)
+ this.blockRemain = Math.max(0, br - writeLen)
+ if (this.ignore)
+ return true
+
+ if (r >= writeLen)
+ return super.write(data)
+
+ // r < writeLen
+ return super.write(data.slice(0, r))
+ }
+
+ [SLURP] (ex, global) {
+ for (let k in ex) {
+ // we slurp in everything except for the path attribute in
+ // a global extended header, because that's weird.
+ if (ex[k] !== null && ex[k] !== undefined &&
+ !(global && k === 'path'))
+ this[k] = ex[k]
+ }
+ }
+}
diff --git a/deps/npm/node_modules/tar/lib/replace.js b/deps/npm/node_modules/tar/lib/replace.js
new file mode 100644
index 0000000000..aac6b57fa8
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/replace.js
@@ -0,0 +1,211 @@
+'use strict'
+
+// tar -r
+const hlo = require('./high-level-opt.js')
+const Pack = require('./pack.js')
+const Parse = require('./parse.js')
+const fs = require('fs')
+const t = require('./list.js')
+const path = require('path')
+
+// starting at the head of the file, read a Header
+// If the checksum is invalid, that's our position to start writing
+// If it is, jump forward by the specified size (round up to 512)
+// and try again.
+// Write the new Pack stream starting there.
+
+const Header = require('./header.js')
+
+const r = module.exports = (opt_, files, cb) => {
+ const opt = hlo(opt_)
+
+ if (!opt.file)
+ throw new TypeError('file is required')
+
+ if (opt.gzip)
+ throw new TypeError('cannot append to compressed archives')
+
+ if (!files || !Array.isArray(files) || !files.length)
+ throw new TypeError('no files or directories specified')
+
+ files = Array.from(files)
+
+ return opt.sync ? replaceSync(opt, files)
+ : replace(opt, files, cb)
+}
+
+const replaceSync = (opt, files) => {
+ const p = new Pack.Sync(opt)
+
+ let threw = true
+ let fd
+ try {
+ try {
+ fd = fs.openSync(opt.file, 'r+')
+ } catch (er) {
+ if (er.code === 'ENOENT')
+ fd = fs.openSync(opt.file, 'w+')
+ else
+ throw er
+ }
+
+ const st = fs.fstatSync(fd)
+ const headBuf = Buffer.alloc(512)
+ let position
+
+ POSITION: for (position = 0; position < st.size; position += 512) {
+ for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
+ bytes = fs.readSync(
+ fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
+ )
+
+ if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
+ throw new Error('cannot append to compressed archives')
+
+ if (!bytes)
+ break POSITION
+ }
+
+ let h = new Header(headBuf)
+ if (!h.cksumValid)
+ break
+ let entryBlockSize = 512 * Math.ceil(h.size / 512)
+ if (position + entryBlockSize + 512 > st.size)
+ break
+ // the 512 for the header we just parsed will be added as well
+ // also jump ahead all the blocks for the body
+ position += entryBlockSize
+ if (opt.mtimeCache)
+ opt.mtimeCache.set(h.path, h.mtime)
+ }
+
+ p.on('data', c => {
+ fs.writeSync(fd, c, 0, c.length, position)
+ position += c.length
+ })
+ p.on('end', _ => fs.closeSync(fd))
+
+ addFilesSync(p, files)
+ threw = false
+ } finally {
+ if (threw)
+ try { fs.closeSync(fd) } catch (er) {}
+ }
+}
+
+const replace = (opt, files, cb) => {
+ files = Array.from(files)
+ const p = new Pack(opt)
+
+ const getPos = (fd, size, cb_) => {
+ const cb = (er, pos) => {
+ if (er)
+ fs.close(fd, _ => cb_(er))
+ else
+ cb_(null, pos)
+ }
+
+ let position = 0
+ if (size === 0)
+ return cb(null, 0)
+
+ let bufPos = 0
+ const headBuf = Buffer.alloc(512)
+ const onread = (er, bytes) => {
+ if (er)
+ return cb(er)
+ bufPos += bytes
+ if (bufPos < 512 && bytes)
+ return fs.read(
+ fd, headBuf, bufPos, headBuf.length - bufPos,
+ position + bufPos, onread
+ )
+
+ if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
+ return cb(new Error('cannot append to compressed archives'))
+
+ // truncated header
+ if (bufPos < 512)
+ return cb(null, position)
+
+ const h = new Header(headBuf)
+ if (!h.cksumValid)
+ return cb(null, position)
+
+ const entryBlockSize = 512 * Math.ceil(h.size / 512)
+ if (position + entryBlockSize + 512 > size)
+ return cb(null, position)
+
+ position += entryBlockSize + 512
+ if (position >= size)
+ return cb(null, position)
+
+ if (opt.mtimeCache)
+ opt.mtimeCache.set(h.path, h.mtime)
+ bufPos = 0
+ fs.read(fd, headBuf, 0, 512, position, onread)
+ }
+ fs.read(fd, headBuf, 0, 512, position, onread)
+ }
+
+ const promise = new Promise((resolve, reject) => {
+ p.on('error', reject)
+ const onopen = (er, fd) => {
+ if (er) {
+ if (er.code === 'ENOENT')
+ return fs.open(opt.file, 'w+', onopen)
+ return reject(er)
+ }
+ fs.fstat(fd, (er, st) => {
+ if (er)
+ return reject(er)
+ getPos(fd, st.size, (er, position) => {
+ if (er)
+ return reject(er)
+ const stream = fs.createWriteStream(opt.file, {
+ fd: fd,
+ flags: 'r+',
+ start: position
+ })
+ p.pipe(stream)
+ stream.on('error', reject)
+ stream.on('close', resolve)
+ addFilesAsync(p, files)
+ })
+ })
+ }
+ fs.open(opt.file, 'r+', onopen)
+ })
+
+ return cb ? promise.then(cb, cb) : promise
+}
+
+const addFilesSync = (p, files) => {
+ files.forEach(file => {
+ if (file.charAt(0) === '@')
+ t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ sync: true,
+ noResume: true,
+ onentry: entry => p.add(entry)
+ })
+ else
+ p.add(file)
+ })
+ p.end()
+}
+
+const addFilesAsync = (p, files) => {
+ while (files.length) {
+ const file = files.shift()
+ if (file.charAt(0) === '@')
+ return t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ noResume: true,
+ onentry: entry => p.add(entry)
+ }).then(_ => addFilesAsync(p, files))
+ else
+ p.add(file)
+ }
+ p.end()
+}
diff --git a/deps/npm/node_modules/tar/lib/types.js b/deps/npm/node_modules/tar/lib/types.js
new file mode 100644
index 0000000000..df425652b5
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/types.js
@@ -0,0 +1,44 @@
+'use strict'
+// map types from key to human-friendly name
+exports.name = new Map([
+ ['0', 'File'],
+ // same as File
+ ['', 'OldFile'],
+ ['1', 'Link'],
+ ['2', 'SymbolicLink'],
+ // Devices and FIFOs aren't fully supported
+ // they are parsed, but skipped when unpacking
+ ['3', 'CharacterDevice'],
+ ['4', 'BlockDevice'],
+ ['5', 'Directory'],
+ ['6', 'FIFO'],
+ // same as File
+ ['7', 'ContiguousFile'],
+ // pax headers
+ ['g', 'GlobalExtendedHeader'],
+ ['x', 'ExtendedHeader'],
+ // vendor-specific stuff
+ // skip
+ ['A', 'SolarisACL'],
+ // like 5, but with data, which should be skipped
+ ['D', 'GNUDumpDir'],
+ // metadata only, skip
+ ['I', 'Inode'],
+ // data = link path of next file
+ ['K', 'NextFileHasLongLinkpath'],
+ // data = path of next file
+ ['L', 'NextFileHasLongPath'],
+ // skip
+ ['M', 'ContinuationFile'],
+ // like L
+ ['N', 'OldGnuLongPath'],
+ // skip
+ ['S', 'SparseFile'],
+ // skip
+ ['V', 'TapeVolumeHeader'],
+ // like x
+ ['X', 'OldExtendedHeader']
+])
+
+// map the other direction
+exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]))
diff --git a/deps/npm/node_modules/tar/lib/unpack.js b/deps/npm/node_modules/tar/lib/unpack.js
new file mode 100644
index 0000000000..e8c80c6fd5
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/unpack.js
@@ -0,0 +1,481 @@
+'use strict'
+
+const assert = require('assert')
+const EE = require('events').EventEmitter
+const Parser = require('./parse.js')
+const fs = require('fs')
+const path = require('path')
+const mkdir = require('./mkdir.js')
+const mkdirSync = mkdir.sync
+const wc = require('./winchars.js')
+
+const ONENTRY = Symbol('onEntry')
+const CHECKFS = Symbol('checkFs')
+const MAKEFS = Symbol('makeFs')
+const FILE = Symbol('file')
+const DIRECTORY = Symbol('directory')
+const LINK = Symbol('link')
+const SYMLINK = Symbol('symlink')
+const HARDLINK = Symbol('hardlink')
+const UNSUPPORTED = Symbol('unsupported')
+const UNKNOWN = Symbol('unknown')
+const CHECKPATH = Symbol('checkPath')
+const MKDIR = Symbol('mkdir')
+const ONERROR = Symbol('onError')
+const PENDING = Symbol('pending')
+const PEND = Symbol('pend')
+const UNPEND = Symbol('unpend')
+const ENDED = Symbol('ended')
+const MAYBECLOSE = Symbol('maybeClose')
+const SKIP = Symbol('skip')
+const DOCHOWN = Symbol('doChown')
+const UID = Symbol('uid')
+const GID = Symbol('gid')
+
+class Unpack extends Parser {
+ constructor (opt) {
+ if (!opt)
+ opt = {}
+
+ opt.ondone = _ => {
+ this[ENDED] = true
+ this[MAYBECLOSE]()
+ }
+
+ super(opt)
+
+ this.writable = true
+ this.readable = false
+
+ this[PENDING] = 0
+ this[ENDED] = false
+
+ this.dirCache = opt.dirCache || new Map()
+
+ if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
+ // need both or neither
+ if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number')
+ throw new TypeError('cannot set owner without number uid and gid')
+ if (opt.preserveOwner)
+ throw new TypeError(
+ 'cannot preserve owner in archive and also set owner explicitly')
+ this.uid = opt.uid
+ this.gid = opt.gid
+ this.setOwner = true
+ } else {
+ this.uid = null
+ this.gid = null
+ this.setOwner = false
+ }
+
+ // default true for root
+ if (opt.preserveOwner === undefined && typeof opt.uid !== 'number')
+ this.preserveOwner = process.getuid && process.getuid() === 0
+ else
+ this.preserveOwner = !!opt.preserveOwner
+
+ this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ?
+ process.getuid() : null
+ this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
+ process.getgid() : null
+
+ // turn ><?| in filenames into 0xf000-higher encoded forms
+ this.win32 = !!opt.win32 || process.platform === 'win32'
+
+ // do not unpack over files that are newer than what's in the archive
+ this.newer = !!opt.newer
+
+ // do not unpack over ANY files
+ this.keep = !!opt.keep
+
+ // do not set mtime/atime of extracted entries
+ this.noMtime = !!opt.noMtime
+
+ // allow .., absolute path entries, and unpacking through symlinks
+ // without this, warn and skip .., relativize absolutes, and error
+ // on symlinks in extraction path
+ this.preservePaths = !!opt.preservePaths
+
+ // unlink files and links before writing. This breaks existing hard
+ // links, and removes symlink directories rather than erroring
+ this.unlink = !!opt.unlink
+
+ this.cwd = path.resolve(opt.cwd || process.cwd())
+ this.strip = +opt.strip || 0
+ this.processUmask = process.umask()
+ this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask
+ // default mode for dirs created as parents
+ this.dmode = opt.dmode || (0o0777 & (~this.umask))
+ this.fmode = opt.fmode || (0o0666 & (~this.umask))
+ this.on('entry', entry => this[ONENTRY](entry))
+ }
+
+ [MAYBECLOSE] () {
+ if (this[ENDED] && this[PENDING] === 0) {
+ this.emit('prefinish')
+ this.emit('finish')
+ this.emit('end')
+ this.emit('close')
+ }
+ }
+
+ [CHECKPATH] (entry) {
+ if (this.strip) {
+ const parts = entry.path.split(/\/|\\/)
+ if (parts.length < this.strip)
+ return false
+ entry.path = parts.slice(this.strip).join('/')
+ }
+
+ if (!this.preservePaths) {
+ const p = entry.path
+ if (p.match(/(^|\/|\\)\.\.(\\|\/|$)/)) {
+ this.warn('path contains \'..\'', p)
+ return false
+ }
+
+ // absolutes on posix are also absolutes on win32
+ // so we only need to test this one to get both
+ if (path.win32.isAbsolute(p)) {
+ const parsed = path.win32.parse(p)
+ this.warn('stripping ' + parsed.root + ' from absolute path', p)
+ entry.path = p.substr(parsed.root.length)
+ }
+ }
+
+ // only encode : chars that aren't drive letter indicators
+ if (this.win32) {
+ const parsed = path.win32.parse(entry.path)
+ entry.path = parsed.root === '' ? wc.encode(entry.path)
+ : parsed.root + wc.encode(entry.path.substr(parsed.root.length))
+ }
+
+ if (path.isAbsolute(entry.path))
+ entry.absolute = entry.path
+ else
+ entry.absolute = path.resolve(this.cwd, entry.path)
+
+ return true
+ }
+
+ [ONENTRY] (entry) {
+ if (!this[CHECKPATH](entry))
+ return entry.resume()
+
+ assert.equal(typeof entry.absolute, 'string')
+
+ switch (entry.type) {
+ case 'Directory':
+ case 'GNUDumpDir':
+ if (entry.mode)
+ entry.mode = entry.mode | 0o700
+
+ case 'File':
+ case 'OldFile':
+ case 'ContiguousFile':
+ case 'Link':
+ case 'SymbolicLink':
+ return this[CHECKFS](entry)
+
+ case 'CharacterDevice':
+ case 'BlockDevice':
+ case 'FIFO':
+ return this[UNSUPPORTED](entry)
+ }
+ }
+
+ [ONERROR] (er, entry) {
+ // Cwd has to exist, or else nothing works. That's serious.
+ // Other errors are warnings, which raise the error in strict
+ // mode, but otherwise continue on.
+ if (er.name === 'CwdError')
+ this.emit('error', er)
+ else {
+ this.warn(er.message, er)
+ this[UNPEND]()
+ entry.resume()
+ }
+ }
+
+ [MKDIR] (dir, mode, cb) {
+ mkdir(dir, {
+ uid: this.uid,
+ gid: this.gid,
+ processUid: this.processUid,
+ processGid: this.processGid,
+ umask: this.processUmask,
+ preserve: this.preservePaths,
+ unlink: this.unlink,
+ cache: this.dirCache,
+ cwd: this.cwd,
+ mode: mode
+ }, cb)
+ }
+
+ [DOCHOWN] (entry) {
+ // in preserve owner mode, chown if the entry doesn't match process
+ // in set owner mode, chown if setting doesn't match process
+ return this.preserveOwner &&
+ ( typeof entry.uid === 'number' && entry.uid !== this.processUid ||
+ typeof entry.gid === 'number' && entry.gid !== this.processGid )
+ ||
+ ( typeof this.uid === 'number' && this.uid !== this.processUid ||
+ typeof this.gid === 'number' && this.gid !== this.processGid )
+ }
+
+ [UID] (entry) {
+ return typeof this.uid === 'number' ? this.uid
+ : typeof entry.uid === 'number' ? entry.uid
+ : this.processUid
+ }
+
+ [GID] (entry) {
+ return typeof this.gid === 'number' ? this.gid
+ : typeof entry.gid === 'number' ? entry.gid
+ : this.processGid
+ }
+
+ [FILE] (entry) {
+ const mode = entry.mode & 0o7777 || this.fmode
+ const stream = fs.createWriteStream(entry.absolute, { mode: mode })
+ stream.on('error', er => this[ONERROR](er, entry))
+
+ const queue = []
+ const processQueue = _ => {
+ const action = queue.shift()
+ if (action)
+ action(processQueue)
+ else
+ this[UNPEND]()
+ }
+
+ stream.on('close', _ => {
+ if (entry.mtime && !this.noMtime)
+ queue.push(cb =>
+ fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, cb))
+ if (this[DOCHOWN](entry))
+ queue.push(cb =>
+ fs.chown(entry.absolute, this[UID](entry), this[GID](entry), cb))
+ processQueue()
+ })
+ entry.pipe(stream)
+ }
+
+ [DIRECTORY] (entry) {
+ const mode = entry.mode & 0o7777 || this.dmode
+ this[MKDIR](entry.absolute, mode, er => {
+ if (er)
+ return this[ONERROR](er, entry)
+
+ const queue = []
+ const processQueue = _ => {
+ const action = queue.shift()
+ if (action)
+ action(processQueue)
+ else {
+ this[UNPEND]()
+ entry.resume()
+ }
+ }
+
+ if (entry.mtime && !this.noMtime)
+ queue.push(cb =>
+ fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, cb))
+ if (this[DOCHOWN](entry))
+ queue.push(cb =>
+ fs.chown(entry.absolute, this[UID](entry), this[GID](entry), cb))
+
+ processQueue()
+ })
+ }
+
+ [UNSUPPORTED] (entry) {
+ this.warn('unsupported entry type: ' + entry.type, entry)
+ entry.resume()
+ }
+
+ [SYMLINK] (entry) {
+ this[LINK](entry, entry.linkpath, 'symlink')
+ }
+
+ [HARDLINK] (entry) {
+ this[LINK](entry, path.resolve(this.cwd, entry.linkpath), 'link')
+ }
+
+ [PEND] () {
+ this[PENDING]++
+ }
+
+ [UNPEND] () {
+ this[PENDING]--
+ this[MAYBECLOSE]()
+ }
+
+ [SKIP] (entry) {
+ this[UNPEND]()
+ entry.resume()
+ }
+
+ // check if a thing is there, and if so, try to clobber it
+ [CHECKFS] (entry) {
+ this[PEND]()
+ this[MKDIR](path.dirname(entry.absolute), this.dmode, er => {
+ if (er)
+ return this[ONERROR](er, entry)
+ fs.lstat(entry.absolute, (er, st) => {
+ if (st && (this.keep || this.newer && st.mtime > entry.mtime))
+ this[SKIP](entry)
+ else if (er || (entry.type === 'File' && !this.unlink && st.isFile()))
+ this[MAKEFS](null, entry)
+ else if (st.isDirectory()) {
+ if (entry.type === 'Directory') {
+ if (!entry.mode || (st.mode & 0o7777) === entry.mode)
+ this[MAKEFS](null, entry)
+ else
+ fs.chmod(entry.absolute, entry.mode, er => this[MAKEFS](er, entry))
+ } else
+ fs.rmdir(entry.absolute, er => this[MAKEFS](er, entry))
+ } else
+ fs.unlink(entry.absolute, er => this[MAKEFS](er, entry))
+ })
+ })
+ }
+
+ [MAKEFS] (er, entry) {
+ if (er)
+ return this[ONERROR](er, entry)
+
+ switch (entry.type) {
+ case 'File':
+ case 'OldFile':
+ case 'ContiguousFile':
+ return this[FILE](entry)
+
+ case 'Link':
+ return this[HARDLINK](entry)
+
+ case 'SymbolicLink':
+ return this[SYMLINK](entry)
+
+ case 'Directory':
+ case 'GNUDumpDir':
+ return this[DIRECTORY](entry)
+ }
+ }
+
+ [LINK] (entry, linkpath, link) {
+ // XXX: get the type ('file' or 'dir') for windows
+ fs[link](linkpath, entry.absolute, er => {
+ if (er)
+ return this[ONERROR](er, entry)
+ this[UNPEND]()
+ entry.resume()
+ })
+ }
+}
+
+class UnpackSync extends Unpack {
+ constructor (opt) {
+ super(opt)
+ }
+
+ [CHECKFS] (entry) {
+ const er = this[MKDIR](path.dirname(entry.absolute), this.dmode)
+ if (er)
+ return this[ONERROR](er, entry)
+ try {
+ const st = fs.lstatSync(entry.absolute)
+ if (this.keep || this.newer && st.mtime > entry.mtime)
+ return this[SKIP](entry)
+ else if (entry.type === 'File' && !this.unlink && st.isFile())
+ return this[MAKEFS](null, entry)
+ else {
+ try {
+ if (st.isDirectory()) {
+ if (entry.type === 'Directory') {
+ if (entry.mode && (st.mode & 0o7777) !== entry.mode)
+ fs.chmodSync(entry.absolute, entry.mode)
+ } else
+ fs.rmdirSync(entry.absolute)
+ } else
+ fs.unlinkSync(entry.absolute)
+ return this[MAKEFS](null, entry)
+ } catch (er) {
+ return this[ONERROR](er, entry)
+ }
+ }
+ } catch (er) {
+ return this[MAKEFS](null, entry)
+ }
+ }
+
+ [FILE] (entry) {
+ const mode = entry.mode & 0o7777 || this.fmode
+ try {
+ const fd = fs.openSync(entry.absolute, 'w', mode)
+ entry.on('data', buf => fs.writeSync(fd, buf, 0, buf.length, null))
+ entry.on('end', _ => {
+ if (entry.mtime && !this.noMtime) {
+ try {
+ fs.futimesSync(fd, entry.atime || new Date(), entry.mtime)
+ } catch (er) {}
+ }
+ if (this[DOCHOWN](entry)) {
+ try {
+ fs.fchownSync(fd, this[UID](entry), this[GID](entry))
+ } catch (er) {}
+ }
+ try { fs.closeSync(fd) } catch (er) { this[ONERROR](er, entry) }
+ })
+ } catch (er) { this[ONERROR](er, entry) }
+ }
+
+ [DIRECTORY] (entry) {
+ const mode = entry.mode & 0o7777 || this.dmode
+ const er = this[MKDIR](entry.absolute, mode)
+ if (er)
+ return this[ONERROR](er, entry)
+ if (entry.mtime && !this.noMtime) {
+ try {
+ fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime)
+ } catch (er) {}
+ }
+ if (this[DOCHOWN](entry)) {
+ try {
+ fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))
+ } catch (er) {}
+ }
+ entry.resume()
+ }
+
+ [MKDIR] (dir, mode) {
+ try {
+ return mkdir.sync(dir, {
+ uid: this.uid,
+ gid: this.gid,
+ processUid: this.processUid,
+ processGid: this.processGid,
+ umask: this.processUmask,
+ preserve: this.preservePaths,
+ unlink: this.unlink,
+ cache: this.dirCache,
+ cwd: this.cwd,
+ mode: mode
+ })
+ } catch (er) {
+ return er
+ }
+ }
+
+ [LINK] (entry, linkpath, link) {
+ try {
+ fs[link + 'Sync'](linkpath, entry.absolute)
+ entry.resume()
+ } catch (er) {
+ return this[ONERROR](er, entry)
+ }
+ }
+}
+
+Unpack.Sync = UnpackSync
+module.exports = Unpack
diff --git a/deps/npm/node_modules/tar/lib/update.js b/deps/npm/node_modules/tar/lib/update.js
new file mode 100644
index 0000000000..16c3e93ed5
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/update.js
@@ -0,0 +1,36 @@
+'use strict'
+
+// tar -u
+
+const hlo = require('./high-level-opt.js')
+const r = require('./replace.js')
+// just call tar.r with the filter and mtimeCache
+
+const u = module.exports = (opt_, files, cb) => {
+ const opt = hlo(opt_)
+
+ if (!opt.file)
+ throw new TypeError('file is required')
+
+ if (opt.gzip)
+ throw new TypeError('cannot append to compressed archives')
+
+ if (!files || !Array.isArray(files) || !files.length)
+ throw new TypeError('no files or directories specified')
+
+ files = Array.from(files)
+
+ mtimeFilter(opt)
+ return r(opt, files, cb)
+}
+
+const mtimeFilter = opt => {
+ const filter = opt.filter
+
+ if (!opt.mtimeCache)
+ opt.mtimeCache = new Map()
+
+ opt.filter = filter ? (path, stat) =>
+ filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime)
+ : (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime)
+}
diff --git a/deps/npm/node_modules/tar/lib/warn-mixin.js b/deps/npm/node_modules/tar/lib/warn-mixin.js
new file mode 100644
index 0000000000..94a4b9b990
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/warn-mixin.js
@@ -0,0 +1,14 @@
+'use strict'
+module.exports = Base => class extends Base {
+ warn (msg, data) {
+ if (!this.strict)
+ this.emit('warn', msg, data)
+ else if (data instanceof Error)
+ this.emit('error', data)
+ else {
+ const er = new Error(msg)
+ er.data = data
+ this.emit('error', er)
+ }
+ }
+}
diff --git a/deps/npm/node_modules/tar/lib/winchars.js b/deps/npm/node_modules/tar/lib/winchars.js
new file mode 100644
index 0000000000..cf6ea06061
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/winchars.js
@@ -0,0 +1,23 @@
+'use strict'
+
+// When writing files on Windows, translate the characters to their
+// 0xf000 higher-encoded versions.
+
+const raw = [
+ '|',
+ '<',
+ '>',
+ '?',
+ ':'
+]
+
+const win = raw.map(char =>
+ String.fromCharCode(0xf000 + char.charCodeAt(0)))
+
+const toWin = new Map(raw.map((char, i) => [char, win[i]]))
+const toRaw = new Map(win.map((char, i) => [char, raw[i]]))
+
+module.exports = {
+ encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s),
+ decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s)
+}
diff --git a/deps/npm/node_modules/tar/lib/write-entry.js b/deps/npm/node_modules/tar/lib/write-entry.js
new file mode 100644
index 0000000000..f562bf138a
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/write-entry.js
@@ -0,0 +1,395 @@
+'use strict'
+const MiniPass = require('minipass')
+const Pax = require('./pax.js')
+const Header = require('./header.js')
+const ReadEntry = require('./read-entry.js')
+const fs = require('fs')
+const path = require('path')
+
+const types = require('./types.js')
+const maxReadSize = 16 * 1024 * 1024
+const PROCESS = Symbol('process')
+const FILE = Symbol('file')
+const DIRECTORY = Symbol('directory')
+const SYMLINK = Symbol('symlink')
+const HARDLINK = Symbol('hardlink')
+const HEADER = Symbol('header')
+const READ = Symbol('read')
+const LSTAT = Symbol('lstat')
+const ONLSTAT = Symbol('onlstat')
+const ONREAD = Symbol('onread')
+const ONREADLINK = Symbol('onreadlink')
+const OPENFILE = Symbol('openfile')
+const ONOPENFILE = Symbol('onopenfile')
+const CLOSE = Symbol('close')
+const warner = require('./warn-mixin.js')
+const winchars = require('./winchars.js')
+
+const WriteEntry = warner(class WriteEntry extends MiniPass {
+ constructor (p, opt) {
+ opt = opt || {}
+ super(opt)
+ if (typeof p !== 'string')
+ throw new TypeError('path is required')
+ this.path = p
+ // suppress atime, ctime, uid, gid, uname, gname
+ this.portable = !!opt.portable
+ // until node has builtin pwnam functions, this'll have to do
+ this.myuid = process.getuid && process.getuid()
+ this.myuser = process.env.USER || ''
+ this.maxReadSize = opt.maxReadSize || maxReadSize
+ this.linkCache = opt.linkCache || new Map()
+ this.statCache = opt.statCache || new Map()
+ this.preservePaths = !!opt.preservePaths
+ this.cwd = opt.cwd || process.cwd()
+ this.strict = !!opt.strict
+ this.noPax = !!opt.noPax
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+
+ if (!this.preservePaths && path.win32.isAbsolute(p)) {
+ // absolutes on posix are also absolutes on win32
+ // so we only need to test this one to get both
+ const parsed = path.win32.parse(p)
+ this.warn('stripping ' + parsed.root + ' from absolute path', p)
+ this.path = p.substr(parsed.root.length)
+ }
+
+ this.win32 = !!opt.win32 || process.platform === 'win32'
+ if (this.win32) {
+ this.path = winchars.decode(this.path.replace(/\\/g, '/'))
+ p = p.replace(/\\/g, '/')
+ }
+
+ this.absolute = opt.absolute || path.resolve(this.cwd, p)
+
+ if (this.path === '')
+ this.path = './'
+
+ if (this.statCache.has(this.absolute))
+ this[ONLSTAT](this.statCache.get(this.absolute))
+ else
+ this[LSTAT]()
+ }
+
+ [LSTAT] () {
+ fs.lstat(this.absolute, (er, stat) => {
+ if (er)
+ return this.emit('error', er)
+ this[ONLSTAT](stat)
+ })
+ }
+
+ [ONLSTAT] (stat) {
+ this.statCache.set(this.absolute, stat)
+ this.stat = stat
+ if (!stat.isFile())
+ stat.size = 0
+ this.type = getType(stat)
+ this.emit('stat', stat)
+ this[PROCESS]()
+ }
+
+ [PROCESS] () {
+ switch (this.type) {
+ case 'File': return this[FILE]()
+ case 'Directory': return this[DIRECTORY]()
+ case 'SymbolicLink': return this[SYMLINK]()
+ // unsupported types are ignored.
+ default: return this.end()
+ }
+ }
+
+ [HEADER] () {
+ this.header = new Header({
+ path: this.path,
+ linkpath: this.linkpath,
+ // only the permissions and setuid/setgid/sticky bitflags
+ // not the higher-order bits that specify file type
+ mode: this.stat.mode & 0o7777,
+ uid: this.portable ? null : this.stat.uid,
+ gid: this.portable ? null : this.stat.gid,
+ size: this.stat.size,
+ mtime: this.type === 'Directory' && this.portable
+ ? null : this.stat.mtime,
+ type: this.type,
+ uname: this.portable ? null :
+ this.stat.uid === this.myuid ? this.myuser : '',
+ atime: this.portable ? null : this.stat.atime,
+ ctime: this.portable ? null : this.stat.ctime
+ })
+
+ if (this.header.encode() && !this.noPax)
+ this.write(new Pax({
+ atime: this.portable ? null : this.header.atime,
+ ctime: this.portable ? null : this.header.ctime,
+ gid: this.portable ? null : this.header.gid,
+ mtime: this.header.mtime,
+ path: this.path,
+ linkpath: this.linkpath,
+ size: this.header.size,
+ uid: this.portable ? null : this.header.uid,
+ uname: this.portable ? null : this.header.uname,
+ dev: this.portable ? null : this.stat.dev,
+ ino: this.portable ? null : this.stat.ino,
+ nlink: this.portable ? null : this.stat.nlink
+ }).encode())
+ this.write(this.header.block)
+ }
+
+ [DIRECTORY] () {
+ if (this.path.substr(-1) !== '/')
+ this.path += '/'
+ this.stat.size = 0
+ this[HEADER]()
+ this.end()
+ }
+
+ [SYMLINK] () {
+ fs.readlink(this.absolute, (er, linkpath) => {
+ if (er)
+ return this.emit('error', er)
+ this[ONREADLINK](linkpath)
+ })
+ }
+
+ [ONREADLINK] (linkpath) {
+ this.linkpath = linkpath
+ this[HEADER]()
+ this.end()
+ }
+
+ [HARDLINK] (linkpath) {
+ this.type = 'Link'
+ this.linkpath = path.relative(this.cwd, linkpath)
+ this.stat.size = 0
+ this[HEADER]()
+ this.end()
+ }
+
+ [FILE] () {
+ if (this.stat.nlink > 1) {
+ const linkKey = this.stat.dev + ':' + this.stat.ino
+ if (this.linkCache.has(linkKey)) {
+ const linkpath = this.linkCache.get(linkKey)
+ if (linkpath.indexOf(this.cwd) === 0)
+ return this[HARDLINK](linkpath)
+ }
+ this.linkCache.set(linkKey, this.absolute)
+ }
+
+ this[HEADER]()
+ if (this.stat.size === 0)
+ return this.end()
+
+ this[OPENFILE]()
+ }
+
+ [OPENFILE] () {
+ fs.open(this.absolute, 'r', (er, fd) => {
+ if (er)
+ return this.emit('error', er)
+ this[ONOPENFILE](fd)
+ })
+ }
+
+ [ONOPENFILE] (fd) {
+ const blockLen = 512 * Math.ceil(this.stat.size / 512)
+ const bufLen = Math.min(blockLen, this.maxReadSize)
+ const buf = Buffer.allocUnsafe(bufLen)
+ this[READ](fd, buf, 0, buf.length, 0, this.stat.size, blockLen)
+ }
+
+ [READ] (fd, buf, offset, length, pos, remain, blockRemain) {
+ fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
+ if (er)
+ return this[CLOSE](fd, _ => this.emit('error', er))
+ this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead)
+ })
+ }
+
+ [CLOSE] (fd, cb) {
+ fs.close(fd, cb)
+ }
+
+ [ONREAD] (fd, buf, offset, length, pos, remain, blockRemain, bytesRead) {
+ if (bytesRead <= 0 && remain > 0) {
+ const er = new Error('unexpected EOF')
+ er.path = this.absolute
+ er.syscall = 'read'
+ er.code = 'EOF'
+ this.emit('error', er)
+ }
+
+ // null out the rest of the buffer, if we could fit the block padding
+ if (bytesRead === remain) {
+ for (let i = bytesRead; i < length && bytesRead < blockRemain; i++) {
+ buf[i + offset] = 0
+ bytesRead ++
+ remain ++
+ }
+ }
+
+ const writeBuf = offset === 0 && bytesRead === buf.length ?
+ buf : buf.slice(offset, offset + bytesRead)
+ remain -= bytesRead
+ blockRemain -= bytesRead
+ pos += bytesRead
+ offset += bytesRead
+
+ this.write(writeBuf)
+
+ if (!remain) {
+ if (blockRemain)
+ this.write(Buffer.alloc(blockRemain))
+ this.end()
+ this[CLOSE](fd, _ => _)
+ return
+ }
+
+ if (offset >= length) {
+ buf = Buffer.allocUnsafe(length)
+ offset = 0
+ }
+ length = buf.length - offset
+ this[READ](fd, buf, offset, length, pos, remain, blockRemain)
+ }
+})
+
+class WriteEntrySync extends WriteEntry {
+ constructor (path, opt) {
+ super(path, opt)
+ }
+
+ [LSTAT] () {
+ this[ONLSTAT](fs.lstatSync(this.absolute))
+ }
+
+ [SYMLINK] () {
+ this[ONREADLINK](fs.readlinkSync(this.absolute))
+ }
+
+ [OPENFILE] () {
+ this[ONOPENFILE](fs.openSync(this.absolute, 'r'))
+ }
+
+ [READ] (fd, buf, offset, length, pos, remain, blockRemain) {
+ let threw = true
+ try {
+ const bytesRead = fs.readSync(fd, buf, offset, length, pos)
+ this[ONREAD](fd, buf, offset, length, pos, remain, blockRemain, bytesRead)
+ threw = false
+ } finally {
+ if (threw)
+ try { this[CLOSE](fd) } catch (er) {}
+ }
+ }
+
+ [CLOSE] (fd) {
+ fs.closeSync(fd)
+ }
+}
+
+const WriteEntryTar = warner(class WriteEntryTar extends MiniPass {
+ constructor (readEntry, opt) {
+ opt = opt || {}
+ super(opt)
+ this.readEntry = readEntry
+ this.type = readEntry.type
+ this.path = readEntry.path
+ this.mode = readEntry.mode
+ if (this.mode)
+ this.mode = this.mode & 0o7777
+ this.uid = readEntry.uid
+ this.gid = readEntry.gid
+ this.uname = readEntry.uname
+ this.gname = readEntry.gname
+ this.size = readEntry.size
+ this.mtime = readEntry.mtime
+ this.atime = readEntry.atime
+ this.ctime = readEntry.ctime
+ this.linkpath = readEntry.linkpath
+ this.uname = readEntry.uname
+ this.gname = readEntry.gname
+
+ this.preservePaths = !!opt.preservePaths
+ this.portable = !!opt.portable
+ this.strict = !!opt.strict
+ this.noPax = !!opt.noPax
+
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+
+ if (path.isAbsolute(this.path) && !this.preservePaths) {
+ const parsed = path.parse(this.path)
+ this.warn(
+ 'stripping ' + parsed.root + ' from absolute path',
+ this.path
+ )
+ this.path = this.path.substr(parsed.root.length)
+ }
+
+ this.remain = readEntry.size
+ this.blockRemain = readEntry.startBlockSize
+
+ this.header = new Header({
+ path: this.path,
+ linkpath: this.linkpath,
+ // only the permissions and setuid/setgid/sticky bitflags
+ // not the higher-order bits that specify file type
+ mode: this.mode,
+ uid: this.portable ? null : this.uid,
+ gid: this.portable ? null : this.gid,
+ size: this.size,
+ mtime: this.mtime,
+ type: this.type,
+ uname: this.portable ? null : this.uname,
+ atime: this.portable ? null : this.atime,
+ ctime: this.portable ? null : this.ctime
+ })
+
+ if (this.header.encode() && !this.noPax)
+ super.write(new Pax({
+ atime: this.portable ? null : this.atime,
+ ctime: this.portable ? null : this.ctime,
+ gid: this.portable ? null : this.gid,
+ mtime: this.mtime,
+ path: this.path,
+ linkpath: this.linkpath,
+ size: this.size,
+ uid: this.portable ? null : this.uid,
+ uname: this.portable ? null : this.uname,
+ dev: this.portable ? null : this.readEntry.dev,
+ ino: this.portable ? null : this.readEntry.ino,
+ nlink: this.portable ? null : this.readEntry.nlink
+ }).encode())
+
+ super.write(this.header.block)
+ readEntry.pipe(this)
+ }
+
+ write (data) {
+ const writeLen = data.length
+ if (writeLen > this.blockRemain)
+ throw new Error('writing more to entry than is appropriate')
+ this.blockRemain -= writeLen
+ return super.write(data)
+ }
+
+ end () {
+ if (this.blockRemain)
+ this.write(Buffer.alloc(this.blockRemain))
+ return super.end()
+ }
+})
+
+WriteEntry.Sync = WriteEntrySync
+WriteEntry.Tar = WriteEntryTar
+
+const getType = stat =>
+ stat.isFile() ? 'File'
+ : stat.isDirectory() ? 'Directory'
+ : stat.isSymbolicLink() ? 'SymbolicLink'
+ : 'Unsupported'
+
+module.exports = WriteEntry
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/LICENCE b/deps/npm/node_modules/tar/node_modules/block-stream/LICENCE
deleted file mode 100644
index 74489e2e26..0000000000
--- a/deps/npm/node_modules/tar/node_modules/block-stream/LICENCE
+++ /dev/null
@@ -1,25 +0,0 @@
-Copyright (c) Isaac Z. Schlueter
-All rights reserved.
-
-The BSD License
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
-``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
-BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE.
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/README.md b/deps/npm/node_modules/tar/node_modules/block-stream/README.md
deleted file mode 100644
index c16e9c4688..0000000000
--- a/deps/npm/node_modules/tar/node_modules/block-stream/README.md
+++ /dev/null
@@ -1,14 +0,0 @@
-# block-stream
-
-A stream of blocks.
-
-Write data into it, and it'll output data in buffer blocks the size you
-specify, padding with zeroes if necessary.
-
-```javascript
-var block = new BlockStream(512)
-fs.createReadStream("some-file").pipe(block)
-block.pipe(fs.createWriteStream("block-file"))
-```
-
-When `.end()` or `.flush()` is called, it'll pad the block with zeroes.
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/block-stream.js b/deps/npm/node_modules/tar/node_modules/block-stream/block-stream.js
deleted file mode 100644
index 008de035c2..0000000000
--- a/deps/npm/node_modules/tar/node_modules/block-stream/block-stream.js
+++ /dev/null
@@ -1,209 +0,0 @@
-// write data to it, and it'll emit data in 512 byte blocks.
-// if you .end() or .flush(), it'll emit whatever it's got,
-// padded with nulls to 512 bytes.
-
-module.exports = BlockStream
-
-var Stream = require("stream").Stream
- , inherits = require("inherits")
- , assert = require("assert").ok
- , debug = process.env.DEBUG ? console.error : function () {}
-
-function BlockStream (size, opt) {
- this.writable = this.readable = true
- this._opt = opt || {}
- this._chunkSize = size || 512
- this._offset = 0
- this._buffer = []
- this._bufferLength = 0
- if (this._opt.nopad) this._zeroes = false
- else {
- this._zeroes = new Buffer(this._chunkSize)
- for (var i = 0; i < this._chunkSize; i ++) {
- this._zeroes[i] = 0
- }
- }
-}
-
-inherits(BlockStream, Stream)
-
-BlockStream.prototype.write = function (c) {
- // debug(" BS write", c)
- if (this._ended) throw new Error("BlockStream: write after end")
- if (c && !Buffer.isBuffer(c)) c = new Buffer(c + "")
- if (c.length) {
- this._buffer.push(c)
- this._bufferLength += c.length
- }
- // debug("pushed onto buffer", this._bufferLength)
- if (this._bufferLength >= this._chunkSize) {
- if (this._paused) {
- // debug(" BS paused, return false, need drain")
- this._needDrain = true
- return false
- }
- this._emitChunk()
- }
- return true
-}
-
-BlockStream.prototype.pause = function () {
- // debug(" BS pausing")
- this._paused = true
-}
-
-BlockStream.prototype.resume = function () {
- // debug(" BS resume")
- this._paused = false
- return this._emitChunk()
-}
-
-BlockStream.prototype.end = function (chunk) {
- // debug("end", chunk)
- if (typeof chunk === "function") cb = chunk, chunk = null
- if (chunk) this.write(chunk)
- this._ended = true
- this.flush()
-}
-
-BlockStream.prototype.flush = function () {
- this._emitChunk(true)
-}
-
-BlockStream.prototype._emitChunk = function (flush) {
- // debug("emitChunk flush=%j emitting=%j paused=%j", flush, this._emitting, this._paused)
-
- // emit a <chunkSize> chunk
- if (flush && this._zeroes) {
- // debug(" BS push zeroes", this._bufferLength)
- // push a chunk of zeroes
- var padBytes = (this._bufferLength % this._chunkSize)
- if (padBytes !== 0) padBytes = this._chunkSize - padBytes
- if (padBytes > 0) {
- // debug("padBytes", padBytes, this._zeroes.slice(0, padBytes))
- this._buffer.push(this._zeroes.slice(0, padBytes))
- this._bufferLength += padBytes
- // debug(this._buffer[this._buffer.length - 1].length, this._bufferLength)
- }
- }
-
- if (this._emitting || this._paused) return
- this._emitting = true
-
- // debug(" BS entering loops")
- var bufferIndex = 0
- while (this._bufferLength >= this._chunkSize &&
- (flush || !this._paused)) {
- // debug(" BS data emission loop", this._bufferLength)
-
- var out
- , outOffset = 0
- , outHas = this._chunkSize
-
- while (outHas > 0 && (flush || !this._paused) ) {
- // debug(" BS data inner emit loop", this._bufferLength)
- var cur = this._buffer[bufferIndex]
- , curHas = cur.length - this._offset
- // debug("cur=", cur)
- // debug("curHas=%j", curHas)
- // If it's not big enough to fill the whole thing, then we'll need
- // to copy multiple buffers into one. However, if it is big enough,
- // then just slice out the part we want, to save unnecessary copying.
- // Also, need to copy if we've already done some copying, since buffers
- // can't be joined like cons strings.
- if (out || curHas < outHas) {
- out = out || new Buffer(this._chunkSize)
- cur.copy(out, outOffset,
- this._offset, this._offset + Math.min(curHas, outHas))
- } else if (cur.length === outHas && this._offset === 0) {
- // shortcut -- cur is exactly long enough, and no offset.
- out = cur
- } else {
- // slice out the piece of cur that we need.
- out = cur.slice(this._offset, this._offset + outHas)
- }
-
- if (curHas > outHas) {
- // means that the current buffer couldn't be completely output
- // update this._offset to reflect how much WAS written
- this._offset += outHas
- outHas = 0
- } else {
- // output the entire current chunk.
- // toss it away
- outHas -= curHas
- outOffset += curHas
- bufferIndex ++
- this._offset = 0
- }
- }
-
- this._bufferLength -= this._chunkSize
- assert(out.length === this._chunkSize)
- // debug("emitting data", out)
- // debug(" BS emitting, paused=%j", this._paused, this._bufferLength)
- this.emit("data", out)
- out = null
- }
- // debug(" BS out of loops", this._bufferLength)
-
- // whatever is left, it's not enough to fill up a block, or we're paused
- this._buffer = this._buffer.slice(bufferIndex)
- if (this._paused) {
- // debug(" BS paused, leaving", this._bufferLength)
- this._needsDrain = true
- this._emitting = false
- return
- }
-
- // if flushing, and not using null-padding, then need to emit the last
- // chunk(s) sitting in the queue. We know that it's not enough to
- // fill up a whole block, because otherwise it would have been emitted
- // above, but there may be some offset.
- var l = this._buffer.length
- if (flush && !this._zeroes && l) {
- if (l === 1) {
- if (this._offset) {
- this.emit("data", this._buffer[0].slice(this._offset))
- } else {
- this.emit("data", this._buffer[0])
- }
- } else {
- var outHas = this._bufferLength
- , out = new Buffer(outHas)
- , outOffset = 0
- for (var i = 0; i < l; i ++) {
- var cur = this._buffer[i]
- , curHas = cur.length - this._offset
- cur.copy(out, outOffset, this._offset)
- this._offset = 0
- outOffset += curHas
- this._bufferLength -= curHas
- }
- this.emit("data", out)
- }
- // truncate
- this._buffer.length = 0
- this._bufferLength = 0
- this._offset = 0
- }
-
- // now either drained or ended
- // debug("either draining, or ended", this._bufferLength, this._ended)
- // means that we've flushed out all that we can so far.
- if (this._needDrain) {
- // debug("emitting drain", this._bufferLength)
- this._needDrain = false
- this.emit("drain")
- }
-
- if ((this._bufferLength === 0) && this._ended && !this._endEmitted) {
- // debug("emitting end", this._bufferLength)
- this._endEmitted = true
- this.emit("end")
- }
-
- this._emitting = false
-
- // debug(" BS no longer emitting", flush, this._paused, this._emitting, this._bufferLength, this._chunkSize)
-}
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/package.json b/deps/npm/node_modules/tar/node_modules/block-stream/package.json
deleted file mode 100644
index 045ca8d243..0000000000
--- a/deps/npm/node_modules/tar/node_modules/block-stream/package.json
+++ /dev/null
@@ -1,63 +0,0 @@
-{
- "_from": "block-stream@*",
- "_id": "block-stream@0.0.9",
- "_integrity": "sha1-E+v+d4oDIFz+A3UUgeu0szAMEmo=",
- "_location": "/tar/block-stream",
- "_phantomChildren": {},
- "_requested": {
- "type": "range",
- "registry": true,
- "raw": "block-stream@*",
- "name": "block-stream",
- "escapedName": "block-stream",
- "rawSpec": "*",
- "saveSpec": null,
- "fetchSpec": "*"
- },
- "_requiredBy": [
- "/tar"
- ],
- "_resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz",
- "_shasum": "13ebfe778a03205cfe03751481ebb4b3300c126a",
- "_shrinkwrap": null,
- "_spec": "block-stream@*",
- "_where": "/Users/zkat/Documents/code/npm/node_modules/tar",
- "author": {
- "name": "Isaac Z. Schlueter",
- "email": "i@izs.me",
- "url": "http://blog.izs.me/"
- },
- "bin": null,
- "bugs": {
- "url": "https://github.com/isaacs/block-stream/issues"
- },
- "bundleDependencies": false,
- "dependencies": {
- "inherits": "~2.0.0"
- },
- "deprecated": false,
- "description": "a stream of blocks",
- "devDependencies": {
- "tap": "^5.7.1"
- },
- "engines": {
- "node": "0.4 || >=0.5.8"
- },
- "files": [
- "block-stream.js"
- ],
- "homepage": "https://github.com/isaacs/block-stream#readme",
- "license": "ISC",
- "main": "block-stream.js",
- "name": "block-stream",
- "optionalDependencies": {},
- "peerDependencies": {},
- "repository": {
- "type": "git",
- "url": "git://github.com/isaacs/block-stream.git"
- },
- "scripts": {
- "test": "tap test/*.js --cov"
- },
- "version": "0.0.9"
-}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/.npmignore b/deps/npm/node_modules/tar/node_modules/minipass/.npmignore
new file mode 100644
index 0000000000..183822a7ff
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/.npmignore
@@ -0,0 +1,4 @@
+.*.swp
+node_modules
+.nyc_output/
+coverage/
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/.travis.yml b/deps/npm/node_modules/tar/node_modules/minipass/.travis.yml
new file mode 100644
index 0000000000..59410a36d0
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/.travis.yml
@@ -0,0 +1,7 @@
+language: node_js
+sudo: false
+node_js:
+ - 7
+cache:
+ directories:
+ - /Users/isaacs/.npm
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/README.md b/deps/npm/node_modules/tar/node_modules/minipass/README.md
new file mode 100644
index 0000000000..97eadeaeae
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/README.md
@@ -0,0 +1,46 @@
+# minipass
+
+A _very_ minimal implementation of a [PassThrough
+stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough)
+
+[It's very
+fast](https://docs.google.com/spreadsheets/d/1oObKSrVwLX_7Ut4Z6g3fZW-AX1j1-k6w-cDsrkaSbHM/edit#gid=0)
+for objects, strings, and buffers.
+
+Supports pipe()ing (including multi-pipe() and backpressure
+transmission), buffering data until either a `data` event handler or
+`pipe()` is added (so you don't lose the first chunk), and most other
+cases where PassThrough is a good idea.
+
+There is a `read()` method, but it's much more efficient to consume
+data from this stream via `'data'` events or by calling `pipe()` into
+some other stream. Calling `read()` requires the buffer to be
+flattened in some cases, which requires copying memory. Also,
+`read()` always returns Buffers, even if an `encoding` option is
+specified.
+
+There is also no `unpipe()` method. Once you start piping, there is
+no stopping it!
+
+If you set `objectMode: true` in the options, then whatever is written
+will be emitted. Otherwise, it'll do a minimal amount of Buffer
+copying to ensure proper Streams semantics when `read(n)` is called.
+
+This is not a `through` or `through2` stream. It doesn't transform
+the data, it just passes it right through. If you want to transform
+the data, extend the class, and override the `write()` method. Once
+you're done transforming the data however you want, call
+`super.write()` with the transform output.
+
+For an example of a stream that extends MiniPass to provide transform
+capabilities, check out [minizlib](http://npm.im/minizlib).
+
+## USAGE
+
+```js
+const MiniPass = require('minipass')
+const mp = new MiniPass(options) // optional: { encoding }
+mp.write('foo')
+mp.pipe(someOtherStream)
+mp.end('bar')
+```
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/b.js b/deps/npm/node_modules/tar/node_modules/minipass/b.js
new file mode 100644
index 0000000000..324c4190a0
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/b.js
@@ -0,0 +1,12 @@
+const MiniPass = require('./')
+const butterfly = '🦋'
+var mp = new MiniPass({ encoding: 'utf8' })
+mp.on('data', chunk => {
+ console.error('data %s', chunk)
+})
+var butterbuf = new Buffer([0xf0, 0x9f, 0xa6, 0x8b])
+mp.write(butterbuf.slice(0, 1))
+mp.write(butterbuf.slice(1, 2))
+mp.write(butterbuf.slice(2, 3))
+mp.write(butterbuf.slice(3, 4))
+mp.end()
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-minipass.js b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-minipass.js
new file mode 100644
index 0000000000..8e7841a87c
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-minipass.js
@@ -0,0 +1,11 @@
+'use strict'
+const MiniPass = require('../..')
+
+module.exports = class ExtendMiniPass extends MiniPass {
+ constructor (opts) {
+ super(opts)
+ }
+ write (data, encoding) {
+ return super.write(data, encoding)
+ }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-through2.js b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-through2.js
new file mode 100644
index 0000000000..6a021084c4
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-through2.js
@@ -0,0 +1,12 @@
+'use strict'
+const through2 = require('through2')
+module.exports = function (opt) {
+ return opt.objectMode
+ ? through2.obj(func)
+ : through2(func)
+
+ function func (data, enc, done) {
+ this.push(data, enc)
+ done()
+ }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-transform.js b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-transform.js
new file mode 100644
index 0000000000..1d2d24026d
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/extend-transform.js
@@ -0,0 +1,11 @@
+'use strict'
+const stream = require('stream')
+module.exports = class ExtendTransform extends stream.Transform {
+ constructor (opts) {
+ super(opts)
+ }
+ _transform (data, enc, done) {
+ this.push(data, enc)
+ done()
+ }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/nullsink.js b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/nullsink.js
new file mode 100644
index 0000000000..13f6e916b9
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/nullsink.js
@@ -0,0 +1,12 @@
+'use strict'
+const EE = require('events').EventEmitter
+
+module.exports = class NullSink extends EE {
+ write (data, encoding, next) {
+ if (next) next()
+ return true
+ }
+ end () {
+ this.emit('finish')
+ }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/numbers.js b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/numbers.js
new file mode 100644
index 0000000000..bd1593299a
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/numbers.js
@@ -0,0 +1,41 @@
+'use strict'
+const stream = require('stream')
+
+const numbers = new Array(1000).join(',').split(',').map((v, k) => k)
+let acc = ''
+const strings = numbers.map(n => acc += n)
+const bufs = strings.map(s => new Buffer(s))
+const objs = strings.map(s => ({ str: s }))
+
+module.exports = class Numbers {
+ constructor (opt) {
+ this.objectMode = opt.objectMode
+ this.encoding = opt.encoding
+ this.ii = 0
+ this.done = false
+ }
+ pipe (dest) {
+ this.dest = dest
+ this.go()
+ return dest
+ }
+
+ go () {
+ let flowing = true
+ while (flowing) {
+ if (this.ii >= 1000) {
+ this.dest.end()
+ this.done = true
+ flowing = false
+ } else {
+ flowing = this.dest.write(
+ (this.objectMode ? objs
+ : this.encoding ? strings
+ : bufs)[this.ii++])
+ }
+ }
+
+ if (!this.done)
+ this.dest.once('drain', _ => this.go())
+ }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/timer.js b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/timer.js
new file mode 100644
index 0000000000..8d8fe3d80d
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/bench/lib/timer.js
@@ -0,0 +1,15 @@
+'use strict'
+module.exports = _ => {
+ const start = process.hrtime()
+ return _ => {
+ const end = process.hrtime(start)
+ const ms = Math.round(end[0]*1e6 + end[1]/1e3)/1e3
+ if (!process.env.isTTY)
+ console.log(ms)
+ else {
+ const s = Math.round(end[0]*10 + end[1]/1e8)/10
+ const ss = s <= 1 ? '' : ' (' + s + 's)'
+ console.log('%d%s', ms, ss)
+ }
+ }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/bench/test.js b/deps/npm/node_modules/tar/node_modules/minipass/bench/test.js
new file mode 100644
index 0000000000..29c9fd07d6
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/bench/test.js
@@ -0,0 +1,160 @@
+'use strict'
+
+const iterations = +process.env.BENCH_TEST_ITERATION || 100
+const testCount = +process.env.BENCH_TEST_COUNT || 20
+
+const tests = [
+ 'baseline',
+ 'minipass',
+ 'extend-minipass',
+ 'through2',
+ 'extend-through2',
+ 'passthrough',
+ 'extend-transform'
+]
+
+const manyOpts = [ 'many', 'single' ]
+const typeOpts = [ 'buffer', 'string', 'object' ]
+
+const main = () => {
+ const spawn = require('child_process').spawn
+ const node = process.execPath
+
+ const results = {}
+
+ const testSet = []
+ tests.forEach(t =>
+ manyOpts.forEach(many =>
+ typeOpts.forEach(type =>
+ new Array(testCount).join(',').split(',').forEach(() =>
+ t !== 'baseline' || (many === 'single' && type === 'object')
+ ? testSet.push([t, many, type]) : null))))
+
+ let didFirst = false
+ const mainRunTest = t => {
+ if (!t)
+ return afterMain(results)
+
+ const k = t.join('\t')
+ if (!results[k]) {
+ results[k] = []
+ if (!didFirst)
+ didFirst = true
+ else
+ process.stderr.write('\n')
+
+ process.stderr.write(k + ' #')
+ } else {
+ process.stderr.write('#')
+ }
+
+ const c = spawn(node, [__filename].concat(t), {
+ stdio: [ 'ignore', 'pipe', 2 ]
+ })
+ let out = ''
+ c.stdout.on('data', c => out += c)
+ c.on('close', (code, signal) => {
+ if (code || signal)
+ throw new Error('failed: ' + code + ' ' + signal)
+ results[k].push(+out)
+ mainRunTest(testSet.shift())
+ })
+ }
+
+ mainRunTest(testSet.shift())
+}
+
+const afterMain = results => {
+ console.log('test\tmany\ttype\tops/s\tmean\tmedian\tmax\tmin' +
+ '\tstdev\trange\traw')
+ // get the mean, median, stddev, and range of each test
+ Object.keys(results).forEach(test => {
+ const k = results[test].sort((a, b) => a - b)
+ const min = k[0]
+ const max = k[ k.length - 1 ]
+ const range = max - min
+ const sum = k.reduce((a,b) => a + b, 0)
+ const mean = sum / k.length
+ const ops = iterations / mean * 1000
+ const devs = k.map(n => n - mean).map(n => n * n)
+ const avgdev = devs.reduce((a,b) => a + b, 0) / k.length
+ const stdev = Math.pow(avgdev, 0.5)
+ const median = k.length % 2 ? k[Math.floor(k.length / 2)] :
+ (k[k.length/2] + k[k.length/2+1])/2
+ console.log(
+ '%s\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%s', test, round(ops),
+ round(mean), round(median),
+ max, min, round(stdev), round(range),
+ k.join('\t'))
+ })
+}
+
+const round = num => Math.round(num * 1000)/1000
+
+const test = (testname, many, type) => {
+ const timer = require('./lib/timer.js')
+ const Class = getClass(testname)
+
+ const done = timer()
+ runTest(Class, many, type, iterations, done)
+}
+
+// don't blow up the stack! loop unless deferred
+const runTest = (Class, many, type, iterations, done) => {
+ const Nullsink = require('./lib/nullsink.js')
+ const Numbers = require('./lib/numbers.js')
+ const opt = {}
+ if (type === 'string')
+ opt.encoding = 'utf8'
+ else if (type === 'object')
+ opt.objectMode = true
+
+ while (iterations--) {
+ let finished = false
+ let inloop = true
+ const after = iterations === 0 ? done
+ : () => {
+ if (iterations === 0)
+ done()
+ else if (inloop)
+ finished = true
+ else
+ runTest(Class, many, type, iterations, done)
+ }
+
+ const out = new Nullsink().on('finish', after)
+ let sink = Class ? new Class(opt) : out
+
+ if (many && Class)
+ sink = sink
+ .pipe(new Class(opt))
+ .pipe(new Class(opt))
+ .pipe(new Class(opt))
+ .pipe(new Class(opt))
+
+ if (sink !== out)
+ sink.pipe(out)
+
+ new Numbers(opt).pipe(sink)
+
+ // keep tight-looping if the stream is done already
+ if (!finished) {
+ inloop = false
+ break
+ }
+ }
+}
+
+const getClass = testname =>
+ testname === 'through2' ? require('through2').obj
+ : testname === 'extend-through2' ? require('./lib/extend-through2.js')
+ : testname === 'minipass' ? require('../')
+ : testname === 'extend-minipass' ? require('./lib/extend-minipass.js')
+ : testname === 'passthrough' ? require('stream').PassThrough
+ : testname === 'extend-transform' ? require('./lib/extend-transform.js')
+ : null
+
+if (!process.argv[2])
+ main()
+else
+ test(process.argv[2], process.argv[3] === 'many', process.argv[4])
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/d.js b/deps/npm/node_modules/tar/node_modules/minipass/d.js
new file mode 100644
index 0000000000..ceea513960
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/d.js
@@ -0,0 +1,7 @@
+var MD = require('./')
+var d = new MD()
+console.log(d.write('hello'))
+console.log(d.write('goodbye'))
+d.pipe(process.stderr)
+console.log(d.write('the end'))
+console.log(d.end())
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/e.js b/deps/npm/node_modules/tar/node_modules/minipass/e.js
new file mode 100644
index 0000000000..f1da6c7460
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/e.js
@@ -0,0 +1,17 @@
+const MP = require('stream').PassThrough // require('./')
+const mp = new MP()
+const wait = (n) => new Promise(resolve => setTimeout(resolve, n))
+const t = require('tap')
+
+t.test('end ordering', async t => {
+ mp.on('end', _ => console.log('end'))
+ mp.end()
+ console.log('called end')
+ // mp.resume()
+ // console.log('called resume()')
+ // mp.read()
+ // console.log('called read')
+ mp.on('data', _=>_)
+ console.log('added data handler')
+ await wait(1)
+})
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/eos.js b/deps/npm/node_modules/tar/node_modules/minipass/eos.js
new file mode 100644
index 0000000000..2250720940
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/eos.js
@@ -0,0 +1,12 @@
+const EE = require('events').EventEmitter
+const eos = require('end-of-stream')
+const ee = new EE()
+ee.readable = ee.writable = true
+eos(ee, er => {
+ if (er)
+ throw er
+ console.log('stream ended')
+})
+ee.emit('finish')
+ee.emit('close')
+ee.emit('end')
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/foo b/deps/npm/node_modules/tar/node_modules/minipass/foo
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/foo
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/index.js b/deps/npm/node_modules/tar/node_modules/minipass/index.js
new file mode 100644
index 0000000000..3a3ad412b5
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/index.js
@@ -0,0 +1,295 @@
+'use strict'
+const EE = require('events')
+const Yallist = require('yallist')
+const EOF = Symbol('EOF')
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
+const EMITTED_END = Symbol('emittedEnd')
+const CLOSED = Symbol('closed')
+const READ = Symbol('read')
+const FLUSH = Symbol('flush')
+const FLUSHCHUNK = Symbol('flushChunk')
+const SD = require('string_decoder').StringDecoder
+const ENCODING = Symbol('encoding')
+const DECODER = Symbol('decoder')
+const FLOWING = Symbol('flowing')
+const RESUME = Symbol('resume')
+const BUFFERLENGTH = Symbol('bufferLength')
+const BUFFERPUSH = Symbol('bufferPush')
+const BUFFERSHIFT = Symbol('bufferShift')
+const OBJECTMODE = Symbol('objectMode')
+
+class MiniPass extends EE {
+ constructor (options) {
+ super()
+ this[FLOWING] = false
+ this.pipes = new Yallist()
+ this.buffer = new Yallist()
+ this[OBJECTMODE] = options && options.objectMode || false
+ if (this[OBJECTMODE])
+ this[ENCODING] = null
+ else
+ this[ENCODING] = options && options.encoding || null
+ if (this[ENCODING] === 'buffer')
+ this[ENCODING] = null
+ this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
+ this[EOF] = false
+ this[EMITTED_END] = false
+ this[CLOSED] = false
+ this.writable = true
+ this.readable = true
+ this[BUFFERLENGTH] = 0
+ }
+
+ get bufferLength () { return this[BUFFERLENGTH] }
+
+ get encoding () { return this[ENCODING] }
+ set encoding (enc) {
+ if (this[OBJECTMODE])
+ throw new Error('cannot set encoding in objectMode')
+
+ if (this[ENCODING] && enc !== this[ENCODING] &&
+ (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
+ throw new Error('cannot change encoding')
+
+ if (this[ENCODING] !== enc) {
+ this[DECODER] = enc ? new SD(enc) : null
+ if (this.buffer.length)
+ this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
+ }
+
+ this[ENCODING] = enc
+ }
+
+ setEncoding (enc) {
+ this.encoding = enc
+ }
+
+ write (chunk, encoding, cb) {
+ if (this[EOF])
+ throw new Error('write after end')
+
+ if (typeof encoding === 'function')
+ cb = encoding, encoding = 'utf8'
+
+ if (!encoding)
+ encoding = 'utf8'
+
+ // fast-path writing strings of same encoding to a stream with
+ // an empty buffer, skipping the buffer/decoder dance
+ if (typeof chunk === 'string' && !this[OBJECTMODE] &&
+ // unless it is a string already ready for us to use
+ !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
+ chunk = new Buffer(chunk, encoding)
+ }
+
+ if (Buffer.isBuffer(chunk) && this[ENCODING])
+ chunk = this[DECODER].write(chunk)
+
+ try {
+ return this.flowing
+ ? (this.emit('data', chunk), this.flowing)
+ : (this[BUFFERPUSH](chunk), false)
+ } finally {
+ this.emit('readable')
+ if (cb)
+ cb()
+ }
+ }
+
+ read (n) {
+ try {
+ if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH])
+ return null
+
+ if (this[OBJECTMODE])
+ n = null
+
+ if (this.buffer.length > 1 && !this[OBJECTMODE]) {
+ if (this.encoding)
+ this.buffer = new Yallist([
+ Array.from(this.buffer).join('')
+ ])
+ else
+ this.buffer = new Yallist([
+ Buffer.concat(Array.from(this.buffer), this[BUFFERLENGTH])
+ ])
+ }
+
+ return this[READ](n || null, this.buffer.head.value)
+ } finally {
+ this[MAYBE_EMIT_END]()
+ }
+ }
+
+ [READ] (n, chunk) {
+ if (n === chunk.length || n === null)
+ this[BUFFERSHIFT]()
+ else {
+ this.buffer.head.value = chunk.slice(n)
+ chunk = chunk.slice(0, n)
+ this[BUFFERLENGTH] -= n
+ }
+
+ this.emit('data', chunk)
+
+ if (!this.buffer.length && !this[EOF])
+ this.emit('drain')
+
+ return chunk
+ }
+
+ end (chunk, encoding, cb) {
+ if (typeof chunk === 'function')
+ cb = chunk, chunk = null
+ if (typeof encoding === 'function')
+ cb = encoding, encoding = 'utf8'
+ if (chunk)
+ this.write(chunk, encoding)
+ if (cb)
+ this.once('end', cb)
+ this[EOF] = true
+ this.writable = false
+ if (this.flowing)
+ this[MAYBE_EMIT_END]()
+ }
+
+ // don't let the internal resume be overwritten
+ [RESUME] () {
+ this[FLOWING] = true
+ this.emit('resume')
+ if (this.buffer.length)
+ this[FLUSH]()
+ else if (this[EOF])
+ this[MAYBE_EMIT_END]()
+ else
+ this.emit('drain')
+ }
+
+ resume () {
+ return this[RESUME]()
+ }
+
+ pause () {
+ this[FLOWING] = false
+ }
+
+ get flowing () {
+ return this[FLOWING]
+ }
+
+ [BUFFERPUSH] (chunk) {
+ if (this[OBJECTMODE])
+ this[BUFFERLENGTH] += 1
+ else
+ this[BUFFERLENGTH] += chunk.length
+ return this.buffer.push(chunk)
+ }
+
+ [BUFFERSHIFT] () {
+ if (this.buffer.length) {
+ if (this[OBJECTMODE])
+ this[BUFFERLENGTH] -= 1
+ else
+ this[BUFFERLENGTH] -= this.buffer.head.value.length
+ }
+ return this.buffer.shift()
+ }
+
+ [FLUSH] () {
+ do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
+
+ if (!this.buffer.length && !this[EOF])
+ this.emit('drain')
+ }
+
+ [FLUSHCHUNK] (chunk) {
+ return chunk ? (this.emit('data', chunk), this.flowing) : false
+ }
+
+ pipe (dest, opts) {
+ if (dest === process.stdout || dest === process.stderr)
+ (opts = opts || {}).end = false
+ const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() }
+ this.pipes.push(p)
+
+ dest.on('drain', p.ondrain)
+ this[RESUME]()
+ return dest
+ }
+
+ addEventHandler (ev, fn) {
+ return this.on(ev, fn)
+ }
+
+ on (ev, fn) {
+ try {
+ return super.on(ev, fn)
+ } finally {
+ if (ev === 'data' && !this.pipes.length && !this.flowing) {
+ this[RESUME]()
+ }
+ }
+ }
+
+ get emittedEnd () {
+ return this[EMITTED_END]
+ }
+
+ [MAYBE_EMIT_END] () {
+ if (!this[EMITTED_END] && this.buffer.length === 0 && this[EOF]) {
+ this.emit('end')
+ this.emit('prefinish')
+ this.emit('finish')
+ if (this[CLOSED])
+ this.emit('close')
+ }
+ }
+
+ emit (ev, data) {
+ if (ev === 'data') {
+ if (!data)
+ return
+
+ if (this.pipes.length)
+ this.pipes.forEach(p => p.dest.write(data) || this.pause())
+ } else if (ev === 'end') {
+ if (this[DECODER]) {
+ data = this[DECODER].end()
+ if (data) {
+ this.pipes.forEach(p => p.dest.write(data))
+ super.emit('data', data)
+ }
+ }
+ this.pipes.forEach(p => {
+ p.dest.removeListener('drain', p.ondrain)
+ if (!p.opts || p.opts.end !== false)
+ p.dest.end()
+ })
+ this[EMITTED_END] = true
+ this.readable = false
+ } else if (ev === 'close') {
+ this[CLOSED] = true
+ // don't emit close before 'end' and 'finish'
+ if (!this[EMITTED_END])
+ return
+ }
+
+ const args = new Array(arguments.length)
+ args[0] = ev
+ args[1] = data
+ if (arguments.length > 2) {
+ for (let i = 2; i < arguments.length; i++) {
+ args[i] = arguments[i]
+ }
+ }
+
+ try {
+ return super.emit.apply(this, args)
+ } finally {
+ if (ev !== 'end')
+ this[MAYBE_EMIT_END]()
+ }
+ }
+}
+
+module.exports = MiniPass
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/minipass-benchmarks.xlsx b/deps/npm/node_modules/tar/node_modules/minipass/minipass-benchmarks.xlsx
new file mode 100644
index 0000000000..05e19a41b7
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/minipass-benchmarks.xlsx
Binary files differ
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/package.json b/deps/npm/node_modules/tar/node_modules/minipass/package.json
new file mode 100644
index 0000000000..52856521fc
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/package.json
@@ -0,0 +1,64 @@
+{
+ "_from": "minipass@^2.0.2",
+ "_id": "minipass@2.2.1",
+ "_inBundle": false,
+ "_integrity": "sha512-u1aUllxPJUI07cOqzR7reGmQxmCqlH88uIIsf6XZFEWgw7gXKpJdR+5R9Y3KEDmWYkdIz9wXZs3C0jOPxejk/Q==",
+ "_location": "/tar/minipass",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "minipass@^2.0.2",
+ "name": "minipass",
+ "escapedName": "minipass",
+ "rawSpec": "^2.0.2",
+ "saveSpec": null,
+ "fetchSpec": "^2.0.2"
+ },
+ "_requiredBy": [
+ "/tar",
+ "/tar/minizlib"
+ ],
+ "_resolved": "https://registry.npmjs.org/minipass/-/minipass-2.2.1.tgz",
+ "_shasum": "5ada97538b1027b4cf7213432428578cb564011f",
+ "_spec": "minipass@^2.0.2",
+ "_where": "/Users/rebecca/code/npm/node_modules/tar",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/minipass/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "yallist": "^3.0.0"
+ },
+ "deprecated": false,
+ "description": "minimal implementation of a PassThrough stream",
+ "devDependencies": {
+ "end-of-stream": "^1.4.0",
+ "tap": "^10.7.0",
+ "through2": "^2.0.3"
+ },
+ "homepage": "https://github.com/isaacs/minipass#readme",
+ "keywords": [
+ "passthrough",
+ "stream"
+ ],
+ "license": "ISC",
+ "main": "index.js",
+ "name": "minipass",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/minipass.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100"
+ },
+ "version": "2.2.1"
+}
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/test/basic.js b/deps/npm/node_modules/tar/node_modules/minipass/test/basic.js
new file mode 100644
index 0000000000..e3885c808b
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/test/basic.js
@@ -0,0 +1,438 @@
+const MiniPass = require('../')
+const t = require('tap')
+const EE = require('events').EventEmitter
+
+t.test('some basic piping and writing', async t => {
+ let mp = new MiniPass({ encoding: 'base64' })
+ t.notOk(mp.flowing)
+ mp.flowing = true
+ t.notOk(mp.flowing)
+ t.equal(mp.encoding, 'base64')
+ mp.encoding = null
+ t.equal(mp.encoding, null)
+ t.equal(mp.readable, true)
+ t.equal(mp.writable, true)
+ t.equal(mp.write('hello'), false)
+ let dest = new MiniPass()
+ let sawDestData = false
+ dest.once('data', chunk => {
+ sawDestData = true
+ t.isa(chunk, Buffer)
+ })
+ t.equal(mp.pipe(dest), dest, 'pipe returns dest')
+ t.ok(sawDestData, 'got data becasue pipe() flushes')
+ t.equal(mp.write('bye'), true, 'write() returns true when flowing')
+ dest.pause()
+ t.equal(mp.write('after pause'), false, 'false when dest is paused')
+ t.equal(mp.write('after false'), false, 'false when not flowing')
+ t.equal(dest.buffer.length, 1, '1 item is buffered in dest')
+ t.equal(mp.buffer.length, 1, '1 item buffered in src')
+ dest.resume()
+ t.equal(dest.buffer.length, 0, 'nothing is buffered in dest')
+ t.equal(mp.buffer.length, 0, 'nothing buffered in src')
+})
+
+t.test('unicode splitting', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'utf8' })
+ t.plan(2)
+ t.equal(mp.encoding, 'utf8')
+ mp.on('data', chunk => {
+ t.equal(chunk, butterfly)
+ })
+ const butterbuf = new Buffer([0xf0, 0x9f, 0xa6, 0x8b])
+ mp.write(butterbuf.slice(0, 1))
+ mp.write(butterbuf.slice(1, 2))
+ mp.write(butterbuf.slice(2, 3))
+ mp.write(butterbuf.slice(3, 4))
+ mp.end()
+})
+
+t.test('unicode splitting with setEncoding', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'hex' })
+ t.plan(4)
+ t.equal(mp.encoding, 'hex')
+ mp.setEncoding('hex')
+ t.equal(mp.encoding, 'hex')
+ mp.setEncoding('utf8')
+ t.equal(mp.encoding, 'utf8')
+ mp.on('data', chunk => {
+ t.equal(chunk, butterfly)
+ })
+ const butterbuf = new Buffer([0xf0, 0x9f, 0xa6, 0x8b])
+ mp.write(butterbuf.slice(0, 1))
+ mp.write(butterbuf.slice(1, 2))
+ mp.write(butterbuf.slice(2, 3))
+ mp.write(butterbuf.slice(3, 4))
+ mp.end()
+})
+
+t.test('base64 -> utf8 piping', t => {
+ t.plan(1)
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'base64' })
+ const dest = new MiniPass({ encoding: 'utf8' })
+ mp.pipe(dest)
+ let out = ''
+ dest.on('data', c => out += c)
+ dest.on('end', _ =>
+ t.equal(new Buffer(out, 'base64').toString('utf8'), butterfly))
+ mp.write(butterfly)
+ mp.end()
+})
+
+t.test('utf8 -> base64 piping', t => {
+ t.plan(1)
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'utf8' })
+ const dest = new MiniPass({ encoding: 'base64' })
+ mp.pipe(dest)
+ let out = ''
+ dest.on('data', c => out += c)
+ dest.on('end', _ =>
+ t.equal(new Buffer(out, 'base64').toString('utf8'), butterfly))
+ mp.write(butterfly)
+ mp.end()
+})
+
+t.test('read method', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'utf8' })
+ mp.on('data', c => t.equal(c, butterfly))
+ mp.pause()
+ mp.write(new Buffer(butterfly))
+ t.equal(mp.read(5), null)
+ t.equal(mp.read(0), null)
+ t.same(mp.read(2), butterfly)
+})
+
+t.test('read with no args', async t => {
+ t.test('buffer -> string', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'utf8' })
+ mp.on('data', c => t.equal(c, butterfly))
+ mp.pause()
+ const butterbuf = new Buffer(butterfly)
+ mp.write(butterbuf.slice(0, 2))
+ mp.write(butterbuf.slice(2))
+ t.same(mp.read(), butterfly)
+ t.equal(mp.read(), null)
+ })
+
+ t.test('buffer -> buffer', async t => {
+ const butterfly = new Buffer('🦋')
+ const mp = new MiniPass()
+ mp.on('data', c => t.same(c, butterfly))
+ mp.pause()
+ mp.write(butterfly.slice(0, 2))
+ mp.write(butterfly.slice(2))
+ t.same(mp.read(), butterfly)
+ t.equal(mp.read(), null)
+ })
+
+ t.test('string -> buffer', async t => {
+ const butterfly = '🦋'
+ const butterbuf = new Buffer(butterfly)
+ const mp = new MiniPass()
+ mp.on('data', c => t.same(c, butterbuf))
+ mp.pause()
+ mp.write(butterfly)
+ t.same(mp.read(), butterbuf)
+ t.equal(mp.read(), null)
+ })
+
+ t.test('string -> string', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass({ encoding: 'utf8' })
+ mp.on('data', c => t.equal(c, butterfly))
+ mp.pause()
+ mp.write(butterfly[0])
+ mp.write(butterfly[1])
+ t.same(mp.read(), butterfly)
+ t.equal(mp.read(), null)
+ })
+})
+
+t.test('partial read', async t => {
+ const butterfly = '🦋'
+ const mp = new MiniPass()
+ const butterbuf = new Buffer(butterfly)
+ mp.write(butterbuf.slice(0, 1))
+ mp.write(butterbuf.slice(1, 2))
+ mp.write(butterbuf.slice(2, 3))
+ mp.write(butterbuf.slice(3, 4))
+ t.equal(mp.read(5), null)
+ t.equal(mp.read(0), null)
+ t.same(mp.read(2), butterbuf.slice(0, 2))
+ t.same(mp.read(2), butterbuf.slice(2, 4))
+})
+
+t.test('write after end', async t => {
+ const mp = new MiniPass()
+ let sawEnd = false
+ mp.on('end', _ => sawEnd = true)
+ mp.end()
+ t.throws(_ => mp.write('nope'))
+ t.notOk(sawEnd, 'should not get end event yet (not flowing)')
+ mp.resume()
+ t.ok(sawEnd, 'should get end event after resume()')
+})
+
+t.test('write cb', async t => {
+ const mp = new MiniPass()
+ let calledCb = false
+ mp.write('ok', () => calledCb = true)
+ t.ok(calledCb)
+})
+
+t.test('end with chunk', async t => {
+ let out = ''
+ const mp = new MiniPass({ encoding: 'utf8' })
+ let sawEnd = false
+ mp.on('end', _ => sawEnd = true)
+ mp.addEventHandler('data', c => out += c)
+ let endCb = false
+ mp.end('ok', _ => endCb = true)
+ t.equal(out, 'ok')
+ t.ok(sawEnd, 'should see end event')
+ t.ok(endCb, 'end cb should get called')
+})
+
+t.test('no drain if could not entirely drain on resume', async t => {
+ const mp = new MiniPass()
+ const dest = new MiniPass({ encoding: 'buffer' })
+ t.equal(mp.write('foo'), false)
+ t.equal(mp.write('bar'), false)
+ t.equal(mp.write('baz'), false)
+ t.equal(mp.write('qux'), false)
+ mp.on('drain', _ => t.fail('should not drain'))
+ mp.pipe(dest)
+})
+
+t.test('end with chunk pending', async t => {
+ const mp = new MiniPass()
+ t.equal(mp.write('foo'), false)
+ t.equal(mp.write('626172', 'hex'), false)
+ t.equal(mp.write('baz'), false)
+ t.equal(mp.write('qux'), false)
+ let sawEnd = false
+ mp.on('end', _ => sawEnd = true)
+ let endCb = false
+ mp.end(_ => endCb = true)
+ t.notOk(endCb, 'endcb should not happen yet')
+ t.notOk(sawEnd, 'should not see end yet')
+ let out = ''
+ mp.on('data', c => out += c)
+ t.ok(sawEnd, 'see end after flush')
+ t.ok(endCb, 'end cb after flush')
+ t.equal(out, 'foobarbazqux')
+})
+
+t.test('pipe to stderr does not throw', t => {
+ const spawn = require('child_process').spawn
+ const module = JSON.stringify(require.resolve('../'))
+ const fs = require('fs')
+ const file = __dirname + '/prog.js'
+ fs.writeFileSync(file, `
+ const MP = require(${module})
+ const mp = new MP()
+ mp.pipe(process.stderr)
+ mp.end("hello")
+ `)
+ let err = ''
+ return new Promise(res => {
+ const child = spawn(process.execPath, [file])
+ child.stderr.on('data', c => err += c)
+ child.on('close', (code, signal) => {
+ t.equal(code, 0)
+ t.equal(signal, null)
+ t.equal(err, 'hello')
+ fs.unlinkSync(file)
+ res()
+ })
+ })
+})
+
+t.test('emit works with many args', t => {
+ const mp = new MiniPass()
+ t.plan(2)
+ mp.on('foo', function (a, b, c, d, e, f, g) {
+ t.same([a,b,c,d,e,f,g], [1,2,3,4,5,6,7])
+ t.equal(arguments.length, 7)
+ })
+ mp.emit('foo', 1, 2, 3, 4, 5, 6, 7)
+})
+
+t.test('emit drain on resume, even if no flush', t => {
+ const mp = new MiniPass()
+ mp.encoding = 'utf8'
+
+ const chunks = []
+ class SlowStream extends EE {
+ write (chunk) {
+ chunks.push(chunk)
+ setTimeout(_ => this.emit('drain'))
+ return false
+ }
+ end () { return this.write() }
+ }
+
+ const ss = new SlowStream()
+
+ mp.pipe(ss)
+ t.ok(mp.flowing, 'flowing, because piped')
+ t.equal(mp.write('foo'), false, 'write() returns false, backpressure')
+ t.equal(mp.buffer.length, 0, 'buffer len is 0')
+ t.equal(mp.flowing, false, 'flowing false, awaiting drain')
+ t.same(chunks, ['foo'], 'chunk made it through')
+ mp.once('drain', _ => {
+ t.pass('received mp drain event')
+ t.end()
+ })
+})
+
+t.test('save close for end', t => {
+ const mp = new MiniPass()
+ let ended = false
+ mp.on('close', _ => {
+ t.equal(ended, true, 'end before close')
+ t.end()
+ })
+ mp.on('end', _ => {
+ t.equal(ended, false, 'only end once')
+ ended = true
+ })
+
+ mp.emit('close')
+ mp.end('foo')
+ t.equal(ended, false, 'no end until flushed')
+ mp.resume()
+})
+
+t.test('eos works', t => {
+ const eos = require('end-of-stream')
+ const mp = new MiniPass()
+
+ eos(mp, er => {
+ if (er)
+ throw er
+ t.end()
+ })
+
+ mp.emit('close')
+ mp.end('foo')
+ mp.resume()
+})
+
+t.test('bufferLength property', t => {
+ const eos = require('end-of-stream')
+ const mp = new MiniPass()
+ mp.write('a')
+ mp.write('a')
+ mp.write('a')
+ mp.write('a')
+ mp.write('a')
+ mp.write('a')
+
+ t.equal(mp.bufferLength, 6)
+ t.equal(mp.read(7), null)
+ t.equal(mp.read(3).toString(), 'aaa')
+ t.equal(mp.bufferLength, 3)
+ t.equal(mp.read().toString(), 'aaa')
+ t.equal(mp.bufferLength, 0)
+ t.end()
+})
+
+t.test('emit resume event on resume', t => {
+ const mp = new MiniPass()
+ t.plan(3)
+ mp.on('resume', _ => t.pass('got resume event'))
+ mp.end('asdf')
+ t.equal(mp.flowing, false, 'not flowing yet')
+ mp.resume()
+ t.equal(mp.flowing, true, 'flowing after resume')
+})
+
+t.test('objectMode', t => {
+ const mp = new MiniPass({ objectMode: true })
+ const a = { a: 1 }
+ const b = { b: 1 }
+ const out = []
+ mp.on('data', c => out.push(c))
+ mp.on('end', _ => {
+ t.equal(out.length, 2)
+ t.equal(out[0], a)
+ t.equal(out[1], b)
+ t.same(out, [ { a: 1 }, { b: 1 } ], 'objs not munged')
+ t.end()
+ })
+ t.ok(mp.write(a))
+ t.ok(mp.write(b))
+ mp.end()
+})
+
+t.test('objectMode no encoding', t => {
+ const mp = new MiniPass({
+ objectMode: true,
+ encoding: 'utf8'
+ })
+ t.equal(mp.encoding, null)
+ const a = { a: 1 }
+ const b = { b: 1 }
+ const out = []
+ mp.on('data', c => out.push(c))
+ mp.on('end', _ => {
+ t.equal(out.length, 2)
+ t.equal(out[0], a)
+ t.equal(out[1], b)
+ t.same(out, [ { a: 1 }, { b: 1 } ], 'objs not munged')
+ t.end()
+ })
+ t.ok(mp.write(a))
+ t.ok(mp.write(b))
+ mp.end()
+})
+
+t.test('objectMode read() and buffering', t => {
+ const mp = new MiniPass({ objectMode: true })
+ const a = { a: 1 }
+ const b = { b: 1 }
+ t.notOk(mp.write(a))
+ t.notOk(mp.write(b))
+ t.equal(mp.read(2), a)
+ t.equal(mp.read(), b)
+ t.end()
+})
+
+t.test('set encoding in object mode throws', async t =>
+ t.throws(_ => new MiniPass({ objectMode: true }).encoding = 'utf8',
+ new Error('cannot set encoding in objectMode')))
+
+t.test('set encoding again throws', async t =>
+ t.throws(_ => {
+ const mp = new MiniPass({ encoding: 'hex' })
+ mp.write('ok')
+ mp.encoding = 'utf8'
+ }, new Error('cannot change encoding')))
+
+t.test('set encoding with existing buffer', async t => {
+ const mp = new MiniPass()
+ const butterfly = '🦋'
+ const butterbuf = new Buffer(butterfly)
+ mp.write(butterbuf.slice(0, 1))
+ mp.write(butterbuf.slice(1, 2))
+ mp.setEncoding('utf8')
+ mp.write(butterbuf.slice(2))
+ t.equal(mp.read(), butterfly)
+})
+
+t.test('end:false', async t => {
+ t.plan(1)
+ const mp = new MiniPass({ encoding: 'utf8' })
+ const d = new MiniPass({ encoding: 'utf8' })
+ d.end = () => t.threw(new Error('no end no exit no way out'))
+ d.on('data', c => t.equal(c, 'this is fine'))
+ mp.pipe(d, { end: false })
+ mp.end('this is fine')
+})
diff --git a/deps/npm/node_modules/tar/node_modules/minipass/test/empty-end.js b/deps/npm/node_modules/tar/node_modules/minipass/test/empty-end.js
new file mode 100644
index 0000000000..42387d51af
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minipass/test/empty-end.js
@@ -0,0 +1,38 @@
+const t = require('tap')
+const MP = require('../')
+
+t.test('emit end on resume', async t => {
+ const list = []
+ const mp = new MP()
+ mp.on('end', _ => list.push('end'))
+ mp.end()
+ t.notOk(mp.emittedEnd)
+ list.push('called end')
+ mp.resume()
+ t.ok(mp.emittedEnd)
+ list.push('called resume')
+ t.same(list, ['called end', 'end', 'called resume'])
+})
+
+t.test('emit end on read()', async t => {
+ const list = []
+ const mp = new MP()
+ mp.on('end', _ => list.push('end'))
+ mp.end()
+ list.push('called end')
+
+ mp.read()
+ list.push('called read()')
+ t.same(list, ['called end', 'end', 'called read()'])
+})
+
+t.test('emit end on data handler', async t => {
+ const list = []
+ const mp = new MP()
+ mp.on('end', _ => list.push('end'))
+ mp.end()
+ list.push('called end')
+ mp.on('data', _=>_)
+ list.push('added data handler')
+ t.same(list, ['called end', 'end', 'added data handler'])
+})
diff --git a/deps/npm/node_modules/tar/node_modules/minizlib/LICENSE b/deps/npm/node_modules/tar/node_modules/minizlib/LICENSE
new file mode 100644
index 0000000000..ffce7383f5
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minizlib/LICENSE
@@ -0,0 +1,26 @@
+Minizlib was created by Isaac Z. Schlueter.
+It is a derivative work of the Node.js project.
+
+"""
+Copyright Isaac Z. Schlueter and Contributors
+Copyright Node.js contributors. All rights reserved.
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""
diff --git a/deps/npm/node_modules/tar/node_modules/minizlib/README.md b/deps/npm/node_modules/tar/node_modules/minizlib/README.md
new file mode 100644
index 0000000000..2b585545ef
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minizlib/README.md
@@ -0,0 +1,44 @@
+# minizlib
+
+A tiny fast zlib stream built on [minipass](http://npm.im/minipass)
+and Node.js's zlib binding.
+
+This module was created to serve the needs of
+[node-tar](http://npm.im/tar) v2. If your needs are different, then
+it may not be for you.
+
+## How does this differ from the streams in `require('zlib')`?
+
+First, there are no convenience methods to compress or decompress a
+buffer. If you want those, use the built-in `zlib` module. This is
+only streams.
+
+This module compresses and decompresses the data as fast as you feed
+it in. It is synchronous, and runs on the main process thread. Zlib
+operations can be high CPU, but they're very fast, and doing it this
+way means much less bookkeeping and artificial deferral.
+
+Node's built in zlib streams are built on top of `stream.Transform`.
+They do the maximally safe thing with respect to consistent
+asynchrony, buffering, and backpressure.
+
+This module _does_ support backpressure, and will buffer output chunks
+that are not consumed, but is less of a mediator between the input and
+output. There is no high or low watermarks, no state objects, and so
+artificial async deferrals. It will not protect you from Zalgo.
+
+If you write, data will be emitted right away. If you write
+everything synchronously in one tick, and you are listening to the
+`data` event to consume it, then it'll all be emitted right away in
+that same tick. If you want data to be emitted in the next tick, then
+write it in the next tick.
+
+It is thus the responsibility of the reader and writer to manage their
+own consumption and process execution flow.
+
+The goal is to compress and decompress as fast as possible, even for
+files that are too large to store all in one buffer.
+
+The API is very similar to the built-in zlib module. There are
+classes that you instantiate with `new` and they are streams that can
+be piped together.
diff --git a/deps/npm/node_modules/tar/node_modules/minizlib/constants.js b/deps/npm/node_modules/tar/node_modules/minizlib/constants.js
new file mode 100644
index 0000000000..4edffde86f
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minizlib/constants.js
@@ -0,0 +1,46 @@
+module.exports = Object.freeze({
+ Z_NO_FLUSH: 0,
+ Z_PARTIAL_FLUSH: 1,
+ Z_SYNC_FLUSH: 2,
+ Z_FULL_FLUSH: 3,
+ Z_FINISH: 4,
+ Z_BLOCK: 5,
+ Z_OK: 0,
+ Z_STREAM_END: 1,
+ Z_NEED_DICT: 2,
+ Z_ERRNO: -1,
+ Z_STREAM_ERROR: -2,
+ Z_DATA_ERROR: -3,
+ Z_MEM_ERROR: -4,
+ Z_BUF_ERROR: -5,
+ Z_VERSION_ERROR: -6,
+ Z_NO_COMPRESSION: 0,
+ Z_BEST_SPEED: 1,
+ Z_BEST_COMPRESSION: 9,
+ Z_DEFAULT_COMPRESSION: -1,
+ Z_FILTERED: 1,
+ Z_HUFFMAN_ONLY: 2,
+ Z_RLE: 3,
+ Z_FIXED: 4,
+ Z_DEFAULT_STRATEGY: 0,
+ ZLIB_VERNUM: 4736,
+ DEFLATE: 1,
+ INFLATE: 2,
+ GZIP: 3,
+ GUNZIP: 4,
+ DEFLATERAW: 5,
+ INFLATERAW: 6,
+ UNZIP: 7,
+ Z_MIN_WINDOWBITS: 8,
+ Z_MAX_WINDOWBITS: 15,
+ Z_DEFAULT_WINDOWBITS: 15,
+ Z_MIN_CHUNK: 64,
+ Z_MAX_CHUNK: Infinity,
+ Z_DEFAULT_CHUNK: 16384,
+ Z_MIN_MEMLEVEL: 1,
+ Z_MAX_MEMLEVEL: 9,
+ Z_DEFAULT_MEMLEVEL: 8,
+ Z_MIN_LEVEL: -1,
+ Z_MAX_LEVEL: 9,
+ Z_DEFAULT_LEVEL: -1
+})
diff --git a/deps/npm/node_modules/tar/node_modules/minizlib/index.js b/deps/npm/node_modules/tar/node_modules/minizlib/index.js
new file mode 100644
index 0000000000..7d595dec4f
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minizlib/index.js
@@ -0,0 +1,333 @@
+'use strict'
+
+const assert = require('assert')
+const Buffer = require('buffer').Buffer
+const binding = process.binding('zlib')
+
+const constants = exports.constants = require('./constants.js')
+const MiniPass = require('minipass')
+
+// translation table for return codes.
+const codes = new Map([
+ [constants.Z_OK, 'Z_OK'],
+ [constants.Z_STREAM_END, 'Z_STREAM_END'],
+ [constants.Z_NEED_DICT, 'Z_NEED_DICT'],
+ [constants.Z_ERRNO, 'Z_ERRNO'],
+ [constants.Z_STREAM_ERROR, 'Z_STREAM_ERROR'],
+ [constants.Z_DATA_ERROR, 'Z_DATA_ERROR'],
+ [constants.Z_MEM_ERROR, 'Z_MEM_ERROR'],
+ [constants.Z_BUF_ERROR, 'Z_BUF_ERROR'],
+ [constants.Z_VERSION_ERROR, 'Z_VERSION_ERROR']
+])
+
+const validFlushFlags = new Set([
+ constants.Z_NO_FLUSH,
+ constants.Z_PARTIAL_FLUSH,
+ constants.Z_SYNC_FLUSH,
+ constants.Z_FULL_FLUSH,
+ constants.Z_FINISH,
+ constants.Z_BLOCK
+])
+
+const strategies = new Set([
+ constants.Z_FILTERED,
+ constants.Z_HUFFMAN_ONLY,
+ constants.Z_RLE,
+ constants.Z_FIXED,
+ constants.Z_DEFAULT_STRATEGY
+])
+
+// the Zlib class they all inherit from
+// This thing manages the queue of requests, and returns
+// true or false if there is anything in the queue when
+// you call the .write() method.
+const _opts = Symbol('opts')
+const _chunkSize = Symbol('chunkSize')
+const _flushFlag = Symbol('flushFlag')
+const _finishFlush = Symbol('finishFlush')
+const _handle = Symbol('handle')
+const _hadError = Symbol('hadError')
+const _buffer = Symbol('buffer')
+const _offset = Symbol('offset')
+const _level = Symbol('level')
+const _strategy = Symbol('strategy')
+const _ended = Symbol('ended')
+
+class Zlib extends MiniPass {
+ constructor (opts, mode) {
+ super(opts)
+ this[_ended] = false
+ this[_opts] = opts = opts || {}
+ this[_chunkSize] = opts.chunkSize || constants.Z_DEFAULT_CHUNK
+ if (opts.flush && !validFlushFlags.has(opts.flush)) {
+ throw new Error('Invalid flush flag: ' + opts.flush)
+ }
+ if (opts.finishFlush && !validFlushFlags.has(opts.finishFlush)) {
+ throw new Error('Invalid flush flag: ' + opts.finishFlush)
+ }
+
+ this[_flushFlag] = opts.flush || constants.Z_NO_FLUSH
+ this[_finishFlush] = typeof opts.finishFlush !== 'undefined' ?
+ opts.finishFlush : constants.Z_FINISH
+
+ if (opts.chunkSize) {
+ if (opts.chunkSize < constants.Z_MIN_CHUNK) {
+ throw new Error('Invalid chunk size: ' + opts.chunkSize)
+ }
+ }
+
+ if (opts.windowBits) {
+ if (opts.windowBits < constants.Z_MIN_WINDOWBITS ||
+ opts.windowBits > constants.Z_MAX_WINDOWBITS) {
+ throw new Error('Invalid windowBits: ' + opts.windowBits)
+ }
+ }
+
+ if (opts.level) {
+ if (opts.level < constants.Z_MIN_LEVEL ||
+ opts.level > constants.Z_MAX_LEVEL) {
+ throw new Error('Invalid compression level: ' + opts.level)
+ }
+ }
+
+ if (opts.memLevel) {
+ if (opts.memLevel < constants.Z_MIN_MEMLEVEL ||
+ opts.memLevel > constants.Z_MAX_MEMLEVEL) {
+ throw new Error('Invalid memLevel: ' + opts.memLevel)
+ }
+ }
+
+ if (opts.strategy && !(strategies.has(opts.strategy)))
+ throw new Error('Invalid strategy: ' + opts.strategy)
+
+ if (opts.dictionary) {
+ if (!(opts.dictionary instanceof Buffer)) {
+ throw new Error('Invalid dictionary: it should be a Buffer instance')
+ }
+ }
+
+ this[_handle] = new binding.Zlib(mode)
+
+ this[_hadError] = false
+ this[_handle].onerror = (message, errno) => {
+ // there is no way to cleanly recover.
+ // continuing only obscures problems.
+ this.close()
+ this[_hadError] = true
+
+ const error = new Error(message)
+ error.errno = errno
+ error.code = codes.get(errno)
+ this.emit('error', error)
+ }
+
+ const level = typeof opts.level === 'number' ? opts.level
+ : constants.Z_DEFAULT_COMPRESSION
+
+ var strategy = typeof opts.strategy === 'number' ? opts.strategy
+ : constants.Z_DEFAULT_STRATEGY
+
+ this[_handle].init(opts.windowBits || constants.Z_DEFAULT_WINDOWBITS,
+ level,
+ opts.memLevel || constants.Z_DEFAULT_MEMLEVEL,
+ strategy,
+ opts.dictionary)
+
+ this[_buffer] = Buffer.allocUnsafe(this[_chunkSize])
+ this[_offset] = 0
+ this[_level] = level
+ this[_strategy] = strategy
+
+ this.once('end', this.close)
+ }
+
+ close () {
+ if (this[_handle]) {
+ this[_handle].close()
+ this[_handle] = null
+ this.emit('close')
+ }
+ }
+
+ params (level, strategy) {
+ if (!this[_handle])
+ throw new Error('cannot switch params when binding is closed')
+
+ // no way to test this without also not supporting params at all
+ /* istanbul ignore if */
+ if (!this[_handle].params)
+ throw new Error('not supported in this implementation')
+
+ if (level < constants.Z_MIN_LEVEL ||
+ level > constants.Z_MAX_LEVEL) {
+ throw new RangeError('Invalid compression level: ' + level)
+ }
+
+ if (!(strategies.has(strategy)))
+ throw new TypeError('Invalid strategy: ' + strategy)
+
+ if (this[_level] !== level || this[_strategy] !== strategy) {
+ this.flush(constants.Z_SYNC_FLUSH)
+ assert(this[_handle], 'zlib binding closed')
+ this[_handle].params(level, strategy)
+ /* istanbul ignore else */
+ if (!this[_hadError]) {
+ this[_level] = level
+ this[_strategy] = strategy
+ }
+ }
+ }
+
+ reset () {
+ assert(this[_handle], 'zlib binding closed')
+ return this[_handle].reset()
+ }
+
+ flush (kind) {
+ if (kind === undefined)
+ kind = constants.Z_FULL_FLUSH
+
+ if (this.ended)
+ return
+
+ const flushFlag = this[_flushFlag]
+ this[_flushFlag] = kind
+ this.write(Buffer.alloc(0))
+ this[_flushFlag] = flushFlag
+ }
+
+ end (chunk, encoding, cb) {
+ if (chunk)
+ this.write(chunk, encoding)
+ this.flush(this[_finishFlush])
+ this[_ended] = true
+ return super.end(null, null, cb)
+ }
+
+ get ended () {
+ return this[_ended]
+ }
+
+ write (chunk, encoding, cb) {
+ // process the chunk using the sync process
+ // then super.write() all the outputted chunks
+ if (typeof encoding === 'function')
+ cb = encoding, encoding = 'utf8'
+
+ if (typeof chunk === 'string')
+ chunk = new Buffer(chunk, encoding)
+
+ let availInBefore = chunk && chunk.length
+ let availOutBefore = this[_chunkSize] - this[_offset]
+ let inOff = 0 // the offset of the input buffer
+ const flushFlag = this[_flushFlag]
+ let writeReturn = true
+
+ assert(this[_handle], 'zlib binding closed')
+ do {
+ let res = this[_handle].writeSync(
+ flushFlag,
+ chunk, // in
+ inOff, // in_off
+ availInBefore, // in_len
+ this[_buffer], // out
+ this[_offset], //out_off
+ availOutBefore // out_len
+ )
+ if (this[_hadError])
+ break
+
+ let availInAfter = res[0]
+ let availOutAfter = res[1]
+
+ const have = availOutBefore - availOutAfter
+ assert(have >= 0, 'have should not go down')
+
+ if (have > 0) {
+ const out = this[_buffer].slice(
+ this[_offset], this[_offset] + have
+ )
+
+ this[_offset] += have
+ // serve some output to the consumer.
+ writeReturn = super.write(out) && writeReturn
+ }
+
+ // exhausted the output buffer, or used all the input create a new one.
+ if (availOutAfter === 0 || this[_offset] >= this[_chunkSize]) {
+ availOutBefore = this[_chunkSize]
+ this[_offset] = 0
+ this[_buffer] = Buffer.allocUnsafe(this[_chunkSize])
+ }
+
+ if (availOutAfter === 0) {
+ // Not actually done. Need to reprocess.
+ // Also, update the availInBefore to the availInAfter value,
+ // so that if we have to hit it a third (fourth, etc.) time,
+ // it'll have the correct byte counts.
+ inOff += (availInBefore - availInAfter)
+ availInBefore = availInAfter
+ continue
+ }
+ break
+ } while (!this[_hadError])
+
+ if (cb)
+ cb()
+ return writeReturn
+ }
+}
+
+// minimal 2-byte header
+class Deflate extends Zlib {
+ constructor (opts) {
+ super(opts, constants.DEFLATE)
+ }
+}
+
+class Inflate extends Zlib {
+ constructor (opts) {
+ super(opts, constants.INFLATE)
+ }
+}
+
+// gzip - bigger header, same deflate compression
+class Gzip extends Zlib {
+ constructor (opts) {
+ super(opts, constants.GZIP)
+ }
+}
+
+class Gunzip extends Zlib {
+ constructor (opts) {
+ super(opts, constants.GUNZIP)
+ }
+}
+
+// raw - no header
+class DeflateRaw extends Zlib {
+ constructor (opts) {
+ super(opts, constants.DEFLATERAW)
+ }
+}
+
+class InflateRaw extends Zlib {
+ constructor (opts) {
+ super(opts, constants.INFLATERAW)
+ }
+}
+
+// auto-detect header.
+class Unzip extends Zlib {
+ constructor (opts) {
+ super(opts, constants.UNZIP)
+ }
+}
+
+exports.Deflate = Deflate
+exports.Inflate = Inflate
+exports.Gzip = Gzip
+exports.Gunzip = Gunzip
+exports.DeflateRaw = DeflateRaw
+exports.InflateRaw = InflateRaw
+exports.Unzip = Unzip
diff --git a/deps/npm/node_modules/tar/node_modules/minizlib/package.json b/deps/npm/node_modules/tar/node_modules/minizlib/package.json
new file mode 100644
index 0000000000..ae7fb898d4
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/minizlib/package.json
@@ -0,0 +1,71 @@
+{
+ "_from": "minizlib@^1.0.3",
+ "_id": "minizlib@1.0.3",
+ "_inBundle": false,
+ "_integrity": "sha1-1cGr93vhVGGZUuJTM27Mq5sqMvU=",
+ "_location": "/tar/minizlib",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "minizlib@^1.0.3",
+ "name": "minizlib",
+ "escapedName": "minizlib",
+ "rawSpec": "^1.0.3",
+ "saveSpec": null,
+ "fetchSpec": "^1.0.3"
+ },
+ "_requiredBy": [
+ "/tar"
+ ],
+ "_resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.0.3.tgz",
+ "_shasum": "d5c1abf77be154619952e253336eccab9b2a32f5",
+ "_spec": "minizlib@^1.0.3",
+ "_where": "/Users/rebecca/code/npm/node_modules/tar",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/minizlib/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "minipass": "^2.0.0"
+ },
+ "deprecated": false,
+ "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
+ "devDependencies": {
+ "tap": "^10.3.0"
+ },
+ "files": [
+ "index.js",
+ "constants.js"
+ ],
+ "homepage": "https://github.com/isaacs/minizlib#readme",
+ "keywords": [
+ "zlib",
+ "gzip",
+ "gunzip",
+ "deflate",
+ "inflate",
+ "compression",
+ "zip",
+ "unzip"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "minizlib",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/minizlib.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100 -J"
+ },
+ "version": "1.0.3"
+}
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/LICENSE b/deps/npm/node_modules/tar/node_modules/yallist/LICENSE
index 19129e315f..19129e315f 100644
--- a/deps/npm/node_modules/tar/node_modules/block-stream/LICENSE
+++ b/deps/npm/node_modules/tar/node_modules/yallist/LICENSE
diff --git a/deps/npm/node_modules/tar/node_modules/yallist/README.md b/deps/npm/node_modules/tar/node_modules/yallist/README.md
new file mode 100644
index 0000000000..f586101869
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/yallist/README.md
@@ -0,0 +1,204 @@
+# yallist
+
+Yet Another Linked List
+
+There are many doubly-linked list implementations like it, but this
+one is mine.
+
+For when an array would be too big, and a Map can't be iterated in
+reverse order.
+
+
+[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist)
+
+## basic usage
+
+```javascript
+var yallist = require('yallist')
+var myList = yallist.create([1, 2, 3])
+myList.push('foo')
+myList.unshift('bar')
+// of course pop() and shift() are there, too
+console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo']
+myList.forEach(function (k) {
+ // walk the list head to tail
+})
+myList.forEachReverse(function (k, index, list) {
+ // walk the list tail to head
+})
+var myDoubledList = myList.map(function (k) {
+ return k + k
+})
+// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo']
+// mapReverse is also a thing
+var myDoubledListReverse = myList.mapReverse(function (k) {
+ return k + k
+}) // ['foofoo', 6, 4, 2, 'barbar']
+
+var reduced = myList.reduce(function (set, entry) {
+ set += entry
+ return set
+}, 'start')
+console.log(reduced) // 'startfoo123bar'
+```
+
+## api
+
+The whole API is considered "public".
+
+Functions with the same name as an Array method work more or less the
+same way.
+
+There's reverse versions of most things because that's the point.
+
+### Yallist
+
+Default export, the class that holds and manages a list.
+
+Call it with either a forEach-able (like an array) or a set of
+arguments, to initialize the list.
+
+The Array-ish methods all act like you'd expect. No magic length,
+though, so if you change that it won't automatically prune or add
+empty spots.
+
+### Yallist.create(..)
+
+Alias for Yallist function. Some people like factories.
+
+#### yallist.head
+
+The first node in the list
+
+#### yallist.tail
+
+The last node in the list
+
+#### yallist.length
+
+The number of nodes in the list. (Change this at your peril. It is
+not magic like Array length.)
+
+#### yallist.toArray()
+
+Convert the list to an array.
+
+#### yallist.forEach(fn, [thisp])
+
+Call a function on each item in the list.
+
+#### yallist.forEachReverse(fn, [thisp])
+
+Call a function on each item in the list, in reverse order.
+
+#### yallist.get(n)
+
+Get the data at position `n` in the list. If you use this a lot,
+probably better off just using an Array.
+
+#### yallist.getReverse(n)
+
+Get the data at position `n`, counting from the tail.
+
+#### yallist.map(fn, thisp)
+
+Create a new Yallist with the result of calling the function on each
+item.
+
+#### yallist.mapReverse(fn, thisp)
+
+Same as `map`, but in reverse.
+
+#### yallist.pop()
+
+Get the data from the list tail, and remove the tail from the list.
+
+#### yallist.push(item, ...)
+
+Insert one or more items to the tail of the list.
+
+#### yallist.reduce(fn, initialValue)
+
+Like Array.reduce.
+
+#### yallist.reduceReverse
+
+Like Array.reduce, but in reverse.
+
+#### yallist.reverse
+
+Reverse the list in place.
+
+#### yallist.shift()
+
+Get the data from the list head, and remove the head from the list.
+
+#### yallist.slice([from], [to])
+
+Just like Array.slice, but returns a new Yallist.
+
+#### yallist.sliceReverse([from], [to])
+
+Just like yallist.slice, but the result is returned in reverse.
+
+#### yallist.toArray()
+
+Create an array representation of the list.
+
+#### yallist.toArrayReverse()
+
+Create a reversed array representation of the list.
+
+#### yallist.unshift(item, ...)
+
+Insert one or more items to the head of the list.
+
+#### yallist.unshiftNode(node)
+
+Move a Node object to the front of the list. (That is, pull it out of
+wherever it lives, and make it the new head.)
+
+If the node belongs to a different list, then that list will remove it
+first.
+
+#### yallist.pushNode(node)
+
+Move a Node object to the end of the list. (That is, pull it out of
+wherever it lives, and make it the new tail.)
+
+If the node belongs to a list already, then that list will remove it
+first.
+
+#### yallist.removeNode(node)
+
+Remove a node from the list, preserving referential integrity of head
+and tail and other nodes.
+
+Will throw an error if you try to have a list remove a node that
+doesn't belong to it.
+
+### Yallist.Node
+
+The class that holds the data and is actually the list.
+
+Call with `var n = new Node(value, previousNode, nextNode)`
+
+Note that if you do direct operations on Nodes themselves, it's very
+easy to get into weird states where the list is broken. Be careful :)
+
+#### node.next
+
+The next node in the list.
+
+#### node.prev
+
+The previous node in the list.
+
+#### node.value
+
+The data the node contains.
+
+#### node.list
+
+The list to which this node belongs. (Null if it does not belong to
+any list.)
diff --git a/deps/npm/node_modules/tar/node_modules/yallist/iterator.js b/deps/npm/node_modules/tar/node_modules/yallist/iterator.js
new file mode 100644
index 0000000000..9149b36488
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/yallist/iterator.js
@@ -0,0 +1,8 @@
+'use strict'
+var Yallist = require('./yallist.js')
+
+Yallist.prototype[Symbol.iterator] = function* () {
+ for (let walker = this.head; walker; walker = walker.next) {
+ yield walker.value
+ }
+}
diff --git a/deps/npm/node_modules/tar/node_modules/yallist/package.json b/deps/npm/node_modules/tar/node_modules/yallist/package.json
new file mode 100644
index 0000000000..c2a8e0d399
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/yallist/package.json
@@ -0,0 +1,63 @@
+{
+ "_from": "yallist@^3.0.2",
+ "_id": "yallist@3.0.2",
+ "_inBundle": false,
+ "_integrity": "sha1-hFK0u36Dx8GI2AQcGoN8dz1ti7k=",
+ "_location": "/tar/yallist",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "yallist@^3.0.2",
+ "name": "yallist",
+ "escapedName": "yallist",
+ "rawSpec": "^3.0.2",
+ "saveSpec": null,
+ "fetchSpec": "^3.0.2"
+ },
+ "_requiredBy": [
+ "/tar",
+ "/tar/minipass"
+ ],
+ "_resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.2.tgz",
+ "_shasum": "8452b4bb7e83c7c188d8041c1a837c773d6d8bb9",
+ "_spec": "yallist@^3.0.2",
+ "_where": "/Users/rebecca/code/npm/node_modules/tar",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me/"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/yallist/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {},
+ "deprecated": false,
+ "description": "Yet Another Linked List",
+ "devDependencies": {
+ "tap": "^10.3.0"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "files": [
+ "yallist.js",
+ "iterator.js"
+ ],
+ "homepage": "https://github.com/isaacs/yallist#readme",
+ "license": "ISC",
+ "main": "yallist.js",
+ "name": "yallist",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/isaacs/yallist.git"
+ },
+ "scripts": {
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100"
+ },
+ "version": "3.0.2"
+}
diff --git a/deps/npm/node_modules/tar/node_modules/yallist/yallist.js b/deps/npm/node_modules/tar/node_modules/yallist/yallist.js
new file mode 100644
index 0000000000..4805bc69fa
--- /dev/null
+++ b/deps/npm/node_modules/tar/node_modules/yallist/yallist.js
@@ -0,0 +1,376 @@
+'use strict'
+module.exports = Yallist
+
+Yallist.Node = Node
+Yallist.create = Yallist
+
+function Yallist (list) {
+ var self = this
+ if (!(self instanceof Yallist)) {
+ self = new Yallist()
+ }
+
+ self.tail = null
+ self.head = null
+ self.length = 0
+
+ if (list && typeof list.forEach === 'function') {
+ list.forEach(function (item) {
+ self.push(item)
+ })
+ } else if (arguments.length > 0) {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ self.push(arguments[i])
+ }
+ }
+
+ return self
+}
+
+Yallist.prototype.removeNode = function (node) {
+ if (node.list !== this) {
+ throw new Error('removing node which does not belong to this list')
+ }
+
+ var next = node.next
+ var prev = node.prev
+
+ if (next) {
+ next.prev = prev
+ }
+
+ if (prev) {
+ prev.next = next
+ }
+
+ if (node === this.head) {
+ this.head = next
+ }
+ if (node === this.tail) {
+ this.tail = prev
+ }
+
+ node.list.length--
+ node.next = null
+ node.prev = null
+ node.list = null
+}
+
+Yallist.prototype.unshiftNode = function (node) {
+ if (node === this.head) {
+ return
+ }
+
+ if (node.list) {
+ node.list.removeNode(node)
+ }
+
+ var head = this.head
+ node.list = this
+ node.next = head
+ if (head) {
+ head.prev = node
+ }
+
+ this.head = node
+ if (!this.tail) {
+ this.tail = node
+ }
+ this.length++
+}
+
+Yallist.prototype.pushNode = function (node) {
+ if (node === this.tail) {
+ return
+ }
+
+ if (node.list) {
+ node.list.removeNode(node)
+ }
+
+ var tail = this.tail
+ node.list = this
+ node.prev = tail
+ if (tail) {
+ tail.next = node
+ }
+
+ this.tail = node
+ if (!this.head) {
+ this.head = node
+ }
+ this.length++
+}
+
+Yallist.prototype.push = function () {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ push(this, arguments[i])
+ }
+ return this.length
+}
+
+Yallist.prototype.unshift = function () {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ unshift(this, arguments[i])
+ }
+ return this.length
+}
+
+Yallist.prototype.pop = function () {
+ if (!this.tail) {
+ return undefined
+ }
+
+ var res = this.tail.value
+ this.tail = this.tail.prev
+ if (this.tail) {
+ this.tail.next = null
+ } else {
+ this.head = null
+ }
+ this.length--
+ return res
+}
+
+Yallist.prototype.shift = function () {
+ if (!this.head) {
+ return undefined
+ }
+
+ var res = this.head.value
+ this.head = this.head.next
+ if (this.head) {
+ this.head.prev = null
+ } else {
+ this.tail = null
+ }
+ this.length--
+ return res
+}
+
+Yallist.prototype.forEach = function (fn, thisp) {
+ thisp = thisp || this
+ for (var walker = this.head, i = 0; walker !== null; i++) {
+ fn.call(thisp, walker.value, i, this)
+ walker = walker.next
+ }
+}
+
+Yallist.prototype.forEachReverse = function (fn, thisp) {
+ thisp = thisp || this
+ for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
+ fn.call(thisp, walker.value, i, this)
+ walker = walker.prev
+ }
+}
+
+Yallist.prototype.get = function (n) {
+ for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
+ // abort out of the list early if we hit a cycle
+ walker = walker.next
+ }
+ if (i === n && walker !== null) {
+ return walker.value
+ }
+}
+
+Yallist.prototype.getReverse = function (n) {
+ for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
+ // abort out of the list early if we hit a cycle
+ walker = walker.prev
+ }
+ if (i === n && walker !== null) {
+ return walker.value
+ }
+}
+
+Yallist.prototype.map = function (fn, thisp) {
+ thisp = thisp || this
+ var res = new Yallist()
+ for (var walker = this.head; walker !== null;) {
+ res.push(fn.call(thisp, walker.value, this))
+ walker = walker.next
+ }
+ return res
+}
+
+Yallist.prototype.mapReverse = function (fn, thisp) {
+ thisp = thisp || this
+ var res = new Yallist()
+ for (var walker = this.tail; walker !== null;) {
+ res.push(fn.call(thisp, walker.value, this))
+ walker = walker.prev
+ }
+ return res
+}
+
+Yallist.prototype.reduce = function (fn, initial) {
+ var acc
+ var walker = this.head
+ if (arguments.length > 1) {
+ acc = initial
+ } else if (this.head) {
+ walker = this.head.next
+ acc = this.head.value
+ } else {
+ throw new TypeError('Reduce of empty list with no initial value')
+ }
+
+ for (var i = 0; walker !== null; i++) {
+ acc = fn(acc, walker.value, i)
+ walker = walker.next
+ }
+
+ return acc
+}
+
+Yallist.prototype.reduceReverse = function (fn, initial) {
+ var acc
+ var walker = this.tail
+ if (arguments.length > 1) {
+ acc = initial
+ } else if (this.tail) {
+ walker = this.tail.prev
+ acc = this.tail.value
+ } else {
+ throw new TypeError('Reduce of empty list with no initial value')
+ }
+
+ for (var i = this.length - 1; walker !== null; i--) {
+ acc = fn(acc, walker.value, i)
+ walker = walker.prev
+ }
+
+ return acc
+}
+
+Yallist.prototype.toArray = function () {
+ var arr = new Array(this.length)
+ for (var i = 0, walker = this.head; walker !== null; i++) {
+ arr[i] = walker.value
+ walker = walker.next
+ }
+ return arr
+}
+
+Yallist.prototype.toArrayReverse = function () {
+ var arr = new Array(this.length)
+ for (var i = 0, walker = this.tail; walker !== null; i++) {
+ arr[i] = walker.value
+ walker = walker.prev
+ }
+ return arr
+}
+
+Yallist.prototype.slice = function (from, to) {
+ to = to || this.length
+ if (to < 0) {
+ to += this.length
+ }
+ from = from || 0
+ if (from < 0) {
+ from += this.length
+ }
+ var ret = new Yallist()
+ if (to < from || to < 0) {
+ return ret
+ }
+ if (from < 0) {
+ from = 0
+ }
+ if (to > this.length) {
+ to = this.length
+ }
+ for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
+ walker = walker.next
+ }
+ for (; walker !== null && i < to; i++, walker = walker.next) {
+ ret.push(walker.value)
+ }
+ return ret
+}
+
+Yallist.prototype.sliceReverse = function (from, to) {
+ to = to || this.length
+ if (to < 0) {
+ to += this.length
+ }
+ from = from || 0
+ if (from < 0) {
+ from += this.length
+ }
+ var ret = new Yallist()
+ if (to < from || to < 0) {
+ return ret
+ }
+ if (from < 0) {
+ from = 0
+ }
+ if (to > this.length) {
+ to = this.length
+ }
+ for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
+ walker = walker.prev
+ }
+ for (; walker !== null && i > from; i--, walker = walker.prev) {
+ ret.push(walker.value)
+ }
+ return ret
+}
+
+Yallist.prototype.reverse = function () {
+ var head = this.head
+ var tail = this.tail
+ for (var walker = head; walker !== null; walker = walker.prev) {
+ var p = walker.prev
+ walker.prev = walker.next
+ walker.next = p
+ }
+ this.head = tail
+ this.tail = head
+ return this
+}
+
+function push (self, item) {
+ self.tail = new Node(item, self.tail, null, self)
+ if (!self.head) {
+ self.head = self.tail
+ }
+ self.length++
+}
+
+function unshift (self, item) {
+ self.head = new Node(item, null, self.head, self)
+ if (!self.tail) {
+ self.tail = self.head
+ }
+ self.length++
+}
+
+function Node (value, prev, next, list) {
+ if (!(this instanceof Node)) {
+ return new Node(value, prev, next, list)
+ }
+
+ this.list = list
+ this.value = value
+
+ if (prev) {
+ prev.next = this
+ this.prev = prev
+ } else {
+ this.prev = null
+ }
+
+ if (next) {
+ next.prev = this
+ this.next = next
+ } else {
+ this.next = null
+ }
+}
+
+try {
+ // add if support or Symbol.iterator is present
+ require('./iterator.js')
+} catch (er) {}
diff --git a/deps/npm/node_modules/tar/package.json b/deps/npm/node_modules/tar/package.json
index 4dc0c151cb..4d6683463a 100644
--- a/deps/npm/node_modules/tar/package.json
+++ b/deps/npm/node_modules/tar/package.json
@@ -1,65 +1,78 @@
{
- "_from": "tar@~2.2.1",
- "_id": "tar@2.2.1",
- "_integrity": "sha1-jk0qJWwOIYXGsYrWlK7JaLg8sdE=",
+ "_from": "tar@latest",
+ "_id": "tar@4.0.1",
+ "_inBundle": false,
+ "_integrity": "sha512-XBpU+/azPOMvE5m2Tn7Sl6U1ahpGfe77LkdrAlFilwrgHZsR+2iy0l8klQtfJNM+DACZO2Xrw10MTyQRB4du5A==",
"_location": "/tar",
- "_phantomChildren": {
- "inherits": "2.0.3"
- },
+ "_phantomChildren": {},
"_requested": {
- "type": "range",
+ "type": "tag",
"registry": true,
- "raw": "tar@~2.2.1",
+ "raw": "tar@latest",
"name": "tar",
"escapedName": "tar",
- "rawSpec": "~2.2.1",
+ "rawSpec": "latest",
"saveSpec": null,
- "fetchSpec": "~2.2.1"
+ "fetchSpec": "latest"
},
"_requiredBy": [
+ "#USER",
"/",
- "/node-gyp"
+ "/pacote"
],
- "_resolved": "https://registry.npmjs.org/tar/-/tar-2.2.1.tgz",
- "_shasum": "8e4d2a256c0e2185c6b18ad694aec968b83cb1d1",
- "_shrinkwrap": null,
- "_spec": "tar@~2.2.1",
- "_where": "/Users/zkat/Documents/code/npm",
+ "_resolved": "https://registry.npmjs.org/tar/-/tar-4.0.1.tgz",
+ "_shasum": "3f5b2e5289db30c2abe4c960f43d0d9fff96aaf0",
+ "_spec": "tar@latest",
+ "_where": "/Users/rebecca/code/npm",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
- "bin": null,
"bugs": {
- "url": "https://github.com/isaacs/node-tar/issues"
+ "url": "https://github.com/npm/node-tar/issues"
},
"bundleDependencies": false,
"dependencies": {
- "block-stream": "*",
- "fstream": "^1.0.2",
- "inherits": "2"
+ "chownr": "^1.0.1",
+ "minipass": "^2.0.2",
+ "minizlib": "^1.0.3",
+ "mkdirp": "^0.5.0",
+ "yallist": "^3.0.2"
},
"deprecated": false,
"description": "tar for node",
"devDependencies": {
- "graceful-fs": "^4.1.2",
- "mkdirp": "^0.5.0",
+ "chmodr": "^1.0.2",
+ "end-of-stream": "^1.4.0",
+ "events-to-array": "^1.1.2",
+ "mutate-fs": "^1.1.0",
"rimraf": "1.x",
- "tap": "0.x"
+ "tap": "^10.3.3",
+ "tar-fs": "^1.15.2",
+ "tar-stream": "^1.5.2"
},
- "homepage": "https://github.com/isaacs/node-tar#readme",
+ "engines": {
+ "node": ">=4.5"
+ },
+ "files": [
+ "index.js",
+ "lib/"
+ ],
+ "homepage": "https://github.com/npm/node-tar#readme",
"license": "ISC",
- "main": "tar.js",
"name": "tar",
- "optionalDependencies": {},
- "peerDependencies": {},
"repository": {
"type": "git",
- "url": "git://github.com/isaacs/node-tar.git"
+ "url": "git+https://github.com/npm/node-tar.git"
},
"scripts": {
- "test": "tap test/*.js"
+ "bench": "for i in benchmarks/*/*.js; do echo $i; for j in {1..5}; do node $i || break; done; done",
+ "genparse": "node scripts/generate-parse-fixtures.js",
+ "postpublish": "git push origin --all; git push origin --tags",
+ "postversion": "npm publish",
+ "preversion": "npm test",
+ "test": "tap test/*.js --100 -J --coverage-report=text"
},
- "version": "2.2.1"
+ "version": "4.0.1"
}
diff --git a/deps/npm/node_modules/tar/tar.js b/deps/npm/node_modules/tar/tar.js
deleted file mode 100644
index a81298b9a0..0000000000
--- a/deps/npm/node_modules/tar/tar.js
+++ /dev/null
@@ -1,173 +0,0 @@
-// field paths that every tar file must have.
-// header is padded to 512 bytes.
-var f = 0
- , fields = {}
- , path = fields.path = f++
- , mode = fields.mode = f++
- , uid = fields.uid = f++
- , gid = fields.gid = f++
- , size = fields.size = f++
- , mtime = fields.mtime = f++
- , cksum = fields.cksum = f++
- , type = fields.type = f++
- , linkpath = fields.linkpath = f++
- , headerSize = 512
- , blockSize = 512
- , fieldSize = []
-
-fieldSize[path] = 100
-fieldSize[mode] = 8
-fieldSize[uid] = 8
-fieldSize[gid] = 8
-fieldSize[size] = 12
-fieldSize[mtime] = 12
-fieldSize[cksum] = 8
-fieldSize[type] = 1
-fieldSize[linkpath] = 100
-
-// "ustar\0" may introduce another bunch of headers.
-// these are optional, and will be nulled out if not present.
-
-var ustar = fields.ustar = f++
- , ustarver = fields.ustarver = f++
- , uname = fields.uname = f++
- , gname = fields.gname = f++
- , devmaj = fields.devmaj = f++
- , devmin = fields.devmin = f++
- , prefix = fields.prefix = f++
- , fill = fields.fill = f++
-
-// terminate fields.
-fields[f] = null
-
-fieldSize[ustar] = 6
-fieldSize[ustarver] = 2
-fieldSize[uname] = 32
-fieldSize[gname] = 32
-fieldSize[devmaj] = 8
-fieldSize[devmin] = 8
-fieldSize[prefix] = 155
-fieldSize[fill] = 12
-
-// nb: prefix field may in fact be 130 bytes of prefix,
-// a null char, 12 bytes for atime, 12 bytes for ctime.
-//
-// To recognize this format:
-// 1. prefix[130] === ' ' or '\0'
-// 2. atime and ctime are octal numeric values
-// 3. atime and ctime have ' ' in their last byte
-
-var fieldEnds = {}
- , fieldOffs = {}
- , fe = 0
-for (var i = 0; i < f; i ++) {
- fieldOffs[i] = fe
- fieldEnds[i] = (fe += fieldSize[i])
-}
-
-// build a translation table of field paths.
-Object.keys(fields).forEach(function (f) {
- if (fields[f] !== null) fields[fields[f]] = f
-})
-
-// different values of the 'type' field
-// paths match the values of Stats.isX() functions, where appropriate
-var types =
- { 0: "File"
- , "\0": "OldFile" // like 0
- , "": "OldFile"
- , 1: "Link"
- , 2: "SymbolicLink"
- , 3: "CharacterDevice"
- , 4: "BlockDevice"
- , 5: "Directory"
- , 6: "FIFO"
- , 7: "ContiguousFile" // like 0
- // posix headers
- , g: "GlobalExtendedHeader" // k=v for the rest of the archive
- , x: "ExtendedHeader" // k=v for the next file
- // vendor-specific stuff
- , A: "SolarisACL" // skip
- , D: "GNUDumpDir" // like 5, but with data, which should be skipped
- , I: "Inode" // metadata only, skip
- , K: "NextFileHasLongLinkpath" // data = link path of next file
- , L: "NextFileHasLongPath" // data = path of next file
- , M: "ContinuationFile" // skip
- , N: "OldGnuLongPath" // like L
- , S: "SparseFile" // skip
- , V: "TapeVolumeHeader" // skip
- , X: "OldExtendedHeader" // like x
- }
-
-Object.keys(types).forEach(function (t) {
- types[types[t]] = types[types[t]] || t
-})
-
-// values for the mode field
-var modes =
- { suid: 04000 // set uid on extraction
- , sgid: 02000 // set gid on extraction
- , svtx: 01000 // set restricted deletion flag on dirs on extraction
- , uread: 0400
- , uwrite: 0200
- , uexec: 0100
- , gread: 040
- , gwrite: 020
- , gexec: 010
- , oread: 4
- , owrite: 2
- , oexec: 1
- , all: 07777
- }
-
-var numeric =
- { mode: true
- , uid: true
- , gid: true
- , size: true
- , mtime: true
- , devmaj: true
- , devmin: true
- , cksum: true
- , atime: true
- , ctime: true
- , dev: true
- , ino: true
- , nlink: true
- }
-
-Object.keys(modes).forEach(function (t) {
- modes[modes[t]] = modes[modes[t]] || t
-})
-
-var knownExtended =
- { atime: true
- , charset: true
- , comment: true
- , ctime: true
- , gid: true
- , gname: true
- , linkpath: true
- , mtime: true
- , path: true
- , realtime: true
- , security: true
- , size: true
- , uid: true
- , uname: true }
-
-
-exports.fields = fields
-exports.fieldSize = fieldSize
-exports.fieldOffs = fieldOffs
-exports.fieldEnds = fieldEnds
-exports.types = types
-exports.modes = modes
-exports.numeric = numeric
-exports.headerSize = headerSize
-exports.blockSize = blockSize
-exports.knownExtended = knownExtended
-
-exports.Pack = require("./lib/pack.js")
-exports.Parse = require("./lib/parse.js")
-exports.Extract = require("./lib/extract.js")
diff --git a/deps/npm/node_modules/tar/test/00-setup-fixtures.js b/deps/npm/node_modules/tar/test/00-setup-fixtures.js
deleted file mode 100644
index 1524ff7af0..0000000000
--- a/deps/npm/node_modules/tar/test/00-setup-fixtures.js
+++ /dev/null
@@ -1,53 +0,0 @@
-// the fixtures have some weird stuff that is painful
-// to include directly in the repo for various reasons.
-//
-// So, unpack the fixtures with the system tar first.
-//
-// This means, of course, that it'll only work if you
-// already have a tar implementation, and some of them
-// will not properly unpack the fixtures anyway.
-//
-// But, since usually those tests will fail on Windows
-// and other systems with less capable filesystems anyway,
-// at least this way we don't cause inconveniences by
-// merely cloning the repo or installing the package.
-
-var tap = require("tap")
-, child_process = require("child_process")
-, rimraf = require("rimraf")
-, test = tap.test
-, path = require("path")
-
-test("clean fixtures", function (t) {
- rimraf(path.resolve(__dirname, "fixtures"), function (er) {
- t.ifError(er, "rimraf ./fixtures/")
- t.end()
- })
-})
-
-test("clean tmp", function (t) {
- rimraf(path.resolve(__dirname, "tmp"), function (er) {
- t.ifError(er, "rimraf ./tmp/")
- t.end()
- })
-})
-
-test("extract fixtures", function (t) {
- var c = child_process.spawn("tar"
- ,["xzvf", "fixtures.tgz"]
- ,{ cwd: __dirname })
-
- c.stdout.on("data", errwrite)
- c.stderr.on("data", errwrite)
- function errwrite (chunk) {
- process.stderr.write(chunk)
- }
-
- c.on("exit", function (code) {
- t.equal(code, 0, "extract fixtures should exit with 0")
- if (code) {
- t.comment("Note, all tests from here on out will fail because of this.")
- }
- t.end()
- })
-})
diff --git a/deps/npm/node_modules/tar/test/cb-never-called-1.0.1.tgz b/deps/npm/node_modules/tar/test/cb-never-called-1.0.1.tgz
deleted file mode 100644
index 9e7014d85a..0000000000
--- a/deps/npm/node_modules/tar/test/cb-never-called-1.0.1.tgz
+++ /dev/null
Binary files differ
diff --git a/deps/npm/node_modules/tar/test/dir-normalization.js b/deps/npm/node_modules/tar/test/dir-normalization.js
deleted file mode 100644
index 9719c42f35..0000000000
--- a/deps/npm/node_modules/tar/test/dir-normalization.js
+++ /dev/null
@@ -1,177 +0,0 @@
-// Set the umask, so that it works the same everywhere.
-process.umask(parseInt('22', 8))
-
-var fs = require('fs')
-var path = require('path')
-
-var fstream = require('fstream')
-var test = require('tap').test
-
-var tar = require('../tar.js')
-var file = path.resolve(__dirname, 'dir-normalization.tar')
-var target = path.resolve(__dirname, 'tmp/dir-normalization-test')
-var ee = 0
-
-var expectEntries = [
- { path: 'fixtures/',
- mode: '755',
- type: '5',
- linkpath: ''
- },
- { path: 'fixtures/a/',
- mode: '755',
- type: '5',
- linkpath: ''
- },
- { path: 'fixtures/the-chumbler',
- mode: '755',
- type: '2',
- linkpath: path.resolve(target, 'a/b/c/d/the-chumbler'),
- },
- { path: 'fixtures/a/b/',
- mode: '755',
- type: '5',
- linkpath: ''
- },
- { path: 'fixtures/a/x',
- mode: '644',
- type: '0',
- linkpath: ''
- },
- { path: 'fixtures/a/b/c/',
- mode: '755',
- type: '5',
- linkpath: ''
- },
- { path: 'fixtures/a/b/c/y',
- mode: '755',
- type: '2',
- linkpath: '../../x',
- }
-]
-
-var ef = 0
-var expectFiles = [
- { path: '',
- mode: '40755',
- type: 'Directory',
- depth: 0,
- linkpath: undefined
- },
- { path: '/fixtures',
- mode: '40755',
- type: 'Directory',
- depth: 1,
- linkpath: undefined
- },
- { path: '/fixtures/a',
- mode: '40755',
- type: 'Directory',
- depth: 2,
- linkpath: undefined
- },
- { path: '/fixtures/a/b',
- mode: '40755',
- type: 'Directory',
- depth: 3,
- linkpath: undefined
- },
- { path: '/fixtures/a/b/c',
- mode: '40755',
- type: 'Directory',
- depth: 4,
- linkpath: undefined
- },
- { path: '/fixtures/a/b/c/y',
- mode: '120755',
- type: 'SymbolicLink',
- depth: 5,
- linkpath: '../../x'
- },
- { path: '/fixtures/a/x',
- mode: '100644',
- type: 'File',
- depth: 3,
- linkpath: undefined
- },
- { path: '/fixtures/the-chumbler',
- mode: '120755',
- type: 'SymbolicLink',
- depth: 2,
- linkpath: path.resolve(target, 'a/b/c/d/the-chumbler')
- }
-]
-
-test('preclean', function (t) {
- require('rimraf').sync(path.join(__dirname, '/tmp/dir-normalization-test'))
- t.pass('cleaned!')
- t.end()
-})
-
-test('extract test', function (t) {
- var extract = tar.Extract(target)
- var inp = fs.createReadStream(file)
-
- inp.pipe(extract)
-
- extract.on('end', function () {
- t.equal(ee, expectEntries.length, 'should see ' + expectEntries.length + ' entries')
-
- // should get no more entries after end
- extract.removeAllListeners('entry')
- extract.on('entry', function (e) {
- t.fail('Should not get entries after end!')
- })
-
- next()
- })
-
- extract.on('entry', function (entry) {
- var mode = entry.props.mode & (~parseInt('22', 8))
- var found = {
- path: entry.path,
- mode: mode.toString(8),
- type: entry.props.type,
- linkpath: entry.props.linkpath,
- }
-
- var wanted = expectEntries[ee++]
- t.equivalent(found, wanted, 'tar entry ' + ee + ' ' + (wanted && wanted.path))
- })
-
- function next () {
- var r = fstream.Reader({
- path: target,
- type: 'Directory',
- sort: 'alpha'
- })
-
- r.on('ready', function () {
- foundEntry(r)
- })
-
- r.on('end', finish)
-
- function foundEntry (entry) {
- var p = entry.path.substr(target.length)
- var mode = entry.props.mode & (~parseInt('22', 8))
- var found = {
- path: p,
- mode: mode.toString(8),
- type: entry.props.type,
- depth: entry.props.depth,
- linkpath: entry.props.linkpath
- }
-
- var wanted = expectFiles[ef++]
- t.equivalent(found, wanted, 'unpacked file ' + ef + ' ' + (wanted && wanted.path))
-
- entry.on('entry', foundEntry)
- }
-
- function finish () {
- t.equal(ef, expectFiles.length, 'should have ' + ef + ' items')
- t.end()
- }
- }
-})
diff --git a/deps/npm/node_modules/tar/test/dir-normalization.tar b/deps/npm/node_modules/tar/test/dir-normalization.tar
deleted file mode 100644
index 3c4845356c..0000000000
--- a/deps/npm/node_modules/tar/test/dir-normalization.tar
+++ /dev/null
Binary files differ
diff --git a/deps/npm/node_modules/tar/test/error-on-broken.js b/deps/npm/node_modules/tar/test/error-on-broken.js
deleted file mode 100644
index e484920fd9..0000000000
--- a/deps/npm/node_modules/tar/test/error-on-broken.js
+++ /dev/null
@@ -1,33 +0,0 @@
-var fs = require('fs')
-var path = require('path')
-var zlib = require('zlib')
-
-var tap = require('tap')
-
-var tar = require('../tar.js')
-
-var file = path.join(__dirname, 'cb-never-called-1.0.1.tgz')
-var target = path.join(__dirname, 'tmp/extract-test')
-
-tap.test('preclean', function (t) {
- require('rimraf').sync(__dirname + '/tmp/extract-test')
- t.pass('cleaned!')
- t.end()
-})
-
-tap.test('extract test', function (t) {
- var extract = tar.Extract(target)
- var inp = fs.createReadStream(file)
-
- inp.pipe(zlib.createGunzip()).pipe(extract)
-
- extract.on('error', function (er) {
- t.equal(er.message, 'unexpected eof', 'error noticed')
- t.end()
- })
-
- extract.on('end', function () {
- t.fail('shouldn\'t reach this point due to errors')
- t.end()
- })
-})
diff --git a/deps/npm/node_modules/tar/test/extract-move.js b/deps/npm/node_modules/tar/test/extract-move.js
deleted file mode 100644
index 45400cd9bb..0000000000
--- a/deps/npm/node_modules/tar/test/extract-move.js
+++ /dev/null
@@ -1,132 +0,0 @@
-// Set the umask, so that it works the same everywhere.
-process.umask(parseInt('22', 8))
-
-var tap = require("tap")
- , tar = require("../tar.js")
- , fs = require("fs")
- , gfs = require("graceful-fs")
- , path = require("path")
- , file = path.resolve(__dirname, "fixtures/dir.tar")
- , target = path.resolve(__dirname, "tmp/extract-test")
- , index = 0
- , fstream = require("fstream")
- , rimraf = require("rimraf")
- , mkdirp = require("mkdirp")
-
- , ee = 0
- , expectEntries = [
- {
- "path" : "dir/",
- "mode" : "750",
- "type" : "5",
- "depth" : undefined,
- "size" : 0,
- "linkpath" : "",
- "nlink" : undefined,
- "dev" : undefined,
- "ino" : undefined
- },
- {
- "path" : "dir/sub/",
- "mode" : "750",
- "type" : "5",
- "depth" : undefined,
- "size" : 0,
- "linkpath" : "",
- "nlink" : undefined,
- "dev" : undefined,
- "ino" : undefined
- } ]
-
-function slow (fs, method, t1, t2) {
- var orig = fs[method]
- if (!orig) return null
- fs[method] = function () {
- var args = [].slice.call(arguments)
- console.error("slow", method, args[0])
- var cb = args.pop()
-
- setTimeout(function () {
- orig.apply(fs, args.concat(function(er, data) {
- setTimeout(function() {
- cb(er, data)
- }, t2)
- }))
- }, t1)
- }
-}
-
-// Make sure we get the graceful-fs that fstream is using.
-var gfs2
-try {
- gfs2 = require("fstream/node_modules/graceful-fs")
-} catch (er) {}
-
-var slowMethods = ["chown", "chmod", "utimes", "lutimes"]
-slowMethods.forEach(function (method) {
- var t1 = 500
- var t2 = 0
- slow(fs, method, t1, t2)
- slow(gfs, method, t1, t2)
- if (gfs2) {
- slow(gfs2, method, t1, t2)
- }
-})
-
-
-
-// The extract class basically just pipes the input
-// to a Reader, and then to a fstream.DirWriter
-
-// So, this is as much a test of fstream.Reader and fstream.Writer
-// as it is of tar.Extract, but it sort of makes sense.
-
-tap.test("preclean", function (t) {
- rimraf.sync(target)
- /mkdirp.sync(target)
- t.pass("cleaned!")
- t.end()
-})
-
-tap.test("extract test", function (t) {
- var extract = tar.Extract(target)
- var inp = fs.createReadStream(file)
-
- // give it a weird buffer size to try to break in odd places
- inp.bufferSize = 1234
-
- inp.pipe(extract)
-
- extract.on("end", function () {
- rimraf.sync(target)
-
- t.equal(ee, expectEntries.length, "should see "+ee+" entries")
-
- // should get no more entries after end
- extract.removeAllListeners("entry")
- extract.on("entry", function (e) {
- t.fail("Should not get entries after end!")
- })
-
- t.end()
- })
-
-
- extract.on("entry", function (entry) {
- var found =
- { path: entry.path
- , mode: entry.props.mode.toString(8)
- , type: entry.props.type
- , depth: entry.props.depth
- , size: entry.props.size
- , linkpath: entry.props.linkpath
- , nlink: entry.props.nlink
- , dev: entry.props.dev
- , ino: entry.props.ino
- }
-
- var wanted = expectEntries[ee ++]
-
- t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path)
- })
-})
diff --git a/deps/npm/node_modules/tar/test/extract.js b/deps/npm/node_modules/tar/test/extract.js
deleted file mode 100644
index eca4e7cc96..0000000000
--- a/deps/npm/node_modules/tar/test/extract.js
+++ /dev/null
@@ -1,367 +0,0 @@
-// Set the umask, so that it works the same everywhere.
-process.umask(parseInt('22', 8))
-
-var tap = require("tap")
- , tar = require("../tar.js")
- , fs = require("fs")
- , path = require("path")
- , file = path.resolve(__dirname, "fixtures/c.tar")
- , target = path.resolve(__dirname, "tmp/extract-test")
- , index = 0
- , fstream = require("fstream")
-
- , ee = 0
- , expectEntries =
-[ { path: 'c.txt',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 513,
- linkpath: '',
- nlink: undefined,
- dev: undefined,
- ino: undefined },
- { path: 'cc.txt',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 513,
- linkpath: '',
- nlink: undefined,
- dev: undefined,
- ino: undefined },
- { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 100,
- linkpath: '',
- nlink: undefined,
- dev: undefined,
- ino: undefined },
- { path: 'Ω.txt',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 2,
- linkpath: '',
- nlink: undefined,
- dev: undefined,
- ino: undefined },
- { path: 'Ω.txt',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 2,
- linkpath: '',
- nlink: 1,
- dev: 234881026,
- ino: 51693379 },
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 200,
- linkpath: '',
- nlink: 1,
- dev: 234881026,
- ino: 51681874 },
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 201,
- linkpath: '',
- nlink: undefined,
- dev: undefined,
- ino: undefined },
- { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',
- mode: '777',
- type: '2',
- depth: undefined,
- size: 0,
- linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- nlink: undefined,
- dev: undefined,
- ino: undefined },
- { path: '200-hard',
- mode: '644',
- type: '0',
- depth: undefined,
- size: 200,
- linkpath: '',
- nlink: 2,
- dev: 234881026,
- ino: 51681874 },
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: '644',
- type: '1',
- depth: undefined,
- size: 0,
- linkpath: path.resolve(target, '200-hard'),
- nlink: 2,
- dev: 234881026,
- ino: 51681874 } ]
-
- , ef = 0
- , expectFiles =
-[ { path: '',
- mode: '40755',
- type: 'Directory',
- depth: 0,
- linkpath: undefined },
- { path: '/200-hard',
- mode: '100644',
- type: 'File',
- depth: 1,
- size: 200,
- linkpath: undefined,
- nlink: 2 },
- { path: '/200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',
- mode: '120777',
- type: 'SymbolicLink',
- depth: 1,
- size: 200,
- linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- nlink: 1 },
- { path: '/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: '100644',
- type: 'Link',
- depth: 1,
- size: 200,
- linkpath: path.join(target, '200-hard'),
- nlink: 2 },
- { path: '/c.txt',
- mode: '100644',
- type: 'File',
- depth: 1,
- size: 513,
- linkpath: undefined,
- nlink: 1 },
- { path: '/cc.txt',
- mode: '100644',
- type: 'File',
- depth: 1,
- size: 513,
- linkpath: undefined,
- nlink: 1 },
- { path: '/r',
- mode: '40755',
- type: 'Directory',
- depth: 1,
- linkpath: undefined },
- { path: '/r/e',
- mode: '40755',
- type: 'Directory',
- depth: 2,
- linkpath: undefined },
- { path: '/r/e/a',
- mode: '40755',
- type: 'Directory',
- depth: 3,
- linkpath: undefined },
- { path: '/r/e/a/l',
- mode: '40755',
- type: 'Directory',
- depth: 4,
- linkpath: undefined },
- { path: '/r/e/a/l/l',
- mode: '40755',
- type: 'Directory',
- depth: 5,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y',
- mode: '40755',
- type: 'Directory',
- depth: 6,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-',
- mode: '40755',
- type: 'Directory',
- depth: 7,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d',
- mode: '40755',
- type: 'Directory',
- depth: 8,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e',
- mode: '40755',
- type: 'Directory',
- depth: 9,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e',
- mode: '40755',
- type: 'Directory',
- depth: 10,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p',
- mode: '40755',
- type: 'Directory',
- depth: 11,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-',
- mode: '40755',
- type: 'Directory',
- depth: 12,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f',
- mode: '40755',
- type: 'Directory',
- depth: 13,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o',
- mode: '40755',
- type: 'Directory',
- depth: 14,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l',
- mode: '40755',
- type: 'Directory',
- depth: 15,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d',
- mode: '40755',
- type: 'Directory',
- depth: 16,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e',
- mode: '40755',
- type: 'Directory',
- depth: 17,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r',
- mode: '40755',
- type: 'Directory',
- depth: 18,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-',
- mode: '40755',
- type: 'Directory',
- depth: 19,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p',
- mode: '40755',
- type: 'Directory',
- depth: 20,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a',
- mode: '40755',
- type: 'Directory',
- depth: 21,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t',
- mode: '40755',
- type: 'Directory',
- depth: 22,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h',
- mode: '40755',
- type: 'Directory',
- depth: 23,
- linkpath: undefined },
- { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: '100644',
- type: 'File',
- depth: 24,
- size: 100,
- linkpath: undefined,
- nlink: 1 },
- { path: '/Ω.txt',
- mode: '100644',
- type: 'File',
- depth: 1,
- size: 2,
- linkpath: undefined,
- nlink: 1 } ]
-
-
-
-// The extract class basically just pipes the input
-// to a Reader, and then to a fstream.DirWriter
-
-// So, this is as much a test of fstream.Reader and fstream.Writer
-// as it is of tar.Extract, but it sort of makes sense.
-
-tap.test("preclean", function (t) {
- require("rimraf").sync(__dirname + "/tmp/extract-test")
- t.pass("cleaned!")
- t.end()
-})
-
-tap.test("extract test", function (t) {
- var extract = tar.Extract(target)
- var inp = fs.createReadStream(file)
-
- // give it a weird buffer size to try to break in odd places
- inp.bufferSize = 1234
-
- inp.pipe(extract)
-
- extract.on("end", function () {
- t.equal(ee, expectEntries.length, "should see "+ee+" entries")
-
- // should get no more entries after end
- extract.removeAllListeners("entry")
- extract.on("entry", function (e) {
- t.fail("Should not get entries after end!")
- })
-
- next()
- })
-
- extract.on("entry", function (entry) {
- var found =
- { path: entry.path
- , mode: entry.props.mode.toString(8)
- , type: entry.props.type
- , depth: entry.props.depth
- , size: entry.props.size
- , linkpath: entry.props.linkpath
- , nlink: entry.props.nlink
- , dev: entry.props.dev
- , ino: entry.props.ino
- }
-
- var wanted = expectEntries[ee ++]
-
- t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path)
- })
-
- function next () {
- var r = fstream.Reader({ path: target
- , type: "Directory"
- // this is just to encourage consistency
- , sort: "alpha" })
-
- r.on("ready", function () {
- foundEntry(r)
- })
-
- r.on("end", finish)
-
- function foundEntry (entry) {
- var p = entry.path.substr(target.length)
- var found =
- { path: p
- , mode: entry.props.mode.toString(8)
- , type: entry.props.type
- , depth: entry.props.depth
- , size: entry.props.size
- , linkpath: entry.props.linkpath
- , nlink: entry.props.nlink
- }
-
- var wanted = expectFiles[ef ++]
-
- t.has(found, wanted, "unpacked file " + ef + " " + wanted.path)
-
- entry.on("entry", foundEntry)
- }
-
- function finish () {
- t.equal(ef, expectFiles.length, "should have "+ef+" items")
- t.end()
- }
- }
-})
diff --git a/deps/npm/node_modules/tar/test/fixtures.tgz b/deps/npm/node_modules/tar/test/fixtures.tgz
deleted file mode 100644
index f1676023af..0000000000
--- a/deps/npm/node_modules/tar/test/fixtures.tgz
+++ /dev/null
Binary files differ
diff --git a/deps/npm/node_modules/tar/test/header.js b/deps/npm/node_modules/tar/test/header.js
deleted file mode 100644
index 8ea6f79500..0000000000
--- a/deps/npm/node_modules/tar/test/header.js
+++ /dev/null
@@ -1,183 +0,0 @@
-var tap = require("tap")
-var TarHeader = require("../lib/header.js")
-var tar = require("../tar.js")
-var fs = require("fs")
-
-
-var headers =
- { "a.txt file header":
- [ "612e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303430312031313635313336303333332030313234353100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
- , { cksumValid: true
- , path: 'a.txt'
- , mode: 420
- , uid: 24561
- , gid: 20
- , size: 257
- , mtime: 1319493851
- , cksum: 5417
- , type: '0'
- , linkpath: ''
- , ustar: 'ustar\0'
- , ustarver: '00'
- , uname: 'isaacs'
- , gname: 'staff'
- , devmaj: 0
- , devmin: 0
- , fill: '' }
- ]
-
- , "omega pax": // the extended header from omega tar.
- [ "5061784865616465722fcea92e74787400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303137302031313534333731303631312030313530353100207800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
- , { cksumValid: true
- , path: 'PaxHeader/Ω.txt'
- , mode: 420
- , uid: 24561
- , gid: 20
- , size: 120
- , mtime: 1301254537
- , cksum: 6697
- , type: 'x'
- , linkpath: ''
- , ustar: 'ustar\0'
- , ustarver: '00'
- , uname: 'isaacs'
- , gname: 'staff'
- , devmaj: 0
- , devmin: 0
- , fill: '' } ]
-
- , "omega file header":
- [ "cea92e7478740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030322031313534333731303631312030313330373200203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
- , { cksumValid: true
- , path: 'Ω.txt'
- , mode: 420
- , uid: 24561
- , gid: 20
- , size: 2
- , mtime: 1301254537
- , cksum: 5690
- , type: '0'
- , linkpath: ''
- , ustar: 'ustar\0'
- , ustarver: '00'
- , uname: 'isaacs'
- , gname: 'staff'
- , devmaj: 0
- , devmin: 0
- , fill: '' } ]
-
- , "foo.js file header":
- [ "666f6f2e6a730000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030342031313534333637303734312030313236313700203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
- , { cksumValid: true
- , path: 'foo.js'
- , mode: 420
- , uid: 24561
- , gid: 20
- , size: 4
- , mtime: 1301246433
- , cksum: 5519
- , type: '0'
- , linkpath: ''
- , ustar: 'ustar\0'
- , ustarver: '00'
- , uname: 'isaacs'
- , gname: 'staff'
- , devmaj: 0
- , devmin: 0
- , fill: '' }
- ]
-
- , "b.txt file header":
- [ "622e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030313030302031313635313336303637372030313234363100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
- , { cksumValid: true
- , path: 'b.txt'
- , mode: 420
- , uid: 24561
- , gid: 20
- , size: 512
- , mtime: 1319494079
- , cksum: 5425
- , type: '0'
- , linkpath: ''
- , ustar: 'ustar\0'
- , ustarver: '00'
- , uname: 'isaacs'
- , gname: 'staff'
- , devmaj: 0
- , devmin: 0
- , fill: '' }
- ]
-
- , "deep nested file":
- [ "636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363633030303634342000303537373631200030303030323420003030303030303030313434203131363532313531353333203034333331340020300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000075737461720030306973616163730000000000000000000000000000000000000000000000000000737461666600000000000000000000000000000000000000000000000000000030303030303020003030303030302000722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d2f662f6f2f6c2f642f652f722f2d2f702f612f742f680000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
- , { cksumValid: true,
- path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'
- , mode: 420
- , uid: 24561
- , gid: 20
- , size: 100
- , mtime: 1319687003
- , cksum: 18124
- , type: '0'
- , linkpath: ''
- , ustar: 'ustar\0'
- , ustarver: '00'
- , uname: 'isaacs'
- , gname: 'staff'
- , devmaj: 0
- , devmin: 0
- , fill: '' }
- ]
- }
-
-tap.test("parsing", function (t) {
- Object.keys(headers).forEach(function (name) {
- var h = headers[name]
- , header = new Buffer(h[0], "hex")
- , expect = h[1]
- , parsed = new TarHeader(header)
-
- // console.error(parsed)
- t.has(parsed, expect, "parse " + name)
- })
- t.end()
-})
-
-tap.test("encoding", function (t) {
- Object.keys(headers).forEach(function (name) {
- var h = headers[name]
- , expect = new Buffer(h[0], "hex")
- , encoded = TarHeader.encode(h[1])
-
- // might have slightly different bytes, since the standard
- // isn't very strict, but should have the same semantics
- // checkSum will be different, but cksumValid will be true
-
- var th = new TarHeader(encoded)
- delete h[1].block
- delete h[1].needExtended
- delete h[1].cksum
- t.has(th, h[1], "fields "+name)
- })
- t.end()
-})
-
-// test these manually. they're a bit rare to find in the wild
-tap.test("parseNumeric tests", function (t) {
- var parseNumeric = TarHeader.parseNumeric
- , numbers =
- { "303737373737373700": 2097151
- , "30373737373737373737373700": 8589934591
- , "303030303036343400": 420
- , "800000ffffffffffff": 281474976710655
- , "ffffff000000000001": -281474976710654
- , "ffffff000000000000": -281474976710655
- , "800000000000200000": 2097152
- , "8000000000001544c5": 1393861
- , "ffffffffffff1544c5": -15383354 }
- Object.keys(numbers).forEach(function (n) {
- var b = new Buffer(n, "hex")
- t.equal(parseNumeric(b), numbers[n], n + " === " + numbers[n])
- })
- t.end()
-})
diff --git a/deps/npm/node_modules/tar/test/pack-no-proprietary.js b/deps/npm/node_modules/tar/test/pack-no-proprietary.js
deleted file mode 100644
index d4b03a1fe9..0000000000
--- a/deps/npm/node_modules/tar/test/pack-no-proprietary.js
+++ /dev/null
@@ -1,886 +0,0 @@
-// This is exactly like test/pack.js, except that it's excluding
-// any proprietary headers.
-//
-// This loses some information about the filesystem, but creates
-// tarballs that are supported by more versions of tar, especially
-// old non-spec-compliant copies of gnutar.
-
-// the symlink file is excluded from git, because it makes
-// windows freak the hell out.
-var fs = require("fs")
- , path = require("path")
- , symlink = path.resolve(__dirname, "fixtures/symlink")
-try { fs.unlinkSync(symlink) } catch (e) {}
-fs.symlinkSync("./hardlink-1", symlink)
-process.on("exit", function () {
- fs.unlinkSync(symlink)
-})
-
-var tap = require("tap")
- , tar = require("../tar.js")
- , pkg = require("../package.json")
- , Pack = tar.Pack
- , fstream = require("fstream")
- , Reader = fstream.Reader
- , Writer = fstream.Writer
- , input = path.resolve(__dirname, "fixtures/")
- , target = path.resolve(__dirname, "tmp/pack.tar")
- , uid = process.getuid ? process.getuid() : 0
- , gid = process.getgid ? process.getgid() : 0
-
- , entries =
-
- // the global header and root fixtures/ dir are going to get
- // a different date each time, so omit that bit.
- // Also, dev/ino values differ across machines, so that's not
- // included.
- [ [ 'entry',
- { path: 'fixtures/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'extendedHeader',
- { path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: uid,
- gid: gid,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- uid: uid,
- gid: gid,
- size: 200 } ]
-
- , [ 'entry',
- { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 200,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/a.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 257,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/b.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 512,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/c.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 513,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/cc.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 513,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/dir/',
- mode: 488,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/dir/sub/',
- mode: 488,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/foo.js',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 4,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/hardlink-1',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 200,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/hardlink-2',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 0,
- type: '1',
- linkpath: 'fixtures/hardlink-1',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/omega.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/omega.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/star.4.html',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 54081,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'extendedHeader',
- { path: 'PaxHeader/fixtures/packtest/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: 'fixtures/packtest/Ω.txt',
- uid: uid,
- gid: gid,
- size: 2 } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 100,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/symlink',
- uid: uid,
- gid: gid,
- size: 0,
- type: '2',
- linkpath: 'hardlink-1',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'extendedHeader',
- { path: 'PaxHeader/fixtures/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: "fixtures/Ω.txt"
- , uid: uid
- , gid: gid
- , size: 2 } ]
-
- , [ 'entry',
- { path: 'fixtures/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
- ]
-
-
-// first, make sure that the hardlinks are actually hardlinks, or this
-// won't work. Git has a way of replacing them with a copy.
-var hard1 = path.resolve(__dirname, "fixtures/hardlink-1")
- , hard2 = path.resolve(__dirname, "fixtures/hardlink-2")
- , fs = require("fs")
-
-try { fs.unlinkSync(hard2) } catch (e) {}
-fs.linkSync(hard1, hard2)
-
-tap.test("with global header", { timeout: 10000 }, function (t) {
- runTest(t, true)
-})
-
-tap.test("without global header", { timeout: 10000 }, function (t) {
- runTest(t, false)
-})
-
-function alphasort (a, b) {
- return a === b ? 0
- : a.toLowerCase() > b.toLowerCase() ? 1
- : a.toLowerCase() < b.toLowerCase() ? -1
- : a > b ? 1
- : -1
-}
-
-
-function runTest (t, doGH) {
- var reader = Reader({ path: input
- , filter: function () {
- return !this.path.match(/\.(tar|hex)$/)
- }
- , sort: alphasort
- })
-
- var props = doGH ? pkg : {}
- props.noProprietary = true
- var pack = Pack(props)
- var writer = Writer(target)
-
- // global header should be skipped regardless, since it has no content.
- var entry = 0
-
- t.ok(reader, "reader ok")
- t.ok(pack, "pack ok")
- t.ok(writer, "writer ok")
-
- pack.pipe(writer)
-
- var parse = tar.Parse()
- t.ok(parse, "parser should be ok")
-
- pack.on("data", function (c) {
- // console.error("PACK DATA")
- if (c.length !== 512) {
- // this one is too noisy, only assert if it'll be relevant
- t.equal(c.length, 512, "parser should emit data in 512byte blocks")
- }
- parse.write(c)
- })
-
- pack.on("end", function () {
- // console.error("PACK END")
- t.pass("parser ends")
- parse.end()
- })
-
- pack.on("error", function (er) {
- t.fail("pack error", er)
- })
-
- parse.on("error", function (er) {
- t.fail("parse error", er)
- })
-
- writer.on("error", function (er) {
- t.fail("writer error", er)
- })
-
- reader.on("error", function (er) {
- t.fail("reader error", er)
- })
-
- parse.on("*", function (ev, e) {
- var wanted = entries[entry++]
- if (!wanted) {
- t.fail("unexpected event: "+ev)
- return
- }
- t.equal(ev, wanted[0], "event type should be "+wanted[0])
-
- if (ev !== wanted[0] || e.path !== wanted[1].path) {
- console.error("wanted", wanted)
- console.error([ev, e.props])
- e.on("end", function () {
- console.error(e.fields)
- throw "break"
- })
- }
-
- t.has(e.props, wanted[1], "properties "+wanted[1].path)
- if (wanted[2]) {
- e.on("end", function () {
- if (!e.fields) {
- t.ok(e.fields, "should get fields")
- } else {
- t.has(e.fields, wanted[2], "should get expected fields")
- }
- })
- }
- })
-
- reader.pipe(pack)
-
- writer.on("close", function () {
- t.equal(entry, entries.length, "should get all expected entries")
- t.pass("it finished")
- t.end()
- })
-
-}
diff --git a/deps/npm/node_modules/tar/test/pack.js b/deps/npm/node_modules/tar/test/pack.js
deleted file mode 100644
index 0f16c07bb0..0000000000
--- a/deps/npm/node_modules/tar/test/pack.js
+++ /dev/null
@@ -1,952 +0,0 @@
-
-// the symlink file is excluded from git, because it makes
-// windows freak the hell out.
-var fs = require("fs")
- , path = require("path")
- , symlink = path.resolve(__dirname, "fixtures/symlink")
-try { fs.unlinkSync(symlink) } catch (e) {}
-fs.symlinkSync("./hardlink-1", symlink)
-process.on("exit", function () {
- fs.unlinkSync(symlink)
-})
-
-
-var tap = require("tap")
- , tar = require("../tar.js")
- , pkg = require("../package.json")
- , Pack = tar.Pack
- , fstream = require("fstream")
- , Reader = fstream.Reader
- , Writer = fstream.Writer
- , input = path.resolve(__dirname, "fixtures/")
- , target = path.resolve(__dirname, "tmp/pack.tar")
- , uid = process.getuid ? process.getuid() : 0
- , gid = process.getgid ? process.getgid() : 0
-
- , entries =
-
- // the global header and root fixtures/ dir are going to get
- // a different date each time, so omit that bit.
- // Also, dev/ino values differ across machines, so that's not
- // included.
- [ [ 'globalExtendedHeader',
- { path: 'PaxHeader/',
- mode: 438,
- uid: 0,
- gid: 0,
- type: 'g',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { "NODETAR.author": pkg.author,
- "NODETAR.name": pkg.name,
- "NODETAR.description": pkg.description,
- "NODETAR.version": pkg.version,
- "NODETAR.repository.type": pkg.repository.type,
- "NODETAR.repository.url": pkg.repository.url,
- "NODETAR.main": pkg.main,
- "NODETAR.scripts.test": pkg.scripts.test } ]
-
- , [ 'entry',
- { path: 'fixtures/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'extendedHeader',
- { path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: uid,
- gid: gid,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- 'NODETAR.depth': '1',
- 'NODETAR.type': 'File',
- nlink: 1,
- uid: uid,
- gid: gid,
- size: 200,
- 'NODETAR.blksize': '4096',
- 'NODETAR.blocks': '8' } ]
-
- , [ 'entry',
- { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 200,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '',
- 'NODETAR.depth': '1',
- 'NODETAR.type': 'File',
- nlink: 1,
- 'NODETAR.blksize': '4096',
- 'NODETAR.blocks': '8' } ]
-
- , [ 'entry',
- { path: 'fixtures/a.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 257,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/b.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 512,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/c.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 513,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/cc.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 513,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/dir/',
- mode: 488,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/dir/sub/',
- mode: 488,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
-
- , [ 'entry',
- { path: 'fixtures/foo.js',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 4,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/hardlink-1',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 200,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/hardlink-2',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 0,
- type: '1',
- linkpath: 'fixtures/hardlink-1',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/omega.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/omega.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/star.4.html',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 54081,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'extendedHeader',
- { path: 'PaxHeader/fixtures/packtest/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: 'fixtures/packtest/Ω.txt',
- 'NODETAR.depth': '2',
- 'NODETAR.type': 'File',
- nlink: 1,
- uid: uid,
- gid: gid,
- size: 2,
- 'NODETAR.blksize': '4096',
- 'NODETAR.blocks': '8' } ]
-
- , [ 'entry',
- { path: 'fixtures/packtest/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '',
- 'NODETAR.depth': '2',
- 'NODETAR.type': 'File',
- nlink: 1,
- 'NODETAR.blksize': '4096',
- 'NODETAR.blocks': '8' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/',
- mode: 493,
- uid: uid,
- gid: gid,
- size: 0,
- type: '5',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 100,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'entry',
- { path: 'fixtures/symlink',
- uid: uid,
- gid: gid,
- size: 0,
- type: '2',
- linkpath: 'hardlink-1',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' } ]
-
- , [ 'extendedHeader',
- { path: 'PaxHeader/fixtures/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: "fixtures/Ω.txt"
- , "NODETAR.depth": "1"
- , "NODETAR.type": "File"
- , nlink: 1
- , uid: uid
- , gid: gid
- , size: 2
- , "NODETAR.blksize": "4096"
- , "NODETAR.blocks": "8" } ]
-
- , [ 'entry',
- { path: 'fixtures/Ω.txt',
- mode: 420,
- uid: uid,
- gid: gid,
- size: 2,
- type: '0',
- linkpath: '',
- ustar: 'ustar\u0000',
- ustarver: '00',
- uname: '',
- gname: '',
- devmaj: 0,
- devmin: 0,
- fill: '',
- 'NODETAR.depth': '1',
- 'NODETAR.type': 'File',
- nlink: 1,
- 'NODETAR.blksize': '4096',
- 'NODETAR.blocks': '8' } ]
- ]
-
-
-// first, make sure that the hardlinks are actually hardlinks, or this
-// won't work. Git has a way of replacing them with a copy.
-var hard1 = path.resolve(__dirname, "fixtures/hardlink-1")
- , hard2 = path.resolve(__dirname, "fixtures/hardlink-2")
- , fs = require("fs")
-
-try { fs.unlinkSync(hard2) } catch (e) {}
-fs.linkSync(hard1, hard2)
-
-tap.test("with global header", { timeout: 10000 }, function (t) {
- runTest(t, true)
-})
-
-tap.test("without global header", { timeout: 10000 }, function (t) {
- runTest(t, false)
-})
-
-tap.test("with from base", { timeout: 10000 }, function (t) {
- runTest(t, true, true)
-})
-
-function alphasort (a, b) {
- return a === b ? 0
- : a.toLowerCase() > b.toLowerCase() ? 1
- : a.toLowerCase() < b.toLowerCase() ? -1
- : a > b ? 1
- : -1
-}
-
-
-function runTest (t, doGH, doFromBase) {
- var reader = Reader({ path: input
- , filter: function () {
- return !this.path.match(/\.(tar|hex)$/)
- }
- , sort: alphasort
- })
-
- var props = doGH ? pkg : {}
- if(doFromBase) props.fromBase = true;
-
- var pack = Pack(props)
- var writer = Writer(target)
-
- // skip the global header if we're not doing that.
- var entry = doGH ? 0 : 1
-
- t.ok(reader, "reader ok")
- t.ok(pack, "pack ok")
- t.ok(writer, "writer ok")
-
- pack.pipe(writer)
-
- var parse = tar.Parse()
- t.ok(parse, "parser should be ok")
-
- pack.on("data", function (c) {
- // console.error("PACK DATA")
- if (c.length !== 512) {
- // this one is too noisy, only assert if it'll be relevant
- t.equal(c.length, 512, "parser should emit data in 512byte blocks")
- }
- parse.write(c)
- })
-
- pack.on("end", function () {
- // console.error("PACK END")
- t.pass("parser ends")
- parse.end()
- })
-
- pack.on("error", function (er) {
- t.fail("pack error", er)
- })
-
- parse.on("error", function (er) {
- t.fail("parse error", er)
- })
-
- writer.on("error", function (er) {
- t.fail("writer error", er)
- })
-
- reader.on("error", function (er) {
- t.fail("reader error", er)
- })
-
- parse.on("*", function (ev, e) {
- var wanted = entries[entry++]
- if (!wanted) {
- t.fail("unexpected event: "+ev)
- return
- }
- t.equal(ev, wanted[0], "event type should be "+wanted[0])
-
- if(doFromBase) {
- if(wanted[1].path.indexOf('fixtures/') && wanted[1].path.length == 100)
- wanted[1].path = wanted[1].path.replace('fixtures/', '') + 'ccccccccc'
-
- if(wanted[1]) wanted[1].path = wanted[1].path.replace('fixtures/', '').replace('//', '/')
- if(wanted[1].path == '') wanted[1].path = '/'
- if(wanted[2] && wanted[2].path) wanted[2].path = wanted[2].path.replace('fixtures', '').replace(/^\//, '')
-
- wanted[1].linkpath = wanted[1].linkpath.replace('fixtures/', '')
- }
-
- if (ev !== wanted[0] || e.path !== wanted[1].path) {
- console.error("wanted", wanted)
- console.error([ev, e.props])
- e.on("end", function () {
- console.error(e.fields)
- throw "break"
- })
- }
-
-
- t.has(e.props, wanted[1], "properties "+wanted[1].path)
- if (wanted[2]) {
- e.on("end", function () {
- if (!e.fields) {
- t.ok(e.fields, "should get fields")
- } else {
- t.has(e.fields, wanted[2], "should get expected fields")
- }
- })
- }
- })
-
- reader.pipe(pack)
-
- writer.on("close", function () {
- t.equal(entry, entries.length, "should get all expected entries")
- t.pass("it finished")
- t.end()
- })
-
-}
diff --git a/deps/npm/node_modules/tar/test/parse-discard.js b/deps/npm/node_modules/tar/test/parse-discard.js
deleted file mode 100644
index da01a65ccc..0000000000
--- a/deps/npm/node_modules/tar/test/parse-discard.js
+++ /dev/null
@@ -1,29 +0,0 @@
-var tap = require("tap")
- , tar = require("../tar.js")
- , fs = require("fs")
- , path = require("path")
- , file = path.resolve(__dirname, "fixtures/c.tar")
-
-tap.test("parser test", function (t) {
- var parser = tar.Parse()
- var total = 0
- var dataTotal = 0
-
- parser.on("end", function () {
-
- t.equals(total-513,dataTotal,'should have discarded only c.txt')
-
- t.end()
- })
-
- fs.createReadStream(file)
- .pipe(parser)
- .on('entry',function(entry){
- if(entry.path === 'c.txt') entry.abort()
-
- total += entry.size;
- entry.on('data',function(data){
- dataTotal += data.length
- })
- })
-})
diff --git a/deps/npm/node_modules/tar/test/parse.js b/deps/npm/node_modules/tar/test/parse.js
deleted file mode 100644
index f765a50129..0000000000
--- a/deps/npm/node_modules/tar/test/parse.js
+++ /dev/null
@@ -1,359 +0,0 @@
-var tap = require("tap")
- , tar = require("../tar.js")
- , fs = require("fs")
- , path = require("path")
- , file = path.resolve(__dirname, "fixtures/c.tar")
- , index = 0
-
- , expect =
-[ [ 'entry',
- { path: 'c.txt',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 513,
- mtime: new Date('Wed, 26 Oct 2011 01:10:58 GMT'),
- cksum: 5422,
- type: '0',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- undefined ],
- [ 'entry',
- { path: 'cc.txt',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 513,
- mtime: new Date('Wed, 26 Oct 2011 01:11:02 GMT'),
- cksum: 5525,
- type: '0',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- undefined ],
- [ 'entry',
- { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 100,
- mtime: new Date('Thu, 27 Oct 2011 03:43:23 GMT'),
- cksum: 18124,
- type: '0',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- undefined ],
- [ 'entry',
- { path: 'Ω.txt',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 2,
- mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
- cksum: 5695,
- type: '0',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- undefined ],
- [ 'extendedHeader',
- { path: 'PaxHeader/Ω.txt',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 120,
- mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
- cksum: 6702,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: 'Ω.txt',
- ctime: 1319737909,
- atime: 1319739061,
- dev: 234881026,
- ino: 51693379,
- nlink: 1 } ],
- [ 'entry',
- { path: 'Ω.txt',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 2,
- mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
- cksum: 5695,
- type: '0',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '',
- ctime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
- atime: new Date('Thu, 27 Oct 2011 18:11:01 GMT'),
- dev: 234881026,
- ino: 51693379,
- nlink: 1 },
- undefined ],
- [ 'extendedHeader',
- { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 353,
- mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
- cksum: 14488,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- ctime: 1319686868,
- atime: 1319741254,
- 'LIBARCHIVE.creationtime': '1319686852',
- dev: 234881026,
- ino: 51681874,
- nlink: 1 } ],
- [ 'entry',
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 200,
- mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
- cksum: 14570,
- type: '0',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '',
- ctime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
- atime: new Date('Thu, 27 Oct 2011 18:47:34 GMT'),
- 'LIBARCHIVE.creationtime': '1319686852',
- dev: 234881026,
- ino: 51681874,
- nlink: 1 },
- undefined ],
- [ 'longPath',
- { path: '././@LongLink',
- mode: 0,
- uid: 0,
- gid: 0,
- size: 201,
- mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'),
- cksum: 4976,
- type: 'L',
- linkpath: '',
- ustar: false },
- '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ],
- [ 'entry',
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: 1000,
- gid: 1000,
- size: 201,
- mtime: new Date('Thu, 27 Oct 2011 22:21:50 GMT'),
- cksum: 14086,
- type: '0',
- linkpath: '',
- ustar: false },
- undefined ],
- [ 'longLinkpath',
- { path: '././@LongLink',
- mode: 0,
- uid: 0,
- gid: 0,
- size: 201,
- mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'),
- cksum: 4975,
- type: 'K',
- linkpath: '',
- ustar: false },
- '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ],
- [ 'longPath',
- { path: '././@LongLink',
- mode: 0,
- uid: 0,
- gid: 0,
- size: 201,
- mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'),
- cksum: 4976,
- type: 'L',
- linkpath: '',
- ustar: false },
- '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL' ],
- [ 'entry',
- { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',
- mode: 511,
- uid: 1000,
- gid: 1000,
- size: 0,
- mtime: new Date('Fri, 28 Oct 2011 23:05:17 GMT'),
- cksum: 21603,
- type: '2',
- linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- ustar: false },
- undefined ],
- [ 'extendedHeader',
- { path: 'PaxHeader/200-hard',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 143,
- mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
- cksum: 6533,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { ctime: 1320617144,
- atime: 1320617232,
- 'LIBARCHIVE.creationtime': '1319686852',
- dev: 234881026,
- ino: 51681874,
- nlink: 2 } ],
- [ 'entry',
- { path: '200-hard',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 200,
- mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
- cksum: 5526,
- type: '0',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '',
- ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'),
- atime: new Date('Sun, 06 Nov 2011 22:07:12 GMT'),
- 'LIBARCHIVE.creationtime': '1319686852',
- dev: 234881026,
- ino: 51681874,
- nlink: 2 },
- undefined ],
- [ 'extendedHeader',
- { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 353,
- mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
- cksum: 14488,
- type: 'x',
- linkpath: '',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '' },
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- ctime: 1320617144,
- atime: 1320617406,
- 'LIBARCHIVE.creationtime': '1319686852',
- dev: 234881026,
- ino: 51681874,
- nlink: 2 } ],
- [ 'entry',
- { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
- mode: 420,
- uid: 24561,
- gid: 20,
- size: 0,
- mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
- cksum: 15173,
- type: '1',
- linkpath: '200-hard',
- ustar: 'ustar\0',
- ustarver: '00',
- uname: 'isaacs',
- gname: 'staff',
- devmaj: 0,
- devmin: 0,
- fill: '',
- ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'),
- atime: new Date('Sun, 06 Nov 2011 22:10:06 GMT'),
- 'LIBARCHIVE.creationtime': '1319686852',
- dev: 234881026,
- ino: 51681874,
- nlink: 2 },
- undefined ] ]
-
-
-tap.test("parser test", function (t) {
- var parser = tar.Parse()
-
- parser.on("end", function () {
- t.equal(index, expect.length, "saw all expected events")
- t.end()
- })
-
- fs.createReadStream(file)
- .pipe(parser)
- .on("*", function (ev, entry) {
- var wanted = expect[index]
- if (!wanted) {
- return t.fail("Unexpected event: " + ev)
- }
- var result = [ev, entry.props]
- entry.on("end", function () {
- result.push(entry.fields || entry.body)
-
- t.equal(ev, wanted[0], index + " event type")
- t.equivalent(entry.props, wanted[1], wanted[1].path + " entry properties")
- if (wanted[2]) {
- t.equivalent(result[2], wanted[2], "metadata values")
- }
- index ++
- })
- })
-})
diff --git a/deps/npm/node_modules/tar/test/zz-cleanup.js b/deps/npm/node_modules/tar/test/zz-cleanup.js
deleted file mode 100644
index a00ff7faa0..0000000000
--- a/deps/npm/node_modules/tar/test/zz-cleanup.js
+++ /dev/null
@@ -1,20 +0,0 @@
-// clean up the fixtures
-
-var tap = require("tap")
-, rimraf = require("rimraf")
-, test = tap.test
-, path = require("path")
-
-test("clean fixtures", function (t) {
- rimraf(path.resolve(__dirname, "fixtures"), function (er) {
- t.ifError(er, "rimraf ./fixtures/")
- t.end()
- })
-})
-
-test("clean tmp", function (t) {
- rimraf(path.resolve(__dirname, "tmp"), function (er) {
- t.ifError(er, "rimraf ./tmp/")
- t.end()
- })
-})